1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Cryptographic API.
4 *
5 * Support for StarFive hardware cryptographic engine.
6 * Copyright (c) 2022 StarFive Technology
7 *
8 */
9
10 #include <crypto/engine.h>
11 #include "jh7110-cryp.h"
12 #include <linux/clk.h>
13 #include <linux/completion.h>
14 #include <linux/err.h>
15 #include <linux/interrupt.h>
16 #include <linux/iopoll.h>
17 #include <linux/kernel.h>
18 #include <linux/mod_devicetable.h>
19 #include <linux/module.h>
20 #include <linux/platform_device.h>
21 #include <linux/pm_runtime.h>
22 #include <linux/reset.h>
23 #include <linux/spinlock.h>
24
25 #define DRIVER_NAME "jh7110-crypto"
26
27 struct starfive_dev_list {
28 struct list_head dev_list;
29 spinlock_t lock; /* protect dev_list */
30 };
31
32 static struct starfive_dev_list dev_list = {
33 .dev_list = LIST_HEAD_INIT(dev_list.dev_list),
34 .lock = __SPIN_LOCK_UNLOCKED(dev_list.lock),
35 };
36
starfive_cryp_find_dev(struct starfive_cryp_ctx * ctx)37 struct starfive_cryp_dev *starfive_cryp_find_dev(struct starfive_cryp_ctx *ctx)
38 {
39 struct starfive_cryp_dev *cryp = NULL, *tmp;
40
41 spin_lock_bh(&dev_list.lock);
42 if (!ctx->cryp) {
43 list_for_each_entry(tmp, &dev_list.dev_list, list) {
44 cryp = tmp;
45 break;
46 }
47 ctx->cryp = cryp;
48 } else {
49 cryp = ctx->cryp;
50 }
51
52 spin_unlock_bh(&dev_list.lock);
53
54 return cryp;
55 }
56
57 static u16 side_chan;
58 module_param(side_chan, ushort, 0);
59 MODULE_PARM_DESC(side_chan, "Enable side channel mitigation for AES module.\n"
60 "Enabling this feature will reduce speed performance.\n"
61 " 0 - Disabled\n"
62 " other - Enabled");
63
starfive_dma_init(struct starfive_cryp_dev * cryp)64 static int starfive_dma_init(struct starfive_cryp_dev *cryp)
65 {
66 dma_cap_mask_t mask;
67
68 dma_cap_zero(mask);
69 dma_cap_set(DMA_SLAVE, mask);
70
71 cryp->tx = dma_request_chan(cryp->dev, "tx");
72 if (IS_ERR(cryp->tx))
73 return dev_err_probe(cryp->dev, PTR_ERR(cryp->tx),
74 "Error requesting tx dma channel.\n");
75
76 cryp->rx = dma_request_chan(cryp->dev, "rx");
77 if (IS_ERR(cryp->rx)) {
78 dma_release_channel(cryp->tx);
79 return dev_err_probe(cryp->dev, PTR_ERR(cryp->rx),
80 "Error requesting rx dma channel.\n");
81 }
82
83 return 0;
84 }
85
starfive_dma_cleanup(struct starfive_cryp_dev * cryp)86 static void starfive_dma_cleanup(struct starfive_cryp_dev *cryp)
87 {
88 dma_release_channel(cryp->tx);
89 dma_release_channel(cryp->rx);
90 }
91
starfive_cryp_irq(int irq,void * priv)92 static irqreturn_t starfive_cryp_irq(int irq, void *priv)
93 {
94 u32 status;
95 u32 mask;
96 struct starfive_cryp_dev *cryp = (struct starfive_cryp_dev *)priv;
97
98 mask = readl(cryp->base + STARFIVE_IE_MASK_OFFSET);
99 status = readl(cryp->base + STARFIVE_IE_FLAG_OFFSET);
100 if (status & STARFIVE_IE_FLAG_AES_DONE) {
101 mask |= STARFIVE_IE_MASK_AES_DONE;
102 writel(mask, cryp->base + STARFIVE_IE_MASK_OFFSET);
103 tasklet_schedule(&cryp->aes_done);
104 }
105
106 if (status & STARFIVE_IE_FLAG_HASH_DONE) {
107 mask |= STARFIVE_IE_MASK_HASH_DONE;
108 writel(mask, cryp->base + STARFIVE_IE_MASK_OFFSET);
109 tasklet_schedule(&cryp->hash_done);
110 }
111
112 return IRQ_HANDLED;
113 }
114
starfive_cryp_probe(struct platform_device * pdev)115 static int starfive_cryp_probe(struct platform_device *pdev)
116 {
117 struct starfive_cryp_dev *cryp;
118 struct resource *res;
119 int irq;
120 int ret;
121
122 cryp = devm_kzalloc(&pdev->dev, sizeof(*cryp), GFP_KERNEL);
123 if (!cryp)
124 return -ENOMEM;
125
126 platform_set_drvdata(pdev, cryp);
127 cryp->dev = &pdev->dev;
128
129 cryp->base = devm_platform_get_and_ioremap_resource(pdev, 0, &res);
130 if (IS_ERR(cryp->base))
131 return dev_err_probe(&pdev->dev, PTR_ERR(cryp->base),
132 "Error remapping memory for platform device\n");
133
134 tasklet_init(&cryp->aes_done, starfive_aes_done_task, (unsigned long)cryp);
135 tasklet_init(&cryp->hash_done, starfive_hash_done_task, (unsigned long)cryp);
136
137 cryp->phys_base = res->start;
138 cryp->dma_maxburst = 32;
139 cryp->side_chan = side_chan;
140
141 cryp->hclk = devm_clk_get(&pdev->dev, "hclk");
142 if (IS_ERR(cryp->hclk))
143 return dev_err_probe(&pdev->dev, PTR_ERR(cryp->hclk),
144 "Error getting hardware reference clock\n");
145
146 cryp->ahb = devm_clk_get(&pdev->dev, "ahb");
147 if (IS_ERR(cryp->ahb))
148 return dev_err_probe(&pdev->dev, PTR_ERR(cryp->ahb),
149 "Error getting ahb reference clock\n");
150
151 cryp->rst = devm_reset_control_get_shared(cryp->dev, NULL);
152 if (IS_ERR(cryp->rst))
153 return dev_err_probe(&pdev->dev, PTR_ERR(cryp->rst),
154 "Error getting hardware reset line\n");
155
156 irq = platform_get_irq(pdev, 0);
157 if (irq < 0)
158 return irq;
159
160 ret = devm_request_irq(&pdev->dev, irq, starfive_cryp_irq, 0, pdev->name,
161 (void *)cryp);
162 if (ret)
163 return dev_err_probe(&pdev->dev, ret,
164 "Failed to register interrupt handler\n");
165
166 clk_prepare_enable(cryp->hclk);
167 clk_prepare_enable(cryp->ahb);
168 reset_control_deassert(cryp->rst);
169
170 spin_lock(&dev_list.lock);
171 list_add(&cryp->list, &dev_list.dev_list);
172 spin_unlock(&dev_list.lock);
173
174 ret = starfive_dma_init(cryp);
175 if (ret)
176 goto err_dma_init;
177
178 /* Initialize crypto engine */
179 cryp->engine = crypto_engine_alloc_init(&pdev->dev, 1);
180 if (!cryp->engine) {
181 ret = -ENOMEM;
182 goto err_engine;
183 }
184
185 ret = crypto_engine_start(cryp->engine);
186 if (ret)
187 goto err_engine_start;
188
189 ret = starfive_aes_register_algs();
190 if (ret)
191 goto err_algs_aes;
192
193 ret = starfive_hash_register_algs();
194 if (ret)
195 goto err_algs_hash;
196
197 ret = starfive_rsa_register_algs();
198 if (ret)
199 goto err_algs_rsa;
200
201 return 0;
202
203 err_algs_rsa:
204 starfive_hash_unregister_algs();
205 err_algs_hash:
206 starfive_aes_unregister_algs();
207 err_algs_aes:
208 crypto_engine_stop(cryp->engine);
209 err_engine_start:
210 crypto_engine_exit(cryp->engine);
211 err_engine:
212 starfive_dma_cleanup(cryp);
213 err_dma_init:
214 spin_lock(&dev_list.lock);
215 list_del(&cryp->list);
216 spin_unlock(&dev_list.lock);
217
218 clk_disable_unprepare(cryp->hclk);
219 clk_disable_unprepare(cryp->ahb);
220 reset_control_assert(cryp->rst);
221
222 tasklet_kill(&cryp->aes_done);
223 tasklet_kill(&cryp->hash_done);
224
225 return ret;
226 }
227
starfive_cryp_remove(struct platform_device * pdev)228 static void starfive_cryp_remove(struct platform_device *pdev)
229 {
230 struct starfive_cryp_dev *cryp = platform_get_drvdata(pdev);
231
232 starfive_aes_unregister_algs();
233 starfive_hash_unregister_algs();
234 starfive_rsa_unregister_algs();
235
236 tasklet_kill(&cryp->aes_done);
237 tasklet_kill(&cryp->hash_done);
238
239 crypto_engine_stop(cryp->engine);
240 crypto_engine_exit(cryp->engine);
241
242 starfive_dma_cleanup(cryp);
243
244 spin_lock(&dev_list.lock);
245 list_del(&cryp->list);
246 spin_unlock(&dev_list.lock);
247
248 clk_disable_unprepare(cryp->hclk);
249 clk_disable_unprepare(cryp->ahb);
250 reset_control_assert(cryp->rst);
251 }
252
253 static const struct of_device_id starfive_dt_ids[] __maybe_unused = {
254 { .compatible = "starfive,jh7110-crypto", .data = NULL},
255 {},
256 };
257 MODULE_DEVICE_TABLE(of, starfive_dt_ids);
258
259 static struct platform_driver starfive_cryp_driver = {
260 .probe = starfive_cryp_probe,
261 .remove_new = starfive_cryp_remove,
262 .driver = {
263 .name = DRIVER_NAME,
264 .of_match_table = starfive_dt_ids,
265 },
266 };
267
268 module_platform_driver(starfive_cryp_driver);
269
270 MODULE_LICENSE("GPL");
271 MODULE_DESCRIPTION("StarFive JH7110 Cryptographic Module");
272