Lines Matching full:eip93

21 #include "eip93-main.h"
22 #include "eip93-regs.h"
23 #include "eip93-common.h"
24 #include "eip93-cipher.h"
25 #include "eip93-aes.h"
26 #include "eip93-des.h"
27 #include "eip93-aead.h"
28 #include "eip93-hash.h"
65 inline void eip93_irq_disable(struct eip93_device *eip93, u32 mask)
67 __raw_writel(mask, eip93->base + EIP93_REG_MASK_DISABLE);
70 inline void eip93_irq_enable(struct eip93_device *eip93, u32 mask)
72 __raw_writel(mask, eip93->base + EIP93_REG_MASK_ENABLE);
75 inline void eip93_irq_clear(struct eip93_device *eip93, u32 mask)
77 __raw_writel(mask, eip93->base + EIP93_REG_INT_CLR);
99 static int eip93_register_algs(struct eip93_device *eip93, u32 supported_algo_flags)
107 eip93_algs[i]->eip93 = eip93;
175 static void eip93_handle_result_descriptor(struct eip93_device *eip93)
188 left = readl(eip93->base + EIP93_REG_PE_RD_COUNT) & EIP93_PE_RD_COUNT;
191 eip93_irq_clear(eip93, EIP93_INT_RDR_THRESH);
192 eip93_irq_enable(eip93, EIP93_INT_RDR_THRESH);
199 scoped_guard(spinlock_irqsave, &eip93->ring->read_lock)
200 rdesc = eip93_get_descriptor(eip93);
202 dev_err(eip93->dev, "Ndesc: %d nreq: %d\n",
225 writel(1, eip93->base + EIP93_REG_PE_RD_COUNT);
226 eip93_irq_clear(eip93, EIP93_INT_RDR_THRESH);
241 scoped_guard(spinlock_bh, &eip93->ring->idr_lock) {
242 async = idr_find(&eip93->ring->crypto_async_idr, crypto_idr);
243 idr_remove(&eip93->ring->crypto_async_idr, crypto_idr);
247 err = eip93_parse_ctrl_stat_err(eip93, err);
263 struct eip93_device *eip93 = (struct eip93_device *)data;
265 eip93_handle_result_descriptor(eip93);
270 struct eip93_device *eip93 = data;
273 irq_status = readl(eip93->base + EIP93_REG_INT_MASK_STAT);
275 eip93_irq_disable(eip93, EIP93_INT_RDR_THRESH);
276 tasklet_schedule(&eip93->ring->done_task);
281 eip93_irq_clear(eip93, irq_status);
283 eip93_irq_disable(eip93, irq_status);
288 static void eip93_initialize(struct eip93_device *eip93, u32 supported_algo_flags)
297 writel(val, eip93->base + EIP93_REG_PE_CONFIG);
303 val = readl(eip93->base + EIP93_REG_PE_CONFIG);
305 writel(val, eip93->base + EIP93_REG_PE_CONFIG);
317 writel(val, eip93->base + EIP93_REG_PE_CLOCK_CTRL);
322 writel(val, eip93->base + EIP93_REG_PE_BUF_THRESH);
325 eip93_irq_clear(eip93, EIP93_INT_ALL);
326 eip93_irq_disable(eip93, EIP93_INT_ALL);
335 writel(val, eip93->base + EIP93_REG_PE_RING_THRESH);
338 static void eip93_desc_free(struct eip93_device *eip93)
340 writel(0, eip93->base + EIP93_REG_PE_RING_CONFIG);
341 writel(0, eip93->base + EIP93_REG_PE_CDR_BASE);
342 writel(0, eip93->base + EIP93_REG_PE_RDR_BASE);
345 static int eip93_set_ring(struct eip93_device *eip93, struct eip93_desc_ring *ring)
348 ring->base = dmam_alloc_coherent(eip93->dev,
361 static int eip93_desc_init(struct eip93_device *eip93)
363 struct eip93_desc_ring *cdr = &eip93->ring->cdr;
364 struct eip93_desc_ring *rdr = &eip93->ring->rdr;
368 ret = eip93_set_ring(eip93, cdr);
372 ret = eip93_set_ring(eip93, rdr);
376 writel((u32 __force)cdr->base_dma, eip93->base + EIP93_REG_PE_CDR_BASE);
377 writel((u32 __force)rdr->base_dma, eip93->base + EIP93_REG_PE_RDR_BASE);
380 writel(val, eip93->base + EIP93_REG_PE_RING_CONFIG);
385 static void eip93_cleanup(struct eip93_device *eip93)
387 tasklet_kill(&eip93->ring->done_task);
390 eip93_irq_clear(eip93, EIP93_INT_ALL);
391 eip93_irq_disable(eip93, EIP93_INT_ALL);
393 writel(0, eip93->base + EIP93_REG_PE_CLOCK_CTRL);
395 eip93_desc_free(eip93);
397 idr_destroy(&eip93->ring->crypto_async_idr);
403 struct eip93_device *eip93;
407 eip93 = devm_kzalloc(dev, sizeof(*eip93), GFP_KERNEL);
408 if (!eip93)
411 eip93->dev = dev;
412 platform_set_drvdata(pdev, eip93);
414 eip93->base = devm_platform_ioremap_resource(pdev, 0);
415 if (IS_ERR(eip93->base))
416 return PTR_ERR(eip93->base);
418 eip93->irq = platform_get_irq(pdev, 0);
419 if (eip93->irq < 0)
420 return eip93->irq;
422 ret = devm_request_threaded_irq(eip93->dev, eip93->irq, eip93_irq_handler,
424 dev_name(eip93->dev), eip93);
426 eip93->ring = devm_kcalloc(eip93->dev, 1, sizeof(*eip93->ring), GFP_KERNEL);
427 if (!eip93->ring)
430 ret = eip93_desc_init(eip93);
435 tasklet_init(&eip93->ring->done_task, eip93_done_task, (unsigned long)eip93);
437 spin_lock_init(&eip93->ring->read_lock);
438 spin_lock_init(&eip93->ring->write_lock);
440 spin_lock_init(&eip93->ring->idr_lock);
441 idr_init(&eip93->ring->crypto_async_idr);
443 algo_flags = readl(eip93->base + EIP93_REG_PE_OPTION_1);
445 eip93_initialize(eip93, algo_flags);
448 eip93_irq_enable(eip93, EIP93_INT_RDR_THRESH);
450 ret = eip93_register_algs(eip93, algo_flags);
452 eip93_cleanup(eip93);
456 ver = readl(eip93->base + EIP93_REG_PE_REVISION);
458 dev_info(eip93->dev, "EIP%lu:%lx:%lx:%lx,PE(0x%x:0x%x)\n",
464 readl(eip93->base + EIP93_REG_PE_OPTION_0));
471 struct eip93_device *eip93 = platform_get_drvdata(pdev);
474 eip93_cleanup(eip93);
492 .name = "inside-secure-eip93",