Lines Matching full:eip93
15 #include "eip93-cipher.h"
16 #include "eip93-hash.h"
17 #include "eip93-main.h"
18 #include "eip93-common.h"
19 #include "eip93-regs.h"
26 struct eip93_device *eip93 = ctx->eip93; in eip93_hash_free_data_blocks() local
30 dma_unmap_single(eip93->dev, block->data_dma, in eip93_hash_free_data_blocks()
38 dma_unmap_single(eip93->dev, rctx->data_dma, in eip93_hash_free_data_blocks()
48 struct eip93_device *eip93 = ctx->eip93; in eip93_hash_free_sa_record() local
51 dma_unmap_single(eip93->dev, rctx->sa_record_hmac_base, in eip93_hash_free_sa_record()
54 dma_unmap_single(eip93->dev, rctx->sa_record_base, in eip93_hash_free_sa_record()
66 struct eip93_device *eip93 = ctx->eip93; in eip93_hash_handle_result() local
69 dma_unmap_single(eip93->dev, rctx->sa_state_base, in eip93_hash_handle_result()
133 * EIP93 have special handling for state_byte_cnt in sa_state. in eip93_hash_export_sa_state()
136 * primitive). This is problematic with export/import as EIP93 in eip93_hash_export_sa_state()
180 * EIP93 apply the outer hash. in __eip93_hash_init()
207 struct eip93_device *eip93 = ctx->eip93; in eip93_send_hash_req() local
213 src_addr = dma_map_single(eip93->dev, data, len, DMA_TO_DEVICE); in eip93_send_hash_req()
214 ret = dma_mapping_error(eip93->dev, src_addr); in eip93_send_hash_req()
240 rctx->sa_record_hmac_base = dma_map_single(eip93->dev, in eip93_send_hash_req()
244 ret = dma_mapping_error(eip93->dev, rctx->sa_record_hmac_base); in eip93_send_hash_req()
254 scoped_guard(spinlock_bh, &eip93->ring->idr_lock) in eip93_send_hash_req()
255 crypto_async_idr = idr_alloc(&eip93->ring->crypto_async_idr, async, 0, in eip93_send_hash_req()
263 scoped_guard(spinlock_irqsave, &eip93->ring->write_lock) in eip93_send_hash_req()
264 ret = eip93_put_descriptor(eip93, &cdesc); in eip93_send_hash_req()
272 writel(1, eip93->base + EIP93_REG_PE_CD_COUNT); in eip93_send_hash_req()
388 struct eip93_device *eip93 = ctx->eip93; in eip93_hash_update() local
394 rctx->sa_state_base = dma_map_single(eip93->dev, sa_state, in eip93_hash_update()
397 ret = dma_mapping_error(eip93->dev, rctx->sa_state_base); in eip93_hash_update()
401 rctx->sa_record_base = dma_map_single(eip93->dev, sa_record, in eip93_hash_update()
404 ret = dma_mapping_error(eip93->dev, rctx->sa_record_base); in eip93_hash_update()
415 dma_unmap_single(eip93->dev, rctx->sa_record_base, in eip93_hash_update()
419 dma_unmap_single(eip93->dev, rctx->sa_state_base, in eip93_hash_update()
437 struct eip93_device *eip93 = ctx->eip93; in __eip93_hash_final() local
440 /* EIP93 can't handle zero bytes hash */ in __eip93_hash_final()
470 rctx->sa_state_base = dma_map_single(eip93->dev, sa_state, in __eip93_hash_final()
473 ret = dma_mapping_error(eip93->dev, rctx->sa_state_base); in __eip93_hash_final()
477 rctx->sa_record_base = dma_map_single(eip93->dev, sa_record, in __eip93_hash_final()
480 ret = dma_mapping_error(eip93->dev, rctx->sa_record_base); in __eip93_hash_final()
496 dma_unmap_single(eip93->dev, rctx->sa_record_base, in __eip93_hash_final()
500 dma_unmap_single(eip93->dev, rctx->sa_state_base, in __eip93_hash_final()
518 struct eip93_device *eip93 = ctx->eip93; in eip93_hash_finup() local
522 rctx->sa_state_base = dma_map_single(eip93->dev, sa_state, in eip93_hash_finup()
525 ret = dma_mapping_error(eip93->dev, rctx->sa_state_base); in eip93_hash_finup()
529 rctx->sa_record_base = dma_map_single(eip93->dev, sa_record, in eip93_hash_finup()
532 ret = dma_mapping_error(eip93->dev, rctx->sa_record_base); in eip93_hash_finup()
544 dma_unmap_single(eip93->dev, rctx->sa_record_base, in eip93_hash_finup()
547 dma_unmap_single(eip93->dev, rctx->sa_state_base, in eip93_hash_finup()
573 ctx->eip93 = tmpl->eip93; in eip93_hash_cra_init()
641 .cra_driver_name = "md5-eip93",
671 .cra_driver_name = "sha1-eip93",
701 .cra_driver_name = "sha224-eip93",
731 .cra_driver_name = "sha256-eip93",
762 .cra_driver_name = "hmac(md5-eip93)",
793 .cra_driver_name = "hmac(sha1-eip93)",
824 .cra_driver_name = "hmac(sha224-eip93)",
855 .cra_driver_name = "hmac(sha256-eip93)",