Lines Matching refs:hace_dev
66 static int aspeed_hace_crypto_handle_queue(struct aspeed_hace_dev *hace_dev,
69 if (hace_dev->version == AST2500_VERSION &&
71 CIPHER_DBG(hace_dev, "SW fallback\n");
76 hace_dev->crypt_engine_crypto, req);
84 struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
88 crypto_engine = &hace_dev->crypto_engine;
92 rc = ctx->start(hace_dev);
100 static int aspeed_sk_complete(struct aspeed_hace_dev *hace_dev, int err)
102 struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
106 CIPHER_DBG(hace_dev, "\n");
122 crypto_finalize_skcipher_request(hace_dev->crypt_engine_crypto, req,
128 static int aspeed_sk_transfer_sg(struct aspeed_hace_dev *hace_dev)
130 struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
131 struct device *dev = hace_dev->dev;
135 CIPHER_DBG(hace_dev, "\n");
147 return aspeed_sk_complete(hace_dev, 0);
150 static int aspeed_sk_transfer(struct aspeed_hace_dev *hace_dev)
152 struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
167 dev_warn(hace_dev->dev, "invalid sg copy, %s:0x%x, %s:0x%x\n",
172 CIPHER_DBG(hace_dev, "%s:%d, %s:%d, %s:%d, %s:%p\n",
177 return aspeed_sk_complete(hace_dev, rc);
180 static int aspeed_sk_start(struct aspeed_hace_dev *hace_dev)
182 struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
195 CIPHER_DBG(hace_dev, "%s:%d, %s:%d, %s:%d, %s:%p\n",
201 dev_warn(hace_dev->dev, "invalid sg copy, %s:0x%x, %s:0x%x\n",
209 ast_hace_write(hace_dev, crypto_engine->cipher_dma_addr,
211 ast_hace_write(hace_dev, crypto_engine->cipher_dma_addr,
213 ast_hace_write(hace_dev, req->cryptlen, ASPEED_HACE_DATA_LEN);
214 ast_hace_write(hace_dev, rctx->enc_cmd, ASPEED_HACE_CMD);
219 static int aspeed_sk_start_sg(struct aspeed_hace_dev *hace_dev)
221 struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
232 CIPHER_DBG(hace_dev, "\n");
242 src_sg_len = dma_map_sg(hace_dev->dev, req->src,
246 dev_warn(hace_dev->dev, "dma_map_sg() src error\n");
251 src_sg_len = dma_map_sg(hace_dev->dev, req->src,
254 dev_warn(hace_dev->dev, "dma_map_sg() src error\n");
258 dst_sg_len = dma_map_sg(hace_dev->dev, req->dst,
261 dev_warn(hace_dev->dev, "dma_map_sg() dst error\n");
335 ast_hace_write(hace_dev, src_dma_addr, ASPEED_HACE_SRC);
336 ast_hace_write(hace_dev, dst_dma_addr, ASPEED_HACE_DEST);
337 ast_hace_write(hace_dev, req->cryptlen, ASPEED_HACE_DATA_LEN);
338 ast_hace_write(hace_dev, rctx->enc_cmd, ASPEED_HACE_CMD);
344 dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents,
348 dma_unmap_sg(hace_dev->dev, req->dst, rctx->dst_nents,
350 dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents,
357 dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
362 static int aspeed_hace_skcipher_trigger(struct aspeed_hace_dev *hace_dev)
364 struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
370 CIPHER_DBG(hace_dev, "\n");
383 ast_hace_write(hace_dev, crypto_engine->cipher_ctx_dma,
395 if (hace_dev->version == AST2600_VERSION) {
398 return aspeed_sk_start_sg(hace_dev);
403 return aspeed_sk_start(hace_dev);
411 struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
414 CIPHER_DBG(hace_dev, "\n");
425 return aspeed_hace_crypto_handle_queue(hace_dev, req);
433 struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
436 CIPHER_DBG(hace_dev, "keylen: %d bits\n", keylen);
439 dev_warn(hace_dev->dev, "invalid keylen: %d bits\n", keylen);
541 struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
549 CIPHER_DBG(hace_dev, "%s\n",
571 return aspeed_hace_crypto_handle_queue(hace_dev, req);
578 struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
581 CIPHER_DBG(hace_dev, "keylen: %d bits\n", (keylen * 8));
587 if (ctx->hace_dev->version == AST2500_VERSION) {
643 ctx->hace_dev = crypto_alg->hace_dev;
646 CIPHER_DBG(ctx->hace_dev, "%s\n", name);
651 dev_err(ctx->hace_dev->dev, "ERROR: Cannot allocate fallback for %s %ld\n",
665 struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
667 CIPHER_DBG(hace_dev, "%s\n", crypto_tfm_alg_name(&tfm->base));
915 void aspeed_unregister_hace_crypto_algs(struct aspeed_hace_dev *hace_dev)
922 if (hace_dev->version != AST2600_VERSION)
929 void aspeed_register_hace_crypto_algs(struct aspeed_hace_dev *hace_dev)
933 CIPHER_DBG(hace_dev, "\n");
936 aspeed_crypto_algs[i].hace_dev = hace_dev;
939 CIPHER_DBG(hace_dev, "Failed to register %s\n",
944 if (hace_dev->version != AST2600_VERSION)
948 aspeed_crypto_algs_g6[i].hace_dev = hace_dev;
951 CIPHER_DBG(hace_dev, "Failed to register %s\n",