Lines Matching defs:req
26 struct aead_request *req = aead_request_cast(async_req);
27 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
29 struct qce_alg_template *tmpl = to_aead_tmpl(crypto_aead_reqtfm(req));
40 diff_dst = (req->src != req->dst) ? true : false;
54 if (req->assoclen) {
71 totallen = req->cryptlen + req->assoclen;
73 scatterwalk_map_and_copy(rctx->ccmresult_buf, req->dst,
76 scatterwalk_map_and_copy(result_buf->auth_iv, req->dst,
80 totallen = req->cryptlen + req->assoclen - ctx->authsize;
81 scatterwalk_map_and_copy(tag, req->src, totallen, ctx->authsize, 0);
93 qce_aead_prepare_result_buf(struct sg_table *tbl, struct aead_request *req)
95 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
96 struct qce_alg_template *tmpl = to_aead_tmpl(crypto_aead_reqtfm(req));
104 qce_aead_prepare_ccm_result_buf(struct sg_table *tbl, struct aead_request *req)
106 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
113 qce_aead_prepare_dst_buf(struct aead_request *req)
115 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
116 struct qce_alg_template *tmpl = to_aead_tmpl(crypto_aead_reqtfm(req));
120 unsigned int assoclen = req->assoclen;
125 rctx->dst_nents = sg_nents_for_len(req->dst, totallen);
135 gfp = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
143 msg_sg = scatterwalk_ffwd(__sg, req->dst, assoclen);
160 sg = qce_sgtable_add(&rctx->dst_tbl, req->dst, totallen);
166 sg = qce_aead_prepare_ccm_result_buf(&rctx->dst_tbl, req);
168 sg = qce_aead_prepare_result_buf(&rctx->dst_tbl, req);
185 qce_aead_ccm_prepare_buf_assoclen(struct aead_request *req)
188 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
189 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
201 totallen = cryptlen + req->assoclen;
204 msg_sg = scatterwalk_ffwd(__sg, req->src, req->assoclen);
228 if (sg_copy_to_buffer(req->src, sg_nents_for_len(req->src, assoclen),
236 diff_dst = (req->src != req->dst) ? true : false;
239 rctx->src_nents = sg_nents_for_len(req->src, totallen) + 1;
241 rctx->src_nents = sg_nents_for_len(req->src, totallen) + 2;
243 gfp = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL : GFP_ATOMIC;
269 sg = qce_aead_prepare_ccm_result_buf(&rctx->src_tbl, req);
282 sg = qce_aead_prepare_dst_buf(req);
301 static int qce_aead_prepare_buf(struct aead_request *req)
303 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
304 struct qce_alg_template *tmpl = to_aead_tmpl(crypto_aead_reqtfm(req));
307 bool diff_dst = (req->src != req->dst) ? true : false;
312 sg = qce_aead_prepare_dst_buf(req);
316 rctx->src_nents = sg_nents_for_len(req->src, totallen);
321 rctx->src_sg = req->src;
329 static int qce_aead_ccm_prepare_buf(struct aead_request *req)
331 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
332 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
335 bool diff_dst = (req->src != req->dst) ? true : false;
339 return qce_aead_ccm_prepare_buf_assoclen(req);
342 return qce_aead_prepare_buf(req);
346 rctx->src_nents = sg_nents_for_len(req->src, cryptlen);
347 rctx->src_sg = req->src;
348 sg = qce_aead_prepare_dst_buf(req);
352 rctx->src_nents = sg_nents_for_len(req->src, cryptlen);
353 rctx->src_sg = req->src;
410 struct aead_request *req = aead_request_cast(async_req);
411 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
412 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
414 struct qce_alg_template *tmpl = to_aead_tmpl(crypto_aead_reqtfm(req));
424 memcpy(&rctx->ccm_rfc4309_iv[4], req->iv, 8);
428 rctx->iv = req->iv;
432 rctx->assoclen = req->assoclen - 8;
434 rctx->assoclen = req->assoclen;
436 diff_dst = (req->src != req->dst) ? true : false;
446 ret = qce_aead_ccm_prepare_buf(req);
448 ret = qce_aead_prepare_buf(req);
488 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, dir_src);
502 static int qce_aead_crypt(struct aead_request *req, int encrypt)
504 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
505 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
514 rctx->cryptlen = req->cryptlen;
516 rctx->cryptlen = req->cryptlen - ctx->authsize;
530 aead_request_set_callback(&rctx->fallback_req, req->base.flags,
531 req->base.complete, req->base.data);
532 aead_request_set_crypt(&rctx->fallback_req, req->src,
533 req->dst, req->cryptlen, req->iv);
534 aead_request_set_ad(&rctx->fallback_req, req->assoclen);
549 if (crypto_ipsec_check_assoclen(req->assoclen))
552 return tmpl->qce->async_req_enqueue(tmpl->qce, &req->base);
555 static int qce_aead_encrypt(struct aead_request *req)
557 return qce_aead_crypt(req, 1);
560 static int qce_aead_decrypt(struct aead_request *req)
562 return qce_aead_crypt(req, 0);