Lines Matching +full:0 +full:x6c000000

76 	0, 0, 16, 24, 40, 48, 64, 72, 88,
83 0, 32, 32, 48, 48, 64, 64, 80, 80,
90 0x01000000, 0x02000000, 0x04000000, 0x08000000,
91 0x10000000, 0x20000000, 0x40000000, 0x80000000,
92 0x1B000000, 0x36000000, 0x6C000000
135 memset(&reqctx->hctx_wr, 0, sizeof(struct chcr_hctx_per_wr)); in chcr_init_hctx_per_wr()
142 int nents = 0; in sg_nents_xlen()
144 unsigned int skip_len = 0; in sg_nents_xlen()
149 skip_len = 0; in sg_nents_xlen()
153 skip = 0; in sg_nents_xlen()
161 skip_len = 0; in sg_nents_xlen()
181 int cmp = 0; in chcr_verify_tag()
197 *err = 0; in chcr_verify_tag()
205 return 0; in chcr_inc_wrcount()
257 for (i = 0; i < nk; i++) in get_aes_decrypt_key()
260 i = 0; in get_aes_decrypt_key()
268 } else if (nk == 8 && (i % 4 == 0)) { in get_aes_decrypt_key()
276 for (k = 0, j = i % nk; k < nk; k++) { in get_aes_decrypt_key()
279 if (j < 0) in get_aes_decrypt_key()
290 base_hash = crypto_alloc_shash("sha1", 0, 0); in chcr_alloc_shash()
293 base_hash = crypto_alloc_shash("sha224", 0, 0); in chcr_alloc_shash()
296 base_hash = crypto_alloc_shash("sha256", 0, 0); in chcr_alloc_shash()
299 base_hash = crypto_alloc_shash("sha384", 0, 0); in chcr_alloc_shash()
302 base_hash = crypto_alloc_shash("sha512", 0, 0); in chcr_alloc_shash()
358 for (i = 0; i < (ds / sizeof(u64)); i++) in chcr_change_order()
362 for (i = 0; i < (ds / sizeof(u32)); i++) in chcr_change_order()
376 return 0; in is_hmac()
383 walk->nents = 0; in dsgl_walk_init()
395 | CPL_RX_PHYS_DSGL_ISRDMA_V(0)); in dsgl_walk_end()
397 htonl(CPL_RX_PHYS_DSGL_PCIRLXORDER_V(0) | in dsgl_walk_end()
398 CPL_RX_PHYS_DSGL_PCINOSNOOP_V(0) | in dsgl_walk_end()
399 CPL_RX_PHYS_DSGL_PCITPHNTENB_V(0) | in dsgl_walk_end()
400 CPL_RX_PHYS_DSGL_PCITPHNT_V(0) | in dsgl_walk_end()
401 CPL_RX_PHYS_DSGL_DCAID_V(0) | in dsgl_walk_end()
405 phys_cpl->rss_hdr_int.hash_val = 0; in dsgl_walk_end()
421 if ((j % 8) == 0) in dsgl_walk_add_page()
431 int skip_len = 0; in dsgl_walk_add_sg()
432 unsigned int left_size = slen, len = 0; in dsgl_walk_add_sg()
441 skip_len = 0; in dsgl_walk_add_sg()
445 skip = 0; in dsgl_walk_add_sg()
451 offset = 0; in dsgl_walk_add_sg()
460 if ((j % 8) == 0) in dsgl_walk_add_sg()
467 skip_len = 0; in dsgl_walk_add_sg()
477 walk->nents = 0; in ulptx_walk_init()
478 walk->pair_idx = 0; in ulptx_walk_init()
481 walk->last_sg_len = 0; in ulptx_walk_init()
498 if (walk->nents == 0) { in ulptx_walk_add_page()
517 int skip_len = 0; in ulptx_walk_add_sg()
525 skip_len = 0; in ulptx_walk_add_sg()
529 skip = 0; in ulptx_walk_add_sg()
533 if (sg && (walk->nents == 0)) { in ulptx_walk_add_sg()
545 skip_len = 0; in ulptx_walk_add_sg()
565 skip_len = 0; in ulptx_walk_add_sg()
585 int ret = 0; in cxgb4_is_crypto_q_full()
609 return 0; in generate_copy_rrkey()
617 int srclen = 0; in chcr_hash_ent_in_wr()
619 int soffset = 0, sless; in chcr_hash_ent_in_wr()
623 srcskip = 0; in chcr_hash_ent_in_wr()
633 soffset = 0; in chcr_hash_ent_in_wr()
634 srcskip = 0; in chcr_hash_ent_in_wr()
647 int srclen = 0, dstlen = 0; in chcr_sg_ent_in_wr()
649 int offset = 0, soffset = 0, less, sless = 0; in chcr_sg_ent_in_wr()
653 srcskip = 0; in chcr_sg_ent_in_wr()
657 dstskip = 0; in chcr_sg_ent_in_wr()
666 offset = 0; in chcr_sg_ent_in_wr()
677 offset = 0; in chcr_sg_ent_in_wr()
680 dstskip = 0; in chcr_sg_ent_in_wr()
685 srcskip = 0; in chcr_sg_ent_in_wr()
686 soffset = 0; in chcr_sg_ent_in_wr()
718 int ret = 0; in get_qidxs()
770 unsigned int txqidx = 0, rxqidx = 0; in create_wreq()
775 fid = u_ctx->lldi.rxq_ids[0]; in create_wreq()
815 unsigned int temp = 0, transhdr_len, dst_size; in create_cipher_wr()
846 FILL_SEC_CPL_CIPHERSTOP_HI(0, 0, IV + 1, 0); in create_cipher_wr()
849 FILL_SEC_CPL_AUTHINSERT(0, 0, 0, 0); in create_cipher_wr()
850 chcr_req->sec_cpl.seqno_numivs = FILL_SEC_CPL_SCMD0_SEQNO(reqctx->op, 0, in create_cipher_wr()
852 0, 0, IV >> 1); in create_cipher_wr()
853 chcr_req->sec_cpl.ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 0, in create_cipher_wr()
854 0, 1, dst_size); in create_cipher_wr()
885 + (reqctx->imm ? (wrparam->bytes) : 0); in create_cipher_wr()
886 create_wreq(c_ctx(tfm), chcr_req, &(wrparam->req->base), reqctx->imm, 0, in create_cipher_wr()
904 int ck_size = 0; in chcr_keyctx_ck_size()
913 ck_size = 0; in chcr_keyctx_ck_size()
936 u16 alignment = 0; in chcr_aes_cbc_setkey()
944 alignment = ck_size == CHCR_KEYCTX_CIPHER_KEY_SIZE_192 ? 8 : 0; in chcr_aes_cbc_setkey()
952 0, 0, context_size); in chcr_aes_cbc_setkey()
954 return 0; in chcr_aes_cbc_setkey()
956 ablkctx->enckey_len = 0; in chcr_aes_cbc_setkey()
967 u16 alignment = 0; in chcr_aes_ctr_setkey()
974 alignment = (ck_size == CHCR_KEYCTX_CIPHER_KEY_SIZE_192) ? 8 : 0; in chcr_aes_ctr_setkey()
981 0, 0, context_size); in chcr_aes_ctr_setkey()
984 return 0; in chcr_aes_ctr_setkey()
986 ablkctx->enckey_len = 0; in chcr_aes_ctr_setkey()
997 u16 alignment = 0; in chcr_aes_rfc3686_setkey()
1011 alignment = (ck_size == CHCR_KEYCTX_CIPHER_KEY_SIZE_192) ? 8 : 0; in chcr_aes_rfc3686_setkey()
1018 0, 0, context_size); in chcr_aes_rfc3686_setkey()
1021 return 0; in chcr_aes_rfc3686_setkey()
1023 ablkctx->enckey_len = 0; in chcr_aes_rfc3686_setkey()
1086 for (i = 0; i < round8; i++) in chcr_update_tweak()
1089 for (i = 0; i < (round % 8); i++) in chcr_update_tweak()
1096 return 0; in chcr_update_tweak()
1105 int ret = 0; in chcr_update_cipher_iv()
1115 ret = chcr_update_tweak(req, iv, 0); in chcr_update_cipher_iv()
1139 int ret = 0; in chcr_final_cipher_iv()
1185 bytes = chcr_sg_ent_in_wr(reqctx->srcsg, reqctx->dstsg, 0, in chcr_handle_cipher_resp()
1200 if (unlikely(bytes == 0)) { in chcr_handle_cipher_resp()
1222 skb->dev = u_ctx->lldi.ports[0]; in chcr_handle_cipher_resp()
1232 return 0; in chcr_handle_cipher_resp()
1260 reqctx->processed = 0; in process_cipher()
1261 reqctx->partial_req = 0; in process_cipher()
1265 if ((ablkctx->enckey_len == 0) || (ivsize > AES_BLOCK_SIZE) || in process_cipher()
1266 (req->cryptlen == 0) || in process_cipher()
1268 if (req->cryptlen == 0 && subtype != CRYPTO_ALG_SUB_TYPE_XTS) in process_cipher()
1287 unsigned int dnents = 0, transhdr_len, phys_dsgl, kctx_len; in process_cipher()
1290 CHCR_DST_SG_SIZE, 0); in process_cipher()
1299 reqctx->imm = 0; in process_cipher()
1303 bytes = chcr_sg_ent_in_wr(req->src, req->dst, 0, in process_cipher()
1305 0, 0); in process_cipher()
1331 if (unlikely(bytes == 0)) { in process_cipher()
1345 reqctx->src_ofst = 0; in process_cipher()
1346 reqctx->dst_ofst = 0; in process_cipher()
1359 return 0; in process_cipher()
1385 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_aes_encrypt()
1396 skb->dev = u_ctx->lldi.ports[0]; in chcr_aes_encrypt()
1431 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_aes_decrypt()
1439 skb->dev = u_ctx->lldi.ports[0]; in chcr_aes_decrypt()
1448 int err = 0, rxq_perchan; in chcr_device_init()
1476 ablkctx->sw_cipher = crypto_alloc_skcipher(alg->base.cra_name, 0, in chcr_init_tfm()
1498 ablkctx->sw_cipher = crypto_alloc_skcipher("ctr(aes)", 0, in chcr_rfc3686_init()
1551 return 0; in get_alg_config()
1574 unsigned int nents = 0, transhdr_len; in create_hash_wr()
1575 unsigned int temp = 0; in create_hash_wr()
1579 int error = 0; in create_hash_wr()
1587 nents += param->bfr_len ? 1 : 0; in create_hash_wr()
1598 FILL_SEC_CPL_OP_IVINSR(rx_channel_id, 2, 0); in create_hash_wr()
1603 FILL_SEC_CPL_CIPHERSTOP_HI(0, 0, 0, 0); in create_hash_wr()
1605 FILL_SEC_CPL_AUTHINSERT(0, 1, 0, 0); in create_hash_wr()
1607 FILL_SEC_CPL_SCMD0_SEQNO(0, 0, 0, param->alg_prm.auth_mode, in create_hash_wr()
1608 param->opad_needed, 0); in create_hash_wr()
1611 FILL_SEC_CPL_IVGEN_HDRLEN(param->last, param->more, 0, 1, 0, 0); in create_hash_wr()
1623 param->alg_prm.mk_size, 0, in create_hash_wr()
1630 if (param->bfr_len != 0) { in create_hash_wr()
1641 req_ctx->hctx_wr.dma_addr = 0; in create_hash_wr()
1646 (param->sg_len + param->bfr_len) : 0); in create_hash_wr()
1650 temp, 0); in create_hash_wr()
1666 u8 remainder = 0, bs; in chcr_ahash_update()
1684 + req_ctx->reqlen, nbytes, 0); in chcr_ahash_update()
1686 return 0; in chcr_ahash_update()
1694 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_ahash_update()
1710 HASH_SPACE_LEFT(params.kctx_len), 0); in chcr_ahash_update()
1715 params.opad_needed = 0; in chcr_ahash_update()
1717 params.last = 0; in chcr_ahash_update()
1719 params.scmd1 = 0; in chcr_ahash_update()
1739 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_update()
1752 memset(bfr_ptr, 0, bs); in create_last_hash_block()
1753 *bfr_ptr = 0x80; in create_last_hash_block()
1786 params.opad_needed = 0; in chcr_ahash_final()
1787 params.sg_len = 0; in chcr_ahash_final()
1795 params.opad_needed = 0; in chcr_ahash_final()
1802 if (req_ctx->reqlen == 0) { in chcr_ahash_final()
1804 params.last = 0; in chcr_ahash_final()
1806 params.scmd1 = 0; in chcr_ahash_final()
1812 params.more = 0; in chcr_ahash_final()
1820 req_ctx->reqlen = 0; in chcr_ahash_final()
1821 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_final()
1853 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_ahash_finup()
1872 params.opad_needed = 0; in chcr_ahash_finup()
1876 HASH_SPACE_LEFT(params.kctx_len), 0); in chcr_ahash_finup()
1880 params.opad_needed = 0; in chcr_ahash_finup()
1882 params.last = 0; in chcr_ahash_finup()
1887 params.scmd1 = 0; in chcr_ahash_finup()
1890 params.more = 0; in chcr_ahash_finup()
1900 if ((req_ctx->reqlen + req->nbytes) == 0) { in chcr_ahash_finup()
1902 params.last = 0; in chcr_ahash_finup()
1904 params.scmd1 = 0; in chcr_ahash_finup()
1912 req_ctx->reqlen = 0; in chcr_ahash_finup()
1914 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_finup()
1949 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_ahash_digest()
1969 params.opad_needed = 0; in chcr_ahash_digest()
1972 HASH_SPACE_LEFT(params.kctx_len), 0); in chcr_ahash_digest()
1976 params.opad_needed = 0; in chcr_ahash_digest()
1978 params.last = 0; in chcr_ahash_digest()
1980 params.scmd1 = 0; in chcr_ahash_digest()
1987 params.more = 0; in chcr_ahash_digest()
1991 params.bfr_len = 0; in chcr_ahash_digest()
1996 if (req->nbytes == 0) { in chcr_ahash_digest()
2008 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_digest()
2044 params.opad_needed = 0; in chcr_ahash_continue()
2046 params.sg_len = chcr_hash_ent_in_wr(hctx_wr->srcsg, 0, in chcr_ahash_continue()
2055 params.opad_needed = 0; in chcr_ahash_continue()
2057 params.last = 0; in chcr_ahash_continue()
2061 params.scmd1 = 0; in chcr_ahash_continue()
2064 params.more = 0; in chcr_ahash_continue()
2068 params.bfr_len = 0; in chcr_ahash_continue()
2076 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_continue()
2079 return 0; in chcr_ahash_continue()
2107 hctx_wr->dma_addr = 0; in chcr_handle_ahash_resp()
2112 hctx_wr->result = 0; in chcr_handle_ahash_resp()
2177 return 0; in chcr_ahash_export()
2193 return 0; in chcr_ahash_import()
2202 unsigned int i, err = 0, updated_digestsize; in chcr_ahash_setkey()
2220 memset(hmacctx->ipad + keylen, 0, bs - keylen); in chcr_ahash_setkey()
2223 for (i = 0; i < bs / sizeof(int); i++) { in chcr_ahash_setkey()
2252 unsigned short context_size = 0; in chcr_aes_xts_setkey()
2270 memset(ablkctx->key + 24, 0, 8); in chcr_aes_xts_setkey()
2271 memset(ablkctx->key + 56, 0, 8); in chcr_aes_xts_setkey()
2276 0, context_size); in chcr_aes_xts_setkey()
2283 0, context_size); in chcr_aes_xts_setkey()
2286 return 0; in chcr_aes_xts_setkey()
2288 ablkctx->enckey_len = 0; in chcr_aes_xts_setkey()
2299 req_ctx->data_len = 0; in chcr_sha_init()
2300 req_ctx->reqlen = 0; in chcr_sha_init()
2305 return 0; in chcr_sha_init()
2336 return 0; in chcr_hmac_init()
2383 if (aeadctx->enckey_len == 0) in chcr_aead_common_init()
2399 return 0; in chcr_aead_common_init()
2410 if (((req->cryptlen - (op_type ? authsize : 0)) == 0) || in chcr_aead_need_fallback()
2415 return 0; in chcr_aead_need_fallback()
2448 unsigned int dst_size = 0, temp, subtype = get_aead_subtype(tfm); in create_authenc_wr()
2449 unsigned int kctx_len = 0, dnents, snents; in create_authenc_wr()
2453 int null = 0; in create_authenc_wr()
2459 if (req->cryptlen == 0) in create_authenc_wr()
2462 reqctx->b0_len = 0; in create_authenc_wr()
2472 (reqctx->op ? -authsize : authsize), CHCR_DST_SG_SIZE, 0); in create_authenc_wr()
2475 CHCR_SRC_SG_SIZE, 0); in create_authenc_wr()
2501 temp = (reqctx->op == CHCR_ENCRYPT_OP) ? 0 : authsize; in create_authenc_wr()
2512 null ? 0 : 1 + IV, in create_authenc_wr()
2513 null ? 0 : IV + req->assoclen, in create_authenc_wr()
2515 (temp & 0x1F0) >> 4); in create_authenc_wr()
2517 temp & 0xF, in create_authenc_wr()
2518 null ? 0 : req->assoclen + IV + 1, in create_authenc_wr()
2526 (reqctx->op == CHCR_ENCRYPT_OP) ? 1 : 0, in create_authenc_wr()
2530 chcr_req->sec_cpl.ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1, in create_authenc_wr()
2531 0, 0, dst_size); in create_authenc_wr()
2562 kctx_len + (reqctx->imm ? (req->assoclen + req->cryptlen) : 0); in create_authenc_wr()
2564 transhdr_len, temp, 0); in create_authenc_wr()
2589 0 : authsize); in chcr_aead_dma_map()
2598 return 0; in chcr_aead_dma_map()
2606 reqctx->b0_dma = 0; in chcr_aead_dma_map()
2630 return 0; in chcr_aead_dma_map()
2650 0 : authsize); in chcr_aead_dma_unmap()
2691 buf, req->cryptlen + req->assoclen, 0); in chcr_add_aead_src_ent()
2698 req->assoclen, 0); in chcr_add_aead_src_ent()
2719 dsgl_walk_add_sg(&dsgl_walk, req->dst, temp, 0); in chcr_add_aead_dst_ent()
2782 param->sg_len, 0); in chcr_add_hash_src_ent()
2800 int error = 0; in chcr_hash_dma_map()
2803 return 0; in chcr_hash_dma_map()
2809 return 0; in chcr_hash_dma_map()
2822 req_ctx->hctx_wr.is_sg_map = 0; in chcr_hash_dma_unmap()
2850 return 0; in chcr_cipher_dma_map()
2873 memset(block, 0, csize); in set_msg_len()
2884 return 0; in set_msg_len()
2900 lp = b0[0]; in generate_b0()
2919 if (iv[0] < 1 || iv[0] > 7) in crypto_ccm_check_iv()
2922 return 0; in crypto_ccm_check_iv()
2934 int rc = 0; in ccm_format_packet()
2937 ivptr[0] = 3; in ccm_format_packet()
2938 memcpy(ivptr + 1, &aeadctx->salt[0], 3); in ccm_format_packet()
2940 memset(ivptr + 12, 0, 4); in ccm_format_packet()
2949 memset(ivptr + 15 - ivptr[0], 0, ivptr[0] + 1); in ccm_format_packet()
2966 unsigned int tag_offset = 0, auth_offset = 0; in fill_sec_cpl_for_aead()
2974 ((assoclen) ? CCM_AAD_FIELD_SIZE : 0); in fill_sec_cpl_for_aead()
2977 (req->assoclen + IV + 1 + ccm_xtra) : 0; in fill_sec_cpl_for_aead()
2982 auth_offset = 0; in fill_sec_cpl_for_aead()
2991 req->assoclen + IV + 1 + ccm_xtra, 0); in fill_sec_cpl_for_aead()
2993 sec_cpl->cipherstop_lo_authinsert = FILL_SEC_CPL_AUTHINSERT(0, in fill_sec_cpl_for_aead()
2995 (op_type == CHCR_ENCRYPT_OP) ? 0 : in fill_sec_cpl_for_aead()
2998 (op_type == CHCR_ENCRYPT_OP) ? 0 : 1, in fill_sec_cpl_for_aead()
3002 sec_cpl->ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1, 0, in fill_sec_cpl_for_aead()
3003 0, dst_size); in fill_sec_cpl_for_aead()
3023 return 0; in aead_ccm_validate_input()
3038 unsigned int dst_size = 0, kctx_len, dnents, temp, snents; in create_aead_ccm_wr()
3050 reqctx->b0_len = CCM_B0_SIZE + (assoclen ? CCM_AAD_FIELD_SIZE : 0); in create_aead_ccm_wr()
3060 CHCR_DST_SG_SIZE, 0); in create_aead_ccm_wr()
3064 CHCR_SRC_SG_SIZE, 0); in create_aead_ccm_wr()
3110 reqctx->b0_len) : 0); in create_aead_ccm_wr()
3111 create_wreq(a_ctx(tfm), chcr_req, &req->base, reqctx->imm, 0, in create_aead_ccm_wr()
3112 transhdr_len, temp, 0); in create_aead_ccm_wr()
3135 unsigned int transhdr_len, dnents = 0, snents; in create_gcm_wr()
3136 unsigned int dst_size = 0, temp = 0, kctx_len, assoclen = req->assoclen; in create_gcm_wr()
3148 reqctx->b0_len = 0; in create_gcm_wr()
3154 CHCR_DST_SG_SIZE, 0); in create_gcm_wr()
3156 CHCR_SRC_SG_SIZE, 0); in create_gcm_wr()
3183 temp = (reqctx->op == CHCR_ENCRYPT_OP) ? 0 : authsize; in create_gcm_wr()
3189 assoclen ? 1 + IV : 0, in create_gcm_wr()
3190 assoclen ? IV + assoclen : 0, in create_gcm_wr()
3191 req->assoclen + IV + 1, 0); in create_gcm_wr()
3193 FILL_SEC_CPL_AUTHINSERT(0, req->assoclen + IV + 1, in create_gcm_wr()
3197 CHCR_ENCRYPT_OP) ? 1 : 0, in create_gcm_wr()
3201 chcr_req->sec_cpl.ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1, in create_gcm_wr()
3202 0, 0, dst_size); in create_gcm_wr()
3211 /* S A L T | IV | 0x00000001 */ in create_gcm_wr()
3219 put_unaligned_be32(0x01, &ivptr[12]); in create_gcm_wr()
3226 kctx_len + (reqctx->imm ? (req->assoclen + req->cryptlen) : 0); in create_gcm_wr()
3244 aeadctx->sw_cipher = crypto_alloc_aead(alg->base.cra_name, 0, in chcr_aead_cra_init()
3416 int key_ctx_size = 0; in chcr_ccm_common_setkey()
3429 aeadctx->enckey_len = 0; in chcr_ccm_common_setkey()
3432 aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, mk_size, 0, 0, in chcr_ccm_common_setkey()
3437 return 0; in chcr_ccm_common_setkey()
3463 aeadctx->enckey_len = 0; in chcr_aead_rfc4309_setkey()
3483 int ret = 0, key_ctx_size = 0; in chcr_gcm_setkey()
3486 aeadctx->enckey_len = 0; in chcr_gcm_setkey()
3517 0, 0, in chcr_gcm_setkey()
3519 /* Calculate the H = CIPH(K, 0 repeated 16 times). in chcr_gcm_setkey()
3524 aeadctx->enckey_len = 0; in chcr_gcm_setkey()
3527 memset(gctx->ghash_h, 0, AEAD_H_SIZE); in chcr_gcm_setkey()
3544 int err = 0, i, key_ctx_len = 0; in chcr_authenc_setkey()
3545 unsigned char ck_size = 0; in chcr_authenc_setkey()
3546 unsigned char pad[CHCR_HASH_MAX_BLOCK_SIZE_128] = { 0 }; in chcr_authenc_setkey()
3559 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) in chcr_authenc_setkey()
3624 memset(pad + keys.authkeylen, 0, bs - keys.authkeylen); in chcr_authenc_setkey()
3626 for (i = 0; i < bs >> 2; i++) in chcr_authenc_setkey()
3633 memset(pad + keys.authkeylen, 0, bs - keys.authkeylen); in chcr_authenc_setkey()
3635 for (i = 0; i < bs >> 2; i++) in chcr_authenc_setkey()
3648 0, 1, key_ctx_len >> 4); in chcr_authenc_setkey()
3653 return 0; in chcr_authenc_setkey()
3656 aeadctx->enckey_len = 0; in chcr_authenc_setkey()
3672 int key_ctx_len = 0; in chcr_aead_digest_null_setkey()
3673 unsigned char ck_size = 0; in chcr_aead_digest_null_setkey()
3682 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) in chcr_aead_digest_null_setkey()
3713 aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY, 0, in chcr_aead_digest_null_setkey()
3714 0, key_ctx_len >> 4); in chcr_aead_digest_null_setkey()
3717 return 0; in chcr_aead_digest_null_setkey()
3719 aeadctx->enckey_len = 0; in chcr_aead_digest_null_setkey()
3748 if (cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_aead_op()
3756 crypto_ipsec_check_assoclen(req->assoclen) != 0) { in chcr_aead_op()
3770 skb->dev = u_ctx->lldi.ports[0]; in chcr_aead_op()
3796 return chcr_aead_op(req, 0, create_authenc_wr); in chcr_aead_encrypt()
3799 return chcr_aead_op(req, 0, create_aead_ccm_wr); in chcr_aead_encrypt()
3801 return chcr_aead_op(req, 0, create_gcm_wr); in chcr_aead_encrypt()
3823 size = 0; in chcr_aead_decrypt()
3845 .is_registered = 0,
3863 .is_registered = 0,
3881 .is_registered = 0,
3900 .is_registered = 0,
3919 .is_registered = 0,
3931 .is_registered = 0,
3943 .is_registered = 0,
3955 .is_registered = 0,
3967 .is_registered = 0,
3980 .is_registered = 0,
3992 .is_registered = 0,
4004 .is_registered = 0,
4016 .is_registered = 0,
4028 .is_registered = 0,
4041 .is_registered = 0,
4060 .is_registered = 0,
4080 .is_registered = 0,
4099 .is_registered = 0,
4118 .is_registered = 0,
4139 .is_registered = 0,
4161 .is_registered = 0,
4181 .is_registered = 0,
4202 .is_registered = 0,
4223 .is_registered = 0,
4237 .maxauthsize = 0,
4244 .is_registered = 0,
4265 .is_registered = 0,
4287 .is_registered = 0,
4307 .is_registered = 0,
4328 .is_registered = 0,
4349 .is_registered = 0,
4363 .maxauthsize = 0,
4378 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { in chcr_unregister_alg()
4386 driver_algs[i].is_registered = 0; in chcr_unregister_alg()
4394 driver_algs[i].is_registered = 0; in chcr_unregister_alg()
4403 driver_algs[i].is_registered = 0; in chcr_unregister_alg()
4408 return 0; in chcr_unregister_alg()
4422 int err = 0, i; in chcr_register_alg()
4425 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { in chcr_register_alg()
4440 driver_algs[i].alg.skcipher.base.cra_alignmask = 0; in chcr_register_alg()
4470 a_hash->halg.base.cra_alignmask = 0; in chcr_register_alg()
4496 return 0; in chcr_register_alg()
4521 return 0; in stop_crypto()