Lines Matching +full:mem +full:- +full:type
1 // SPDX-License-Identifier: GPL-2.0-or-later
43 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL; in crypto_mod_get()
49 struct module *module = alg->cra_module; in crypto_mod_put()
56 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, in __crypto_alg_lookup() argument
60 int best = -2; in __crypto_alg_lookup()
68 if ((q->cra_flags ^ type) & mask) in __crypto_alg_lookup()
73 ((struct crypto_larval *)q)->mask != mask) in __crypto_alg_lookup()
76 exact = !strcmp(q->cra_driver_name, name); in __crypto_alg_lookup()
77 fuzzy = !strcmp(q->cra_name, name); in __crypto_alg_lookup()
78 if (!exact && !(fuzzy && q->cra_priority > best)) in __crypto_alg_lookup()
84 best = q->cra_priority; in __crypto_alg_lookup()
101 if (!IS_ERR_OR_NULL(larval->adult)) in crypto_larval_destroy()
102 crypto_mod_put(larval->adult); in crypto_larval_destroy()
106 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask) in crypto_larval_alloc() argument
112 return ERR_PTR(-ENOMEM); in crypto_larval_alloc()
114 larval->mask = mask; in crypto_larval_alloc()
115 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type; in crypto_larval_alloc()
116 larval->alg.cra_priority = -1; in crypto_larval_alloc()
117 larval->alg.cra_destroy = crypto_larval_destroy; in crypto_larval_alloc()
119 strscpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME); in crypto_larval_alloc()
120 init_completion(&larval->completion); in crypto_larval_alloc()
126 static struct crypto_alg *crypto_larval_add(const char *name, u32 type, in crypto_larval_add() argument
132 larval = crypto_larval_alloc(name, type, mask); in crypto_larval_add()
136 refcount_set(&larval->alg.cra_refcnt, 2); in crypto_larval_add()
139 alg = __crypto_alg_lookup(name, type, mask); in crypto_larval_add()
141 alg = &larval->alg; in crypto_larval_add()
142 list_add(&alg->cra_list, &crypto_alg_list); in crypto_larval_add()
146 if (alg != &larval->alg) { in crypto_larval_add()
160 list_del(&alg->cra_list); in crypto_larval_kill()
162 complete_all(&larval->completion); in crypto_larval_kill()
171 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult); in crypto_wait_for_test()
175 err = wait_for_completion_killable(&larval->completion); in crypto_wait_for_test()
178 crypto_larval_kill(&larval->alg); in crypto_wait_for_test()
187 if (larval->test_started) in crypto_start_test()
191 if (larval->test_started) { in crypto_start_test()
196 larval->test_started = true; in crypto_start_test()
211 &larval->completion, 60 * HZ); in crypto_larval_wait()
213 alg = larval->adult; in crypto_larval_wait()
215 alg = ERR_PTR(-EINTR); in crypto_larval_wait()
217 alg = ERR_PTR(-ETIMEDOUT); in crypto_larval_wait()
219 alg = ERR_PTR(-ENOENT); in crypto_larval_wait()
223 !(alg->cra_flags & CRYPTO_ALG_TESTED)) in crypto_larval_wait()
224 alg = ERR_PTR(-EAGAIN); in crypto_larval_wait()
225 else if (alg->cra_flags & CRYPTO_ALG_FIPS_INTERNAL) in crypto_larval_wait()
226 alg = ERR_PTR(-EAGAIN); in crypto_larval_wait()
228 alg = ERR_PTR(-EAGAIN); in crypto_larval_wait()
229 crypto_mod_put(&larval->alg); in crypto_larval_wait()
234 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, in crypto_alg_lookup() argument
241 if (!((type | mask) & CRYPTO_ALG_TESTED)) in crypto_alg_lookup()
245 alg = __crypto_alg_lookup(name, (type | test) & ~fips, in crypto_alg_lookup()
248 if (((type | mask) ^ fips) & fips) in crypto_alg_lookup()
253 ((type ^ alg->cra_flags) & mask)) { in crypto_alg_lookup()
256 alg = ERR_PTR(-ENOENT); in crypto_alg_lookup()
259 alg = __crypto_alg_lookup(name, type, mask); in crypto_alg_lookup()
263 alg = ERR_PTR(-ELIBBAD); in crypto_alg_lookup()
271 static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, in crypto_larval_lookup() argument
277 return ERR_PTR(-ENOENT); in crypto_larval_lookup()
279 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD); in crypto_larval_lookup()
282 alg = crypto_alg_lookup(name, type, mask); in crypto_larval_lookup()
284 request_module("crypto-%s", name); in crypto_larval_lookup()
286 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask & in crypto_larval_lookup()
288 request_module("crypto-%s-all", name); in crypto_larval_lookup()
290 alg = crypto_alg_lookup(name, type, mask); in crypto_larval_lookup()
296 alg = crypto_larval_add(name, type, mask); in crypto_larval_lookup()
315 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask) in crypto_alg_mod_lookup() argument
325 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and in crypto_alg_mod_lookup()
328 if (!((type | mask) & CRYPTO_ALG_INTERNAL)) in crypto_alg_mod_lookup()
331 larval = crypto_larval_lookup(name, type, mask); in crypto_alg_mod_lookup()
341 alg = ERR_PTR(-ENOENT); in crypto_alg_mod_lookup()
350 const struct crypto_type *type = tfm->__crt_alg->cra_type; in crypto_exit_ops() local
352 if (type && tfm->exit) in crypto_exit_ops()
353 tfm->exit(tfm); in crypto_exit_ops()
356 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask) in crypto_ctxsize() argument
358 const struct crypto_type *type_obj = alg->cra_type; in crypto_ctxsize()
361 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1); in crypto_ctxsize()
363 return len + type_obj->ctxsize(alg, type, mask); in crypto_ctxsize()
365 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { in crypto_ctxsize()
384 alg->cra_flags |= CRYPTO_ALG_DYING; in crypto_shoot_alg()
389 struct crypto_tfm *__crypto_alloc_tfmgfp(struct crypto_alg *alg, u32 type, in __crypto_alloc_tfmgfp() argument
394 int err = -ENOMEM; in __crypto_alloc_tfmgfp()
396 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask); in __crypto_alloc_tfmgfp()
401 tfm->__crt_alg = alg; in __crypto_alloc_tfmgfp()
402 refcount_set(&tfm->refcnt, 1); in __crypto_alloc_tfmgfp()
404 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm))) in __crypto_alloc_tfmgfp()
411 if (err == -EAGAIN) in __crypto_alloc_tfmgfp()
421 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type, in __crypto_alloc_tfm() argument
424 return __crypto_alloc_tfmgfp(alg, type, mask, GFP_KERNEL); in __crypto_alloc_tfm()
429 * crypto_alloc_base - Locate algorithm and allocate transform
431 * @type: Type of algorithm
432 * @mask: Mask for type comparison
444 * The returned transform is of a non-determinate type. Most people
450 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask) in crypto_alloc_base() argument
458 alg = crypto_alg_mod_lookup(alg_name, type, mask); in crypto_alloc_base()
464 tfm = __crypto_alloc_tfm(alg, type, mask); in crypto_alloc_base()
472 if (err != -EAGAIN) in crypto_alloc_base()
475 err = -EINTR; in crypto_alloc_base()
491 char *mem; in crypto_alloc_tfmmem() local
493 tfmsize = frontend->tfmsize; in crypto_alloc_tfmmem()
494 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg); in crypto_alloc_tfmmem()
496 mem = kzalloc_node(total, gfp, node); in crypto_alloc_tfmmem()
497 if (mem == NULL) in crypto_alloc_tfmmem()
498 return ERR_PTR(-ENOMEM); in crypto_alloc_tfmmem()
500 tfm = (struct crypto_tfm *)(mem + tfmsize); in crypto_alloc_tfmmem()
501 tfm->__crt_alg = alg; in crypto_alloc_tfmmem()
502 tfm->node = node; in crypto_alloc_tfmmem()
503 refcount_set(&tfm->refcnt, 1); in crypto_alloc_tfmmem()
505 return mem; in crypto_alloc_tfmmem()
513 char *mem; in crypto_create_tfm_node() local
516 mem = crypto_alloc_tfmmem(alg, frontend, node, GFP_KERNEL); in crypto_create_tfm_node()
517 if (IS_ERR(mem)) in crypto_create_tfm_node()
520 tfm = (struct crypto_tfm *)(mem + frontend->tfmsize); in crypto_create_tfm_node()
522 err = frontend->init_tfm(tfm); in crypto_create_tfm_node()
526 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm))) in crypto_create_tfm_node()
534 if (err == -EAGAIN) in crypto_create_tfm_node()
536 kfree(mem); in crypto_create_tfm_node()
537 mem = ERR_PTR(err); in crypto_create_tfm_node()
539 return mem; in crypto_create_tfm_node()
546 struct crypto_alg *alg = otfm->__crt_alg; in crypto_clone_tfm()
548 char *mem; in crypto_clone_tfm() local
550 mem = ERR_PTR(-ESTALE); in crypto_clone_tfm()
554 mem = crypto_alloc_tfmmem(alg, frontend, otfm->node, GFP_ATOMIC); in crypto_clone_tfm()
555 if (IS_ERR(mem)) { in crypto_clone_tfm()
560 tfm = (struct crypto_tfm *)(mem + frontend->tfmsize); in crypto_clone_tfm()
561 tfm->crt_flags = otfm->crt_flags; in crypto_clone_tfm()
562 tfm->exit = otfm->exit; in crypto_clone_tfm()
565 return mem; in crypto_clone_tfm()
571 u32 type, u32 mask) in crypto_find_alg() argument
574 type &= frontend->maskclear; in crypto_find_alg()
575 mask &= frontend->maskclear; in crypto_find_alg()
576 type |= frontend->type; in crypto_find_alg()
577 mask |= frontend->maskset; in crypto_find_alg()
580 return crypto_alg_mod_lookup(alg_name, type, mask); in crypto_find_alg()
585 * crypto_alloc_tfm_node - Locate algorithm and allocate transform
587 * @frontend: Frontend algorithm type
588 * @type: Type of algorithm
589 * @mask: Mask for type comparison
600 * The returned transform is of a non-determinate type. Most people
608 const struct crypto_type *frontend, u32 type, u32 mask, in crypto_alloc_tfm_node() argument
617 alg = crypto_find_alg(alg_name, frontend, type, mask); in crypto_alloc_tfm_node()
631 if (err != -EAGAIN) in crypto_alloc_tfm_node()
634 err = -EINTR; in crypto_alloc_tfm_node()
644 * crypto_destroy_tfm - Free crypto transform
645 * @mem: Start of tfm slab
651 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm) in crypto_destroy_tfm() argument
655 if (IS_ERR_OR_NULL(mem)) in crypto_destroy_tfm()
658 if (!refcount_dec_and_test(&tfm->refcnt)) in crypto_destroy_tfm()
660 alg = tfm->__crt_alg; in crypto_destroy_tfm()
662 if (!tfm->exit && alg->cra_exit) in crypto_destroy_tfm()
663 alg->cra_exit(tfm); in crypto_destroy_tfm()
666 kfree_sensitive(mem); in crypto_destroy_tfm()
670 int crypto_has_alg(const char *name, u32 type, u32 mask) in crypto_has_alg() argument
673 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask); in crypto_has_alg()
688 if (err == -EINPROGRESS) in crypto_req_done()
691 wait->err = err; in crypto_req_done()
692 complete(&wait->completion); in crypto_req_done()