2 * Scatterlist Cryptographic API.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
18 #include <linux/err.h>
19 #include <linux/errno.h>
20 #include <linux/kernel.h>
21 #include <linux/kmod.h>
22 #include <linux/module.h>
23 #include <linux/param.h>
24 #include <linux/sched.h>
25 #include <linux/slab.h>
26 #include <linux/string.h>
29 LIST_HEAD(crypto_alg_list);
30 EXPORT_SYMBOL_GPL(crypto_alg_list);
31 DECLARE_RWSEM(crypto_alg_sem);
32 EXPORT_SYMBOL_GPL(crypto_alg_sem);
34 BLOCKING_NOTIFIER_HEAD(crypto_chain);
35 EXPORT_SYMBOL_GPL(crypto_chain);
37 static inline struct crypto_alg *crypto_alg_get(struct crypto_alg *alg)
39 atomic_inc(&alg->cra_refcnt);
43 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
45 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
47 EXPORT_SYMBOL_GPL(crypto_mod_get);
49 void crypto_mod_put(struct crypto_alg *alg)
51 struct module *module = alg->cra_module;
56 EXPORT_SYMBOL_GPL(crypto_mod_put);
58 static inline int crypto_is_test_larval(struct crypto_larval *larval)
60 return larval->alg.cra_driver_name[0];
63 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
66 struct crypto_alg *q, *alg = NULL;
69 list_for_each_entry(q, &crypto_alg_list, cra_list) {
72 if (crypto_is_moribund(q))
75 if ((q->cra_flags ^ type) & mask)
78 if (crypto_is_larval(q) &&
79 !crypto_is_test_larval((struct crypto_larval *)q) &&
80 ((struct crypto_larval *)q)->mask != mask)
83 exact = !strcmp(q->cra_driver_name, name);
84 fuzzy = !strcmp(q->cra_name, name);
85 if (!exact && !(fuzzy && q->cra_priority > best))
88 if (unlikely(!crypto_mod_get(q)))
91 best = q->cra_priority;
103 static void crypto_larval_destroy(struct crypto_alg *alg)
105 struct crypto_larval *larval = (void *)alg;
107 BUG_ON(!crypto_is_larval(alg));
109 crypto_mod_put(larval->adult);
113 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
115 struct crypto_larval *larval;
117 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
119 return ERR_PTR(-ENOMEM);
122 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
123 larval->alg.cra_priority = -1;
124 larval->alg.cra_destroy = crypto_larval_destroy;
126 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
127 init_completion(&larval->completion);
131 EXPORT_SYMBOL_GPL(crypto_larval_alloc);
133 static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
136 struct crypto_alg *alg;
137 struct crypto_larval *larval;
139 larval = crypto_larval_alloc(name, type, mask);
141 return ERR_CAST(larval);
143 atomic_set(&larval->alg.cra_refcnt, 2);
145 down_write(&crypto_alg_sem);
146 alg = __crypto_alg_lookup(name, type, mask);
149 list_add(&alg->cra_list, &crypto_alg_list);
151 up_write(&crypto_alg_sem);
153 if (alg != &larval->alg)
159 void crypto_larval_kill(struct crypto_alg *alg)
161 struct crypto_larval *larval = (void *)alg;
163 down_write(&crypto_alg_sem);
164 list_del(&alg->cra_list);
165 up_write(&crypto_alg_sem);
166 complete_all(&larval->completion);
169 EXPORT_SYMBOL_GPL(crypto_larval_kill);
171 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
173 struct crypto_larval *larval = (void *)alg;
176 timeout = wait_for_completion_interruptible_timeout(
177 &larval->completion, 60 * HZ);
181 alg = ERR_PTR(-EINTR);
183 alg = ERR_PTR(-ETIMEDOUT);
185 alg = ERR_PTR(-ENOENT);
186 else if (crypto_is_test_larval(larval) &&
187 !(alg->cra_flags & CRYPTO_ALG_TESTED))
188 alg = ERR_PTR(-EAGAIN);
189 else if (!crypto_mod_get(alg))
190 alg = ERR_PTR(-EAGAIN);
191 crypto_mod_put(&larval->alg);
196 struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask)
198 struct crypto_alg *alg;
200 down_read(&crypto_alg_sem);
201 alg = __crypto_alg_lookup(name, type, mask);
202 up_read(&crypto_alg_sem);
206 EXPORT_SYMBOL_GPL(crypto_alg_lookup);
208 struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
210 struct crypto_alg *alg;
213 return ERR_PTR(-ENOENT);
215 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
218 alg = crypto_alg_lookup(name, type, mask);
220 char tmp[CRYPTO_MAX_ALG_NAME];
222 request_module(name);
224 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask) &&
225 snprintf(tmp, sizeof(tmp), "%s-all", name) < sizeof(tmp))
228 alg = crypto_alg_lookup(name, type, mask);
232 return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
234 return crypto_larval_add(name, type, mask);
236 EXPORT_SYMBOL_GPL(crypto_larval_lookup);
238 int crypto_probing_notify(unsigned long val, void *v)
242 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
243 if (ok == NOTIFY_DONE) {
244 request_module("cryptomgr");
245 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
250 EXPORT_SYMBOL_GPL(crypto_probing_notify);
252 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
254 struct crypto_alg *alg;
255 struct crypto_alg *larval;
258 if (!(mask & CRYPTO_ALG_TESTED)) {
259 type |= CRYPTO_ALG_TESTED;
260 mask |= CRYPTO_ALG_TESTED;
263 larval = crypto_larval_lookup(name, type, mask);
264 if (IS_ERR(larval) || !crypto_is_larval(larval))
267 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
269 if (ok == NOTIFY_STOP)
270 alg = crypto_larval_wait(larval);
272 crypto_mod_put(larval);
273 alg = ERR_PTR(-ENOENT);
275 crypto_larval_kill(larval);
278 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
280 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
282 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
285 return type_obj->init(tfm, type, mask);
287 switch (crypto_tfm_alg_type(tfm)) {
288 case CRYPTO_ALG_TYPE_CIPHER:
289 return crypto_init_cipher_ops(tfm);
291 case CRYPTO_ALG_TYPE_DIGEST:
292 if ((mask & CRYPTO_ALG_TYPE_HASH_MASK) !=
293 CRYPTO_ALG_TYPE_HASH_MASK)
294 return crypto_init_digest_ops_async(tfm);
296 return crypto_init_digest_ops(tfm);
298 case CRYPTO_ALG_TYPE_COMPRESS:
299 return crypto_init_compress_ops(tfm);
309 static void crypto_exit_ops(struct crypto_tfm *tfm)
311 const struct crypto_type *type = tfm->__crt_alg->cra_type;
319 switch (crypto_tfm_alg_type(tfm)) {
320 case CRYPTO_ALG_TYPE_CIPHER:
321 crypto_exit_cipher_ops(tfm);
324 case CRYPTO_ALG_TYPE_DIGEST:
325 crypto_exit_digest_ops(tfm);
328 case CRYPTO_ALG_TYPE_COMPRESS:
329 crypto_exit_compress_ops(tfm);
338 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
340 const struct crypto_type *type_obj = alg->cra_type;
343 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
345 return len + type_obj->ctxsize(alg, type, mask);
347 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
351 case CRYPTO_ALG_TYPE_CIPHER:
352 len += crypto_cipher_ctxsize(alg);
355 case CRYPTO_ALG_TYPE_DIGEST:
356 len += crypto_digest_ctxsize(alg);
359 case CRYPTO_ALG_TYPE_COMPRESS:
360 len += crypto_compress_ctxsize(alg);
367 void crypto_shoot_alg(struct crypto_alg *alg)
369 down_write(&crypto_alg_sem);
370 alg->cra_flags |= CRYPTO_ALG_DYING;
371 up_write(&crypto_alg_sem);
373 EXPORT_SYMBOL_GPL(crypto_shoot_alg);
375 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
378 struct crypto_tfm *tfm = NULL;
379 unsigned int tfm_size;
382 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
383 tfm = kzalloc(tfm_size, GFP_KERNEL);
387 tfm->__crt_alg = alg;
389 err = crypto_init_ops(tfm, type, mask);
393 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
394 goto cra_init_failed;
399 crypto_exit_ops(tfm);
402 crypto_shoot_alg(alg);
409 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
412 * crypto_alloc_base - Locate algorithm and allocate transform
413 * @alg_name: Name of algorithm
414 * @type: Type of algorithm
415 * @mask: Mask for type comparison
417 * This function should not be used by new algorithm types.
418 * Plesae use crypto_alloc_tfm instead.
420 * crypto_alloc_base() will first attempt to locate an already loaded
421 * algorithm. If that fails and the kernel supports dynamically loadable
422 * modules, it will then attempt to load a module of the same name or
423 * alias. If that fails it will send a query to any loaded crypto manager
424 * to construct an algorithm on the fly. A refcount is grabbed on the
425 * algorithm which is then associated with the new transform.
427 * The returned transform is of a non-determinate type. Most people
428 * should use one of the more specific allocation functions such as
429 * crypto_alloc_blkcipher.
431 * In case of error the return value is an error pointer.
433 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
435 struct crypto_tfm *tfm;
439 struct crypto_alg *alg;
441 alg = crypto_alg_mod_lookup(alg_name, type, mask);
447 tfm = __crypto_alloc_tfm(alg, type, mask);
457 if (signal_pending(current)) {
465 EXPORT_SYMBOL_GPL(crypto_alloc_base);
467 struct crypto_tfm *crypto_create_tfm(struct crypto_alg *alg,
468 const struct crypto_type *frontend)
471 struct crypto_tfm *tfm = NULL;
472 unsigned int tfmsize;
476 tfmsize = frontend->tfmsize;
477 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg, frontend);
479 mem = kzalloc(total, GFP_KERNEL);
483 tfm = (struct crypto_tfm *)(mem + tfmsize);
484 tfm->__crt_alg = alg;
486 err = frontend->init_tfm(tfm, frontend);
490 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
491 goto cra_init_failed;
496 crypto_exit_ops(tfm);
499 crypto_shoot_alg(alg);
506 EXPORT_SYMBOL_GPL(crypto_create_tfm);
509 * crypto_alloc_tfm - Locate algorithm and allocate transform
510 * @alg_name: Name of algorithm
511 * @frontend: Frontend algorithm type
512 * @type: Type of algorithm
513 * @mask: Mask for type comparison
515 * crypto_alloc_tfm() will first attempt to locate an already loaded
516 * algorithm. If that fails and the kernel supports dynamically loadable
517 * modules, it will then attempt to load a module of the same name or
518 * alias. If that fails it will send a query to any loaded crypto manager
519 * to construct an algorithm on the fly. A refcount is grabbed on the
520 * algorithm which is then associated with the new transform.
522 * The returned transform is of a non-determinate type. Most people
523 * should use one of the more specific allocation functions such as
524 * crypto_alloc_blkcipher.
526 * In case of error the return value is an error pointer.
528 struct crypto_tfm *crypto_alloc_tfm(const char *alg_name,
529 const struct crypto_type *frontend,
532 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
533 struct crypto_tfm *tfm;
536 type &= frontend->maskclear;
537 mask &= frontend->maskclear;
538 type |= frontend->type;
539 mask |= frontend->maskset;
541 lookup = frontend->lookup ?: crypto_alg_mod_lookup;
544 struct crypto_alg *alg;
546 alg = lookup(alg_name, type, mask);
552 tfm = crypto_create_tfm(alg, frontend);
562 if (signal_pending(current)) {
570 EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
573 * crypto_destroy_tfm - Free crypto transform
574 * @mem: Start of tfm slab
575 * @tfm: Transform to free
577 * This function frees up the transform and any associated resources,
578 * then drops the refcount on the associated algorithm.
580 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
582 struct crypto_alg *alg;
588 alg = tfm->__crt_alg;
591 if (!tfm->exit && alg->cra_exit)
593 crypto_exit_ops(tfm);
595 memset(mem, 0, size);
598 EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
600 int crypto_has_alg(const char *name, u32 type, u32 mask)
603 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
612 EXPORT_SYMBOL_GPL(crypto_has_alg);
614 MODULE_DESCRIPTION("Cryptographic core API");
615 MODULE_LICENSE("GPL");