crypto: api - Rebirth of crypto_alloc_tfm
This patch reintroduces a completely revamped crypto_alloc_tfm. The biggest change is that we now take two crypto_type objects when allocating a tfm, a frontend and a backend. In fact this simply formalises what we've been doing behind the API's back. For example, as it stands crypto_alloc_ahash may use an actual ahash algorithm or a crypto_hash algorithm. Putting this in the API allows us to do this much more cleanly. The existing types will be converted across gradually. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
Родитель
4a7794860b
Коммит
7b0bac64cd
108
crypto/api.c
108
crypto/api.c
|
@ -403,6 +403,9 @@ EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
|
|||
* @type: Type of algorithm
|
||||
* @mask: Mask for type comparison
|
||||
*
|
||||
* This function should not be used by new algorithm types.
|
||||
* Plesae use crypto_alloc_tfm instead.
|
||||
*
|
||||
* crypto_alloc_base() will first attempt to locate an already loaded
|
||||
* algorithm. If that fails and the kernel supports dynamically loadable
|
||||
* modules, it will then attempt to load a module of the same name or
|
||||
|
@ -449,6 +452,111 @@ err:
|
|||
return ERR_PTR(err);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_alloc_base);
|
||||
|
||||
struct crypto_tfm *crypto_create_tfm(struct crypto_alg *alg,
|
||||
const struct crypto_type *frontend)
|
||||
{
|
||||
char *mem;
|
||||
struct crypto_tfm *tfm = NULL;
|
||||
unsigned int tfmsize;
|
||||
unsigned int total;
|
||||
int err = -ENOMEM;
|
||||
|
||||
tfmsize = frontend->tfmsize;
|
||||
total = tfmsize + sizeof(*tfm) + frontend->extsize(alg, frontend);
|
||||
|
||||
mem = kzalloc(total, GFP_KERNEL);
|
||||
if (mem == NULL)
|
||||
goto out_err;
|
||||
|
||||
tfm = (struct crypto_tfm *)(mem + tfmsize);
|
||||
tfm->__crt_alg = alg;
|
||||
|
||||
err = frontend->init_tfm(tfm, frontend);
|
||||
if (err)
|
||||
goto out_free_tfm;
|
||||
|
||||
if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
|
||||
goto cra_init_failed;
|
||||
|
||||
goto out;
|
||||
|
||||
cra_init_failed:
|
||||
crypto_exit_ops(tfm);
|
||||
out_free_tfm:
|
||||
if (err == -EAGAIN)
|
||||
crypto_shoot_alg(alg);
|
||||
kfree(mem);
|
||||
out_err:
|
||||
tfm = ERR_PTR(err);
|
||||
out:
|
||||
return tfm;
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_create_tfm);
|
||||
|
||||
/*
|
||||
* crypto_alloc_tfm - Locate algorithm and allocate transform
|
||||
* @alg_name: Name of algorithm
|
||||
* @frontend: Frontend algorithm type
|
||||
* @type: Type of algorithm
|
||||
* @mask: Mask for type comparison
|
||||
*
|
||||
* crypto_alloc_tfm() will first attempt to locate an already loaded
|
||||
* algorithm. If that fails and the kernel supports dynamically loadable
|
||||
* modules, it will then attempt to load a module of the same name or
|
||||
* alias. If that fails it will send a query to any loaded crypto manager
|
||||
* to construct an algorithm on the fly. A refcount is grabbed on the
|
||||
* algorithm which is then associated with the new transform.
|
||||
*
|
||||
* The returned transform is of a non-determinate type. Most people
|
||||
* should use one of the more specific allocation functions such as
|
||||
* crypto_alloc_blkcipher.
|
||||
*
|
||||
* In case of error the return value is an error pointer.
|
||||
*/
|
||||
struct crypto_tfm *crypto_alloc_tfm(const char *alg_name,
|
||||
const struct crypto_type *frontend,
|
||||
u32 type, u32 mask)
|
||||
{
|
||||
struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
|
||||
struct crypto_tfm *tfm;
|
||||
int err;
|
||||
|
||||
type &= frontend->maskclear;
|
||||
mask &= frontend->maskclear;
|
||||
type |= frontend->type;
|
||||
mask |= frontend->maskset;
|
||||
|
||||
lookup = frontend->lookup ?: crypto_alg_mod_lookup;
|
||||
|
||||
for (;;) {
|
||||
struct crypto_alg *alg;
|
||||
|
||||
alg = lookup(alg_name, type, mask);
|
||||
if (IS_ERR(alg)) {
|
||||
err = PTR_ERR(alg);
|
||||
goto err;
|
||||
}
|
||||
|
||||
tfm = crypto_create_tfm(alg, frontend);
|
||||
if (!IS_ERR(tfm))
|
||||
return tfm;
|
||||
|
||||
crypto_mod_put(alg);
|
||||
err = PTR_ERR(tfm);
|
||||
|
||||
err:
|
||||
if (err != -EAGAIN)
|
||||
break;
|
||||
if (signal_pending(current)) {
|
||||
err = -EINTR;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return ERR_PTR(err);
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
|
||||
|
||||
/*
|
||||
* crypto_free_tfm - Free crypto transform
|
||||
|
|
|
@ -109,6 +109,8 @@ void crypto_alg_tested(const char *name, int err);
|
|||
void crypto_shoot_alg(struct crypto_alg *alg);
|
||||
struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
|
||||
u32 mask);
|
||||
struct crypto_tfm *crypto_create_tfm(struct crypto_alg *alg,
|
||||
const struct crypto_type *frontend);
|
||||
|
||||
int crypto_register_instance(struct crypto_template *tmpl,
|
||||
struct crypto_instance *inst);
|
||||
|
|
|
@ -22,8 +22,18 @@ struct seq_file;
|
|||
|
||||
struct crypto_type {
|
||||
unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
|
||||
unsigned int (*extsize)(struct crypto_alg *alg,
|
||||
const struct crypto_type *frontend);
|
||||
int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask);
|
||||
int (*init_tfm)(struct crypto_tfm *tfm,
|
||||
const struct crypto_type *frontend);
|
||||
void (*show)(struct seq_file *m, struct crypto_alg *alg);
|
||||
struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
|
||||
|
||||
unsigned int type;
|
||||
unsigned int maskclear;
|
||||
unsigned int maskset;
|
||||
unsigned int tfmsize;
|
||||
};
|
||||
|
||||
struct crypto_instance {
|
||||
|
|
|
@ -546,7 +546,9 @@ struct crypto_attr_u32 {
|
|||
* Transform user interface.
|
||||
*/
|
||||
|
||||
struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags);
|
||||
struct crypto_tfm *crypto_alloc_tfm(const char *alg_name,
|
||||
const struct crypto_type *frontend,
|
||||
u32 type, u32 mask);
|
||||
struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
|
||||
void crypto_free_tfm(struct crypto_tfm *tfm);
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче