crypto: x86/aes-ni - remove special handling of AES in PCBC mode
For historical reasons, the AES-NI based implementation of the PCBC chaining mode uses a special FPU chaining mode wrapper template to amortize the FPU start/stop overhead over multiple blocks. When this FPU wrapper was introduced, it supported widely used chaining modes such as XTS and CTR (as well as LRW), but currently, PCBC is the only remaining user. Since there are no known users of pcbc(aes) in the kernel, let's remove this special driver, and rely on the generic pcbc driver to encapsulate the AES-NI core cipher. Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
This commit is contained in:
Родитель
79517e8f8e
Коммит
944585a64f
|
@ -103,7 +103,7 @@ ifeq ($(avx2_supported),yes)
|
|||
morus1280-avx2-y := morus1280-avx2-asm.o morus1280-avx2-glue.o
|
||||
endif
|
||||
|
||||
aesni-intel-y := aesni-intel_asm.o aesni-intel_glue.o fpu.o
|
||||
aesni-intel-y := aesni-intel_asm.o aesni-intel_glue.o
|
||||
aesni-intel-$(CONFIG_64BIT) += aesni-intel_avx-x86_64.o aes_ctrby8_avx-x86_64.o
|
||||
ghash-clmulni-intel-y := ghash-clmulni-intel_asm.o ghash-clmulni-intel_glue.o
|
||||
sha1-ssse3-y := sha1_ssse3_asm.o sha1_ssse3_glue.o
|
||||
|
|
|
@ -1253,22 +1253,6 @@ static struct skcipher_alg aesni_skciphers[] = {
|
|||
static
|
||||
struct simd_skcipher_alg *aesni_simd_skciphers[ARRAY_SIZE(aesni_skciphers)];
|
||||
|
||||
static struct {
|
||||
const char *algname;
|
||||
const char *drvname;
|
||||
const char *basename;
|
||||
struct simd_skcipher_alg *simd;
|
||||
} aesni_simd_skciphers2[] = {
|
||||
#if (defined(MODULE) && IS_ENABLED(CONFIG_CRYPTO_PCBC)) || \
|
||||
IS_BUILTIN(CONFIG_CRYPTO_PCBC)
|
||||
{
|
||||
.algname = "pcbc(aes)",
|
||||
.drvname = "pcbc-aes-aesni",
|
||||
.basename = "fpu(pcbc(__aes-aesni))",
|
||||
},
|
||||
#endif
|
||||
};
|
||||
|
||||
#ifdef CONFIG_X86_64
|
||||
static int generic_gcmaes_set_key(struct crypto_aead *aead, const u8 *key,
|
||||
unsigned int key_len)
|
||||
|
@ -1422,10 +1406,6 @@ static void aesni_free_simds(void)
|
|||
for (i = 0; i < ARRAY_SIZE(aesni_simd_skciphers) &&
|
||||
aesni_simd_skciphers[i]; i++)
|
||||
simd_skcipher_free(aesni_simd_skciphers[i]);
|
||||
|
||||
for (i = 0; i < ARRAY_SIZE(aesni_simd_skciphers2); i++)
|
||||
if (aesni_simd_skciphers2[i].simd)
|
||||
simd_skcipher_free(aesni_simd_skciphers2[i].simd);
|
||||
}
|
||||
|
||||
static int __init aesni_init(void)
|
||||
|
@ -1499,18 +1479,6 @@ static int __init aesni_init(void)
|
|||
aesni_simd_skciphers[i] = simd;
|
||||
}
|
||||
|
||||
for (i = 0; i < ARRAY_SIZE(aesni_simd_skciphers2); i++) {
|
||||
algname = aesni_simd_skciphers2[i].algname;
|
||||
drvname = aesni_simd_skciphers2[i].drvname;
|
||||
basename = aesni_simd_skciphers2[i].basename;
|
||||
simd = simd_skcipher_create_compat(algname, drvname, basename);
|
||||
err = PTR_ERR(simd);
|
||||
if (IS_ERR(simd))
|
||||
continue;
|
||||
|
||||
aesni_simd_skciphers2[i].simd = simd;
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
||||
unregister_simds:
|
||||
|
|
|
@ -1,209 +0,0 @@
|
|||
/*
|
||||
* FPU: Wrapper for blkcipher touching fpu
|
||||
*
|
||||
* Copyright (c) Intel Corp.
|
||||
* Author: Huang Ying <ying.huang@intel.com>
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License as published by the Free
|
||||
* Software Foundation; either version 2 of the License, or (at your option)
|
||||
* any later version.
|
||||
*
|
||||
*/
|
||||
|
||||
#include <crypto/internal/skcipher.h>
|
||||
#include <linux/err.h>
|
||||
#include <linux/init.h>
|
||||
#include <linux/kernel.h>
|
||||
#include <linux/module.h>
|
||||
#include <linux/slab.h>
|
||||
#include <asm/fpu/api.h>
|
||||
|
||||
struct crypto_fpu_ctx {
|
||||
struct crypto_sync_skcipher *child;
|
||||
};
|
||||
|
||||
static int crypto_fpu_setkey(struct crypto_skcipher *parent, const u8 *key,
|
||||
unsigned int keylen)
|
||||
{
|
||||
struct crypto_fpu_ctx *ctx = crypto_skcipher_ctx(parent);
|
||||
struct crypto_sync_skcipher *child = ctx->child;
|
||||
int err;
|
||||
|
||||
crypto_sync_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
|
||||
crypto_sync_skcipher_set_flags(child,
|
||||
crypto_skcipher_get_flags(parent) &
|
||||
CRYPTO_TFM_REQ_MASK);
|
||||
err = crypto_sync_skcipher_setkey(child, key, keylen);
|
||||
crypto_skcipher_set_flags(parent,
|
||||
crypto_sync_skcipher_get_flags(child) &
|
||||
CRYPTO_TFM_RES_MASK);
|
||||
return err;
|
||||
}
|
||||
|
||||
static int crypto_fpu_encrypt(struct skcipher_request *req)
|
||||
{
|
||||
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
||||
struct crypto_fpu_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||
struct crypto_sync_skcipher *child = ctx->child;
|
||||
SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
|
||||
int err;
|
||||
|
||||
skcipher_request_set_sync_tfm(subreq, child);
|
||||
skcipher_request_set_callback(subreq, 0, NULL, NULL);
|
||||
skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
|
||||
req->iv);
|
||||
|
||||
kernel_fpu_begin();
|
||||
err = crypto_skcipher_encrypt(subreq);
|
||||
kernel_fpu_end();
|
||||
|
||||
skcipher_request_zero(subreq);
|
||||
return err;
|
||||
}
|
||||
|
||||
static int crypto_fpu_decrypt(struct skcipher_request *req)
|
||||
{
|
||||
struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
|
||||
struct crypto_fpu_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||
struct crypto_sync_skcipher *child = ctx->child;
|
||||
SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
|
||||
int err;
|
||||
|
||||
skcipher_request_set_sync_tfm(subreq, child);
|
||||
skcipher_request_set_callback(subreq, 0, NULL, NULL);
|
||||
skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
|
||||
req->iv);
|
||||
|
||||
kernel_fpu_begin();
|
||||
err = crypto_skcipher_decrypt(subreq);
|
||||
kernel_fpu_end();
|
||||
|
||||
skcipher_request_zero(subreq);
|
||||
return err;
|
||||
}
|
||||
|
||||
static int crypto_fpu_init_tfm(struct crypto_skcipher *tfm)
|
||||
{
|
||||
struct skcipher_instance *inst = skcipher_alg_instance(tfm);
|
||||
struct crypto_fpu_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||
struct crypto_skcipher_spawn *spawn;
|
||||
struct crypto_skcipher *cipher;
|
||||
|
||||
spawn = skcipher_instance_ctx(inst);
|
||||
cipher = crypto_spawn_skcipher(spawn);
|
||||
if (IS_ERR(cipher))
|
||||
return PTR_ERR(cipher);
|
||||
|
||||
ctx->child = (struct crypto_sync_skcipher *)cipher;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void crypto_fpu_exit_tfm(struct crypto_skcipher *tfm)
|
||||
{
|
||||
struct crypto_fpu_ctx *ctx = crypto_skcipher_ctx(tfm);
|
||||
|
||||
crypto_free_sync_skcipher(ctx->child);
|
||||
}
|
||||
|
||||
static void crypto_fpu_free(struct skcipher_instance *inst)
|
||||
{
|
||||
crypto_drop_skcipher(skcipher_instance_ctx(inst));
|
||||
kfree(inst);
|
||||
}
|
||||
|
||||
static int crypto_fpu_create(struct crypto_template *tmpl, struct rtattr **tb)
|
||||
{
|
||||
struct crypto_skcipher_spawn *spawn;
|
||||
struct skcipher_instance *inst;
|
||||
struct crypto_attr_type *algt;
|
||||
struct skcipher_alg *alg;
|
||||
const char *cipher_name;
|
||||
int err;
|
||||
|
||||
algt = crypto_get_attr_type(tb);
|
||||
if (IS_ERR(algt))
|
||||
return PTR_ERR(algt);
|
||||
|
||||
if ((algt->type ^ (CRYPTO_ALG_INTERNAL | CRYPTO_ALG_TYPE_SKCIPHER)) &
|
||||
algt->mask)
|
||||
return -EINVAL;
|
||||
|
||||
if (!(algt->mask & CRYPTO_ALG_INTERNAL))
|
||||
return -EINVAL;
|
||||
|
||||
cipher_name = crypto_attr_alg_name(tb[1]);
|
||||
if (IS_ERR(cipher_name))
|
||||
return PTR_ERR(cipher_name);
|
||||
|
||||
inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
|
||||
if (!inst)
|
||||
return -ENOMEM;
|
||||
|
||||
spawn = skcipher_instance_ctx(inst);
|
||||
|
||||
crypto_set_skcipher_spawn(spawn, skcipher_crypto_instance(inst));
|
||||
err = crypto_grab_skcipher(spawn, cipher_name, CRYPTO_ALG_INTERNAL,
|
||||
CRYPTO_ALG_INTERNAL | CRYPTO_ALG_ASYNC);
|
||||
if (err)
|
||||
goto out_free_inst;
|
||||
|
||||
alg = crypto_skcipher_spawn_alg(spawn);
|
||||
|
||||
err = crypto_inst_setname(skcipher_crypto_instance(inst), "fpu",
|
||||
&alg->base);
|
||||
if (err)
|
||||
goto out_drop_skcipher;
|
||||
|
||||
inst->alg.base.cra_flags = CRYPTO_ALG_INTERNAL;
|
||||
inst->alg.base.cra_priority = alg->base.cra_priority;
|
||||
inst->alg.base.cra_blocksize = alg->base.cra_blocksize;
|
||||
inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
|
||||
|
||||
inst->alg.ivsize = crypto_skcipher_alg_ivsize(alg);
|
||||
inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg);
|
||||
inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg);
|
||||
|
||||
inst->alg.base.cra_ctxsize = sizeof(struct crypto_fpu_ctx);
|
||||
|
||||
inst->alg.init = crypto_fpu_init_tfm;
|
||||
inst->alg.exit = crypto_fpu_exit_tfm;
|
||||
|
||||
inst->alg.setkey = crypto_fpu_setkey;
|
||||
inst->alg.encrypt = crypto_fpu_encrypt;
|
||||
inst->alg.decrypt = crypto_fpu_decrypt;
|
||||
|
||||
inst->free = crypto_fpu_free;
|
||||
|
||||
err = skcipher_register_instance(tmpl, inst);
|
||||
if (err)
|
||||
goto out_drop_skcipher;
|
||||
|
||||
out:
|
||||
return err;
|
||||
|
||||
out_drop_skcipher:
|
||||
crypto_drop_skcipher(spawn);
|
||||
out_free_inst:
|
||||
kfree(inst);
|
||||
goto out;
|
||||
}
|
||||
|
||||
static struct crypto_template crypto_fpu_tmpl = {
|
||||
.name = "fpu",
|
||||
.create = crypto_fpu_create,
|
||||
.module = THIS_MODULE,
|
||||
};
|
||||
|
||||
int __init crypto_fpu_init(void)
|
||||
{
|
||||
return crypto_register_template(&crypto_fpu_tmpl);
|
||||
}
|
||||
|
||||
void crypto_fpu_exit(void)
|
||||
{
|
||||
crypto_unregister_template(&crypto_fpu_tmpl);
|
||||
}
|
||||
|
||||
MODULE_ALIAS_CRYPTO("fpu");
|
|
@ -1083,7 +1083,7 @@ config CRYPTO_AES_NI_INTEL
|
|||
|
||||
In addition to AES cipher algorithm support, the acceleration
|
||||
for some popular block cipher mode is supported too, including
|
||||
ECB, CBC, LRW, PCBC, XTS. The 64 bit version has additional
|
||||
ECB, CBC, LRW, XTS. The 64 bit version has additional
|
||||
acceleration for CTR.
|
||||
|
||||
config CRYPTO_AES_SPARC64
|
||||
|
|
Загрузка…
Ссылка в новой задаче