summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@kernel.org>2026-01-12 11:20:01 -0800
committerEric Biggers <ebiggers@kernel.org>2026-01-12 11:39:58 -0800
commita4e4e44649ba5f2d59f056ba59885fb080caa22e (patch)
tree999ab0d4fa0023fa25812eadc85594d6660fdcd9
parenta22fd0e3c495dd2d706c49c26663476e24d96e7d (diff)
crypto: arm/aes-neonbs - Use AES library for single blocks
aes-neonbs-glue.c calls __aes_arm_encrypt() and __aes_arm_decrypt() to en/decrypt single blocks for CBC encryption, XTS tweak encryption, and XTS ciphertext stealing. In preparation for making the AES library use this same ARM-optimized single-block AES en/decryption code and making it an internal implementation detail of the AES library, replace the calls to these functions with calls to the AES library. Note that this reduces the size of the aesbs_cbc_ctx and aesbs_xts_ctx structs, since unnecessary decryption round keys are no longer included. Acked-by: Ard Biesheuvel <ardb@kernel.org> Link: https://lore.kernel.org/r/20260112192035.10427-4-ebiggers@kernel.org Signed-off-by: Eric Biggers <ebiggers@kernel.org>
-rw-r--r--arch/arm/crypto/Kconfig1
-rw-r--r--arch/arm/crypto/aes-neonbs-glue.c29
2 files changed, 16 insertions, 14 deletions
diff --git a/arch/arm/crypto/Kconfig b/arch/arm/crypto/Kconfig
index 3eb5071bea14..167a648a9def 100644
--- a/arch/arm/crypto/Kconfig
+++ b/arch/arm/crypto/Kconfig
@@ -44,7 +44,6 @@ config CRYPTO_AES_ARM
config CRYPTO_AES_ARM_BS
tristate "Ciphers: AES, modes: ECB/CBC/CTR/XTS (bit-sliced NEON)"
depends on KERNEL_MODE_NEON
- select CRYPTO_AES_ARM
select CRYPTO_SKCIPHER
select CRYPTO_LIB_AES
help
diff --git a/arch/arm/crypto/aes-neonbs-glue.c b/arch/arm/crypto/aes-neonbs-glue.c
index df5afe601e4a..c49ddafc54f3 100644
--- a/arch/arm/crypto/aes-neonbs-glue.c
+++ b/arch/arm/crypto/aes-neonbs-glue.c
@@ -12,7 +12,6 @@
#include <crypto/scatterwalk.h>
#include <crypto/xts.h>
#include <linux/module.h>
-#include "aes-cipher.h"
MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
MODULE_DESCRIPTION("Bit sliced AES using NEON instructions");
@@ -48,13 +47,13 @@ struct aesbs_ctx {
struct aesbs_cbc_ctx {
struct aesbs_ctx key;
- struct crypto_aes_ctx fallback;
+ struct aes_enckey fallback;
};
struct aesbs_xts_ctx {
struct aesbs_ctx key;
- struct crypto_aes_ctx fallback;
- struct crypto_aes_ctx tweak_key;
+ struct aes_key fallback;
+ struct aes_enckey tweak_key;
};
static int aesbs_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
@@ -122,14 +121,19 @@ static int aesbs_cbc_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
int err;
- err = aes_expandkey(&ctx->fallback, in_key, key_len);
+ err = aes_prepareenckey(&ctx->fallback, in_key, key_len);
if (err)
return err;
ctx->key.rounds = 6 + key_len / 4;
+ /*
+ * Note: this assumes that the arm implementation of the AES library
+ * stores the standard round keys in k.rndkeys.
+ */
kernel_neon_begin();
- aesbs_convert_key(ctx->key.rk, ctx->fallback.key_enc, ctx->key.rounds);
+ aesbs_convert_key(ctx->key.rk, ctx->fallback.k.rndkeys,
+ ctx->key.rounds);
kernel_neon_end();
return 0;
@@ -152,8 +156,7 @@ static int cbc_encrypt(struct skcipher_request *req)
do {
crypto_xor_cpy(dst, src, prev, AES_BLOCK_SIZE);
- __aes_arm_encrypt(ctx->fallback.key_enc,
- ctx->key.rounds, dst, dst);
+ aes_encrypt(&ctx->fallback, dst, dst);
prev = dst;
src += AES_BLOCK_SIZE;
dst += AES_BLOCK_SIZE;
@@ -239,10 +242,10 @@ static int aesbs_xts_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
return err;
key_len /= 2;
- err = aes_expandkey(&ctx->fallback, in_key, key_len);
+ err = aes_preparekey(&ctx->fallback, in_key, key_len);
if (err)
return err;
- err = aes_expandkey(&ctx->tweak_key, in_key + key_len, key_len);
+ err = aes_prepareenckey(&ctx->tweak_key, in_key + key_len, key_len);
if (err)
return err;
@@ -279,7 +282,7 @@ static int __xts_crypt(struct skcipher_request *req, bool encrypt,
if (err)
return err;
- __aes_arm_encrypt(ctx->tweak_key.key_enc, rounds, walk.iv, walk.iv);
+ aes_encrypt(&ctx->tweak_key, walk.iv, walk.iv);
while (walk.nbytes >= AES_BLOCK_SIZE) {
unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
@@ -311,9 +314,9 @@ static int __xts_crypt(struct skcipher_request *req, bool encrypt,
crypto_xor(buf, req->iv, AES_BLOCK_SIZE);
if (encrypt)
- __aes_arm_encrypt(ctx->fallback.key_enc, rounds, buf, buf);
+ aes_encrypt(&ctx->fallback, buf, buf);
else
- __aes_arm_decrypt(ctx->fallback.key_dec, rounds, buf, buf);
+ aes_decrypt(&ctx->fallback, buf, buf);
crypto_xor(buf, req->iv, AES_BLOCK_SIZE);