Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux
1
fork

Configure Feed

Select the types of activity you want to include in your feed.

lib/crypto: aesgcm: Use new AES library API

Switch from the old AES library functions (which use struct
crypto_aes_ctx) to the new ones (which use struct aes_enckey). This
eliminates the unnecessary computation and caching of the decryption
round keys. The new AES en/decryption functions are also much faster
and use AES instructions when supported by the CPU.

Note that in addition to the change in the key preparation function and
the key struct type itself, the change in the type of the key struct
results in aes_encrypt() (which is temporarily a type-generic macro)
calling the new encryption function rather than the old one.

Acked-by: Ard Biesheuvel <ardb@kernel.org>
Link: https://lore.kernel.org/r/20260112192035.10427-34-ebiggers@kernel.org
Signed-off-by: Eric Biggers <ebiggers@kernel.org>

+7 -7
+1 -1
include/crypto/gcm.h
··· 66 66 67 67 struct aesgcm_ctx { 68 68 be128 ghash_key; 69 - struct crypto_aes_ctx aes_ctx; 69 + struct aes_enckey aes_key; 70 70 unsigned int authsize; 71 71 }; 72 72
+6 -6
lib/crypto/aesgcm.c
··· 12 12 #include <linux/module.h> 13 13 #include <asm/irqflags.h> 14 14 15 - static void aesgcm_encrypt_block(const struct crypto_aes_ctx *ctx, void *dst, 15 + static void aesgcm_encrypt_block(const struct aes_enckey *key, void *dst, 16 16 const void *src) 17 17 { 18 18 unsigned long flags; ··· 26 26 * effective when running with interrupts disabled. 27 27 */ 28 28 local_irq_save(flags); 29 - aes_encrypt(ctx, dst, src); 29 + aes_encrypt(key, dst, src); 30 30 local_irq_restore(flags); 31 31 } 32 32 ··· 49 49 int ret; 50 50 51 51 ret = crypto_gcm_check_authsize(authsize) ?: 52 - aes_expandkey(&ctx->aes_ctx, key, keysize); 52 + aes_prepareenckey(&ctx->aes_key, key, keysize); 53 53 if (ret) 54 54 return ret; 55 55 56 56 ctx->authsize = authsize; 57 - aesgcm_encrypt_block(&ctx->aes_ctx, &ctx->ghash_key, kin); 57 + aesgcm_encrypt_block(&ctx->aes_key, &ctx->ghash_key, kin); 58 58 59 59 return 0; 60 60 } ··· 97 97 aesgcm_ghash(&ghash, &ctx->ghash_key, &tail, sizeof(tail)); 98 98 99 99 ctr[3] = cpu_to_be32(1); 100 - aesgcm_encrypt_block(&ctx->aes_ctx, buf, ctr); 100 + aesgcm_encrypt_block(&ctx->aes_key, buf, ctr); 101 101 crypto_xor_cpy(authtag, buf, (u8 *)&ghash, ctx->authsize); 102 102 103 103 memzero_explicit(&ghash, sizeof(ghash)); ··· 119 119 * len', this cannot happen, so no explicit test is necessary. 120 120 */ 121 121 ctr[3] = cpu_to_be32(n++); 122 - aesgcm_encrypt_block(&ctx->aes_ctx, buf, ctr); 122 + aesgcm_encrypt_block(&ctx->aes_key, buf, ctr); 123 123 crypto_xor_cpy(dst, src, buf, min(len, AES_BLOCK_SIZE)); 124 124 125 125 dst += AES_BLOCK_SIZE;