Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux
1
fork

Configure Feed

Select the types of activity you want to include in your feed.

lib/crypto: aes: Remove old AES en/decryption functions

Now that all callers of the aes_encrypt() and aes_decrypt() type-generic
macros are using the new types, remove the old functions.

Then, replace the macro with direct calls to the new functions, dropping
the "_new" suffix from them.

This completes the change in the type of the key struct that is passed
to aes_encrypt() and aes_decrypt().

Acked-by: Ard Biesheuvel <ardb@kernel.org>
Link: https://lore.kernel.org/r/20260112192035.10427-35-ebiggers@kernel.org
Signed-off-by: Eric Biggers <ebiggers@kernel.org>

+10 -132
+4 -20
include/crypto/aes.h
··· 308 308 * 309 309 * Context: Any context. 310 310 */ 311 - #define aes_encrypt(key, out, in) \ 312 - _Generic((key), \ 313 - struct crypto_aes_ctx *: aes_encrypt_old((const struct crypto_aes_ctx *)(key), (out), (in)), \ 314 - const struct crypto_aes_ctx *: aes_encrypt_old((const struct crypto_aes_ctx *)(key), (out), (in)), \ 315 - struct aes_enckey *: aes_encrypt_new((const struct aes_enckey *)(key), (out), (in)), \ 316 - const struct aes_enckey *: aes_encrypt_new((const struct aes_enckey *)(key), (out), (in)), \ 317 - struct aes_key *: aes_encrypt_new((const struct aes_key *)(key), (out), (in)), \ 318 - const struct aes_key *: aes_encrypt_new((const struct aes_key *)(key), (out), (in))) 319 - void aes_encrypt_old(const struct crypto_aes_ctx *ctx, u8 *out, const u8 *in); 320 - void aes_encrypt_new(aes_encrypt_arg key, u8 out[at_least AES_BLOCK_SIZE], 321 - const u8 in[at_least AES_BLOCK_SIZE]); 311 + void aes_encrypt(aes_encrypt_arg key, u8 out[at_least AES_BLOCK_SIZE], 312 + const u8 in[at_least AES_BLOCK_SIZE]); 322 313 323 314 /** 324 315 * aes_decrypt() - Decrypt a single AES block ··· 319 328 * 320 329 * Context: Any context. 321 330 */ 322 - #define aes_decrypt(key, out, in) \ 323 - _Generic((key), \ 324 - struct crypto_aes_ctx *: aes_decrypt_old((const struct crypto_aes_ctx *)(key), (out), (in)), \ 325 - const struct crypto_aes_ctx *: aes_decrypt_old((const struct crypto_aes_ctx *)(key), (out), (in)), \ 326 - struct aes_key *: aes_decrypt_new((const struct aes_key *)(key), (out), (in)), \ 327 - const struct aes_key *: aes_decrypt_new((const struct aes_key *)(key), (out), (in))) 328 - void aes_decrypt_old(const struct crypto_aes_ctx *ctx, u8 *out, const u8 *in); 329 - void aes_decrypt_new(const struct aes_key *key, u8 out[at_least AES_BLOCK_SIZE], 330 - const u8 in[at_least AES_BLOCK_SIZE]); 331 + void aes_decrypt(const struct aes_key *key, u8 out[at_least AES_BLOCK_SIZE], 332 + const u8 in[at_least AES_BLOCK_SIZE]); 331 333 332 334 extern const u8 crypto_aes_sbox[]; 333 335 extern const u8 crypto_aes_inv_sbox[];
+6 -112
lib/crypto/aes.c
··· 251 251 return mix_columns(x ^ y ^ ror32(y, 16)); 252 252 } 253 253 254 - static __always_inline u32 subshift(u32 in[], int pos) 255 - { 256 - return (aes_sbox[in[pos] & 0xff]) ^ 257 - (aes_sbox[(in[(pos + 1) % 4] >> 8) & 0xff] << 8) ^ 258 - (aes_sbox[(in[(pos + 2) % 4] >> 16) & 0xff] << 16) ^ 259 - (aes_sbox[(in[(pos + 3) % 4] >> 24) & 0xff] << 24); 260 - } 261 - 262 - static __always_inline u32 inv_subshift(u32 in[], int pos) 263 - { 264 - return (aes_inv_sbox[in[pos] & 0xff]) ^ 265 - (aes_inv_sbox[(in[(pos + 3) % 4] >> 8) & 0xff] << 8) ^ 266 - (aes_inv_sbox[(in[(pos + 2) % 4] >> 16) & 0xff] << 16) ^ 267 - (aes_inv_sbox[(in[(pos + 1) % 4] >> 24) & 0xff] << 24); 268 - } 269 - 270 254 static u32 subw(u32 in) 271 255 { 272 256 return (aes_sbox[in & 0xff]) ^ ··· 328 344 return 0; 329 345 } 330 346 EXPORT_SYMBOL(aes_expandkey); 331 - 332 - void aes_encrypt_old(const struct crypto_aes_ctx *ctx, u8 *out, const u8 *in) 333 - { 334 - const u32 *rkp = ctx->key_enc + 4; 335 - int rounds = 6 + ctx->key_length / 4; 336 - u32 st0[4], st1[4]; 337 - int round; 338 - 339 - st0[0] = ctx->key_enc[0] ^ get_unaligned_le32(in); 340 - st0[1] = ctx->key_enc[1] ^ get_unaligned_le32(in + 4); 341 - st0[2] = ctx->key_enc[2] ^ get_unaligned_le32(in + 8); 342 - st0[3] = ctx->key_enc[3] ^ get_unaligned_le32(in + 12); 343 - 344 - /* 345 - * Force the compiler to emit data independent Sbox references, 346 - * by xoring the input with Sbox values that are known to add up 347 - * to zero. This pulls the entire Sbox into the D-cache before any 348 - * data dependent lookups are done. 349 - */ 350 - st0[0] ^= aes_sbox[ 0] ^ aes_sbox[ 64] ^ aes_sbox[134] ^ aes_sbox[195]; 351 - st0[1] ^= aes_sbox[16] ^ aes_sbox[ 82] ^ aes_sbox[158] ^ aes_sbox[221]; 352 - st0[2] ^= aes_sbox[32] ^ aes_sbox[ 96] ^ aes_sbox[160] ^ aes_sbox[234]; 353 - st0[3] ^= aes_sbox[48] ^ aes_sbox[112] ^ aes_sbox[186] ^ aes_sbox[241]; 354 - 355 - for (round = 0;; round += 2, rkp += 8) { 356 - st1[0] = mix_columns(subshift(st0, 0)) ^ rkp[0]; 357 - st1[1] = mix_columns(subshift(st0, 1)) ^ rkp[1]; 358 - st1[2] = mix_columns(subshift(st0, 2)) ^ rkp[2]; 359 - st1[3] = mix_columns(subshift(st0, 3)) ^ rkp[3]; 360 - 361 - if (round == rounds - 2) 362 - break; 363 - 364 - st0[0] = mix_columns(subshift(st1, 0)) ^ rkp[4]; 365 - st0[1] = mix_columns(subshift(st1, 1)) ^ rkp[5]; 366 - st0[2] = mix_columns(subshift(st1, 2)) ^ rkp[6]; 367 - st0[3] = mix_columns(subshift(st1, 3)) ^ rkp[7]; 368 - } 369 - 370 - put_unaligned_le32(subshift(st1, 0) ^ rkp[4], out); 371 - put_unaligned_le32(subshift(st1, 1) ^ rkp[5], out + 4); 372 - put_unaligned_le32(subshift(st1, 2) ^ rkp[6], out + 8); 373 - put_unaligned_le32(subshift(st1, 3) ^ rkp[7], out + 12); 374 - } 375 - EXPORT_SYMBOL(aes_encrypt_old); 376 347 377 348 static __always_inline u32 enc_quarterround(const u32 w[4], int i, u32 rk) 378 349 { ··· 437 498 put_unaligned_le32(declast_quarterround(w, 3, *rkp++), &out[12]); 438 499 } 439 500 440 - void aes_decrypt_old(const struct crypto_aes_ctx *ctx, u8 *out, const u8 *in) 441 - { 442 - const u32 *rkp = ctx->key_dec + 4; 443 - int rounds = 6 + ctx->key_length / 4; 444 - u32 st0[4], st1[4]; 445 - int round; 446 - 447 - st0[0] = ctx->key_dec[0] ^ get_unaligned_le32(in); 448 - st0[1] = ctx->key_dec[1] ^ get_unaligned_le32(in + 4); 449 - st0[2] = ctx->key_dec[2] ^ get_unaligned_le32(in + 8); 450 - st0[3] = ctx->key_dec[3] ^ get_unaligned_le32(in + 12); 451 - 452 - /* 453 - * Force the compiler to emit data independent Sbox references, 454 - * by xoring the input with Sbox values that are known to add up 455 - * to zero. This pulls the entire Sbox into the D-cache before any 456 - * data dependent lookups are done. 457 - */ 458 - st0[0] ^= aes_inv_sbox[ 0] ^ aes_inv_sbox[ 64] ^ aes_inv_sbox[129] ^ aes_inv_sbox[200]; 459 - st0[1] ^= aes_inv_sbox[16] ^ aes_inv_sbox[ 83] ^ aes_inv_sbox[150] ^ aes_inv_sbox[212]; 460 - st0[2] ^= aes_inv_sbox[32] ^ aes_inv_sbox[ 96] ^ aes_inv_sbox[160] ^ aes_inv_sbox[236]; 461 - st0[3] ^= aes_inv_sbox[48] ^ aes_inv_sbox[112] ^ aes_inv_sbox[187] ^ aes_inv_sbox[247]; 462 - 463 - for (round = 0;; round += 2, rkp += 8) { 464 - st1[0] = inv_mix_columns(inv_subshift(st0, 0)) ^ rkp[0]; 465 - st1[1] = inv_mix_columns(inv_subshift(st0, 1)) ^ rkp[1]; 466 - st1[2] = inv_mix_columns(inv_subshift(st0, 2)) ^ rkp[2]; 467 - st1[3] = inv_mix_columns(inv_subshift(st0, 3)) ^ rkp[3]; 468 - 469 - if (round == rounds - 2) 470 - break; 471 - 472 - st0[0] = inv_mix_columns(inv_subshift(st1, 0)) ^ rkp[4]; 473 - st0[1] = inv_mix_columns(inv_subshift(st1, 1)) ^ rkp[5]; 474 - st0[2] = inv_mix_columns(inv_subshift(st1, 2)) ^ rkp[6]; 475 - st0[3] = inv_mix_columns(inv_subshift(st1, 3)) ^ rkp[7]; 476 - } 477 - 478 - put_unaligned_le32(inv_subshift(st1, 0) ^ rkp[4], out); 479 - put_unaligned_le32(inv_subshift(st1, 1) ^ rkp[5], out + 4); 480 - put_unaligned_le32(inv_subshift(st1, 2) ^ rkp[6], out + 8); 481 - put_unaligned_le32(inv_subshift(st1, 3) ^ rkp[7], out + 12); 482 - } 483 - EXPORT_SYMBOL(aes_decrypt_old); 484 - 485 501 /* 486 502 * Note: the aes_prepare*key_* names reflect the fact that the implementation 487 503 * might not actually expand the key. (The s390 code for example doesn't.) ··· 502 608 } 503 609 EXPORT_SYMBOL(aes_prepareenckey); 504 610 505 - void aes_encrypt_new(aes_encrypt_arg key, u8 out[AES_BLOCK_SIZE], 506 - const u8 in[AES_BLOCK_SIZE]) 611 + void aes_encrypt(aes_encrypt_arg key, u8 out[AES_BLOCK_SIZE], 612 + const u8 in[AES_BLOCK_SIZE]) 507 613 { 508 614 aes_encrypt_arch(key.enc_key, out, in); 509 615 } 510 - EXPORT_SYMBOL(aes_encrypt_new); 616 + EXPORT_SYMBOL(aes_encrypt); 511 617 512 - void aes_decrypt_new(const struct aes_key *key, u8 out[AES_BLOCK_SIZE], 513 - const u8 in[AES_BLOCK_SIZE]) 618 + void aes_decrypt(const struct aes_key *key, u8 out[AES_BLOCK_SIZE], 619 + const u8 in[AES_BLOCK_SIZE]) 514 620 { 515 621 aes_decrypt_arch(key, out, in); 516 622 } 517 - EXPORT_SYMBOL(aes_decrypt_new); 623 + EXPORT_SYMBOL(aes_decrypt); 518 624 519 625 #ifdef aes_mod_init_arch 520 626 static int __init aes_mod_init(void)