Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux
1
fork

Configure Feed

Select the types of activity you want to include in your feed.

lib/crypto: sparc/aes: Migrate optimized code into library

Move the SPARC64 AES assembly code into lib/crypto/, wire the key
expansion and single-block en/decryption functions up to the AES library
API, and remove the "aes-sparc64" crypto_cipher algorithm.

The result is that both the AES library and crypto_cipher APIs use the
SPARC64 AES opcodes, whereas previously only crypto_cipher did (and it
wasn't enabled by default, which this commit fixes as well).

Note that some of the functions in the SPARC64 AES assembly code are
still used by the AES mode implementations in
arch/sparc/crypto/aes_glue.c. For now, just export these functions.
These exports will go away once the AES mode implementations are
migrated to the library as well. (Trying to split up the assembly file
seemed like much more trouble than it would be worth.)

Acked-by: Ard Biesheuvel <ardb@kernel.org>
Link: https://lore.kernel.org/r/20260112192035.10427-17-ebiggers@kernel.org
Signed-off-by: Eric Biggers <ebiggers@kernel.org>

+200 -137
+1 -1
arch/sparc/crypto/Kconfig
··· 19 19 config CRYPTO_AES_SPARC64 20 20 tristate "Ciphers: AES, modes: ECB, CBC, CTR" 21 21 depends on SPARC64 22 + select CRYPTO_LIB_AES 22 23 select CRYPTO_SKCIPHER 23 24 help 24 - Block ciphers: AES cipher algorithms (FIPS-197) 25 25 Length-preseving ciphers: AES with ECB, CBC, and CTR modes 26 26 27 27 Architecture: sparc64 using crypto instructions
+1 -1
arch/sparc/crypto/Makefile
··· 7 7 obj-$(CONFIG_CRYPTO_DES_SPARC64) += des-sparc64.o 8 8 obj-$(CONFIG_CRYPTO_CAMELLIA_SPARC64) += camellia-sparc64.o 9 9 10 - aes-sparc64-y := aes_asm.o aes_glue.o 10 + aes-sparc64-y := aes_glue.o 11 11 des-sparc64-y := des_asm.o des_glue.o 12 12 camellia-sparc64-y := camellia_asm.o camellia_glue.o
arch/sparc/crypto/aes_asm.S lib/crypto/sparc/aes_asm.S
+5 -135
arch/sparc/crypto/aes_glue.c
··· 32 32 #include <asm/elf.h> 33 33 34 34 struct aes_ops { 35 - void (*encrypt)(const u64 *key, const u32 *input, u32 *output); 36 - void (*decrypt)(const u64 *key, const u32 *input, u32 *output); 37 35 void (*load_encrypt_keys)(const u64 *key); 38 36 void (*load_decrypt_keys)(const u64 *key); 39 37 void (*ecb_encrypt)(const u64 *key, const u64 *input, u64 *output, ··· 53 55 u32 expanded_key_length; 54 56 }; 55 57 56 - extern void aes_sparc64_encrypt_128(const u64 *key, const u32 *input, 57 - u32 *output); 58 - extern void aes_sparc64_encrypt_192(const u64 *key, const u32 *input, 59 - u32 *output); 60 - extern void aes_sparc64_encrypt_256(const u64 *key, const u32 *input, 61 - u32 *output); 62 - 63 - extern void aes_sparc64_decrypt_128(const u64 *key, const u32 *input, 64 - u32 *output); 65 - extern void aes_sparc64_decrypt_192(const u64 *key, const u32 *input, 66 - u32 *output); 67 - extern void aes_sparc64_decrypt_256(const u64 *key, const u32 *input, 68 - u32 *output); 69 - 70 - extern void aes_sparc64_load_encrypt_keys_128(const u64 *key); 71 - extern void aes_sparc64_load_encrypt_keys_192(const u64 *key); 72 - extern void aes_sparc64_load_encrypt_keys_256(const u64 *key); 73 - 74 - extern void aes_sparc64_load_decrypt_keys_128(const u64 *key); 75 - extern void aes_sparc64_load_decrypt_keys_192(const u64 *key); 76 - extern void aes_sparc64_load_decrypt_keys_256(const u64 *key); 77 - 78 - extern void aes_sparc64_ecb_encrypt_128(const u64 *key, const u64 *input, 79 - u64 *output, unsigned int len); 80 - extern void aes_sparc64_ecb_encrypt_192(const u64 *key, const u64 *input, 81 - u64 *output, unsigned int len); 82 - extern void aes_sparc64_ecb_encrypt_256(const u64 *key, const u64 *input, 83 - u64 *output, unsigned int len); 84 - 85 - extern void aes_sparc64_ecb_decrypt_128(const u64 *key, const u64 *input, 86 - u64 *output, unsigned int len); 87 - extern void aes_sparc64_ecb_decrypt_192(const u64 *key, const u64 *input, 88 - u64 *output, unsigned int len); 89 - extern void aes_sparc64_ecb_decrypt_256(const u64 *key, const u64 *input, 90 - u64 *output, unsigned int len); 91 - 92 - extern void aes_sparc64_cbc_encrypt_128(const u64 *key, const u64 *input, 93 - u64 *output, unsigned int len, 94 - u64 *iv); 95 - 96 - extern void aes_sparc64_cbc_encrypt_192(const u64 *key, const u64 *input, 97 - u64 *output, unsigned int len, 98 - u64 *iv); 99 - 100 - extern void aes_sparc64_cbc_encrypt_256(const u64 *key, const u64 *input, 101 - u64 *output, unsigned int len, 102 - u64 *iv); 103 - 104 - extern void aes_sparc64_cbc_decrypt_128(const u64 *key, const u64 *input, 105 - u64 *output, unsigned int len, 106 - u64 *iv); 107 - 108 - extern void aes_sparc64_cbc_decrypt_192(const u64 *key, const u64 *input, 109 - u64 *output, unsigned int len, 110 - u64 *iv); 111 - 112 - extern void aes_sparc64_cbc_decrypt_256(const u64 *key, const u64 *input, 113 - u64 *output, unsigned int len, 114 - u64 *iv); 115 - 116 - extern void aes_sparc64_ctr_crypt_128(const u64 *key, const u64 *input, 117 - u64 *output, unsigned int len, 118 - u64 *iv); 119 - extern void aes_sparc64_ctr_crypt_192(const u64 *key, const u64 *input, 120 - u64 *output, unsigned int len, 121 - u64 *iv); 122 - extern void aes_sparc64_ctr_crypt_256(const u64 *key, const u64 *input, 123 - u64 *output, unsigned int len, 124 - u64 *iv); 125 - 126 58 static struct aes_ops aes128_ops = { 127 - .encrypt = aes_sparc64_encrypt_128, 128 - .decrypt = aes_sparc64_decrypt_128, 129 59 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_128, 130 60 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_128, 131 61 .ecb_encrypt = aes_sparc64_ecb_encrypt_128, ··· 64 138 }; 65 139 66 140 static struct aes_ops aes192_ops = { 67 - .encrypt = aes_sparc64_encrypt_192, 68 - .decrypt = aes_sparc64_decrypt_192, 69 141 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_192, 70 142 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_192, 71 143 .ecb_encrypt = aes_sparc64_ecb_encrypt_192, ··· 74 150 }; 75 151 76 152 static struct aes_ops aes256_ops = { 77 - .encrypt = aes_sparc64_encrypt_256, 78 - .decrypt = aes_sparc64_decrypt_256, 79 153 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_256, 80 154 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_256, 81 155 .ecb_encrypt = aes_sparc64_ecb_encrypt_256, ··· 83 161 .ctr_crypt = aes_sparc64_ctr_crypt_256, 84 162 }; 85 163 86 - extern void aes_sparc64_key_expand(const u32 *in_key, u64 *output_key, 87 - unsigned int key_len); 88 - 89 - static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, 90 - unsigned int key_len) 164 + static int aes_set_key_skcipher(struct crypto_skcipher *tfm, const u8 *in_key, 165 + unsigned int key_len) 91 166 { 92 - struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm); 167 + struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm); 93 168 94 169 switch (key_len) { 95 170 case AES_KEYSIZE_128: ··· 112 193 ctx->key_length = key_len; 113 194 114 195 return 0; 115 - } 116 - 117 - static int aes_set_key_skcipher(struct crypto_skcipher *tfm, const u8 *in_key, 118 - unsigned int key_len) 119 - { 120 - return aes_set_key(crypto_skcipher_tfm(tfm), in_key, key_len); 121 - } 122 - 123 - static void crypto_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 124 - { 125 - struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm); 126 - 127 - ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst); 128 - } 129 - 130 - static void crypto_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 131 - { 132 - struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm); 133 - 134 - ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst); 135 196 } 136 197 137 198 static int ecb_encrypt(struct skcipher_request *req) ··· 257 358 return err; 258 359 } 259 360 260 - static struct crypto_alg cipher_alg = { 261 - .cra_name = "aes", 262 - .cra_driver_name = "aes-sparc64", 263 - .cra_priority = SPARC_CR_OPCODE_PRIORITY, 264 - .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 265 - .cra_blocksize = AES_BLOCK_SIZE, 266 - .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx), 267 - .cra_alignmask = 3, 268 - .cra_module = THIS_MODULE, 269 - .cra_u = { 270 - .cipher = { 271 - .cia_min_keysize = AES_MIN_KEY_SIZE, 272 - .cia_max_keysize = AES_MAX_KEY_SIZE, 273 - .cia_setkey = aes_set_key, 274 - .cia_encrypt = crypto_aes_encrypt, 275 - .cia_decrypt = crypto_aes_decrypt 276 - } 277 - } 278 - }; 279 - 280 361 static struct skcipher_alg skcipher_algs[] = { 281 362 { 282 363 .base.cra_name = "ecb(aes)", ··· 319 440 320 441 static int __init aes_sparc64_mod_init(void) 321 442 { 322 - int err; 323 - 324 443 if (!sparc64_has_aes_opcode()) { 325 444 pr_info("sparc64 aes opcodes not available.\n"); 326 445 return -ENODEV; 327 446 } 328 447 pr_info("Using sparc64 aes opcodes optimized AES implementation\n"); 329 - err = crypto_register_alg(&cipher_alg); 330 - if (err) 331 - return err; 332 - err = crypto_register_skciphers(skcipher_algs, 333 - ARRAY_SIZE(skcipher_algs)); 334 - if (err) 335 - crypto_unregister_alg(&cipher_alg); 336 - return err; 448 + return crypto_register_skciphers(skcipher_algs, 449 + ARRAY_SIZE(skcipher_algs)); 337 450 } 338 451 339 452 static void __exit aes_sparc64_mod_fini(void) 340 453 { 341 - crypto_unregister_alg(&cipher_alg); 342 454 crypto_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs)); 343 455 } 344 456
+42
include/crypto/aes.h
··· 49 49 #elif defined(CONFIG_S390) 50 50 /* Used when the CPU supports CPACF AES for this key's length */ 51 51 u8 raw_key[AES_MAX_KEY_SIZE]; 52 + #elif defined(CONFIG_SPARC64) 53 + /* Used when the CPU supports the SPARC64 AES opcodes */ 54 + u64 sparc_rndkeys[AES_MAX_KEYLENGTH / sizeof(u64)]; 52 55 #endif 53 56 #endif /* CONFIG_CRYPTO_LIB_AES_ARCH */ 54 57 }; ··· 202 199 void aes_p8_xts_decrypt(const u8 *in, u8 *out, size_t len, 203 200 const struct p8_aes_key *key1, 204 201 const struct p8_aes_key *key2, u8 *iv); 202 + #elif defined(CONFIG_SPARC64) 203 + void aes_sparc64_key_expand(const u32 *in_key, u64 *output_key, 204 + unsigned int key_len); 205 + void aes_sparc64_load_encrypt_keys_128(const u64 *key); 206 + void aes_sparc64_load_encrypt_keys_192(const u64 *key); 207 + void aes_sparc64_load_encrypt_keys_256(const u64 *key); 208 + void aes_sparc64_load_decrypt_keys_128(const u64 *key); 209 + void aes_sparc64_load_decrypt_keys_192(const u64 *key); 210 + void aes_sparc64_load_decrypt_keys_256(const u64 *key); 211 + void aes_sparc64_ecb_encrypt_128(const u64 *key, const u64 *input, u64 *output, 212 + unsigned int len); 213 + void aes_sparc64_ecb_encrypt_192(const u64 *key, const u64 *input, u64 *output, 214 + unsigned int len); 215 + void aes_sparc64_ecb_encrypt_256(const u64 *key, const u64 *input, u64 *output, 216 + unsigned int len); 217 + void aes_sparc64_ecb_decrypt_128(const u64 *key, const u64 *input, u64 *output, 218 + unsigned int len); 219 + void aes_sparc64_ecb_decrypt_192(const u64 *key, const u64 *input, u64 *output, 220 + unsigned int len); 221 + void aes_sparc64_ecb_decrypt_256(const u64 *key, const u64 *input, u64 *output, 222 + unsigned int len); 223 + void aes_sparc64_cbc_encrypt_128(const u64 *key, const u64 *input, u64 *output, 224 + unsigned int len, u64 *iv); 225 + void aes_sparc64_cbc_encrypt_192(const u64 *key, const u64 *input, u64 *output, 226 + unsigned int len, u64 *iv); 227 + void aes_sparc64_cbc_encrypt_256(const u64 *key, const u64 *input, u64 *output, 228 + unsigned int len, u64 *iv); 229 + void aes_sparc64_cbc_decrypt_128(const u64 *key, const u64 *input, u64 *output, 230 + unsigned int len, u64 *iv); 231 + void aes_sparc64_cbc_decrypt_192(const u64 *key, const u64 *input, u64 *output, 232 + unsigned int len, u64 *iv); 233 + void aes_sparc64_cbc_decrypt_256(const u64 *key, const u64 *input, u64 *output, 234 + unsigned int len, u64 *iv); 235 + void aes_sparc64_ctr_crypt_128(const u64 *key, const u64 *input, u64 *output, 236 + unsigned int len, u64 *iv); 237 + void aes_sparc64_ctr_crypt_192(const u64 *key, const u64 *input, u64 *output, 238 + unsigned int len, u64 *iv); 239 + void aes_sparc64_ctr_crypt_256(const u64 *key, const u64 *input, u64 *output, 240 + unsigned int len, u64 *iv); 205 241 #endif 206 242 207 243 /**
+1
lib/crypto/Kconfig
··· 20 20 default y if RISCV && 64BIT && TOOLCHAIN_HAS_VECTOR_CRYPTO && \ 21 21 RISCV_EFFICIENT_VECTOR_UNALIGNED_ACCESS 22 22 default y if S390 23 + default y if SPARC64 23 24 24 25 config CRYPTO_LIB_AESCFB 25 26 tristate
+1
lib/crypto/Makefile
··· 51 51 endif # CONFIG_PPC 52 52 53 53 libaes-$(CONFIG_RISCV) += riscv/aes-riscv64-zvkned.o 54 + libaes-$(CONFIG_SPARC) += sparc/aes_asm.o 54 55 endif # CONFIG_CRYPTO_LIB_AES_ARCH 55 56 56 57 ################################################################################
+149
lib/crypto/sparc/aes.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0-only */ 2 + /* 3 + * AES accelerated using the sparc64 aes opcodes 4 + * 5 + * Copyright (C) 2008, Intel Corp. 6 + * Copyright (c) 2010, Intel Corporation. 7 + * Copyright 2026 Google LLC 8 + */ 9 + 10 + #include <asm/fpumacro.h> 11 + #include <asm/opcodes.h> 12 + #include <asm/pstate.h> 13 + #include <asm/elf.h> 14 + 15 + static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_aes_opcodes); 16 + 17 + EXPORT_SYMBOL_GPL(aes_sparc64_key_expand); 18 + EXPORT_SYMBOL_GPL(aes_sparc64_load_encrypt_keys_128); 19 + EXPORT_SYMBOL_GPL(aes_sparc64_load_encrypt_keys_192); 20 + EXPORT_SYMBOL_GPL(aes_sparc64_load_encrypt_keys_256); 21 + EXPORT_SYMBOL_GPL(aes_sparc64_load_decrypt_keys_128); 22 + EXPORT_SYMBOL_GPL(aes_sparc64_load_decrypt_keys_192); 23 + EXPORT_SYMBOL_GPL(aes_sparc64_load_decrypt_keys_256); 24 + EXPORT_SYMBOL_GPL(aes_sparc64_ecb_encrypt_128); 25 + EXPORT_SYMBOL_GPL(aes_sparc64_ecb_encrypt_192); 26 + EXPORT_SYMBOL_GPL(aes_sparc64_ecb_encrypt_256); 27 + EXPORT_SYMBOL_GPL(aes_sparc64_ecb_decrypt_128); 28 + EXPORT_SYMBOL_GPL(aes_sparc64_ecb_decrypt_192); 29 + EXPORT_SYMBOL_GPL(aes_sparc64_ecb_decrypt_256); 30 + EXPORT_SYMBOL_GPL(aes_sparc64_cbc_encrypt_128); 31 + EXPORT_SYMBOL_GPL(aes_sparc64_cbc_encrypt_192); 32 + EXPORT_SYMBOL_GPL(aes_sparc64_cbc_encrypt_256); 33 + EXPORT_SYMBOL_GPL(aes_sparc64_cbc_decrypt_128); 34 + EXPORT_SYMBOL_GPL(aes_sparc64_cbc_decrypt_192); 35 + EXPORT_SYMBOL_GPL(aes_sparc64_cbc_decrypt_256); 36 + EXPORT_SYMBOL_GPL(aes_sparc64_ctr_crypt_128); 37 + EXPORT_SYMBOL_GPL(aes_sparc64_ctr_crypt_192); 38 + EXPORT_SYMBOL_GPL(aes_sparc64_ctr_crypt_256); 39 + 40 + void aes_sparc64_encrypt_128(const u64 *key, const u32 *input, u32 *output); 41 + void aes_sparc64_encrypt_192(const u64 *key, const u32 *input, u32 *output); 42 + void aes_sparc64_encrypt_256(const u64 *key, const u32 *input, u32 *output); 43 + void aes_sparc64_decrypt_128(const u64 *key, const u32 *input, u32 *output); 44 + void aes_sparc64_decrypt_192(const u64 *key, const u32 *input, u32 *output); 45 + void aes_sparc64_decrypt_256(const u64 *key, const u32 *input, u32 *output); 46 + 47 + static void aes_preparekey_arch(union aes_enckey_arch *k, 48 + union aes_invkey_arch *inv_k, 49 + const u8 *in_key, int key_len, int nrounds) 50 + { 51 + if (static_branch_likely(&have_aes_opcodes)) { 52 + u32 aligned_key[AES_MAX_KEY_SIZE / 4]; 53 + 54 + if (IS_ALIGNED((uintptr_t)in_key, 4)) { 55 + aes_sparc64_key_expand((const u32 *)in_key, 56 + k->sparc_rndkeys, key_len); 57 + } else { 58 + memcpy(aligned_key, in_key, key_len); 59 + aes_sparc64_key_expand(aligned_key, 60 + k->sparc_rndkeys, key_len); 61 + memzero_explicit(aligned_key, key_len); 62 + } 63 + /* 64 + * Note that nothing needs to be written to inv_k (if it's 65 + * non-NULL) here, since the SPARC64 assembly code uses 66 + * k->sparc_rndkeys for both encryption and decryption. 67 + */ 68 + } else { 69 + aes_expandkey_generic(k->rndkeys, 70 + inv_k ? inv_k->inv_rndkeys : NULL, 71 + in_key, key_len); 72 + } 73 + } 74 + 75 + static void aes_sparc64_encrypt(const struct aes_enckey *key, 76 + const u32 *input, u32 *output) 77 + { 78 + if (key->len == AES_KEYSIZE_128) 79 + aes_sparc64_encrypt_128(key->k.sparc_rndkeys, input, output); 80 + else if (key->len == AES_KEYSIZE_192) 81 + aes_sparc64_encrypt_192(key->k.sparc_rndkeys, input, output); 82 + else 83 + aes_sparc64_encrypt_256(key->k.sparc_rndkeys, input, output); 84 + } 85 + 86 + static void aes_encrypt_arch(const struct aes_enckey *key, 87 + u8 out[AES_BLOCK_SIZE], 88 + const u8 in[AES_BLOCK_SIZE]) 89 + { 90 + u32 bounce_buf[AES_BLOCK_SIZE / 4]; 91 + 92 + if (static_branch_likely(&have_aes_opcodes)) { 93 + if (IS_ALIGNED((uintptr_t)in | (uintptr_t)out, 4)) { 94 + aes_sparc64_encrypt(key, (const u32 *)in, (u32 *)out); 95 + } else { 96 + memcpy(bounce_buf, in, AES_BLOCK_SIZE); 97 + aes_sparc64_encrypt(key, bounce_buf, bounce_buf); 98 + memcpy(out, bounce_buf, AES_BLOCK_SIZE); 99 + } 100 + } else { 101 + aes_encrypt_generic(key->k.rndkeys, key->nrounds, out, in); 102 + } 103 + } 104 + 105 + static void aes_sparc64_decrypt(const struct aes_key *key, 106 + const u32 *input, u32 *output) 107 + { 108 + if (key->len == AES_KEYSIZE_128) 109 + aes_sparc64_decrypt_128(key->k.sparc_rndkeys, input, output); 110 + else if (key->len == AES_KEYSIZE_192) 111 + aes_sparc64_decrypt_192(key->k.sparc_rndkeys, input, output); 112 + else 113 + aes_sparc64_decrypt_256(key->k.sparc_rndkeys, input, output); 114 + } 115 + 116 + static void aes_decrypt_arch(const struct aes_key *key, 117 + u8 out[AES_BLOCK_SIZE], 118 + const u8 in[AES_BLOCK_SIZE]) 119 + { 120 + u32 bounce_buf[AES_BLOCK_SIZE / 4]; 121 + 122 + if (static_branch_likely(&have_aes_opcodes)) { 123 + if (IS_ALIGNED((uintptr_t)in | (uintptr_t)out, 4)) { 124 + aes_sparc64_decrypt(key, (const u32 *)in, (u32 *)out); 125 + } else { 126 + memcpy(bounce_buf, in, AES_BLOCK_SIZE); 127 + aes_sparc64_decrypt(key, bounce_buf, bounce_buf); 128 + memcpy(out, bounce_buf, AES_BLOCK_SIZE); 129 + } 130 + } else { 131 + aes_decrypt_generic(key->inv_k.inv_rndkeys, key->nrounds, 132 + out, in); 133 + } 134 + } 135 + 136 + #define aes_mod_init_arch aes_mod_init_arch 137 + static void aes_mod_init_arch(void) 138 + { 139 + unsigned long cfr; 140 + 141 + if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO)) 142 + return; 143 + 144 + __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr)); 145 + if (!(cfr & CFR_AES)) 146 + return; 147 + 148 + static_branch_enable(&have_aes_opcodes); 149 + }