Linux kernel mirror (for testing)
git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel
os
linux
1// SPDX-License-Identifier: GPL-2.0
2/*
3 * Copyright (C) 2017 Marvell
4 *
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
6 */
7
8#include <linux/unaligned.h>
9#include <linux/device.h>
10#include <linux/dma-mapping.h>
11#include <linux/dmapool.h>
12#include <crypto/aead.h>
13#include <crypto/aes.h>
14#include <crypto/authenc.h>
15#include <crypto/chacha.h>
16#include <crypto/ctr.h>
17#include <crypto/internal/des.h>
18#include <crypto/gcm.h>
19#include <crypto/ghash.h>
20#include <crypto/md5.h>
21#include <crypto/poly1305.h>
22#include <crypto/sha1.h>
23#include <crypto/sha2.h>
24#include <crypto/sm3.h>
25#include <crypto/sm4.h>
26#include <crypto/xts.h>
27#include <crypto/skcipher.h>
28#include <crypto/internal/aead.h>
29#include <crypto/internal/skcipher.h>
30
31#include "safexcel.h"
32
33enum safexcel_cipher_direction {
34 SAFEXCEL_ENCRYPT,
35 SAFEXCEL_DECRYPT,
36};
37
38enum safexcel_cipher_alg {
39 SAFEXCEL_DES,
40 SAFEXCEL_3DES,
41 SAFEXCEL_AES,
42 SAFEXCEL_CHACHA20,
43 SAFEXCEL_SM4,
44};
45
46struct safexcel_cipher_ctx {
47 struct safexcel_context base;
48 struct safexcel_crypto_priv *priv;
49
50 u32 mode;
51 enum safexcel_cipher_alg alg;
52 u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
53 u8 xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */
54 u8 aadskip;
55 u8 blocksz;
56 u32 ivmask;
57 u32 ctrinit;
58
59 __le32 key[16];
60 u32 nonce;
61 unsigned int key_len, xts;
62
63 /* All the below is AEAD specific */
64 u32 hash_alg;
65 u32 state_sz;
66
67 struct crypto_aead *fback;
68};
69
70struct safexcel_cipher_req {
71 enum safexcel_cipher_direction direction;
72 /* Number of result descriptors associated to the request */
73 unsigned int rdescs;
74 bool needs_inv;
75 int nr_src, nr_dst;
76};
77
78static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
79 struct safexcel_command_desc *cdesc)
80{
81 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
82 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
83 /* 32 bit nonce */
84 cdesc->control_data.token[0] = ctx->nonce;
85 /* 64 bit IV part */
86 memcpy(&cdesc->control_data.token[1], iv, 8);
87 /* 32 bit counter, start at 0 or 1 (big endian!) */
88 cdesc->control_data.token[3] =
89 (__force u32)cpu_to_be32(ctx->ctrinit);
90 return 4;
91 }
92 if (ctx->alg == SAFEXCEL_CHACHA20) {
93 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
94 /* 96 bit nonce part */
95 memcpy(&cdesc->control_data.token[0], &iv[4], 12);
96 /* 32 bit counter */
97 cdesc->control_data.token[3] = *(u32 *)iv;
98 return 4;
99 }
100
101 cdesc->control_data.options |= ctx->ivmask;
102 memcpy(cdesc->control_data.token, iv, ctx->blocksz);
103 return ctx->blocksz / sizeof(u32);
104}
105
106static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
107 struct safexcel_command_desc *cdesc,
108 struct safexcel_token *atoken,
109 u32 length)
110{
111 struct safexcel_token *token;
112 int ivlen;
113
114 ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
115 if (ivlen == 4) {
116 /* No space in cdesc, instruction moves to atoken */
117 cdesc->additional_cdata_size = 1;
118 token = atoken;
119 } else {
120 /* Everything fits in cdesc */
121 token = (struct safexcel_token *)(cdesc->control_data.token + 2);
122 /* Need to pad with NOP */
123 eip197_noop_token(&token[1]);
124 }
125
126 token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
127 token->packet_length = length;
128 token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
129 EIP197_TOKEN_STAT_LAST_HASH;
130 token->instructions = EIP197_TOKEN_INS_LAST |
131 EIP197_TOKEN_INS_TYPE_CRYPTO |
132 EIP197_TOKEN_INS_TYPE_OUTPUT;
133}
134
135static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
136 struct safexcel_command_desc *cdesc)
137{
138 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
139 ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
140 /* 32 bit nonce */
141 cdesc->control_data.token[0] = ctx->nonce;
142 /* 64 bit IV part */
143 memcpy(&cdesc->control_data.token[1], iv, 8);
144 /* 32 bit counter, start at 0 or 1 (big endian!) */
145 cdesc->control_data.token[3] =
146 (__force u32)cpu_to_be32(ctx->ctrinit);
147 return;
148 }
149 if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
150 /* 96 bit IV part */
151 memcpy(&cdesc->control_data.token[0], iv, 12);
152 /* 32 bit counter, start at 0 or 1 (big endian!) */
153 cdesc->control_data.token[3] =
154 (__force u32)cpu_to_be32(ctx->ctrinit);
155 return;
156 }
157 /* CBC */
158 memcpy(cdesc->control_data.token, iv, ctx->blocksz);
159}
160
161static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
162 struct safexcel_command_desc *cdesc,
163 struct safexcel_token *atoken,
164 enum safexcel_cipher_direction direction,
165 u32 cryptlen, u32 assoclen, u32 digestsize)
166{
167 struct safexcel_token *aadref;
168 int atoksize = 2; /* Start with minimum size */
169 int assocadj = assoclen - ctx->aadskip, aadalign;
170
171 /* Always 4 dwords of embedded IV for AEAD modes */
172 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
173
174 if (direction == SAFEXCEL_DECRYPT)
175 cryptlen -= digestsize;
176
177 if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
178 /* Construct IV block B0 for the CBC-MAC */
179 u8 *final_iv = (u8 *)cdesc->control_data.token;
180 u8 *cbcmaciv = (u8 *)&atoken[1];
181 __le32 *aadlen = (__le32 *)&atoken[5];
182
183 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
184 /* Length + nonce */
185 cdesc->control_data.token[0] = ctx->nonce;
186 /* Fixup flags byte */
187 *(__le32 *)cbcmaciv =
188 cpu_to_le32(ctx->nonce |
189 ((assocadj > 0) << 6) |
190 ((digestsize - 2) << 2));
191 /* 64 bit IV part */
192 memcpy(&cdesc->control_data.token[1], iv, 8);
193 memcpy(cbcmaciv + 4, iv, 8);
194 /* Start counter at 0 */
195 cdesc->control_data.token[3] = 0;
196 /* Message length */
197 *(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
198 } else {
199 /* Variable length IV part */
200 memcpy(final_iv, iv, 15 - iv[0]);
201 memcpy(cbcmaciv, iv, 15 - iv[0]);
202 /* Start variable length counter at 0 */
203 memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
204 memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
205 /* fixup flags byte */
206 cbcmaciv[0] |= ((assocadj > 0) << 6) |
207 ((digestsize - 2) << 2);
208 /* insert lower 2 bytes of message length */
209 cbcmaciv[14] = cryptlen >> 8;
210 cbcmaciv[15] = cryptlen & 255;
211 }
212
213 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
214 atoken->packet_length = AES_BLOCK_SIZE +
215 ((assocadj > 0) << 1);
216 atoken->stat = 0;
217 atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
218 EIP197_TOKEN_INS_TYPE_HASH;
219
220 if (likely(assocadj)) {
221 *aadlen = cpu_to_le32((assocadj >> 8) |
222 (assocadj & 255) << 8);
223 atoken += 6;
224 atoksize += 7;
225 } else {
226 atoken += 5;
227 atoksize += 6;
228 }
229
230 /* Process AAD data */
231 aadref = atoken;
232 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
233 atoken->packet_length = assocadj;
234 atoken->stat = 0;
235 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
236 atoken++;
237
238 /* For CCM only, align AAD data towards hash engine */
239 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
240 aadalign = (assocadj + 2) & 15;
241 atoken->packet_length = assocadj && aadalign ?
242 16 - aadalign :
243 0;
244 if (likely(cryptlen)) {
245 atoken->stat = 0;
246 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
247 } else {
248 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
249 atoken->instructions = EIP197_TOKEN_INS_LAST |
250 EIP197_TOKEN_INS_TYPE_HASH;
251 }
252 } else {
253 safexcel_aead_iv(ctx, iv, cdesc);
254
255 /* Process AAD data */
256 aadref = atoken;
257 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
258 atoken->packet_length = assocadj;
259 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
260 atoken->instructions = EIP197_TOKEN_INS_LAST |
261 EIP197_TOKEN_INS_TYPE_HASH;
262 }
263 atoken++;
264
265 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
266 /* For ESP mode (and not GMAC), skip over the IV */
267 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
268 atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
269 atoken->stat = 0;
270 atoken->instructions = 0;
271 atoken++;
272 atoksize++;
273 } else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
274 direction == SAFEXCEL_DECRYPT)) {
275 /* Poly-chacha decryption needs a dummy NOP here ... */
276 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
277 atoken->packet_length = 16; /* According to Op Manual */
278 atoken->stat = 0;
279 atoken->instructions = 0;
280 atoken++;
281 atoksize++;
282 }
283
284 if (ctx->xcm) {
285 /* For GCM and CCM, obtain enc(Y0) */
286 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
287 atoken->packet_length = 0;
288 atoken->stat = 0;
289 atoken->instructions = AES_BLOCK_SIZE;
290 atoken++;
291
292 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
293 atoken->packet_length = AES_BLOCK_SIZE;
294 atoken->stat = 0;
295 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
296 EIP197_TOKEN_INS_TYPE_CRYPTO;
297 atoken++;
298 atoksize += 2;
299 }
300
301 if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
302 /* Fixup stat field for AAD direction instruction */
303 aadref->stat = 0;
304
305 /* Process crypto data */
306 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
307 atoken->packet_length = cryptlen;
308
309 if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
310 /* Fixup instruction field for AAD dir instruction */
311 aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
312
313 /* Do not send to crypt engine in case of GMAC */
314 atoken->instructions = EIP197_TOKEN_INS_LAST |
315 EIP197_TOKEN_INS_TYPE_HASH |
316 EIP197_TOKEN_INS_TYPE_OUTPUT;
317 } else {
318 atoken->instructions = EIP197_TOKEN_INS_LAST |
319 EIP197_TOKEN_INS_TYPE_CRYPTO |
320 EIP197_TOKEN_INS_TYPE_HASH |
321 EIP197_TOKEN_INS_TYPE_OUTPUT;
322 }
323
324 cryptlen &= 15;
325 if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
326 atoken->stat = 0;
327 /* For CCM only, pad crypto data to the hash engine */
328 atoken++;
329 atoksize++;
330 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
331 atoken->packet_length = 16 - cryptlen;
332 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
333 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
334 } else {
335 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
336 }
337 atoken++;
338 atoksize++;
339 }
340
341 if (direction == SAFEXCEL_ENCRYPT) {
342 /* Append ICV */
343 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
344 atoken->packet_length = digestsize;
345 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
346 EIP197_TOKEN_STAT_LAST_PACKET;
347 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
348 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
349 } else {
350 /* Extract ICV */
351 atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
352 atoken->packet_length = digestsize;
353 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
354 EIP197_TOKEN_STAT_LAST_PACKET;
355 atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
356 atoken++;
357 atoksize++;
358
359 /* Verify ICV */
360 atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
361 atoken->packet_length = digestsize |
362 EIP197_TOKEN_HASH_RESULT_VERIFY;
363 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
364 EIP197_TOKEN_STAT_LAST_PACKET;
365 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
366 }
367
368 /* Fixup length of the token in the command descriptor */
369 cdesc->additional_cdata_size = atoksize;
370}
371
372static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
373 const u8 *key, unsigned int len)
374{
375 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
376 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
377 struct safexcel_crypto_priv *priv = ctx->base.priv;
378 struct crypto_aes_ctx aes;
379 int ret, i;
380
381 ret = aes_expandkey(&aes, key, len);
382 if (ret)
383 return ret;
384
385 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
386 for (i = 0; i < len / sizeof(u32); i++) {
387 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
388 ctx->base.needs_inv = true;
389 break;
390 }
391 }
392 }
393
394 for (i = 0; i < len / sizeof(u32); i++)
395 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
396
397 ctx->key_len = len;
398
399 memzero_explicit(&aes, sizeof(aes));
400 return 0;
401}
402
403static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
404 unsigned int len)
405{
406 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
407 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
408 struct safexcel_crypto_priv *priv = ctx->base.priv;
409 struct crypto_authenc_keys keys;
410 struct crypto_aes_ctx aes;
411 int err = -EINVAL, i;
412 const char *alg;
413
414 if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
415 goto badkey;
416
417 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
418 /* Must have at least space for the nonce here */
419 if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
420 goto badkey;
421 /* last 4 bytes of key are the nonce! */
422 ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
423 CTR_RFC3686_NONCE_SIZE);
424 /* exclude the nonce here */
425 keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
426 }
427
428 /* Encryption key */
429 switch (ctx->alg) {
430 case SAFEXCEL_DES:
431 err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
432 if (unlikely(err))
433 goto badkey;
434 break;
435 case SAFEXCEL_3DES:
436 err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
437 if (unlikely(err))
438 goto badkey;
439 break;
440 case SAFEXCEL_AES:
441 err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
442 if (unlikely(err))
443 goto badkey;
444 break;
445 case SAFEXCEL_SM4:
446 if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
447 goto badkey;
448 break;
449 default:
450 dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
451 goto badkey;
452 }
453
454 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
455 for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
456 if (le32_to_cpu(ctx->key[i]) !=
457 ((u32 *)keys.enckey)[i]) {
458 ctx->base.needs_inv = true;
459 break;
460 }
461 }
462 }
463
464 /* Auth key */
465 switch (ctx->hash_alg) {
466 case CONTEXT_CONTROL_CRYPTO_ALG_MD5:
467 alg = "safexcel-md5";
468 break;
469 case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
470 alg = "safexcel-sha1";
471 break;
472 case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
473 alg = "safexcel-sha224";
474 break;
475 case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
476 alg = "safexcel-sha256";
477 break;
478 case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
479 alg = "safexcel-sha384";
480 break;
481 case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
482 alg = "safexcel-sha512";
483 break;
484 case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
485 alg = "safexcel-sm3";
486 break;
487 default:
488 dev_err(priv->dev, "aead: unsupported hash algorithm\n");
489 goto badkey;
490 }
491
492 if (safexcel_hmac_setkey(&ctx->base, keys.authkey, keys.authkeylen,
493 alg, ctx->state_sz))
494 goto badkey;
495
496 /* Now copy the keys into the context */
497 for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
498 ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
499 ctx->key_len = keys.enckeylen;
500
501 memzero_explicit(&keys, sizeof(keys));
502 return 0;
503
504badkey:
505 memzero_explicit(&keys, sizeof(keys));
506 return err;
507}
508
509static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
510 struct crypto_async_request *async,
511 struct safexcel_cipher_req *sreq,
512 struct safexcel_command_desc *cdesc)
513{
514 struct safexcel_crypto_priv *priv = ctx->base.priv;
515 int ctrl_size = ctx->key_len / sizeof(u32);
516
517 cdesc->control_data.control1 = ctx->mode;
518
519 if (ctx->aead) {
520 /* Take in account the ipad+opad digests */
521 if (ctx->xcm) {
522 ctrl_size += ctx->state_sz / sizeof(u32);
523 cdesc->control_data.control0 =
524 CONTEXT_CONTROL_KEY_EN |
525 CONTEXT_CONTROL_DIGEST_XCM |
526 ctx->hash_alg |
527 CONTEXT_CONTROL_SIZE(ctrl_size);
528 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
529 /* Chacha20-Poly1305 */
530 cdesc->control_data.control0 =
531 CONTEXT_CONTROL_KEY_EN |
532 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
533 (sreq->direction == SAFEXCEL_ENCRYPT ?
534 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
535 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
536 ctx->hash_alg |
537 CONTEXT_CONTROL_SIZE(ctrl_size);
538 return 0;
539 } else {
540 ctrl_size += ctx->state_sz / sizeof(u32) * 2;
541 cdesc->control_data.control0 =
542 CONTEXT_CONTROL_KEY_EN |
543 CONTEXT_CONTROL_DIGEST_HMAC |
544 ctx->hash_alg |
545 CONTEXT_CONTROL_SIZE(ctrl_size);
546 }
547
548 if (sreq->direction == SAFEXCEL_ENCRYPT &&
549 (ctx->xcm == EIP197_XCM_MODE_CCM ||
550 ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
551 cdesc->control_data.control0 |=
552 CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
553 else if (sreq->direction == SAFEXCEL_ENCRYPT)
554 cdesc->control_data.control0 |=
555 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
556 else if (ctx->xcm == EIP197_XCM_MODE_CCM)
557 cdesc->control_data.control0 |=
558 CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
559 else
560 cdesc->control_data.control0 |=
561 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
562 } else {
563 if (sreq->direction == SAFEXCEL_ENCRYPT)
564 cdesc->control_data.control0 =
565 CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
566 CONTEXT_CONTROL_KEY_EN |
567 CONTEXT_CONTROL_SIZE(ctrl_size);
568 else
569 cdesc->control_data.control0 =
570 CONTEXT_CONTROL_TYPE_CRYPTO_IN |
571 CONTEXT_CONTROL_KEY_EN |
572 CONTEXT_CONTROL_SIZE(ctrl_size);
573 }
574
575 if (ctx->alg == SAFEXCEL_DES) {
576 cdesc->control_data.control0 |=
577 CONTEXT_CONTROL_CRYPTO_ALG_DES;
578 } else if (ctx->alg == SAFEXCEL_3DES) {
579 cdesc->control_data.control0 |=
580 CONTEXT_CONTROL_CRYPTO_ALG_3DES;
581 } else if (ctx->alg == SAFEXCEL_AES) {
582 switch (ctx->key_len >> ctx->xts) {
583 case AES_KEYSIZE_128:
584 cdesc->control_data.control0 |=
585 CONTEXT_CONTROL_CRYPTO_ALG_AES128;
586 break;
587 case AES_KEYSIZE_192:
588 cdesc->control_data.control0 |=
589 CONTEXT_CONTROL_CRYPTO_ALG_AES192;
590 break;
591 case AES_KEYSIZE_256:
592 cdesc->control_data.control0 |=
593 CONTEXT_CONTROL_CRYPTO_ALG_AES256;
594 break;
595 default:
596 dev_err(priv->dev, "aes keysize not supported: %u\n",
597 ctx->key_len >> ctx->xts);
598 return -EINVAL;
599 }
600 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
601 cdesc->control_data.control0 |=
602 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
603 } else if (ctx->alg == SAFEXCEL_SM4) {
604 cdesc->control_data.control0 |=
605 CONTEXT_CONTROL_CRYPTO_ALG_SM4;
606 }
607
608 return 0;
609}
610
611static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
612 struct crypto_async_request *async,
613 struct scatterlist *src,
614 struct scatterlist *dst,
615 unsigned int cryptlen,
616 struct safexcel_cipher_req *sreq,
617 bool *should_complete, int *ret)
618{
619 struct skcipher_request *areq = skcipher_request_cast(async);
620 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
621 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
622 struct safexcel_result_desc *rdesc;
623 int ndesc = 0;
624
625 *ret = 0;
626
627 if (unlikely(!sreq->rdescs))
628 return 0;
629
630 while (sreq->rdescs--) {
631 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
632 if (IS_ERR(rdesc)) {
633 dev_err(priv->dev,
634 "cipher: result: could not retrieve the result descriptor\n");
635 *ret = PTR_ERR(rdesc);
636 break;
637 }
638
639 if (likely(!*ret))
640 *ret = safexcel_rdesc_check_errors(priv, rdesc);
641
642 ndesc++;
643 }
644
645 safexcel_complete(priv, ring);
646
647 if (src == dst) {
648 if (sreq->nr_src > 0)
649 dma_unmap_sg(priv->dev, src, sreq->nr_src,
650 DMA_BIDIRECTIONAL);
651 } else {
652 if (sreq->nr_src > 0)
653 dma_unmap_sg(priv->dev, src, sreq->nr_src,
654 DMA_TO_DEVICE);
655 if (sreq->nr_dst > 0)
656 dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
657 DMA_FROM_DEVICE);
658 }
659
660 /*
661 * Update IV in req from last crypto output word for CBC modes
662 */
663 if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
664 (sreq->direction == SAFEXCEL_ENCRYPT)) {
665 /* For encrypt take the last output word */
666 sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
667 crypto_skcipher_ivsize(skcipher),
668 (cryptlen -
669 crypto_skcipher_ivsize(skcipher)));
670 }
671
672 *should_complete = true;
673
674 return ndesc;
675}
676
677static int safexcel_send_req(struct crypto_async_request *base, int ring,
678 struct safexcel_cipher_req *sreq,
679 struct scatterlist *src, struct scatterlist *dst,
680 unsigned int cryptlen, unsigned int assoclen,
681 unsigned int digestsize, u8 *iv, int *commands,
682 int *results)
683{
684 struct skcipher_request *areq = skcipher_request_cast(base);
685 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
686 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
687 struct safexcel_crypto_priv *priv = ctx->base.priv;
688 struct safexcel_command_desc *cdesc;
689 struct safexcel_command_desc *first_cdesc = NULL;
690 struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
691 struct scatterlist *sg;
692 unsigned int totlen;
693 unsigned int totlen_src = cryptlen + assoclen;
694 unsigned int totlen_dst = totlen_src;
695 struct safexcel_token *atoken;
696 int n_cdesc = 0, n_rdesc = 0;
697 int queued, i, ret = 0;
698 bool first = true;
699
700 sreq->nr_src = sg_nents_for_len(src, totlen_src);
701
702 if (ctx->aead) {
703 /*
704 * AEAD has auth tag appended to output for encrypt and
705 * removed from the output for decrypt!
706 */
707 if (sreq->direction == SAFEXCEL_DECRYPT)
708 totlen_dst -= digestsize;
709 else
710 totlen_dst += digestsize;
711
712 memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
713 &ctx->base.ipad, ctx->state_sz);
714 if (!ctx->xcm)
715 memcpy(ctx->base.ctxr->data + (ctx->key_len +
716 ctx->state_sz) / sizeof(u32), &ctx->base.opad,
717 ctx->state_sz);
718 } else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
719 (sreq->direction == SAFEXCEL_DECRYPT)) {
720 /*
721 * Save IV from last crypto input word for CBC modes in decrypt
722 * direction. Need to do this first in case of inplace operation
723 * as it will be overwritten.
724 */
725 sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
726 crypto_skcipher_ivsize(skcipher),
727 (totlen_src -
728 crypto_skcipher_ivsize(skcipher)));
729 }
730
731 sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
732
733 /*
734 * Remember actual input length, source buffer length may be
735 * updated in case of inline operation below.
736 */
737 totlen = totlen_src;
738 queued = totlen_src;
739
740 if (src == dst) {
741 sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
742 sreq->nr_dst = sreq->nr_src;
743 if (unlikely((totlen_src || totlen_dst) &&
744 (sreq->nr_src <= 0))) {
745 dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
746 max(totlen_src, totlen_dst));
747 return -EINVAL;
748 }
749 if (sreq->nr_src > 0 &&
750 !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL))
751 return -EIO;
752 } else {
753 if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
754 dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
755 totlen_src);
756 return -EINVAL;
757 }
758
759 if (sreq->nr_src > 0 &&
760 !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE))
761 return -EIO;
762
763 if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
764 dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
765 totlen_dst);
766 ret = -EINVAL;
767 goto unmap;
768 }
769
770 if (sreq->nr_dst > 0 &&
771 !dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE)) {
772 ret = -EIO;
773 goto unmap;
774 }
775 }
776
777 memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
778
779 if (!totlen) {
780 /*
781 * The EIP97 cannot deal with zero length input packets!
782 * So stuff a dummy command descriptor indicating a 1 byte
783 * (dummy) input packet, using the context record as source.
784 */
785 first_cdesc = safexcel_add_cdesc(priv, ring,
786 1, 1, ctx->base.ctxr_dma,
787 1, 1, ctx->base.ctxr_dma,
788 &atoken);
789 if (IS_ERR(first_cdesc)) {
790 /* No space left in the command descriptor ring */
791 ret = PTR_ERR(first_cdesc);
792 goto cdesc_rollback;
793 }
794 n_cdesc = 1;
795 goto skip_cdesc;
796 }
797
798 /* command descriptors */
799 for_each_sg(src, sg, sreq->nr_src, i) {
800 int len = sg_dma_len(sg);
801
802 /* Do not overflow the request */
803 if (queued < len)
804 len = queued;
805
806 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
807 !(queued - len),
808 sg_dma_address(sg), len, totlen,
809 ctx->base.ctxr_dma, &atoken);
810 if (IS_ERR(cdesc)) {
811 /* No space left in the command descriptor ring */
812 ret = PTR_ERR(cdesc);
813 goto cdesc_rollback;
814 }
815
816 if (!n_cdesc)
817 first_cdesc = cdesc;
818
819 n_cdesc++;
820 queued -= len;
821 if (!queued)
822 break;
823 }
824skip_cdesc:
825 /* Add context control words and token to first command descriptor */
826 safexcel_context_control(ctx, base, sreq, first_cdesc);
827 if (ctx->aead)
828 safexcel_aead_token(ctx, iv, first_cdesc, atoken,
829 sreq->direction, cryptlen,
830 assoclen, digestsize);
831 else
832 safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
833 cryptlen);
834
835 /* result descriptors */
836 for_each_sg(dst, sg, sreq->nr_dst, i) {
837 bool last = (i == sreq->nr_dst - 1);
838 u32 len = sg_dma_len(sg);
839
840 /* only allow the part of the buffer we know we need */
841 if (len > totlen_dst)
842 len = totlen_dst;
843 if (unlikely(!len))
844 break;
845 totlen_dst -= len;
846
847 /* skip over AAD space in buffer - not written */
848 if (assoclen) {
849 if (assoclen >= len) {
850 assoclen -= len;
851 continue;
852 }
853 rdesc = safexcel_add_rdesc(priv, ring, first, last,
854 sg_dma_address(sg) +
855 assoclen,
856 len - assoclen);
857 assoclen = 0;
858 } else {
859 rdesc = safexcel_add_rdesc(priv, ring, first, last,
860 sg_dma_address(sg),
861 len);
862 }
863 if (IS_ERR(rdesc)) {
864 /* No space left in the result descriptor ring */
865 ret = PTR_ERR(rdesc);
866 goto rdesc_rollback;
867 }
868 if (first) {
869 first_rdesc = rdesc;
870 first = false;
871 }
872 n_rdesc++;
873 }
874
875 if (unlikely(first)) {
876 /*
877 * Special case: AEAD decrypt with only AAD data.
878 * In this case there is NO output data from the engine,
879 * but the engine still needs a result descriptor!
880 * Create a dummy one just for catching the result token.
881 */
882 rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
883 if (IS_ERR(rdesc)) {
884 /* No space left in the result descriptor ring */
885 ret = PTR_ERR(rdesc);
886 goto rdesc_rollback;
887 }
888 first_rdesc = rdesc;
889 n_rdesc = 1;
890 }
891
892 safexcel_rdr_req_set(priv, ring, first_rdesc, base);
893
894 *commands = n_cdesc;
895 *results = n_rdesc;
896 return 0;
897
898rdesc_rollback:
899 for (i = 0; i < n_rdesc; i++)
900 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
901cdesc_rollback:
902 for (i = 0; i < n_cdesc; i++)
903 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
904unmap:
905 if (src == dst) {
906 if (sreq->nr_src > 0)
907 dma_unmap_sg(priv->dev, src, sreq->nr_src,
908 DMA_BIDIRECTIONAL);
909 } else {
910 if (sreq->nr_src > 0)
911 dma_unmap_sg(priv->dev, src, sreq->nr_src,
912 DMA_TO_DEVICE);
913 if (sreq->nr_dst > 0)
914 dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
915 DMA_FROM_DEVICE);
916 }
917
918 return ret;
919}
920
921static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
922 int ring,
923 struct crypto_async_request *base,
924 struct safexcel_cipher_req *sreq,
925 bool *should_complete, int *ret)
926{
927 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
928 struct safexcel_result_desc *rdesc;
929 int ndesc = 0, enq_ret;
930
931 *ret = 0;
932
933 if (unlikely(!sreq->rdescs))
934 return 0;
935
936 while (sreq->rdescs--) {
937 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
938 if (IS_ERR(rdesc)) {
939 dev_err(priv->dev,
940 "cipher: invalidate: could not retrieve the result descriptor\n");
941 *ret = PTR_ERR(rdesc);
942 break;
943 }
944
945 if (likely(!*ret))
946 *ret = safexcel_rdesc_check_errors(priv, rdesc);
947
948 ndesc++;
949 }
950
951 safexcel_complete(priv, ring);
952
953 if (ctx->base.exit_inv) {
954 dma_pool_free(priv->context_pool, ctx->base.ctxr,
955 ctx->base.ctxr_dma);
956
957 *should_complete = true;
958
959 return ndesc;
960 }
961
962 ring = safexcel_select_ring(priv);
963 ctx->base.ring = ring;
964
965 spin_lock_bh(&priv->ring[ring].queue_lock);
966 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
967 spin_unlock_bh(&priv->ring[ring].queue_lock);
968
969 if (enq_ret != -EINPROGRESS)
970 *ret = enq_ret;
971
972 queue_work(priv->ring[ring].workqueue,
973 &priv->ring[ring].work_data.work);
974
975 *should_complete = false;
976
977 return ndesc;
978}
979
980static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
981 int ring,
982 struct crypto_async_request *async,
983 bool *should_complete, int *ret)
984{
985 struct skcipher_request *req = skcipher_request_cast(async);
986 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
987 int err;
988
989 if (sreq->needs_inv) {
990 sreq->needs_inv = false;
991 err = safexcel_handle_inv_result(priv, ring, async, sreq,
992 should_complete, ret);
993 } else {
994 err = safexcel_handle_req_result(priv, ring, async, req->src,
995 req->dst, req->cryptlen, sreq,
996 should_complete, ret);
997 }
998
999 return err;
1000}
1001
1002static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
1003 int ring,
1004 struct crypto_async_request *async,
1005 bool *should_complete, int *ret)
1006{
1007 struct aead_request *req = aead_request_cast(async);
1008 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1009 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1010 int err;
1011
1012 if (sreq->needs_inv) {
1013 sreq->needs_inv = false;
1014 err = safexcel_handle_inv_result(priv, ring, async, sreq,
1015 should_complete, ret);
1016 } else {
1017 err = safexcel_handle_req_result(priv, ring, async, req->src,
1018 req->dst,
1019 req->cryptlen + crypto_aead_authsize(tfm),
1020 sreq, should_complete, ret);
1021 }
1022
1023 return err;
1024}
1025
1026static int safexcel_cipher_send_inv(struct crypto_async_request *base,
1027 int ring, int *commands, int *results)
1028{
1029 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1030 struct safexcel_crypto_priv *priv = ctx->base.priv;
1031 int ret;
1032
1033 ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1034 if (unlikely(ret))
1035 return ret;
1036
1037 *commands = 1;
1038 *results = 1;
1039
1040 return 0;
1041}
1042
1043static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
1044 int *commands, int *results)
1045{
1046 struct skcipher_request *req = skcipher_request_cast(async);
1047 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1048 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1049 struct safexcel_crypto_priv *priv = ctx->base.priv;
1050 int ret;
1051
1052 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1053
1054 if (sreq->needs_inv) {
1055 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1056 } else {
1057 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1058 u8 input_iv[AES_BLOCK_SIZE];
1059
1060 /*
1061 * Save input IV in case of CBC decrypt mode
1062 * Will be overwritten with output IV prior to use!
1063 */
1064 memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1065
1066 ret = safexcel_send_req(async, ring, sreq, req->src,
1067 req->dst, req->cryptlen, 0, 0, input_iv,
1068 commands, results);
1069 }
1070
1071 sreq->rdescs = *results;
1072 return ret;
1073}
1074
1075static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1076 int *commands, int *results)
1077{
1078 struct aead_request *req = aead_request_cast(async);
1079 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1080 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1081 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1082 struct safexcel_crypto_priv *priv = ctx->base.priv;
1083 int ret;
1084
1085 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1086
1087 if (sreq->needs_inv)
1088 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1089 else
1090 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1091 req->cryptlen, req->assoclen,
1092 crypto_aead_authsize(tfm), req->iv,
1093 commands, results);
1094 sreq->rdescs = *results;
1095 return ret;
1096}
1097
1098static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1099 struct crypto_async_request *base,
1100 struct safexcel_cipher_req *sreq,
1101 struct crypto_wait *result)
1102{
1103 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1104 struct safexcel_crypto_priv *priv = ctx->base.priv;
1105 int ring = ctx->base.ring;
1106 int err;
1107
1108 ctx = crypto_tfm_ctx(base->tfm);
1109 ctx->base.exit_inv = true;
1110 sreq->needs_inv = true;
1111
1112 spin_lock_bh(&priv->ring[ring].queue_lock);
1113 crypto_enqueue_request(&priv->ring[ring].queue, base);
1114 spin_unlock_bh(&priv->ring[ring].queue_lock);
1115
1116 queue_work(priv->ring[ring].workqueue,
1117 &priv->ring[ring].work_data.work);
1118
1119 err = crypto_wait_req(-EINPROGRESS, result);
1120
1121 if (err) {
1122 dev_warn(priv->dev,
1123 "cipher: sync: invalidate: completion error %d\n",
1124 err);
1125 return err;
1126 }
1127
1128 return 0;
1129}
1130
1131static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1132{
1133 EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1134 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1135 DECLARE_CRYPTO_WAIT(result);
1136
1137 memset(req, 0, sizeof(struct skcipher_request));
1138
1139 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1140 crypto_req_done, &result);
1141 skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1142
1143 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1144}
1145
1146static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1147{
1148 EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1149 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1150 DECLARE_CRYPTO_WAIT(result);
1151
1152 memset(req, 0, sizeof(struct aead_request));
1153
1154 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1155 crypto_req_done, &result);
1156 aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1157
1158 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1159}
1160
1161static int safexcel_queue_req(struct crypto_async_request *base,
1162 struct safexcel_cipher_req *sreq,
1163 enum safexcel_cipher_direction dir)
1164{
1165 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1166 struct safexcel_crypto_priv *priv = ctx->base.priv;
1167 int ret, ring;
1168
1169 sreq->needs_inv = false;
1170 sreq->direction = dir;
1171
1172 if (ctx->base.ctxr) {
1173 if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1174 sreq->needs_inv = true;
1175 ctx->base.needs_inv = false;
1176 }
1177 } else {
1178 ctx->base.ring = safexcel_select_ring(priv);
1179 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1180 EIP197_GFP_FLAGS(*base),
1181 &ctx->base.ctxr_dma);
1182 if (!ctx->base.ctxr)
1183 return -ENOMEM;
1184 }
1185
1186 ring = ctx->base.ring;
1187
1188 spin_lock_bh(&priv->ring[ring].queue_lock);
1189 ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1190 spin_unlock_bh(&priv->ring[ring].queue_lock);
1191
1192 queue_work(priv->ring[ring].workqueue,
1193 &priv->ring[ring].work_data.work);
1194
1195 return ret;
1196}
1197
1198static int safexcel_encrypt(struct skcipher_request *req)
1199{
1200 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1201 SAFEXCEL_ENCRYPT);
1202}
1203
1204static int safexcel_decrypt(struct skcipher_request *req)
1205{
1206 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1207 SAFEXCEL_DECRYPT);
1208}
1209
1210static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1211{
1212 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1213 struct safexcel_alg_template *tmpl =
1214 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1215 alg.skcipher.base);
1216
1217 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1218 sizeof(struct safexcel_cipher_req));
1219
1220 ctx->base.priv = tmpl->priv;
1221
1222 ctx->base.send = safexcel_skcipher_send;
1223 ctx->base.handle_result = safexcel_skcipher_handle_result;
1224 ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1225 ctx->ctrinit = 1;
1226 return 0;
1227}
1228
1229static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1230{
1231 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1232
1233 memzero_explicit(ctx->key, sizeof(ctx->key));
1234
1235 /* context not allocated, skip invalidation */
1236 if (!ctx->base.ctxr)
1237 return -ENOMEM;
1238
1239 memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1240 return 0;
1241}
1242
1243static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1244{
1245 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1246 struct safexcel_crypto_priv *priv = ctx->base.priv;
1247 int ret;
1248
1249 if (safexcel_cipher_cra_exit(tfm))
1250 return;
1251
1252 if (priv->flags & EIP197_TRC_CACHE) {
1253 ret = safexcel_skcipher_exit_inv(tfm);
1254 if (ret)
1255 dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1256 ret);
1257 } else {
1258 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1259 ctx->base.ctxr_dma);
1260 }
1261}
1262
1263static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1264{
1265 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1266 struct safexcel_crypto_priv *priv = ctx->base.priv;
1267 int ret;
1268
1269 if (safexcel_cipher_cra_exit(tfm))
1270 return;
1271
1272 if (priv->flags & EIP197_TRC_CACHE) {
1273 ret = safexcel_aead_exit_inv(tfm);
1274 if (ret)
1275 dev_warn(priv->dev, "aead: invalidation error %d\n",
1276 ret);
1277 } else {
1278 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1279 ctx->base.ctxr_dma);
1280 }
1281}
1282
1283static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1284{
1285 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1286
1287 safexcel_skcipher_cra_init(tfm);
1288 ctx->alg = SAFEXCEL_AES;
1289 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1290 ctx->blocksz = 0;
1291 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1292 return 0;
1293}
1294
1295struct safexcel_alg_template safexcel_alg_ecb_aes = {
1296 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1297 .algo_mask = SAFEXCEL_ALG_AES,
1298 .alg.skcipher = {
1299 .setkey = safexcel_skcipher_aes_setkey,
1300 .encrypt = safexcel_encrypt,
1301 .decrypt = safexcel_decrypt,
1302 .min_keysize = AES_MIN_KEY_SIZE,
1303 .max_keysize = AES_MAX_KEY_SIZE,
1304 .base = {
1305 .cra_name = "ecb(aes)",
1306 .cra_driver_name = "safexcel-ecb-aes",
1307 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1308 .cra_flags = CRYPTO_ALG_ASYNC |
1309 CRYPTO_ALG_ALLOCATES_MEMORY |
1310 CRYPTO_ALG_KERN_DRIVER_ONLY,
1311 .cra_blocksize = AES_BLOCK_SIZE,
1312 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1313 .cra_alignmask = 0,
1314 .cra_init = safexcel_skcipher_aes_ecb_cra_init,
1315 .cra_exit = safexcel_skcipher_cra_exit,
1316 .cra_module = THIS_MODULE,
1317 },
1318 },
1319};
1320
1321static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1322{
1323 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1324
1325 safexcel_skcipher_cra_init(tfm);
1326 ctx->alg = SAFEXCEL_AES;
1327 ctx->blocksz = AES_BLOCK_SIZE;
1328 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1329 return 0;
1330}
1331
1332struct safexcel_alg_template safexcel_alg_cbc_aes = {
1333 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1334 .algo_mask = SAFEXCEL_ALG_AES,
1335 .alg.skcipher = {
1336 .setkey = safexcel_skcipher_aes_setkey,
1337 .encrypt = safexcel_encrypt,
1338 .decrypt = safexcel_decrypt,
1339 .min_keysize = AES_MIN_KEY_SIZE,
1340 .max_keysize = AES_MAX_KEY_SIZE,
1341 .ivsize = AES_BLOCK_SIZE,
1342 .base = {
1343 .cra_name = "cbc(aes)",
1344 .cra_driver_name = "safexcel-cbc-aes",
1345 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1346 .cra_flags = CRYPTO_ALG_ASYNC |
1347 CRYPTO_ALG_ALLOCATES_MEMORY |
1348 CRYPTO_ALG_KERN_DRIVER_ONLY,
1349 .cra_blocksize = AES_BLOCK_SIZE,
1350 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1351 .cra_alignmask = 0,
1352 .cra_init = safexcel_skcipher_aes_cbc_cra_init,
1353 .cra_exit = safexcel_skcipher_cra_exit,
1354 .cra_module = THIS_MODULE,
1355 },
1356 },
1357};
1358
1359static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1360 const u8 *key, unsigned int len)
1361{
1362 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1363 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1364 struct safexcel_crypto_priv *priv = ctx->base.priv;
1365 struct crypto_aes_ctx aes;
1366 int ret, i;
1367 unsigned int keylen;
1368
1369 /* last 4 bytes of key are the nonce! */
1370 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1371 /* exclude the nonce here */
1372 keylen = len - CTR_RFC3686_NONCE_SIZE;
1373 ret = aes_expandkey(&aes, key, keylen);
1374 if (ret)
1375 return ret;
1376
1377 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1378 for (i = 0; i < keylen / sizeof(u32); i++) {
1379 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1380 ctx->base.needs_inv = true;
1381 break;
1382 }
1383 }
1384 }
1385
1386 for (i = 0; i < keylen / sizeof(u32); i++)
1387 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1388
1389 ctx->key_len = keylen;
1390
1391 memzero_explicit(&aes, sizeof(aes));
1392 return 0;
1393}
1394
1395static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1396{
1397 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1398
1399 safexcel_skcipher_cra_init(tfm);
1400 ctx->alg = SAFEXCEL_AES;
1401 ctx->blocksz = AES_BLOCK_SIZE;
1402 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1403 return 0;
1404}
1405
1406struct safexcel_alg_template safexcel_alg_ctr_aes = {
1407 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1408 .algo_mask = SAFEXCEL_ALG_AES,
1409 .alg.skcipher = {
1410 .setkey = safexcel_skcipher_aesctr_setkey,
1411 .encrypt = safexcel_encrypt,
1412 .decrypt = safexcel_decrypt,
1413 /* Add nonce size */
1414 .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1415 .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1416 .ivsize = CTR_RFC3686_IV_SIZE,
1417 .base = {
1418 .cra_name = "rfc3686(ctr(aes))",
1419 .cra_driver_name = "safexcel-ctr-aes",
1420 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1421 .cra_flags = CRYPTO_ALG_ASYNC |
1422 CRYPTO_ALG_ALLOCATES_MEMORY |
1423 CRYPTO_ALG_KERN_DRIVER_ONLY,
1424 .cra_blocksize = 1,
1425 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1426 .cra_alignmask = 0,
1427 .cra_init = safexcel_skcipher_aes_ctr_cra_init,
1428 .cra_exit = safexcel_skcipher_cra_exit,
1429 .cra_module = THIS_MODULE,
1430 },
1431 },
1432};
1433
1434static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1435 unsigned int len)
1436{
1437 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1438 struct safexcel_crypto_priv *priv = ctx->base.priv;
1439 int ret;
1440
1441 ret = verify_skcipher_des_key(ctfm, key);
1442 if (ret)
1443 return ret;
1444
1445 /* if context exits and key changed, need to invalidate it */
1446 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1447 if (memcmp(ctx->key, key, len))
1448 ctx->base.needs_inv = true;
1449
1450 memcpy(ctx->key, key, len);
1451 ctx->key_len = len;
1452
1453 return 0;
1454}
1455
1456static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1457{
1458 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1459
1460 safexcel_skcipher_cra_init(tfm);
1461 ctx->alg = SAFEXCEL_DES;
1462 ctx->blocksz = DES_BLOCK_SIZE;
1463 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1464 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1465 return 0;
1466}
1467
1468struct safexcel_alg_template safexcel_alg_cbc_des = {
1469 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1470 .algo_mask = SAFEXCEL_ALG_DES,
1471 .alg.skcipher = {
1472 .setkey = safexcel_des_setkey,
1473 .encrypt = safexcel_encrypt,
1474 .decrypt = safexcel_decrypt,
1475 .min_keysize = DES_KEY_SIZE,
1476 .max_keysize = DES_KEY_SIZE,
1477 .ivsize = DES_BLOCK_SIZE,
1478 .base = {
1479 .cra_name = "cbc(des)",
1480 .cra_driver_name = "safexcel-cbc-des",
1481 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1482 .cra_flags = CRYPTO_ALG_ASYNC |
1483 CRYPTO_ALG_ALLOCATES_MEMORY |
1484 CRYPTO_ALG_KERN_DRIVER_ONLY,
1485 .cra_blocksize = DES_BLOCK_SIZE,
1486 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1487 .cra_alignmask = 0,
1488 .cra_init = safexcel_skcipher_des_cbc_cra_init,
1489 .cra_exit = safexcel_skcipher_cra_exit,
1490 .cra_module = THIS_MODULE,
1491 },
1492 },
1493};
1494
1495static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1496{
1497 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1498
1499 safexcel_skcipher_cra_init(tfm);
1500 ctx->alg = SAFEXCEL_DES;
1501 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1502 ctx->blocksz = 0;
1503 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1504 return 0;
1505}
1506
1507struct safexcel_alg_template safexcel_alg_ecb_des = {
1508 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1509 .algo_mask = SAFEXCEL_ALG_DES,
1510 .alg.skcipher = {
1511 .setkey = safexcel_des_setkey,
1512 .encrypt = safexcel_encrypt,
1513 .decrypt = safexcel_decrypt,
1514 .min_keysize = DES_KEY_SIZE,
1515 .max_keysize = DES_KEY_SIZE,
1516 .base = {
1517 .cra_name = "ecb(des)",
1518 .cra_driver_name = "safexcel-ecb-des",
1519 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1520 .cra_flags = CRYPTO_ALG_ASYNC |
1521 CRYPTO_ALG_ALLOCATES_MEMORY |
1522 CRYPTO_ALG_KERN_DRIVER_ONLY,
1523 .cra_blocksize = DES_BLOCK_SIZE,
1524 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1525 .cra_alignmask = 0,
1526 .cra_init = safexcel_skcipher_des_ecb_cra_init,
1527 .cra_exit = safexcel_skcipher_cra_exit,
1528 .cra_module = THIS_MODULE,
1529 },
1530 },
1531};
1532
1533static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1534 const u8 *key, unsigned int len)
1535{
1536 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1537 struct safexcel_crypto_priv *priv = ctx->base.priv;
1538 int err;
1539
1540 err = verify_skcipher_des3_key(ctfm, key);
1541 if (err)
1542 return err;
1543
1544 /* if context exits and key changed, need to invalidate it */
1545 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1546 if (memcmp(ctx->key, key, len))
1547 ctx->base.needs_inv = true;
1548
1549 memcpy(ctx->key, key, len);
1550 ctx->key_len = len;
1551
1552 return 0;
1553}
1554
1555static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1556{
1557 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1558
1559 safexcel_skcipher_cra_init(tfm);
1560 ctx->alg = SAFEXCEL_3DES;
1561 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1562 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1563 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1564 return 0;
1565}
1566
1567struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1568 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1569 .algo_mask = SAFEXCEL_ALG_DES,
1570 .alg.skcipher = {
1571 .setkey = safexcel_des3_ede_setkey,
1572 .encrypt = safexcel_encrypt,
1573 .decrypt = safexcel_decrypt,
1574 .min_keysize = DES3_EDE_KEY_SIZE,
1575 .max_keysize = DES3_EDE_KEY_SIZE,
1576 .ivsize = DES3_EDE_BLOCK_SIZE,
1577 .base = {
1578 .cra_name = "cbc(des3_ede)",
1579 .cra_driver_name = "safexcel-cbc-des3_ede",
1580 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1581 .cra_flags = CRYPTO_ALG_ASYNC |
1582 CRYPTO_ALG_ALLOCATES_MEMORY |
1583 CRYPTO_ALG_KERN_DRIVER_ONLY,
1584 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1585 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1586 .cra_alignmask = 0,
1587 .cra_init = safexcel_skcipher_des3_cbc_cra_init,
1588 .cra_exit = safexcel_skcipher_cra_exit,
1589 .cra_module = THIS_MODULE,
1590 },
1591 },
1592};
1593
1594static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1595{
1596 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1597
1598 safexcel_skcipher_cra_init(tfm);
1599 ctx->alg = SAFEXCEL_3DES;
1600 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1601 ctx->blocksz = 0;
1602 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1603 return 0;
1604}
1605
1606struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1607 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1608 .algo_mask = SAFEXCEL_ALG_DES,
1609 .alg.skcipher = {
1610 .setkey = safexcel_des3_ede_setkey,
1611 .encrypt = safexcel_encrypt,
1612 .decrypt = safexcel_decrypt,
1613 .min_keysize = DES3_EDE_KEY_SIZE,
1614 .max_keysize = DES3_EDE_KEY_SIZE,
1615 .base = {
1616 .cra_name = "ecb(des3_ede)",
1617 .cra_driver_name = "safexcel-ecb-des3_ede",
1618 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1619 .cra_flags = CRYPTO_ALG_ASYNC |
1620 CRYPTO_ALG_ALLOCATES_MEMORY |
1621 CRYPTO_ALG_KERN_DRIVER_ONLY,
1622 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1623 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1624 .cra_alignmask = 0,
1625 .cra_init = safexcel_skcipher_des3_ecb_cra_init,
1626 .cra_exit = safexcel_skcipher_cra_exit,
1627 .cra_module = THIS_MODULE,
1628 },
1629 },
1630};
1631
1632static int safexcel_aead_encrypt(struct aead_request *req)
1633{
1634 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1635
1636 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1637}
1638
1639static int safexcel_aead_decrypt(struct aead_request *req)
1640{
1641 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1642
1643 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1644}
1645
1646static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1647{
1648 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1649 struct safexcel_alg_template *tmpl =
1650 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1651 alg.aead.base);
1652
1653 crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1654 sizeof(struct safexcel_cipher_req));
1655
1656 ctx->base.priv = tmpl->priv;
1657
1658 ctx->alg = SAFEXCEL_AES; /* default */
1659 ctx->blocksz = AES_BLOCK_SIZE;
1660 ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1661 ctx->ctrinit = 1;
1662 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1663 ctx->aead = true;
1664 ctx->base.send = safexcel_aead_send;
1665 ctx->base.handle_result = safexcel_aead_handle_result;
1666 return 0;
1667}
1668
1669static int safexcel_aead_md5_cra_init(struct crypto_tfm *tfm)
1670{
1671 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1672
1673 safexcel_aead_cra_init(tfm);
1674 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1675 ctx->state_sz = MD5_DIGEST_SIZE;
1676 return 0;
1677}
1678
1679struct safexcel_alg_template safexcel_alg_authenc_hmac_md5_cbc_aes = {
1680 .type = SAFEXCEL_ALG_TYPE_AEAD,
1681 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_MD5,
1682 .alg.aead = {
1683 .setkey = safexcel_aead_setkey,
1684 .encrypt = safexcel_aead_encrypt,
1685 .decrypt = safexcel_aead_decrypt,
1686 .ivsize = AES_BLOCK_SIZE,
1687 .maxauthsize = MD5_DIGEST_SIZE,
1688 .base = {
1689 .cra_name = "authenc(hmac(md5),cbc(aes))",
1690 .cra_driver_name = "safexcel-authenc-hmac-md5-cbc-aes",
1691 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1692 .cra_flags = CRYPTO_ALG_ASYNC |
1693 CRYPTO_ALG_ALLOCATES_MEMORY |
1694 CRYPTO_ALG_KERN_DRIVER_ONLY,
1695 .cra_blocksize = AES_BLOCK_SIZE,
1696 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1697 .cra_alignmask = 0,
1698 .cra_init = safexcel_aead_md5_cra_init,
1699 .cra_exit = safexcel_aead_cra_exit,
1700 .cra_module = THIS_MODULE,
1701 },
1702 },
1703};
1704
1705static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1706{
1707 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1708
1709 safexcel_aead_cra_init(tfm);
1710 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1711 ctx->state_sz = SHA1_DIGEST_SIZE;
1712 return 0;
1713}
1714
1715struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1716 .type = SAFEXCEL_ALG_TYPE_AEAD,
1717 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1718 .alg.aead = {
1719 .setkey = safexcel_aead_setkey,
1720 .encrypt = safexcel_aead_encrypt,
1721 .decrypt = safexcel_aead_decrypt,
1722 .ivsize = AES_BLOCK_SIZE,
1723 .maxauthsize = SHA1_DIGEST_SIZE,
1724 .base = {
1725 .cra_name = "authenc(hmac(sha1),cbc(aes))",
1726 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1727 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1728 .cra_flags = CRYPTO_ALG_ASYNC |
1729 CRYPTO_ALG_ALLOCATES_MEMORY |
1730 CRYPTO_ALG_KERN_DRIVER_ONLY,
1731 .cra_blocksize = AES_BLOCK_SIZE,
1732 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1733 .cra_alignmask = 0,
1734 .cra_init = safexcel_aead_sha1_cra_init,
1735 .cra_exit = safexcel_aead_cra_exit,
1736 .cra_module = THIS_MODULE,
1737 },
1738 },
1739};
1740
1741static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1742{
1743 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1744
1745 safexcel_aead_cra_init(tfm);
1746 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1747 ctx->state_sz = SHA256_DIGEST_SIZE;
1748 return 0;
1749}
1750
1751struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1752 .type = SAFEXCEL_ALG_TYPE_AEAD,
1753 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1754 .alg.aead = {
1755 .setkey = safexcel_aead_setkey,
1756 .encrypt = safexcel_aead_encrypt,
1757 .decrypt = safexcel_aead_decrypt,
1758 .ivsize = AES_BLOCK_SIZE,
1759 .maxauthsize = SHA256_DIGEST_SIZE,
1760 .base = {
1761 .cra_name = "authenc(hmac(sha256),cbc(aes))",
1762 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1763 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1764 .cra_flags = CRYPTO_ALG_ASYNC |
1765 CRYPTO_ALG_ALLOCATES_MEMORY |
1766 CRYPTO_ALG_KERN_DRIVER_ONLY,
1767 .cra_blocksize = AES_BLOCK_SIZE,
1768 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1769 .cra_alignmask = 0,
1770 .cra_init = safexcel_aead_sha256_cra_init,
1771 .cra_exit = safexcel_aead_cra_exit,
1772 .cra_module = THIS_MODULE,
1773 },
1774 },
1775};
1776
1777static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1778{
1779 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1780
1781 safexcel_aead_cra_init(tfm);
1782 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1783 ctx->state_sz = SHA256_DIGEST_SIZE;
1784 return 0;
1785}
1786
1787struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1788 .type = SAFEXCEL_ALG_TYPE_AEAD,
1789 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1790 .alg.aead = {
1791 .setkey = safexcel_aead_setkey,
1792 .encrypt = safexcel_aead_encrypt,
1793 .decrypt = safexcel_aead_decrypt,
1794 .ivsize = AES_BLOCK_SIZE,
1795 .maxauthsize = SHA224_DIGEST_SIZE,
1796 .base = {
1797 .cra_name = "authenc(hmac(sha224),cbc(aes))",
1798 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1799 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1800 .cra_flags = CRYPTO_ALG_ASYNC |
1801 CRYPTO_ALG_ALLOCATES_MEMORY |
1802 CRYPTO_ALG_KERN_DRIVER_ONLY,
1803 .cra_blocksize = AES_BLOCK_SIZE,
1804 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1805 .cra_alignmask = 0,
1806 .cra_init = safexcel_aead_sha224_cra_init,
1807 .cra_exit = safexcel_aead_cra_exit,
1808 .cra_module = THIS_MODULE,
1809 },
1810 },
1811};
1812
1813static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1814{
1815 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1816
1817 safexcel_aead_cra_init(tfm);
1818 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1819 ctx->state_sz = SHA512_DIGEST_SIZE;
1820 return 0;
1821}
1822
1823struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1824 .type = SAFEXCEL_ALG_TYPE_AEAD,
1825 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1826 .alg.aead = {
1827 .setkey = safexcel_aead_setkey,
1828 .encrypt = safexcel_aead_encrypt,
1829 .decrypt = safexcel_aead_decrypt,
1830 .ivsize = AES_BLOCK_SIZE,
1831 .maxauthsize = SHA512_DIGEST_SIZE,
1832 .base = {
1833 .cra_name = "authenc(hmac(sha512),cbc(aes))",
1834 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1835 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1836 .cra_flags = CRYPTO_ALG_ASYNC |
1837 CRYPTO_ALG_ALLOCATES_MEMORY |
1838 CRYPTO_ALG_KERN_DRIVER_ONLY,
1839 .cra_blocksize = AES_BLOCK_SIZE,
1840 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1841 .cra_alignmask = 0,
1842 .cra_init = safexcel_aead_sha512_cra_init,
1843 .cra_exit = safexcel_aead_cra_exit,
1844 .cra_module = THIS_MODULE,
1845 },
1846 },
1847};
1848
1849static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1850{
1851 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1852
1853 safexcel_aead_cra_init(tfm);
1854 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1855 ctx->state_sz = SHA512_DIGEST_SIZE;
1856 return 0;
1857}
1858
1859struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1860 .type = SAFEXCEL_ALG_TYPE_AEAD,
1861 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1862 .alg.aead = {
1863 .setkey = safexcel_aead_setkey,
1864 .encrypt = safexcel_aead_encrypt,
1865 .decrypt = safexcel_aead_decrypt,
1866 .ivsize = AES_BLOCK_SIZE,
1867 .maxauthsize = SHA384_DIGEST_SIZE,
1868 .base = {
1869 .cra_name = "authenc(hmac(sha384),cbc(aes))",
1870 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1871 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1872 .cra_flags = CRYPTO_ALG_ASYNC |
1873 CRYPTO_ALG_ALLOCATES_MEMORY |
1874 CRYPTO_ALG_KERN_DRIVER_ONLY,
1875 .cra_blocksize = AES_BLOCK_SIZE,
1876 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1877 .cra_alignmask = 0,
1878 .cra_init = safexcel_aead_sha384_cra_init,
1879 .cra_exit = safexcel_aead_cra_exit,
1880 .cra_module = THIS_MODULE,
1881 },
1882 },
1883};
1884
1885static int safexcel_aead_md5_des3_cra_init(struct crypto_tfm *tfm)
1886{
1887 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1888
1889 safexcel_aead_md5_cra_init(tfm);
1890 ctx->alg = SAFEXCEL_3DES; /* override default */
1891 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1892 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1893 return 0;
1894}
1895
1896struct safexcel_alg_template safexcel_alg_authenc_hmac_md5_cbc_des3_ede = {
1897 .type = SAFEXCEL_ALG_TYPE_AEAD,
1898 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_MD5,
1899 .alg.aead = {
1900 .setkey = safexcel_aead_setkey,
1901 .encrypt = safexcel_aead_encrypt,
1902 .decrypt = safexcel_aead_decrypt,
1903 .ivsize = DES3_EDE_BLOCK_SIZE,
1904 .maxauthsize = MD5_DIGEST_SIZE,
1905 .base = {
1906 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
1907 .cra_driver_name = "safexcel-authenc-hmac-md5-cbc-des3_ede",
1908 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1909 .cra_flags = CRYPTO_ALG_ASYNC |
1910 CRYPTO_ALG_ALLOCATES_MEMORY |
1911 CRYPTO_ALG_KERN_DRIVER_ONLY,
1912 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1913 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1914 .cra_alignmask = 0,
1915 .cra_init = safexcel_aead_md5_des3_cra_init,
1916 .cra_exit = safexcel_aead_cra_exit,
1917 .cra_module = THIS_MODULE,
1918 },
1919 },
1920};
1921
1922static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1923{
1924 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1925
1926 safexcel_aead_sha1_cra_init(tfm);
1927 ctx->alg = SAFEXCEL_3DES; /* override default */
1928 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1929 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1930 return 0;
1931}
1932
1933struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1934 .type = SAFEXCEL_ALG_TYPE_AEAD,
1935 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1936 .alg.aead = {
1937 .setkey = safexcel_aead_setkey,
1938 .encrypt = safexcel_aead_encrypt,
1939 .decrypt = safexcel_aead_decrypt,
1940 .ivsize = DES3_EDE_BLOCK_SIZE,
1941 .maxauthsize = SHA1_DIGEST_SIZE,
1942 .base = {
1943 .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1944 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1945 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1946 .cra_flags = CRYPTO_ALG_ASYNC |
1947 CRYPTO_ALG_ALLOCATES_MEMORY |
1948 CRYPTO_ALG_KERN_DRIVER_ONLY,
1949 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1950 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1951 .cra_alignmask = 0,
1952 .cra_init = safexcel_aead_sha1_des3_cra_init,
1953 .cra_exit = safexcel_aead_cra_exit,
1954 .cra_module = THIS_MODULE,
1955 },
1956 },
1957};
1958
1959static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1960{
1961 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1962
1963 safexcel_aead_sha256_cra_init(tfm);
1964 ctx->alg = SAFEXCEL_3DES; /* override default */
1965 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1966 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1967 return 0;
1968}
1969
1970struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1971 .type = SAFEXCEL_ALG_TYPE_AEAD,
1972 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1973 .alg.aead = {
1974 .setkey = safexcel_aead_setkey,
1975 .encrypt = safexcel_aead_encrypt,
1976 .decrypt = safexcel_aead_decrypt,
1977 .ivsize = DES3_EDE_BLOCK_SIZE,
1978 .maxauthsize = SHA256_DIGEST_SIZE,
1979 .base = {
1980 .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1981 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1982 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1983 .cra_flags = CRYPTO_ALG_ASYNC |
1984 CRYPTO_ALG_ALLOCATES_MEMORY |
1985 CRYPTO_ALG_KERN_DRIVER_ONLY,
1986 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1987 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1988 .cra_alignmask = 0,
1989 .cra_init = safexcel_aead_sha256_des3_cra_init,
1990 .cra_exit = safexcel_aead_cra_exit,
1991 .cra_module = THIS_MODULE,
1992 },
1993 },
1994};
1995
1996static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1997{
1998 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1999
2000 safexcel_aead_sha224_cra_init(tfm);
2001 ctx->alg = SAFEXCEL_3DES; /* override default */
2002 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2003 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2004 return 0;
2005}
2006
2007struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
2008 .type = SAFEXCEL_ALG_TYPE_AEAD,
2009 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2010 .alg.aead = {
2011 .setkey = safexcel_aead_setkey,
2012 .encrypt = safexcel_aead_encrypt,
2013 .decrypt = safexcel_aead_decrypt,
2014 .ivsize = DES3_EDE_BLOCK_SIZE,
2015 .maxauthsize = SHA224_DIGEST_SIZE,
2016 .base = {
2017 .cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
2018 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
2019 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2020 .cra_flags = CRYPTO_ALG_ASYNC |
2021 CRYPTO_ALG_ALLOCATES_MEMORY |
2022 CRYPTO_ALG_KERN_DRIVER_ONLY,
2023 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2024 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2025 .cra_alignmask = 0,
2026 .cra_init = safexcel_aead_sha224_des3_cra_init,
2027 .cra_exit = safexcel_aead_cra_exit,
2028 .cra_module = THIS_MODULE,
2029 },
2030 },
2031};
2032
2033static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
2034{
2035 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2036
2037 safexcel_aead_sha512_cra_init(tfm);
2038 ctx->alg = SAFEXCEL_3DES; /* override default */
2039 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2040 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2041 return 0;
2042}
2043
2044struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
2045 .type = SAFEXCEL_ALG_TYPE_AEAD,
2046 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2047 .alg.aead = {
2048 .setkey = safexcel_aead_setkey,
2049 .encrypt = safexcel_aead_encrypt,
2050 .decrypt = safexcel_aead_decrypt,
2051 .ivsize = DES3_EDE_BLOCK_SIZE,
2052 .maxauthsize = SHA512_DIGEST_SIZE,
2053 .base = {
2054 .cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
2055 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
2056 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2057 .cra_flags = CRYPTO_ALG_ASYNC |
2058 CRYPTO_ALG_ALLOCATES_MEMORY |
2059 CRYPTO_ALG_KERN_DRIVER_ONLY,
2060 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2061 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2062 .cra_alignmask = 0,
2063 .cra_init = safexcel_aead_sha512_des3_cra_init,
2064 .cra_exit = safexcel_aead_cra_exit,
2065 .cra_module = THIS_MODULE,
2066 },
2067 },
2068};
2069
2070static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
2071{
2072 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2073
2074 safexcel_aead_sha384_cra_init(tfm);
2075 ctx->alg = SAFEXCEL_3DES; /* override default */
2076 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2077 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2078 return 0;
2079}
2080
2081struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
2082 .type = SAFEXCEL_ALG_TYPE_AEAD,
2083 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2084 .alg.aead = {
2085 .setkey = safexcel_aead_setkey,
2086 .encrypt = safexcel_aead_encrypt,
2087 .decrypt = safexcel_aead_decrypt,
2088 .ivsize = DES3_EDE_BLOCK_SIZE,
2089 .maxauthsize = SHA384_DIGEST_SIZE,
2090 .base = {
2091 .cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
2092 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2093 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2094 .cra_flags = CRYPTO_ALG_ASYNC |
2095 CRYPTO_ALG_ALLOCATES_MEMORY |
2096 CRYPTO_ALG_KERN_DRIVER_ONLY,
2097 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2098 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2099 .cra_alignmask = 0,
2100 .cra_init = safexcel_aead_sha384_des3_cra_init,
2101 .cra_exit = safexcel_aead_cra_exit,
2102 .cra_module = THIS_MODULE,
2103 },
2104 },
2105};
2106
2107static int safexcel_aead_md5_des_cra_init(struct crypto_tfm *tfm)
2108{
2109 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2110
2111 safexcel_aead_md5_cra_init(tfm);
2112 ctx->alg = SAFEXCEL_DES; /* override default */
2113 ctx->blocksz = DES_BLOCK_SIZE;
2114 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2115 return 0;
2116}
2117
2118struct safexcel_alg_template safexcel_alg_authenc_hmac_md5_cbc_des = {
2119 .type = SAFEXCEL_ALG_TYPE_AEAD,
2120 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_MD5,
2121 .alg.aead = {
2122 .setkey = safexcel_aead_setkey,
2123 .encrypt = safexcel_aead_encrypt,
2124 .decrypt = safexcel_aead_decrypt,
2125 .ivsize = DES_BLOCK_SIZE,
2126 .maxauthsize = MD5_DIGEST_SIZE,
2127 .base = {
2128 .cra_name = "authenc(hmac(md5),cbc(des))",
2129 .cra_driver_name = "safexcel-authenc-hmac-md5-cbc-des",
2130 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2131 .cra_flags = CRYPTO_ALG_ASYNC |
2132 CRYPTO_ALG_ALLOCATES_MEMORY |
2133 CRYPTO_ALG_KERN_DRIVER_ONLY,
2134 .cra_blocksize = DES_BLOCK_SIZE,
2135 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2136 .cra_alignmask = 0,
2137 .cra_init = safexcel_aead_md5_des_cra_init,
2138 .cra_exit = safexcel_aead_cra_exit,
2139 .cra_module = THIS_MODULE,
2140 },
2141 },
2142};
2143
2144static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2145{
2146 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2147
2148 safexcel_aead_sha1_cra_init(tfm);
2149 ctx->alg = SAFEXCEL_DES; /* override default */
2150 ctx->blocksz = DES_BLOCK_SIZE;
2151 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2152 return 0;
2153}
2154
2155struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2156 .type = SAFEXCEL_ALG_TYPE_AEAD,
2157 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2158 .alg.aead = {
2159 .setkey = safexcel_aead_setkey,
2160 .encrypt = safexcel_aead_encrypt,
2161 .decrypt = safexcel_aead_decrypt,
2162 .ivsize = DES_BLOCK_SIZE,
2163 .maxauthsize = SHA1_DIGEST_SIZE,
2164 .base = {
2165 .cra_name = "authenc(hmac(sha1),cbc(des))",
2166 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2167 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2168 .cra_flags = CRYPTO_ALG_ASYNC |
2169 CRYPTO_ALG_ALLOCATES_MEMORY |
2170 CRYPTO_ALG_KERN_DRIVER_ONLY,
2171 .cra_blocksize = DES_BLOCK_SIZE,
2172 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2173 .cra_alignmask = 0,
2174 .cra_init = safexcel_aead_sha1_des_cra_init,
2175 .cra_exit = safexcel_aead_cra_exit,
2176 .cra_module = THIS_MODULE,
2177 },
2178 },
2179};
2180
2181static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2182{
2183 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2184
2185 safexcel_aead_sha256_cra_init(tfm);
2186 ctx->alg = SAFEXCEL_DES; /* override default */
2187 ctx->blocksz = DES_BLOCK_SIZE;
2188 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2189 return 0;
2190}
2191
2192struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2193 .type = SAFEXCEL_ALG_TYPE_AEAD,
2194 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2195 .alg.aead = {
2196 .setkey = safexcel_aead_setkey,
2197 .encrypt = safexcel_aead_encrypt,
2198 .decrypt = safexcel_aead_decrypt,
2199 .ivsize = DES_BLOCK_SIZE,
2200 .maxauthsize = SHA256_DIGEST_SIZE,
2201 .base = {
2202 .cra_name = "authenc(hmac(sha256),cbc(des))",
2203 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2204 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2205 .cra_flags = CRYPTO_ALG_ASYNC |
2206 CRYPTO_ALG_ALLOCATES_MEMORY |
2207 CRYPTO_ALG_KERN_DRIVER_ONLY,
2208 .cra_blocksize = DES_BLOCK_SIZE,
2209 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2210 .cra_alignmask = 0,
2211 .cra_init = safexcel_aead_sha256_des_cra_init,
2212 .cra_exit = safexcel_aead_cra_exit,
2213 .cra_module = THIS_MODULE,
2214 },
2215 },
2216};
2217
2218static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2219{
2220 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2221
2222 safexcel_aead_sha224_cra_init(tfm);
2223 ctx->alg = SAFEXCEL_DES; /* override default */
2224 ctx->blocksz = DES_BLOCK_SIZE;
2225 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2226 return 0;
2227}
2228
2229struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2230 .type = SAFEXCEL_ALG_TYPE_AEAD,
2231 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2232 .alg.aead = {
2233 .setkey = safexcel_aead_setkey,
2234 .encrypt = safexcel_aead_encrypt,
2235 .decrypt = safexcel_aead_decrypt,
2236 .ivsize = DES_BLOCK_SIZE,
2237 .maxauthsize = SHA224_DIGEST_SIZE,
2238 .base = {
2239 .cra_name = "authenc(hmac(sha224),cbc(des))",
2240 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2241 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2242 .cra_flags = CRYPTO_ALG_ASYNC |
2243 CRYPTO_ALG_ALLOCATES_MEMORY |
2244 CRYPTO_ALG_KERN_DRIVER_ONLY,
2245 .cra_blocksize = DES_BLOCK_SIZE,
2246 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2247 .cra_alignmask = 0,
2248 .cra_init = safexcel_aead_sha224_des_cra_init,
2249 .cra_exit = safexcel_aead_cra_exit,
2250 .cra_module = THIS_MODULE,
2251 },
2252 },
2253};
2254
2255static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2256{
2257 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2258
2259 safexcel_aead_sha512_cra_init(tfm);
2260 ctx->alg = SAFEXCEL_DES; /* override default */
2261 ctx->blocksz = DES_BLOCK_SIZE;
2262 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2263 return 0;
2264}
2265
2266struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2267 .type = SAFEXCEL_ALG_TYPE_AEAD,
2268 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2269 .alg.aead = {
2270 .setkey = safexcel_aead_setkey,
2271 .encrypt = safexcel_aead_encrypt,
2272 .decrypt = safexcel_aead_decrypt,
2273 .ivsize = DES_BLOCK_SIZE,
2274 .maxauthsize = SHA512_DIGEST_SIZE,
2275 .base = {
2276 .cra_name = "authenc(hmac(sha512),cbc(des))",
2277 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2278 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2279 .cra_flags = CRYPTO_ALG_ASYNC |
2280 CRYPTO_ALG_ALLOCATES_MEMORY |
2281 CRYPTO_ALG_KERN_DRIVER_ONLY,
2282 .cra_blocksize = DES_BLOCK_SIZE,
2283 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2284 .cra_alignmask = 0,
2285 .cra_init = safexcel_aead_sha512_des_cra_init,
2286 .cra_exit = safexcel_aead_cra_exit,
2287 .cra_module = THIS_MODULE,
2288 },
2289 },
2290};
2291
2292static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2293{
2294 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2295
2296 safexcel_aead_sha384_cra_init(tfm);
2297 ctx->alg = SAFEXCEL_DES; /* override default */
2298 ctx->blocksz = DES_BLOCK_SIZE;
2299 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2300 return 0;
2301}
2302
2303struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2304 .type = SAFEXCEL_ALG_TYPE_AEAD,
2305 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2306 .alg.aead = {
2307 .setkey = safexcel_aead_setkey,
2308 .encrypt = safexcel_aead_encrypt,
2309 .decrypt = safexcel_aead_decrypt,
2310 .ivsize = DES_BLOCK_SIZE,
2311 .maxauthsize = SHA384_DIGEST_SIZE,
2312 .base = {
2313 .cra_name = "authenc(hmac(sha384),cbc(des))",
2314 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2315 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2316 .cra_flags = CRYPTO_ALG_ASYNC |
2317 CRYPTO_ALG_ALLOCATES_MEMORY |
2318 CRYPTO_ALG_KERN_DRIVER_ONLY,
2319 .cra_blocksize = DES_BLOCK_SIZE,
2320 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2321 .cra_alignmask = 0,
2322 .cra_init = safexcel_aead_sha384_des_cra_init,
2323 .cra_exit = safexcel_aead_cra_exit,
2324 .cra_module = THIS_MODULE,
2325 },
2326 },
2327};
2328
2329static int safexcel_aead_md5_ctr_cra_init(struct crypto_tfm *tfm)
2330{
2331 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2332
2333 safexcel_aead_md5_cra_init(tfm);
2334 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2335 return 0;
2336}
2337
2338struct safexcel_alg_template safexcel_alg_authenc_hmac_md5_ctr_aes = {
2339 .type = SAFEXCEL_ALG_TYPE_AEAD,
2340 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_MD5,
2341 .alg.aead = {
2342 .setkey = safexcel_aead_setkey,
2343 .encrypt = safexcel_aead_encrypt,
2344 .decrypt = safexcel_aead_decrypt,
2345 .ivsize = CTR_RFC3686_IV_SIZE,
2346 .maxauthsize = MD5_DIGEST_SIZE,
2347 .base = {
2348 .cra_name = "authenc(hmac(md5),rfc3686(ctr(aes)))",
2349 .cra_driver_name = "safexcel-authenc-hmac-md5-ctr-aes",
2350 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2351 .cra_flags = CRYPTO_ALG_ASYNC |
2352 CRYPTO_ALG_ALLOCATES_MEMORY |
2353 CRYPTO_ALG_KERN_DRIVER_ONLY,
2354 .cra_blocksize = 1,
2355 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2356 .cra_alignmask = 0,
2357 .cra_init = safexcel_aead_md5_ctr_cra_init,
2358 .cra_exit = safexcel_aead_cra_exit,
2359 .cra_module = THIS_MODULE,
2360 },
2361 },
2362};
2363
2364static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2365{
2366 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2367
2368 safexcel_aead_sha1_cra_init(tfm);
2369 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2370 return 0;
2371}
2372
2373struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2374 .type = SAFEXCEL_ALG_TYPE_AEAD,
2375 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2376 .alg.aead = {
2377 .setkey = safexcel_aead_setkey,
2378 .encrypt = safexcel_aead_encrypt,
2379 .decrypt = safexcel_aead_decrypt,
2380 .ivsize = CTR_RFC3686_IV_SIZE,
2381 .maxauthsize = SHA1_DIGEST_SIZE,
2382 .base = {
2383 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2384 .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2385 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2386 .cra_flags = CRYPTO_ALG_ASYNC |
2387 CRYPTO_ALG_ALLOCATES_MEMORY |
2388 CRYPTO_ALG_KERN_DRIVER_ONLY,
2389 .cra_blocksize = 1,
2390 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2391 .cra_alignmask = 0,
2392 .cra_init = safexcel_aead_sha1_ctr_cra_init,
2393 .cra_exit = safexcel_aead_cra_exit,
2394 .cra_module = THIS_MODULE,
2395 },
2396 },
2397};
2398
2399static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2400{
2401 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2402
2403 safexcel_aead_sha256_cra_init(tfm);
2404 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2405 return 0;
2406}
2407
2408struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2409 .type = SAFEXCEL_ALG_TYPE_AEAD,
2410 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2411 .alg.aead = {
2412 .setkey = safexcel_aead_setkey,
2413 .encrypt = safexcel_aead_encrypt,
2414 .decrypt = safexcel_aead_decrypt,
2415 .ivsize = CTR_RFC3686_IV_SIZE,
2416 .maxauthsize = SHA256_DIGEST_SIZE,
2417 .base = {
2418 .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2419 .cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2420 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2421 .cra_flags = CRYPTO_ALG_ASYNC |
2422 CRYPTO_ALG_ALLOCATES_MEMORY |
2423 CRYPTO_ALG_KERN_DRIVER_ONLY,
2424 .cra_blocksize = 1,
2425 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2426 .cra_alignmask = 0,
2427 .cra_init = safexcel_aead_sha256_ctr_cra_init,
2428 .cra_exit = safexcel_aead_cra_exit,
2429 .cra_module = THIS_MODULE,
2430 },
2431 },
2432};
2433
2434static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2435{
2436 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2437
2438 safexcel_aead_sha224_cra_init(tfm);
2439 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2440 return 0;
2441}
2442
2443struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2444 .type = SAFEXCEL_ALG_TYPE_AEAD,
2445 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2446 .alg.aead = {
2447 .setkey = safexcel_aead_setkey,
2448 .encrypt = safexcel_aead_encrypt,
2449 .decrypt = safexcel_aead_decrypt,
2450 .ivsize = CTR_RFC3686_IV_SIZE,
2451 .maxauthsize = SHA224_DIGEST_SIZE,
2452 .base = {
2453 .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2454 .cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2455 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2456 .cra_flags = CRYPTO_ALG_ASYNC |
2457 CRYPTO_ALG_ALLOCATES_MEMORY |
2458 CRYPTO_ALG_KERN_DRIVER_ONLY,
2459 .cra_blocksize = 1,
2460 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2461 .cra_alignmask = 0,
2462 .cra_init = safexcel_aead_sha224_ctr_cra_init,
2463 .cra_exit = safexcel_aead_cra_exit,
2464 .cra_module = THIS_MODULE,
2465 },
2466 },
2467};
2468
2469static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2470{
2471 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2472
2473 safexcel_aead_sha512_cra_init(tfm);
2474 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2475 return 0;
2476}
2477
2478struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2479 .type = SAFEXCEL_ALG_TYPE_AEAD,
2480 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2481 .alg.aead = {
2482 .setkey = safexcel_aead_setkey,
2483 .encrypt = safexcel_aead_encrypt,
2484 .decrypt = safexcel_aead_decrypt,
2485 .ivsize = CTR_RFC3686_IV_SIZE,
2486 .maxauthsize = SHA512_DIGEST_SIZE,
2487 .base = {
2488 .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2489 .cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2490 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2491 .cra_flags = CRYPTO_ALG_ASYNC |
2492 CRYPTO_ALG_ALLOCATES_MEMORY |
2493 CRYPTO_ALG_KERN_DRIVER_ONLY,
2494 .cra_blocksize = 1,
2495 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2496 .cra_alignmask = 0,
2497 .cra_init = safexcel_aead_sha512_ctr_cra_init,
2498 .cra_exit = safexcel_aead_cra_exit,
2499 .cra_module = THIS_MODULE,
2500 },
2501 },
2502};
2503
2504static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2505{
2506 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2507
2508 safexcel_aead_sha384_cra_init(tfm);
2509 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2510 return 0;
2511}
2512
2513struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2514 .type = SAFEXCEL_ALG_TYPE_AEAD,
2515 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2516 .alg.aead = {
2517 .setkey = safexcel_aead_setkey,
2518 .encrypt = safexcel_aead_encrypt,
2519 .decrypt = safexcel_aead_decrypt,
2520 .ivsize = CTR_RFC3686_IV_SIZE,
2521 .maxauthsize = SHA384_DIGEST_SIZE,
2522 .base = {
2523 .cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2524 .cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2525 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2526 .cra_flags = CRYPTO_ALG_ASYNC |
2527 CRYPTO_ALG_ALLOCATES_MEMORY |
2528 CRYPTO_ALG_KERN_DRIVER_ONLY,
2529 .cra_blocksize = 1,
2530 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2531 .cra_alignmask = 0,
2532 .cra_init = safexcel_aead_sha384_ctr_cra_init,
2533 .cra_exit = safexcel_aead_cra_exit,
2534 .cra_module = THIS_MODULE,
2535 },
2536 },
2537};
2538
2539static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2540 const u8 *key, unsigned int len)
2541{
2542 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2543 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2544 struct safexcel_crypto_priv *priv = ctx->base.priv;
2545 struct crypto_aes_ctx aes;
2546 int ret, i;
2547 unsigned int keylen;
2548
2549 /* Check for illegal XTS keys */
2550 ret = xts_verify_key(ctfm, key, len);
2551 if (ret)
2552 return ret;
2553
2554 /* Only half of the key data is cipher key */
2555 keylen = (len >> 1);
2556 ret = aes_expandkey(&aes, key, keylen);
2557 if (ret)
2558 return ret;
2559
2560 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2561 for (i = 0; i < keylen / sizeof(u32); i++) {
2562 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2563 ctx->base.needs_inv = true;
2564 break;
2565 }
2566 }
2567 }
2568
2569 for (i = 0; i < keylen / sizeof(u32); i++)
2570 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2571
2572 /* The other half is the tweak key */
2573 ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2574 if (ret)
2575 return ret;
2576
2577 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2578 for (i = 0; i < keylen / sizeof(u32); i++) {
2579 if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2580 aes.key_enc[i]) {
2581 ctx->base.needs_inv = true;
2582 break;
2583 }
2584 }
2585 }
2586
2587 for (i = 0; i < keylen / sizeof(u32); i++)
2588 ctx->key[i + keylen / sizeof(u32)] =
2589 cpu_to_le32(aes.key_enc[i]);
2590
2591 ctx->key_len = keylen << 1;
2592
2593 memzero_explicit(&aes, sizeof(aes));
2594 return 0;
2595}
2596
2597static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2598{
2599 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2600
2601 safexcel_skcipher_cra_init(tfm);
2602 ctx->alg = SAFEXCEL_AES;
2603 ctx->blocksz = AES_BLOCK_SIZE;
2604 ctx->xts = 1;
2605 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2606 return 0;
2607}
2608
2609static int safexcel_encrypt_xts(struct skcipher_request *req)
2610{
2611 if (req->cryptlen < XTS_BLOCK_SIZE)
2612 return -EINVAL;
2613 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2614 SAFEXCEL_ENCRYPT);
2615}
2616
2617static int safexcel_decrypt_xts(struct skcipher_request *req)
2618{
2619 if (req->cryptlen < XTS_BLOCK_SIZE)
2620 return -EINVAL;
2621 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2622 SAFEXCEL_DECRYPT);
2623}
2624
2625struct safexcel_alg_template safexcel_alg_xts_aes = {
2626 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2627 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2628 .alg.skcipher = {
2629 .setkey = safexcel_skcipher_aesxts_setkey,
2630 .encrypt = safexcel_encrypt_xts,
2631 .decrypt = safexcel_decrypt_xts,
2632 /* XTS actually uses 2 AES keys glued together */
2633 .min_keysize = AES_MIN_KEY_SIZE * 2,
2634 .max_keysize = AES_MAX_KEY_SIZE * 2,
2635 .ivsize = XTS_BLOCK_SIZE,
2636 .base = {
2637 .cra_name = "xts(aes)",
2638 .cra_driver_name = "safexcel-xts-aes",
2639 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2640 .cra_flags = CRYPTO_ALG_ASYNC |
2641 CRYPTO_ALG_ALLOCATES_MEMORY |
2642 CRYPTO_ALG_KERN_DRIVER_ONLY,
2643 .cra_blocksize = XTS_BLOCK_SIZE,
2644 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2645 .cra_alignmask = 0,
2646 .cra_init = safexcel_skcipher_aes_xts_cra_init,
2647 .cra_exit = safexcel_skcipher_cra_exit,
2648 .cra_module = THIS_MODULE,
2649 },
2650 },
2651};
2652
2653static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2654 unsigned int len)
2655{
2656 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2657 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2658 struct safexcel_crypto_priv *priv = ctx->base.priv;
2659 struct aes_enckey aes;
2660 u32 hashkey[AES_BLOCK_SIZE >> 2];
2661 int ret, i;
2662
2663 ret = aes_prepareenckey(&aes, key, len);
2664 if (ret)
2665 return ret;
2666
2667 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2668 for (i = 0; i < len / sizeof(u32); i++) {
2669 if (ctx->key[i] != get_unaligned((__le32 *)key + i)) {
2670 ctx->base.needs_inv = true;
2671 break;
2672 }
2673 }
2674 }
2675
2676 for (i = 0; i < len / sizeof(u32); i++)
2677 ctx->key[i] = get_unaligned((__le32 *)key + i);
2678
2679 ctx->key_len = len;
2680
2681 /* Compute hash key by encrypting zeroes with cipher key */
2682 memset(hashkey, 0, AES_BLOCK_SIZE);
2683 aes_encrypt(&aes, (u8 *)hashkey, (u8 *)hashkey);
2684
2685 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2686 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2687 if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
2688 ctx->base.needs_inv = true;
2689 break;
2690 }
2691 }
2692 }
2693
2694 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2695 ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
2696
2697 memzero_explicit(hashkey, AES_BLOCK_SIZE);
2698 memzero_explicit(&aes, sizeof(aes));
2699 return 0;
2700}
2701
2702static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2703{
2704 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2705
2706 safexcel_aead_cra_init(tfm);
2707 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2708 ctx->state_sz = GHASH_BLOCK_SIZE;
2709 ctx->xcm = EIP197_XCM_MODE_GCM;
2710 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2711
2712 return 0;
2713}
2714
2715static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2716{
2717 safexcel_aead_cra_exit(tfm);
2718}
2719
2720static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2721 unsigned int authsize)
2722{
2723 return crypto_gcm_check_authsize(authsize);
2724}
2725
2726struct safexcel_alg_template safexcel_alg_gcm = {
2727 .type = SAFEXCEL_ALG_TYPE_AEAD,
2728 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2729 .alg.aead = {
2730 .setkey = safexcel_aead_gcm_setkey,
2731 .setauthsize = safexcel_aead_gcm_setauthsize,
2732 .encrypt = safexcel_aead_encrypt,
2733 .decrypt = safexcel_aead_decrypt,
2734 .ivsize = GCM_AES_IV_SIZE,
2735 .maxauthsize = GHASH_DIGEST_SIZE,
2736 .base = {
2737 .cra_name = "gcm(aes)",
2738 .cra_driver_name = "safexcel-gcm-aes",
2739 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2740 .cra_flags = CRYPTO_ALG_ASYNC |
2741 CRYPTO_ALG_ALLOCATES_MEMORY |
2742 CRYPTO_ALG_KERN_DRIVER_ONLY,
2743 .cra_blocksize = 1,
2744 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2745 .cra_alignmask = 0,
2746 .cra_init = safexcel_aead_gcm_cra_init,
2747 .cra_exit = safexcel_aead_gcm_cra_exit,
2748 .cra_module = THIS_MODULE,
2749 },
2750 },
2751};
2752
2753static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2754 unsigned int len)
2755{
2756 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2757 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2758 struct safexcel_crypto_priv *priv = ctx->base.priv;
2759 struct crypto_aes_ctx aes;
2760 int ret, i;
2761
2762 ret = aes_expandkey(&aes, key, len);
2763 if (ret) {
2764 memzero_explicit(&aes, sizeof(aes));
2765 return ret;
2766 }
2767
2768 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2769 for (i = 0; i < len / sizeof(u32); i++) {
2770 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2771 ctx->base.needs_inv = true;
2772 break;
2773 }
2774 }
2775 }
2776
2777 for (i = 0; i < len / sizeof(u32); i++) {
2778 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2779 ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2780 cpu_to_be32(aes.key_enc[i]);
2781 }
2782
2783 ctx->key_len = len;
2784 ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2785
2786 if (len == AES_KEYSIZE_192)
2787 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2788 else if (len == AES_KEYSIZE_256)
2789 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2790 else
2791 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2792
2793 memzero_explicit(&aes, sizeof(aes));
2794 return 0;
2795}
2796
2797static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2798{
2799 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2800
2801 safexcel_aead_cra_init(tfm);
2802 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2803 ctx->state_sz = 3 * AES_BLOCK_SIZE;
2804 ctx->xcm = EIP197_XCM_MODE_CCM;
2805 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2806 ctx->ctrinit = 0;
2807 return 0;
2808}
2809
2810static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2811 unsigned int authsize)
2812{
2813 /* Borrowed from crypto/ccm.c */
2814 switch (authsize) {
2815 case 4:
2816 case 6:
2817 case 8:
2818 case 10:
2819 case 12:
2820 case 14:
2821 case 16:
2822 break;
2823 default:
2824 return -EINVAL;
2825 }
2826
2827 return 0;
2828}
2829
2830static int safexcel_ccm_encrypt(struct aead_request *req)
2831{
2832 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2833
2834 if (req->iv[0] < 1 || req->iv[0] > 7)
2835 return -EINVAL;
2836
2837 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2838}
2839
2840static int safexcel_ccm_decrypt(struct aead_request *req)
2841{
2842 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2843
2844 if (req->iv[0] < 1 || req->iv[0] > 7)
2845 return -EINVAL;
2846
2847 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2848}
2849
2850struct safexcel_alg_template safexcel_alg_ccm = {
2851 .type = SAFEXCEL_ALG_TYPE_AEAD,
2852 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2853 .alg.aead = {
2854 .setkey = safexcel_aead_ccm_setkey,
2855 .setauthsize = safexcel_aead_ccm_setauthsize,
2856 .encrypt = safexcel_ccm_encrypt,
2857 .decrypt = safexcel_ccm_decrypt,
2858 .ivsize = AES_BLOCK_SIZE,
2859 .maxauthsize = AES_BLOCK_SIZE,
2860 .base = {
2861 .cra_name = "ccm(aes)",
2862 .cra_driver_name = "safexcel-ccm-aes",
2863 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2864 .cra_flags = CRYPTO_ALG_ASYNC |
2865 CRYPTO_ALG_ALLOCATES_MEMORY |
2866 CRYPTO_ALG_KERN_DRIVER_ONLY,
2867 .cra_blocksize = 1,
2868 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2869 .cra_alignmask = 0,
2870 .cra_init = safexcel_aead_ccm_cra_init,
2871 .cra_exit = safexcel_aead_cra_exit,
2872 .cra_module = THIS_MODULE,
2873 },
2874 },
2875};
2876
2877static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2878 const u8 *key)
2879{
2880 struct safexcel_crypto_priv *priv = ctx->base.priv;
2881
2882 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2883 if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2884 ctx->base.needs_inv = true;
2885
2886 memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2887 ctx->key_len = CHACHA_KEY_SIZE;
2888}
2889
2890static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2891 const u8 *key, unsigned int len)
2892{
2893 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2894
2895 if (len != CHACHA_KEY_SIZE)
2896 return -EINVAL;
2897
2898 safexcel_chacha20_setkey(ctx, key);
2899
2900 return 0;
2901}
2902
2903static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2904{
2905 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2906
2907 safexcel_skcipher_cra_init(tfm);
2908 ctx->alg = SAFEXCEL_CHACHA20;
2909 ctx->ctrinit = 0;
2910 ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2911 return 0;
2912}
2913
2914struct safexcel_alg_template safexcel_alg_chacha20 = {
2915 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2916 .algo_mask = SAFEXCEL_ALG_CHACHA20,
2917 .alg.skcipher = {
2918 .setkey = safexcel_skcipher_chacha20_setkey,
2919 .encrypt = safexcel_encrypt,
2920 .decrypt = safexcel_decrypt,
2921 .min_keysize = CHACHA_KEY_SIZE,
2922 .max_keysize = CHACHA_KEY_SIZE,
2923 .ivsize = CHACHA_IV_SIZE,
2924 .base = {
2925 .cra_name = "chacha20",
2926 .cra_driver_name = "safexcel-chacha20",
2927 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2928 .cra_flags = CRYPTO_ALG_ASYNC |
2929 CRYPTO_ALG_ALLOCATES_MEMORY |
2930 CRYPTO_ALG_KERN_DRIVER_ONLY,
2931 .cra_blocksize = 1,
2932 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2933 .cra_alignmask = 0,
2934 .cra_init = safexcel_skcipher_chacha20_cra_init,
2935 .cra_exit = safexcel_skcipher_cra_exit,
2936 .cra_module = THIS_MODULE,
2937 },
2938 },
2939};
2940
2941static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2942 const u8 *key, unsigned int len)
2943{
2944 struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2945
2946 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP &&
2947 len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2948 /* ESP variant has nonce appended to key */
2949 len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2950 ctx->nonce = *(u32 *)(key + len);
2951 }
2952 if (len != CHACHA_KEY_SIZE)
2953 return -EINVAL;
2954
2955 safexcel_chacha20_setkey(ctx, key);
2956
2957 return 0;
2958}
2959
2960static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2961 unsigned int authsize)
2962{
2963 if (authsize != POLY1305_DIGEST_SIZE)
2964 return -EINVAL;
2965 return 0;
2966}
2967
2968static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2969 enum safexcel_cipher_direction dir)
2970{
2971 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2972 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2973 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2974 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2975 struct aead_request *subreq = aead_request_ctx(req);
2976 u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2977 int ret = 0;
2978
2979 /*
2980 * Instead of wasting time detecting umpteen silly corner cases,
2981 * just dump all "small" requests to the fallback implementation.
2982 * HW would not be faster on such small requests anyway.
2983 */
2984 if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2985 req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2986 req->cryptlen > POLY1305_DIGEST_SIZE)) {
2987 return safexcel_queue_req(&req->base, creq, dir);
2988 }
2989
2990 /* HW cannot do full (AAD+payload) zero length, use fallback */
2991 memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2992 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2993 /* ESP variant has nonce appended to the key */
2994 key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2995 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2996 CHACHA_KEY_SIZE +
2997 EIP197_AEAD_IPSEC_NONCE_SIZE);
2998 } else {
2999 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
3000 CHACHA_KEY_SIZE);
3001 }
3002 if (ret) {
3003 crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
3004 crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
3005 CRYPTO_TFM_REQ_MASK);
3006 return ret;
3007 }
3008
3009 aead_request_set_tfm(subreq, ctx->fback);
3010 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3011 req->base.data);
3012 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3013 req->iv);
3014 aead_request_set_ad(subreq, req->assoclen);
3015
3016 return (dir == SAFEXCEL_ENCRYPT) ?
3017 crypto_aead_encrypt(subreq) :
3018 crypto_aead_decrypt(subreq);
3019}
3020
3021static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
3022{
3023 return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
3024}
3025
3026static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
3027{
3028 return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
3029}
3030
3031static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
3032{
3033 struct crypto_aead *aead = __crypto_aead_cast(tfm);
3034 struct aead_alg *alg = crypto_aead_alg(aead);
3035 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3036
3037 safexcel_aead_cra_init(tfm);
3038
3039 /* Allocate fallback implementation */
3040 ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
3041 CRYPTO_ALG_ASYNC |
3042 CRYPTO_ALG_NEED_FALLBACK);
3043 if (IS_ERR(ctx->fback))
3044 return PTR_ERR(ctx->fback);
3045
3046 crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
3047 sizeof(struct aead_request) +
3048 crypto_aead_reqsize(ctx->fback)));
3049
3050 return 0;
3051}
3052
3053static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
3054{
3055 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3056
3057 safexcel_aead_fallback_cra_init(tfm);
3058 ctx->alg = SAFEXCEL_CHACHA20;
3059 ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
3060 CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
3061 ctx->ctrinit = 0;
3062 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
3063 ctx->state_sz = 0; /* Precomputed by HW */
3064 return 0;
3065}
3066
3067static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
3068{
3069 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3070
3071 crypto_free_aead(ctx->fback);
3072 safexcel_aead_cra_exit(tfm);
3073}
3074
3075struct safexcel_alg_template safexcel_alg_chachapoly = {
3076 .type = SAFEXCEL_ALG_TYPE_AEAD,
3077 .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3078 .alg.aead = {
3079 .setkey = safexcel_aead_chachapoly_setkey,
3080 .setauthsize = safexcel_aead_chachapoly_setauthsize,
3081 .encrypt = safexcel_aead_chachapoly_encrypt,
3082 .decrypt = safexcel_aead_chachapoly_decrypt,
3083 .ivsize = CHACHAPOLY_IV_SIZE,
3084 .maxauthsize = POLY1305_DIGEST_SIZE,
3085 .base = {
3086 .cra_name = "rfc7539(chacha20,poly1305)",
3087 .cra_driver_name = "safexcel-chacha20-poly1305",
3088 /* +1 to put it above HW chacha + SW poly */
3089 .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3090 .cra_flags = CRYPTO_ALG_ASYNC |
3091 CRYPTO_ALG_ALLOCATES_MEMORY |
3092 CRYPTO_ALG_KERN_DRIVER_ONLY |
3093 CRYPTO_ALG_NEED_FALLBACK,
3094 .cra_blocksize = 1,
3095 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3096 .cra_alignmask = 0,
3097 .cra_init = safexcel_aead_chachapoly_cra_init,
3098 .cra_exit = safexcel_aead_fallback_cra_exit,
3099 .cra_module = THIS_MODULE,
3100 },
3101 },
3102};
3103
3104static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
3105{
3106 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3107 int ret;
3108
3109 ret = safexcel_aead_chachapoly_cra_init(tfm);
3110 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3111 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3112 return ret;
3113}
3114
3115struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
3116 .type = SAFEXCEL_ALG_TYPE_AEAD,
3117 .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3118 .alg.aead = {
3119 .setkey = safexcel_aead_chachapoly_setkey,
3120 .setauthsize = safexcel_aead_chachapoly_setauthsize,
3121 .encrypt = safexcel_aead_chachapoly_encrypt,
3122 .decrypt = safexcel_aead_chachapoly_decrypt,
3123 .ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
3124 .maxauthsize = POLY1305_DIGEST_SIZE,
3125 .base = {
3126 .cra_name = "rfc7539esp(chacha20,poly1305)",
3127 .cra_driver_name = "safexcel-chacha20-poly1305-esp",
3128 /* +1 to put it above HW chacha + SW poly */
3129 .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3130 .cra_flags = CRYPTO_ALG_ASYNC |
3131 CRYPTO_ALG_ALLOCATES_MEMORY |
3132 CRYPTO_ALG_KERN_DRIVER_ONLY |
3133 CRYPTO_ALG_NEED_FALLBACK,
3134 .cra_blocksize = 1,
3135 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3136 .cra_alignmask = 0,
3137 .cra_init = safexcel_aead_chachapolyesp_cra_init,
3138 .cra_exit = safexcel_aead_fallback_cra_exit,
3139 .cra_module = THIS_MODULE,
3140 },
3141 },
3142};
3143
3144static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
3145 const u8 *key, unsigned int len)
3146{
3147 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3148 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3149 struct safexcel_crypto_priv *priv = ctx->base.priv;
3150
3151 if (len != SM4_KEY_SIZE)
3152 return -EINVAL;
3153
3154 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3155 if (memcmp(ctx->key, key, SM4_KEY_SIZE))
3156 ctx->base.needs_inv = true;
3157
3158 memcpy(ctx->key, key, SM4_KEY_SIZE);
3159 ctx->key_len = SM4_KEY_SIZE;
3160
3161 return 0;
3162}
3163
3164static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3165{
3166 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3167 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3168 return -EINVAL;
3169 else
3170 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3171 SAFEXCEL_ENCRYPT);
3172}
3173
3174static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3175{
3176 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3177 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3178 return -EINVAL;
3179 else
3180 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3181 SAFEXCEL_DECRYPT);
3182}
3183
3184static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3185{
3186 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3187
3188 safexcel_skcipher_cra_init(tfm);
3189 ctx->alg = SAFEXCEL_SM4;
3190 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3191 ctx->blocksz = 0;
3192 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
3193 return 0;
3194}
3195
3196struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3197 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3198 .algo_mask = SAFEXCEL_ALG_SM4,
3199 .alg.skcipher = {
3200 .setkey = safexcel_skcipher_sm4_setkey,
3201 .encrypt = safexcel_sm4_blk_encrypt,
3202 .decrypt = safexcel_sm4_blk_decrypt,
3203 .min_keysize = SM4_KEY_SIZE,
3204 .max_keysize = SM4_KEY_SIZE,
3205 .base = {
3206 .cra_name = "ecb(sm4)",
3207 .cra_driver_name = "safexcel-ecb-sm4",
3208 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3209 .cra_flags = CRYPTO_ALG_ASYNC |
3210 CRYPTO_ALG_ALLOCATES_MEMORY |
3211 CRYPTO_ALG_KERN_DRIVER_ONLY,
3212 .cra_blocksize = SM4_BLOCK_SIZE,
3213 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3214 .cra_alignmask = 0,
3215 .cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3216 .cra_exit = safexcel_skcipher_cra_exit,
3217 .cra_module = THIS_MODULE,
3218 },
3219 },
3220};
3221
3222static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3223{
3224 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3225
3226 safexcel_skcipher_cra_init(tfm);
3227 ctx->alg = SAFEXCEL_SM4;
3228 ctx->blocksz = SM4_BLOCK_SIZE;
3229 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3230 return 0;
3231}
3232
3233struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3234 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3235 .algo_mask = SAFEXCEL_ALG_SM4,
3236 .alg.skcipher = {
3237 .setkey = safexcel_skcipher_sm4_setkey,
3238 .encrypt = safexcel_sm4_blk_encrypt,
3239 .decrypt = safexcel_sm4_blk_decrypt,
3240 .min_keysize = SM4_KEY_SIZE,
3241 .max_keysize = SM4_KEY_SIZE,
3242 .ivsize = SM4_BLOCK_SIZE,
3243 .base = {
3244 .cra_name = "cbc(sm4)",
3245 .cra_driver_name = "safexcel-cbc-sm4",
3246 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3247 .cra_flags = CRYPTO_ALG_ASYNC |
3248 CRYPTO_ALG_ALLOCATES_MEMORY |
3249 CRYPTO_ALG_KERN_DRIVER_ONLY,
3250 .cra_blocksize = SM4_BLOCK_SIZE,
3251 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3252 .cra_alignmask = 0,
3253 .cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3254 .cra_exit = safexcel_skcipher_cra_exit,
3255 .cra_module = THIS_MODULE,
3256 },
3257 },
3258};
3259
3260static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3261 const u8 *key, unsigned int len)
3262{
3263 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3264 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3265
3266 /* last 4 bytes of key are the nonce! */
3267 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3268 /* exclude the nonce here */
3269 len -= CTR_RFC3686_NONCE_SIZE;
3270
3271 return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3272}
3273
3274static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3275{
3276 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3277
3278 safexcel_skcipher_cra_init(tfm);
3279 ctx->alg = SAFEXCEL_SM4;
3280 ctx->blocksz = SM4_BLOCK_SIZE;
3281 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3282 return 0;
3283}
3284
3285struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3286 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3287 .algo_mask = SAFEXCEL_ALG_SM4,
3288 .alg.skcipher = {
3289 .setkey = safexcel_skcipher_sm4ctr_setkey,
3290 .encrypt = safexcel_encrypt,
3291 .decrypt = safexcel_decrypt,
3292 /* Add nonce size */
3293 .min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3294 .max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3295 .ivsize = CTR_RFC3686_IV_SIZE,
3296 .base = {
3297 .cra_name = "rfc3686(ctr(sm4))",
3298 .cra_driver_name = "safexcel-ctr-sm4",
3299 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3300 .cra_flags = CRYPTO_ALG_ASYNC |
3301 CRYPTO_ALG_ALLOCATES_MEMORY |
3302 CRYPTO_ALG_KERN_DRIVER_ONLY,
3303 .cra_blocksize = 1,
3304 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3305 .cra_alignmask = 0,
3306 .cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3307 .cra_exit = safexcel_skcipher_cra_exit,
3308 .cra_module = THIS_MODULE,
3309 },
3310 },
3311};
3312
3313static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3314{
3315 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3316 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3317 return -EINVAL;
3318
3319 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3320 SAFEXCEL_ENCRYPT);
3321}
3322
3323static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3324{
3325 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3326
3327 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3328 if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3329 return -EINVAL;
3330
3331 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3332 SAFEXCEL_DECRYPT);
3333}
3334
3335static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3336{
3337 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3338
3339 safexcel_aead_cra_init(tfm);
3340 ctx->alg = SAFEXCEL_SM4;
3341 ctx->blocksz = SM4_BLOCK_SIZE;
3342 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3343 ctx->state_sz = SHA1_DIGEST_SIZE;
3344 return 0;
3345}
3346
3347struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3348 .type = SAFEXCEL_ALG_TYPE_AEAD,
3349 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3350 .alg.aead = {
3351 .setkey = safexcel_aead_setkey,
3352 .encrypt = safexcel_aead_sm4_blk_encrypt,
3353 .decrypt = safexcel_aead_sm4_blk_decrypt,
3354 .ivsize = SM4_BLOCK_SIZE,
3355 .maxauthsize = SHA1_DIGEST_SIZE,
3356 .base = {
3357 .cra_name = "authenc(hmac(sha1),cbc(sm4))",
3358 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3359 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3360 .cra_flags = CRYPTO_ALG_ASYNC |
3361 CRYPTO_ALG_ALLOCATES_MEMORY |
3362 CRYPTO_ALG_KERN_DRIVER_ONLY,
3363 .cra_blocksize = SM4_BLOCK_SIZE,
3364 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3365 .cra_alignmask = 0,
3366 .cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3367 .cra_exit = safexcel_aead_cra_exit,
3368 .cra_module = THIS_MODULE,
3369 },
3370 },
3371};
3372
3373static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3374 const u8 *key, unsigned int len)
3375{
3376 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3377 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3378
3379 /* Keep fallback cipher synchronized */
3380 return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3381 safexcel_aead_setkey(ctfm, key, len);
3382}
3383
3384static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3385 unsigned int authsize)
3386{
3387 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3388 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3389
3390 /* Keep fallback cipher synchronized */
3391 return crypto_aead_setauthsize(ctx->fback, authsize);
3392}
3393
3394static int safexcel_aead_fallback_crypt(struct aead_request *req,
3395 enum safexcel_cipher_direction dir)
3396{
3397 struct crypto_aead *aead = crypto_aead_reqtfm(req);
3398 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3399 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3400 struct aead_request *subreq = aead_request_ctx(req);
3401
3402 aead_request_set_tfm(subreq, ctx->fback);
3403 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3404 req->base.data);
3405 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3406 req->iv);
3407 aead_request_set_ad(subreq, req->assoclen);
3408
3409 return (dir == SAFEXCEL_ENCRYPT) ?
3410 crypto_aead_encrypt(subreq) :
3411 crypto_aead_decrypt(subreq);
3412}
3413
3414static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3415{
3416 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3417
3418 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3419 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3420 return -EINVAL;
3421 else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3422 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3423
3424 /* HW cannot do full (AAD+payload) zero length, use fallback */
3425 return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3426}
3427
3428static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3429{
3430 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3431 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3432
3433 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3434 if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3435 return -EINVAL;
3436 else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3437 /* If input length > 0 only */
3438 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3439
3440 /* HW cannot do full (AAD+payload) zero length, use fallback */
3441 return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3442}
3443
3444static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3445{
3446 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3447
3448 safexcel_aead_fallback_cra_init(tfm);
3449 ctx->alg = SAFEXCEL_SM4;
3450 ctx->blocksz = SM4_BLOCK_SIZE;
3451 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3452 ctx->state_sz = SM3_DIGEST_SIZE;
3453 return 0;
3454}
3455
3456struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3457 .type = SAFEXCEL_ALG_TYPE_AEAD,
3458 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3459 .alg.aead = {
3460 .setkey = safexcel_aead_fallback_setkey,
3461 .setauthsize = safexcel_aead_fallback_setauthsize,
3462 .encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3463 .decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3464 .ivsize = SM4_BLOCK_SIZE,
3465 .maxauthsize = SM3_DIGEST_SIZE,
3466 .base = {
3467 .cra_name = "authenc(hmac(sm3),cbc(sm4))",
3468 .cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3469 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3470 .cra_flags = CRYPTO_ALG_ASYNC |
3471 CRYPTO_ALG_ALLOCATES_MEMORY |
3472 CRYPTO_ALG_KERN_DRIVER_ONLY |
3473 CRYPTO_ALG_NEED_FALLBACK,
3474 .cra_blocksize = SM4_BLOCK_SIZE,
3475 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3476 .cra_alignmask = 0,
3477 .cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3478 .cra_exit = safexcel_aead_fallback_cra_exit,
3479 .cra_module = THIS_MODULE,
3480 },
3481 },
3482};
3483
3484static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3485{
3486 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3487
3488 safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3489 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3490 return 0;
3491}
3492
3493struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3494 .type = SAFEXCEL_ALG_TYPE_AEAD,
3495 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3496 .alg.aead = {
3497 .setkey = safexcel_aead_setkey,
3498 .encrypt = safexcel_aead_encrypt,
3499 .decrypt = safexcel_aead_decrypt,
3500 .ivsize = CTR_RFC3686_IV_SIZE,
3501 .maxauthsize = SHA1_DIGEST_SIZE,
3502 .base = {
3503 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3504 .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3505 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3506 .cra_flags = CRYPTO_ALG_ASYNC |
3507 CRYPTO_ALG_ALLOCATES_MEMORY |
3508 CRYPTO_ALG_KERN_DRIVER_ONLY,
3509 .cra_blocksize = 1,
3510 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3511 .cra_alignmask = 0,
3512 .cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3513 .cra_exit = safexcel_aead_cra_exit,
3514 .cra_module = THIS_MODULE,
3515 },
3516 },
3517};
3518
3519static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3520{
3521 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3522
3523 safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3524 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3525 return 0;
3526}
3527
3528struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3529 .type = SAFEXCEL_ALG_TYPE_AEAD,
3530 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3531 .alg.aead = {
3532 .setkey = safexcel_aead_setkey,
3533 .encrypt = safexcel_aead_encrypt,
3534 .decrypt = safexcel_aead_decrypt,
3535 .ivsize = CTR_RFC3686_IV_SIZE,
3536 .maxauthsize = SM3_DIGEST_SIZE,
3537 .base = {
3538 .cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3539 .cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3540 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3541 .cra_flags = CRYPTO_ALG_ASYNC |
3542 CRYPTO_ALG_ALLOCATES_MEMORY |
3543 CRYPTO_ALG_KERN_DRIVER_ONLY,
3544 .cra_blocksize = 1,
3545 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3546 .cra_alignmask = 0,
3547 .cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3548 .cra_exit = safexcel_aead_cra_exit,
3549 .cra_module = THIS_MODULE,
3550 },
3551 },
3552};
3553
3554static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3555 unsigned int len)
3556{
3557 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3558 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3559
3560 /* last 4 bytes of key are the nonce! */
3561 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3562
3563 len -= CTR_RFC3686_NONCE_SIZE;
3564 return safexcel_aead_gcm_setkey(ctfm, key, len);
3565}
3566
3567static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3568 unsigned int authsize)
3569{
3570 return crypto_rfc4106_check_authsize(authsize);
3571}
3572
3573static int safexcel_rfc4106_encrypt(struct aead_request *req)
3574{
3575 return crypto_ipsec_check_assoclen(req->assoclen) ?:
3576 safexcel_aead_encrypt(req);
3577}
3578
3579static int safexcel_rfc4106_decrypt(struct aead_request *req)
3580{
3581 return crypto_ipsec_check_assoclen(req->assoclen) ?:
3582 safexcel_aead_decrypt(req);
3583}
3584
3585static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3586{
3587 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3588 int ret;
3589
3590 ret = safexcel_aead_gcm_cra_init(tfm);
3591 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3592 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3593 return ret;
3594}
3595
3596struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3597 .type = SAFEXCEL_ALG_TYPE_AEAD,
3598 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3599 .alg.aead = {
3600 .setkey = safexcel_rfc4106_gcm_setkey,
3601 .setauthsize = safexcel_rfc4106_gcm_setauthsize,
3602 .encrypt = safexcel_rfc4106_encrypt,
3603 .decrypt = safexcel_rfc4106_decrypt,
3604 .ivsize = GCM_RFC4106_IV_SIZE,
3605 .maxauthsize = GHASH_DIGEST_SIZE,
3606 .base = {
3607 .cra_name = "rfc4106(gcm(aes))",
3608 .cra_driver_name = "safexcel-rfc4106-gcm-aes",
3609 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3610 .cra_flags = CRYPTO_ALG_ASYNC |
3611 CRYPTO_ALG_ALLOCATES_MEMORY |
3612 CRYPTO_ALG_KERN_DRIVER_ONLY,
3613 .cra_blocksize = 1,
3614 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3615 .cra_alignmask = 0,
3616 .cra_init = safexcel_rfc4106_gcm_cra_init,
3617 .cra_exit = safexcel_aead_gcm_cra_exit,
3618 },
3619 },
3620};
3621
3622static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3623 unsigned int authsize)
3624{
3625 if (authsize != GHASH_DIGEST_SIZE)
3626 return -EINVAL;
3627
3628 return 0;
3629}
3630
3631static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3632{
3633 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3634 int ret;
3635
3636 ret = safexcel_aead_gcm_cra_init(tfm);
3637 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3638 return ret;
3639}
3640
3641struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3642 .type = SAFEXCEL_ALG_TYPE_AEAD,
3643 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3644 .alg.aead = {
3645 .setkey = safexcel_rfc4106_gcm_setkey,
3646 .setauthsize = safexcel_rfc4543_gcm_setauthsize,
3647 .encrypt = safexcel_rfc4106_encrypt,
3648 .decrypt = safexcel_rfc4106_decrypt,
3649 .ivsize = GCM_RFC4543_IV_SIZE,
3650 .maxauthsize = GHASH_DIGEST_SIZE,
3651 .base = {
3652 .cra_name = "rfc4543(gcm(aes))",
3653 .cra_driver_name = "safexcel-rfc4543-gcm-aes",
3654 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3655 .cra_flags = CRYPTO_ALG_ASYNC |
3656 CRYPTO_ALG_ALLOCATES_MEMORY |
3657 CRYPTO_ALG_KERN_DRIVER_ONLY,
3658 .cra_blocksize = 1,
3659 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3660 .cra_alignmask = 0,
3661 .cra_init = safexcel_rfc4543_gcm_cra_init,
3662 .cra_exit = safexcel_aead_gcm_cra_exit,
3663 },
3664 },
3665};
3666
3667static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3668 unsigned int len)
3669{
3670 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3671 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3672
3673 /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3674 *(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3675 /* last 3 bytes of key are the nonce! */
3676 memcpy((u8 *)&ctx->nonce + 1, key + len -
3677 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3678 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3679
3680 len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3681 return safexcel_aead_ccm_setkey(ctfm, key, len);
3682}
3683
3684static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3685 unsigned int authsize)
3686{
3687 /* Borrowed from crypto/ccm.c */
3688 switch (authsize) {
3689 case 8:
3690 case 12:
3691 case 16:
3692 break;
3693 default:
3694 return -EINVAL;
3695 }
3696
3697 return 0;
3698}
3699
3700static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3701{
3702 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3703
3704 /* Borrowed from crypto/ccm.c */
3705 if (req->assoclen != 16 && req->assoclen != 20)
3706 return -EINVAL;
3707
3708 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3709}
3710
3711static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3712{
3713 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3714
3715 /* Borrowed from crypto/ccm.c */
3716 if (req->assoclen != 16 && req->assoclen != 20)
3717 return -EINVAL;
3718
3719 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3720}
3721
3722static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3723{
3724 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3725 int ret;
3726
3727 ret = safexcel_aead_ccm_cra_init(tfm);
3728 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3729 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3730 return ret;
3731}
3732
3733struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3734 .type = SAFEXCEL_ALG_TYPE_AEAD,
3735 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3736 .alg.aead = {
3737 .setkey = safexcel_rfc4309_ccm_setkey,
3738 .setauthsize = safexcel_rfc4309_ccm_setauthsize,
3739 .encrypt = safexcel_rfc4309_ccm_encrypt,
3740 .decrypt = safexcel_rfc4309_ccm_decrypt,
3741 .ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3742 .maxauthsize = AES_BLOCK_SIZE,
3743 .base = {
3744 .cra_name = "rfc4309(ccm(aes))",
3745 .cra_driver_name = "safexcel-rfc4309-ccm-aes",
3746 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3747 .cra_flags = CRYPTO_ALG_ASYNC |
3748 CRYPTO_ALG_ALLOCATES_MEMORY |
3749 CRYPTO_ALG_KERN_DRIVER_ONLY,
3750 .cra_blocksize = 1,
3751 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3752 .cra_alignmask = 0,
3753 .cra_init = safexcel_rfc4309_ccm_cra_init,
3754 .cra_exit = safexcel_aead_cra_exit,
3755 .cra_module = THIS_MODULE,
3756 },
3757 },
3758};