Linux kernel mirror (for testing)
git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel
os
linux
1// SPDX-License-Identifier: GPL-2.0-only
2/*
3 * K3 DTHE V2 crypto accelerator driver
4 *
5 * Copyright (C) Texas Instruments 2025 - https://www.ti.com
6 * Author: T Pratham <t-pratham@ti.com>
7 */
8
9#include <crypto/aead.h>
10#include <crypto/aes.h>
11#include <crypto/algapi.h>
12#include <crypto/engine.h>
13#include <crypto/gcm.h>
14#include <crypto/internal/aead.h>
15#include <crypto/internal/skcipher.h>
16
17#include "dthev2-common.h"
18
19#include <linux/bitfield.h>
20#include <linux/delay.h>
21#include <linux/dmaengine.h>
22#include <linux/dma-mapping.h>
23#include <linux/io.h>
24#include <linux/iopoll.h>
25#include <linux/scatterlist.h>
26
27/* Registers */
28
29// AES Engine
30#define DTHE_P_AES_BASE 0x7000
31
32#define DTHE_P_AES_KEY1_0 0x0038
33#define DTHE_P_AES_KEY1_1 0x003C
34#define DTHE_P_AES_KEY1_2 0x0030
35#define DTHE_P_AES_KEY1_3 0x0034
36#define DTHE_P_AES_KEY1_4 0x0028
37#define DTHE_P_AES_KEY1_5 0x002C
38#define DTHE_P_AES_KEY1_6 0x0020
39#define DTHE_P_AES_KEY1_7 0x0024
40
41#define DTHE_P_AES_KEY2_0 0x0018
42#define DTHE_P_AES_KEY2_1 0x001C
43#define DTHE_P_AES_KEY2_2 0x0010
44#define DTHE_P_AES_KEY2_3 0x0014
45#define DTHE_P_AES_KEY2_4 0x0008
46#define DTHE_P_AES_KEY2_5 0x000C
47#define DTHE_P_AES_KEY2_6 0x0000
48#define DTHE_P_AES_KEY2_7 0x0004
49
50#define DTHE_P_AES_IV_IN_0 0x0040
51#define DTHE_P_AES_IV_IN_1 0x0044
52#define DTHE_P_AES_IV_IN_2 0x0048
53#define DTHE_P_AES_IV_IN_3 0x004C
54#define DTHE_P_AES_CTRL 0x0050
55#define DTHE_P_AES_C_LENGTH_0 0x0054
56#define DTHE_P_AES_C_LENGTH_1 0x0058
57#define DTHE_P_AES_AUTH_LENGTH 0x005C
58#define DTHE_P_AES_DATA_IN_OUT 0x0060
59#define DTHE_P_AES_TAG_OUT 0x0070
60
61#define DTHE_P_AES_SYSCONFIG 0x0084
62#define DTHE_P_AES_IRQSTATUS 0x008C
63#define DTHE_P_AES_IRQENABLE 0x0090
64
65/* Register write values and macros */
66
67enum aes_ctrl_mode_masks {
68 AES_CTRL_ECB_MASK = 0x00,
69 AES_CTRL_CBC_MASK = BIT(5),
70 AES_CTRL_CTR_MASK = BIT(6),
71 AES_CTRL_XTS_MASK = BIT(12) | BIT(11),
72 AES_CTRL_GCM_MASK = BIT(17) | BIT(16) | BIT(6),
73 AES_CTRL_CCM_MASK = BIT(18) | BIT(6),
74};
75
76#define DTHE_AES_CTRL_MODE_CLEAR_MASK ~GENMASK(28, 5)
77
78#define DTHE_AES_CTRL_DIR_ENC BIT(2)
79
80#define DTHE_AES_CTRL_KEYSIZE_16B BIT(3)
81#define DTHE_AES_CTRL_KEYSIZE_24B BIT(4)
82#define DTHE_AES_CTRL_KEYSIZE_32B (BIT(3) | BIT(4))
83
84#define DTHE_AES_CTRL_CTR_WIDTH_128B (BIT(7) | BIT(8))
85
86#define DTHE_AES_CCM_L_FROM_IV_MASK GENMASK(2, 0)
87#define DTHE_AES_CCM_M_BITS GENMASK(2, 0)
88#define DTHE_AES_CTRL_CCM_L_FIELD_MASK GENMASK(21, 19)
89#define DTHE_AES_CTRL_CCM_M_FIELD_MASK GENMASK(24, 22)
90
91#define DTHE_AES_CTRL_SAVE_CTX_SET BIT(29)
92
93#define DTHE_AES_CTRL_OUTPUT_READY BIT_MASK(0)
94#define DTHE_AES_CTRL_INPUT_READY BIT_MASK(1)
95#define DTHE_AES_CTRL_SAVED_CTX_READY BIT_MASK(30)
96#define DTHE_AES_CTRL_CTX_READY BIT_MASK(31)
97
98#define DTHE_AES_SYSCONFIG_DMA_DATA_IN_OUT_EN GENMASK(6, 5)
99#define DTHE_AES_IRQENABLE_EN_ALL GENMASK(3, 0)
100
101/* Misc */
102#define AES_IV_SIZE AES_BLOCK_SIZE
103#define AES_BLOCK_WORDS (AES_BLOCK_SIZE / sizeof(u32))
104#define AES_IV_WORDS AES_BLOCK_WORDS
105#define DTHE_AES_GCM_AAD_MAXLEN (BIT_ULL(32) - 1)
106#define DTHE_AES_CCM_AAD_MAXLEN (BIT(16) - BIT(8))
107#define DTHE_AES_CCM_CRYPT_MAXLEN (BIT_ULL(61) - 1)
108#define POLL_TIMEOUT_INTERVAL HZ
109
110static int dthe_cipher_init_tfm(struct crypto_skcipher *tfm)
111{
112 struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
113 struct dthe_data *dev_data = dthe_get_dev(ctx);
114
115 ctx->dev_data = dev_data;
116 ctx->keylen = 0;
117
118 return 0;
119}
120
121static int dthe_cipher_init_tfm_fallback(struct crypto_skcipher *tfm)
122{
123 struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
124 struct dthe_data *dev_data = dthe_get_dev(ctx);
125 const char *alg_name = crypto_tfm_alg_name(crypto_skcipher_tfm(tfm));
126
127 ctx->dev_data = dev_data;
128 ctx->keylen = 0;
129
130 ctx->skcipher_fb = crypto_alloc_sync_skcipher(alg_name, 0,
131 CRYPTO_ALG_NEED_FALLBACK);
132 if (IS_ERR(ctx->skcipher_fb)) {
133 dev_err(dev_data->dev, "fallback driver %s couldn't be loaded\n",
134 alg_name);
135 return PTR_ERR(ctx->skcipher_fb);
136 }
137
138 return 0;
139}
140
141static void dthe_cipher_exit_tfm(struct crypto_skcipher *tfm)
142{
143 struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
144
145 crypto_free_sync_skcipher(ctx->skcipher_fb);
146}
147
148static int dthe_aes_setkey(struct crypto_skcipher *tfm, const u8 *key, unsigned int keylen)
149{
150 struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
151
152 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 && keylen != AES_KEYSIZE_256)
153 return -EINVAL;
154
155 ctx->keylen = keylen;
156 memcpy(ctx->key, key, keylen);
157
158 return 0;
159}
160
161static int dthe_aes_ecb_setkey(struct crypto_skcipher *tfm, const u8 *key, unsigned int keylen)
162{
163 struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
164
165 ctx->aes_mode = DTHE_AES_ECB;
166
167 return dthe_aes_setkey(tfm, key, keylen);
168}
169
170static int dthe_aes_cbc_setkey(struct crypto_skcipher *tfm, const u8 *key, unsigned int keylen)
171{
172 struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
173
174 ctx->aes_mode = DTHE_AES_CBC;
175
176 return dthe_aes_setkey(tfm, key, keylen);
177}
178
179static int dthe_aes_ctr_setkey(struct crypto_skcipher *tfm, const u8 *key, unsigned int keylen)
180{
181 struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
182 int ret = dthe_aes_setkey(tfm, key, keylen);
183
184 if (ret)
185 return ret;
186
187 ctx->aes_mode = DTHE_AES_CTR;
188
189 crypto_sync_skcipher_clear_flags(ctx->skcipher_fb, CRYPTO_TFM_REQ_MASK);
190 crypto_sync_skcipher_set_flags(ctx->skcipher_fb,
191 crypto_skcipher_get_flags(tfm) &
192 CRYPTO_TFM_REQ_MASK);
193
194 return crypto_sync_skcipher_setkey(ctx->skcipher_fb, key, keylen);
195}
196
197static int dthe_aes_xts_setkey(struct crypto_skcipher *tfm, const u8 *key, unsigned int keylen)
198{
199 struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
200
201 if (keylen != 2 * AES_KEYSIZE_128 &&
202 keylen != 2 * AES_KEYSIZE_192 &&
203 keylen != 2 * AES_KEYSIZE_256)
204 return -EINVAL;
205
206 ctx->aes_mode = DTHE_AES_XTS;
207 ctx->keylen = keylen / 2;
208 memcpy(ctx->key, key, keylen);
209
210 crypto_sync_skcipher_clear_flags(ctx->skcipher_fb, CRYPTO_TFM_REQ_MASK);
211 crypto_sync_skcipher_set_flags(ctx->skcipher_fb,
212 crypto_skcipher_get_flags(tfm) &
213 CRYPTO_TFM_REQ_MASK);
214
215 return crypto_sync_skcipher_setkey(ctx->skcipher_fb, key, keylen);
216}
217
218static void dthe_aes_set_ctrl_key(struct dthe_tfm_ctx *ctx,
219 struct dthe_aes_req_ctx *rctx,
220 u32 *iv_in)
221{
222 struct dthe_data *dev_data = dthe_get_dev(ctx);
223 void __iomem *aes_base_reg = dev_data->regs + DTHE_P_AES_BASE;
224 u32 ctrl_val = 0;
225
226 writel_relaxed(ctx->key[0], aes_base_reg + DTHE_P_AES_KEY1_0);
227 writel_relaxed(ctx->key[1], aes_base_reg + DTHE_P_AES_KEY1_1);
228 writel_relaxed(ctx->key[2], aes_base_reg + DTHE_P_AES_KEY1_2);
229 writel_relaxed(ctx->key[3], aes_base_reg + DTHE_P_AES_KEY1_3);
230
231 if (ctx->keylen > AES_KEYSIZE_128) {
232 writel_relaxed(ctx->key[4], aes_base_reg + DTHE_P_AES_KEY1_4);
233 writel_relaxed(ctx->key[5], aes_base_reg + DTHE_P_AES_KEY1_5);
234 }
235 if (ctx->keylen == AES_KEYSIZE_256) {
236 writel_relaxed(ctx->key[6], aes_base_reg + DTHE_P_AES_KEY1_6);
237 writel_relaxed(ctx->key[7], aes_base_reg + DTHE_P_AES_KEY1_7);
238 }
239
240 if (ctx->aes_mode == DTHE_AES_XTS) {
241 size_t key2_offset = ctx->keylen / sizeof(u32);
242
243 writel_relaxed(ctx->key[key2_offset + 0], aes_base_reg + DTHE_P_AES_KEY2_0);
244 writel_relaxed(ctx->key[key2_offset + 1], aes_base_reg + DTHE_P_AES_KEY2_1);
245 writel_relaxed(ctx->key[key2_offset + 2], aes_base_reg + DTHE_P_AES_KEY2_2);
246 writel_relaxed(ctx->key[key2_offset + 3], aes_base_reg + DTHE_P_AES_KEY2_3);
247
248 if (ctx->keylen > AES_KEYSIZE_128) {
249 writel_relaxed(ctx->key[key2_offset + 4], aes_base_reg + DTHE_P_AES_KEY2_4);
250 writel_relaxed(ctx->key[key2_offset + 5], aes_base_reg + DTHE_P_AES_KEY2_5);
251 }
252 if (ctx->keylen == AES_KEYSIZE_256) {
253 writel_relaxed(ctx->key[key2_offset + 6], aes_base_reg + DTHE_P_AES_KEY2_6);
254 writel_relaxed(ctx->key[key2_offset + 7], aes_base_reg + DTHE_P_AES_KEY2_7);
255 }
256 }
257
258 if (rctx->enc)
259 ctrl_val |= DTHE_AES_CTRL_DIR_ENC;
260
261 if (ctx->keylen == AES_KEYSIZE_128)
262 ctrl_val |= DTHE_AES_CTRL_KEYSIZE_16B;
263 else if (ctx->keylen == AES_KEYSIZE_192)
264 ctrl_val |= DTHE_AES_CTRL_KEYSIZE_24B;
265 else
266 ctrl_val |= DTHE_AES_CTRL_KEYSIZE_32B;
267
268 // Write AES mode
269 ctrl_val &= DTHE_AES_CTRL_MODE_CLEAR_MASK;
270 switch (ctx->aes_mode) {
271 case DTHE_AES_ECB:
272 ctrl_val |= AES_CTRL_ECB_MASK;
273 break;
274 case DTHE_AES_CBC:
275 ctrl_val |= AES_CTRL_CBC_MASK;
276 break;
277 case DTHE_AES_CTR:
278 ctrl_val |= AES_CTRL_CTR_MASK;
279 ctrl_val |= DTHE_AES_CTRL_CTR_WIDTH_128B;
280 break;
281 case DTHE_AES_XTS:
282 ctrl_val |= AES_CTRL_XTS_MASK;
283 break;
284 case DTHE_AES_GCM:
285 ctrl_val |= AES_CTRL_GCM_MASK;
286 break;
287 case DTHE_AES_CCM:
288 ctrl_val |= AES_CTRL_CCM_MASK;
289 ctrl_val |= FIELD_PREP(DTHE_AES_CTRL_CCM_L_FIELD_MASK,
290 (iv_in[0] & DTHE_AES_CCM_L_FROM_IV_MASK));
291 ctrl_val |= FIELD_PREP(DTHE_AES_CTRL_CCM_M_FIELD_MASK,
292 ((ctx->authsize - 2) >> 1) & DTHE_AES_CCM_M_BITS);
293 break;
294 }
295
296 if (iv_in) {
297 ctrl_val |= DTHE_AES_CTRL_SAVE_CTX_SET;
298 for (int i = 0; i < AES_IV_WORDS; ++i)
299 writel_relaxed(iv_in[i],
300 aes_base_reg + DTHE_P_AES_IV_IN_0 + (DTHE_REG_SIZE * i));
301 }
302
303 writel_relaxed(ctrl_val, aes_base_reg + DTHE_P_AES_CTRL);
304}
305
306static int dthe_aes_do_fallback(struct skcipher_request *req)
307{
308 struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
309 struct dthe_aes_req_ctx *rctx = skcipher_request_ctx(req);
310
311 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->skcipher_fb);
312
313 skcipher_request_set_callback(subreq, skcipher_request_flags(req),
314 req->base.complete, req->base.data);
315 skcipher_request_set_crypt(subreq, req->src, req->dst,
316 req->cryptlen, req->iv);
317
318 return rctx->enc ? crypto_skcipher_encrypt(subreq) :
319 crypto_skcipher_decrypt(subreq);
320}
321
322static void dthe_aes_dma_in_callback(void *data)
323{
324 struct skcipher_request *req = (struct skcipher_request *)data;
325 struct dthe_aes_req_ctx *rctx = skcipher_request_ctx(req);
326
327 complete(&rctx->aes_compl);
328}
329
330static int dthe_aes_run(struct crypto_engine *engine, void *areq)
331{
332 struct skcipher_request *req = container_of(areq, struct skcipher_request, base);
333 struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
334 struct dthe_data *dev_data = dthe_get_dev(ctx);
335 struct dthe_aes_req_ctx *rctx = skcipher_request_ctx(req);
336
337 unsigned int len = req->cryptlen;
338 struct scatterlist *src = req->src;
339 struct scatterlist *dst = req->dst;
340
341 int src_nents = sg_nents_for_len(src, len);
342 int dst_nents = sg_nents_for_len(dst, len);
343
344 int src_mapped_nents;
345 int dst_mapped_nents;
346
347 bool diff_dst;
348 enum dma_data_direction src_dir, dst_dir;
349
350 struct device *tx_dev, *rx_dev;
351 struct dma_async_tx_descriptor *desc_in, *desc_out;
352
353 int ret;
354
355 void __iomem *aes_base_reg = dev_data->regs + DTHE_P_AES_BASE;
356
357 u32 aes_irqenable_val = readl_relaxed(aes_base_reg + DTHE_P_AES_IRQENABLE);
358 u32 aes_sysconfig_val = readl_relaxed(aes_base_reg + DTHE_P_AES_SYSCONFIG);
359
360 aes_sysconfig_val |= DTHE_AES_SYSCONFIG_DMA_DATA_IN_OUT_EN;
361 writel_relaxed(aes_sysconfig_val, aes_base_reg + DTHE_P_AES_SYSCONFIG);
362
363 aes_irqenable_val |= DTHE_AES_IRQENABLE_EN_ALL;
364 writel_relaxed(aes_irqenable_val, aes_base_reg + DTHE_P_AES_IRQENABLE);
365
366 if (src == dst) {
367 diff_dst = false;
368 src_dir = DMA_BIDIRECTIONAL;
369 dst_dir = DMA_BIDIRECTIONAL;
370 } else {
371 diff_dst = true;
372 src_dir = DMA_TO_DEVICE;
373 dst_dir = DMA_FROM_DEVICE;
374 }
375
376 /*
377 * CTR mode can operate on any input length, but the hardware
378 * requires input length to be a multiple of the block size.
379 * We need to handle the padding in the driver.
380 */
381 if (ctx->aes_mode == DTHE_AES_CTR && req->cryptlen % AES_BLOCK_SIZE) {
382 unsigned int pad_size = AES_BLOCK_SIZE - (req->cryptlen % AES_BLOCK_SIZE);
383 u8 *pad_buf = rctx->padding;
384 struct scatterlist *sg;
385
386 len += pad_size;
387 src_nents++;
388 dst_nents++;
389
390 src = kmalloc_array(src_nents, sizeof(*src), GFP_ATOMIC);
391 if (!src) {
392 ret = -ENOMEM;
393 goto aes_ctr_src_alloc_err;
394 }
395
396 sg_init_table(src, src_nents);
397 sg = dthe_copy_sg(src, req->src, req->cryptlen);
398 memzero_explicit(pad_buf, AES_BLOCK_SIZE);
399 sg_set_buf(sg, pad_buf, pad_size);
400
401 if (diff_dst) {
402 dst = kmalloc_array(dst_nents, sizeof(*dst), GFP_ATOMIC);
403 if (!dst) {
404 ret = -ENOMEM;
405 goto aes_ctr_dst_alloc_err;
406 }
407
408 sg_init_table(dst, dst_nents);
409 sg = dthe_copy_sg(dst, req->dst, req->cryptlen);
410 sg_set_buf(sg, pad_buf, pad_size);
411 } else {
412 dst = src;
413 }
414 }
415
416 tx_dev = dmaengine_get_dma_device(dev_data->dma_aes_tx);
417 rx_dev = dmaengine_get_dma_device(dev_data->dma_aes_rx);
418
419 src_mapped_nents = dma_map_sg(tx_dev, src, src_nents, src_dir);
420 if (src_mapped_nents == 0) {
421 ret = -EINVAL;
422 goto aes_map_src_err;
423 }
424
425 if (!diff_dst) {
426 dst_mapped_nents = src_mapped_nents;
427 } else {
428 dst_mapped_nents = dma_map_sg(rx_dev, dst, dst_nents, dst_dir);
429 if (dst_mapped_nents == 0) {
430 ret = -EINVAL;
431 goto aes_map_dst_err;
432 }
433 }
434
435 desc_in = dmaengine_prep_slave_sg(dev_data->dma_aes_rx, dst, dst_mapped_nents,
436 DMA_DEV_TO_MEM, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
437 if (!desc_in) {
438 dev_err(dev_data->dev, "IN prep_slave_sg() failed\n");
439 ret = -EINVAL;
440 goto aes_prep_err;
441 }
442
443 desc_out = dmaengine_prep_slave_sg(dev_data->dma_aes_tx, src, src_mapped_nents,
444 DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
445 if (!desc_out) {
446 dev_err(dev_data->dev, "OUT prep_slave_sg() failed\n");
447 ret = -EINVAL;
448 goto aes_prep_err;
449 }
450
451 desc_in->callback = dthe_aes_dma_in_callback;
452 desc_in->callback_param = req;
453
454 init_completion(&rctx->aes_compl);
455
456 if (ctx->aes_mode == DTHE_AES_ECB)
457 dthe_aes_set_ctrl_key(ctx, rctx, NULL);
458 else
459 dthe_aes_set_ctrl_key(ctx, rctx, (u32 *)req->iv);
460
461 writel_relaxed(lower_32_bits(len), aes_base_reg + DTHE_P_AES_C_LENGTH_0);
462 writel_relaxed(upper_32_bits(len), aes_base_reg + DTHE_P_AES_C_LENGTH_1);
463
464 dmaengine_submit(desc_in);
465 dmaengine_submit(desc_out);
466
467 dma_async_issue_pending(dev_data->dma_aes_rx);
468 dma_async_issue_pending(dev_data->dma_aes_tx);
469
470 // Need to do a timeout to ensure finalise gets called if DMA callback fails for any reason
471 ret = wait_for_completion_timeout(&rctx->aes_compl, msecs_to_jiffies(DTHE_DMA_TIMEOUT_MS));
472 if (!ret) {
473 ret = -ETIMEDOUT;
474 dmaengine_terminate_sync(dev_data->dma_aes_rx);
475 dmaengine_terminate_sync(dev_data->dma_aes_tx);
476
477 for (int i = 0; i < AES_BLOCK_WORDS; ++i)
478 readl_relaxed(aes_base_reg + DTHE_P_AES_DATA_IN_OUT + (DTHE_REG_SIZE * i));
479 } else {
480 ret = 0;
481 }
482
483 // For modes other than ECB, read IV_OUT
484 if (ctx->aes_mode != DTHE_AES_ECB) {
485 u32 *iv_out = (u32 *)req->iv;
486
487 for (int i = 0; i < AES_IV_WORDS; ++i)
488 iv_out[i] = readl_relaxed(aes_base_reg +
489 DTHE_P_AES_IV_IN_0 +
490 (DTHE_REG_SIZE * i));
491 }
492
493aes_prep_err:
494 if (dst_dir != DMA_BIDIRECTIONAL)
495 dma_unmap_sg(rx_dev, dst, dst_nents, dst_dir);
496aes_map_dst_err:
497 dma_unmap_sg(tx_dev, src, src_nents, src_dir);
498
499aes_map_src_err:
500 if (ctx->aes_mode == DTHE_AES_CTR && req->cryptlen % AES_BLOCK_SIZE) {
501 memzero_explicit(rctx->padding, AES_BLOCK_SIZE);
502 if (diff_dst)
503 kfree(dst);
504aes_ctr_dst_alloc_err:
505 kfree(src);
506aes_ctr_src_alloc_err:
507 /*
508 * Fallback to software if ENOMEM
509 */
510 if (ret == -ENOMEM)
511 ret = dthe_aes_do_fallback(req);
512 }
513
514 local_bh_disable();
515 crypto_finalize_skcipher_request(dev_data->engine, req, ret);
516 local_bh_enable();
517 return 0;
518}
519
520static int dthe_aes_crypt(struct skcipher_request *req)
521{
522 struct dthe_tfm_ctx *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
523 struct dthe_data *dev_data = dthe_get_dev(ctx);
524 struct crypto_engine *engine;
525
526 /*
527 * If data is not a multiple of AES_BLOCK_SIZE:
528 * - need to return -EINVAL for ECB, CBC as they are block ciphers
529 * - need to fallback to software as H/W doesn't support Ciphertext Stealing for XTS
530 * - do nothing for CTR
531 */
532 if (req->cryptlen % AES_BLOCK_SIZE) {
533 if (ctx->aes_mode == DTHE_AES_XTS)
534 return dthe_aes_do_fallback(req);
535
536 if (ctx->aes_mode != DTHE_AES_CTR)
537 return -EINVAL;
538 }
539
540 /*
541 * If data length input is zero, no need to do any operation.
542 * Except for XTS mode, where data length should be non-zero.
543 */
544 if (req->cryptlen == 0) {
545 if (ctx->aes_mode == DTHE_AES_XTS)
546 return -EINVAL;
547 return 0;
548 }
549
550 engine = dev_data->engine;
551 return crypto_transfer_skcipher_request_to_engine(engine, req);
552}
553
554static int dthe_aes_encrypt(struct skcipher_request *req)
555{
556 struct dthe_aes_req_ctx *rctx = skcipher_request_ctx(req);
557
558 rctx->enc = 1;
559 return dthe_aes_crypt(req);
560}
561
562static int dthe_aes_decrypt(struct skcipher_request *req)
563{
564 struct dthe_aes_req_ctx *rctx = skcipher_request_ctx(req);
565
566 rctx->enc = 0;
567 return dthe_aes_crypt(req);
568}
569
570static int dthe_aead_init_tfm(struct crypto_aead *tfm)
571{
572 struct dthe_tfm_ctx *ctx = crypto_aead_ctx(tfm);
573 struct dthe_data *dev_data = dthe_get_dev(ctx);
574
575 ctx->dev_data = dev_data;
576
577 const char *alg_name = crypto_tfm_alg_name(crypto_aead_tfm(tfm));
578
579 ctx->aead_fb = crypto_alloc_sync_aead(alg_name, 0,
580 CRYPTO_ALG_NEED_FALLBACK);
581 if (IS_ERR(ctx->aead_fb)) {
582 dev_err(dev_data->dev, "fallback driver %s couldn't be loaded\n",
583 alg_name);
584 return PTR_ERR(ctx->aead_fb);
585 }
586
587 return 0;
588}
589
590static void dthe_aead_exit_tfm(struct crypto_aead *tfm)
591{
592 struct dthe_tfm_ctx *ctx = crypto_aead_ctx(tfm);
593
594 crypto_free_sync_aead(ctx->aead_fb);
595}
596
597/**
598 * dthe_aead_prep_aad - Prepare AAD scatterlist from input request
599 * @sg: Input scatterlist containing AAD
600 * @assoclen: Length of AAD
601 * @pad_buf: Buffer to hold AAD padding if needed
602 *
603 * Description:
604 * Creates a scatterlist containing only the AAD portion with padding
605 * to align to AES_BLOCK_SIZE. This simplifies DMA handling by allowing
606 * AAD to be sent separately via TX-only DMA.
607 *
608 * Return:
609 * Pointer to the AAD scatterlist, or ERR_PTR(error) on failure.
610 * The calling function needs to free the returned scatterlist when done.
611 **/
612static struct scatterlist *dthe_aead_prep_aad(struct scatterlist *sg,
613 unsigned int assoclen,
614 u8 *pad_buf)
615{
616 struct scatterlist *aad_sg;
617 struct scatterlist *to_sg;
618 int aad_nents;
619
620 if (assoclen == 0)
621 return NULL;
622
623 aad_nents = sg_nents_for_len(sg, assoclen);
624 if (assoclen % AES_BLOCK_SIZE)
625 aad_nents++;
626
627 aad_sg = kmalloc_array(aad_nents, sizeof(struct scatterlist), GFP_ATOMIC);
628 if (!aad_sg)
629 return ERR_PTR(-ENOMEM);
630
631 sg_init_table(aad_sg, aad_nents);
632 to_sg = dthe_copy_sg(aad_sg, sg, assoclen);
633 if (assoclen % AES_BLOCK_SIZE) {
634 unsigned int pad_len = AES_BLOCK_SIZE - (assoclen % AES_BLOCK_SIZE);
635
636 memset(pad_buf, 0, pad_len);
637 sg_set_buf(to_sg, pad_buf, pad_len);
638 }
639
640 return aad_sg;
641}
642
643/**
644 * dthe_aead_prep_crypt - Prepare crypt scatterlist from req->src/req->dst
645 * @sg: Input req->src/req->dst scatterlist
646 * @assoclen: Length of AAD (to skip)
647 * @cryptlen: Length of ciphertext/plaintext (minus the size of TAG in decryption)
648 * @pad_buf: Zeroed buffer to hold crypt padding if needed
649 *
650 * Description:
651 * Creates a scatterlist containing only the ciphertext/plaintext portion
652 * (skipping AAD) with padding to align to AES_BLOCK_SIZE.
653 *
654 * Return:
655 * Pointer to the ciphertext scatterlist, or ERR_PTR(error) on failure.
656 * The calling function needs to free the returned scatterlist when done.
657 **/
658static struct scatterlist *dthe_aead_prep_crypt(struct scatterlist *sg,
659 unsigned int assoclen,
660 unsigned int cryptlen,
661 u8 *pad_buf)
662{
663 struct scatterlist *out_sg[1];
664 struct scatterlist *crypt_sg;
665 struct scatterlist *to_sg;
666 size_t split_sizes[1] = {cryptlen};
667 int out_mapped_nents[1];
668 int crypt_nents;
669 int err;
670
671 if (cryptlen == 0)
672 return NULL;
673
674 /* Skip AAD, extract ciphertext portion */
675 err = sg_split(sg, 0, assoclen, 1, split_sizes, out_sg, out_mapped_nents, GFP_ATOMIC);
676 if (err)
677 goto dthe_aead_prep_crypt_split_err;
678
679 crypt_nents = sg_nents_for_len(out_sg[0], cryptlen);
680 if (cryptlen % AES_BLOCK_SIZE)
681 crypt_nents++;
682
683 crypt_sg = kmalloc_array(crypt_nents, sizeof(struct scatterlist), GFP_ATOMIC);
684 if (!crypt_sg) {
685 err = -ENOMEM;
686 goto dthe_aead_prep_crypt_mem_err;
687 }
688
689 sg_init_table(crypt_sg, crypt_nents);
690 to_sg = dthe_copy_sg(crypt_sg, out_sg[0], cryptlen);
691 if (cryptlen % AES_BLOCK_SIZE) {
692 unsigned int pad_len = AES_BLOCK_SIZE - (cryptlen % AES_BLOCK_SIZE);
693
694 sg_set_buf(to_sg, pad_buf, pad_len);
695 }
696
697dthe_aead_prep_crypt_mem_err:
698 kfree(out_sg[0]);
699
700dthe_aead_prep_crypt_split_err:
701 if (err)
702 return ERR_PTR(err);
703 return crypt_sg;
704}
705
706static int dthe_aead_read_tag(struct dthe_tfm_ctx *ctx, u32 *tag)
707{
708 struct dthe_data *dev_data = dthe_get_dev(ctx);
709 void __iomem *aes_base_reg = dev_data->regs + DTHE_P_AES_BASE;
710 u32 val;
711 int ret;
712
713 ret = readl_relaxed_poll_timeout(aes_base_reg + DTHE_P_AES_CTRL, val,
714 (val & DTHE_AES_CTRL_SAVED_CTX_READY),
715 0, POLL_TIMEOUT_INTERVAL);
716 if (ret)
717 return ret;
718
719 for (int i = 0; i < AES_BLOCK_WORDS; ++i)
720 tag[i] = readl_relaxed(aes_base_reg +
721 DTHE_P_AES_TAG_OUT +
722 DTHE_REG_SIZE * i);
723 return 0;
724}
725
726static int dthe_aead_enc_get_tag(struct aead_request *req)
727{
728 struct dthe_tfm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
729 u32 tag[AES_BLOCK_WORDS];
730 int nents;
731 int ret;
732
733 ret = dthe_aead_read_tag(ctx, tag);
734 if (ret)
735 return ret;
736
737 nents = sg_nents_for_len(req->dst, req->cryptlen + req->assoclen + ctx->authsize);
738
739 sg_pcopy_from_buffer(req->dst, nents, tag, ctx->authsize,
740 req->assoclen + req->cryptlen);
741
742 return 0;
743}
744
745static int dthe_aead_dec_verify_tag(struct aead_request *req)
746{
747 struct dthe_tfm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
748 u32 tag_out[AES_BLOCK_WORDS];
749 u32 tag_in[AES_BLOCK_WORDS];
750 int nents;
751 int ret;
752
753 ret = dthe_aead_read_tag(ctx, tag_out);
754 if (ret)
755 return ret;
756
757 nents = sg_nents_for_len(req->src, req->assoclen + req->cryptlen);
758
759 sg_pcopy_to_buffer(req->src, nents, tag_in, ctx->authsize,
760 req->assoclen + req->cryptlen - ctx->authsize);
761
762 if (crypto_memneq(tag_in, tag_out, ctx->authsize))
763 return -EBADMSG;
764 else
765 return 0;
766}
767
768static int dthe_aead_setkey(struct crypto_aead *tfm, const u8 *key, unsigned int keylen)
769{
770 struct dthe_tfm_ctx *ctx = crypto_aead_ctx(tfm);
771
772 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 && keylen != AES_KEYSIZE_256)
773 return -EINVAL;
774
775 crypto_sync_aead_clear_flags(ctx->aead_fb, CRYPTO_TFM_REQ_MASK);
776 crypto_sync_aead_set_flags(ctx->aead_fb,
777 crypto_aead_get_flags(tfm) &
778 CRYPTO_TFM_REQ_MASK);
779
780 return crypto_sync_aead_setkey(ctx->aead_fb, key, keylen);
781}
782
783static int dthe_gcm_aes_setkey(struct crypto_aead *tfm, const u8 *key, unsigned int keylen)
784{
785 struct dthe_tfm_ctx *ctx = crypto_aead_ctx(tfm);
786 int ret;
787
788 ret = dthe_aead_setkey(tfm, key, keylen);
789 if (ret)
790 return ret;
791
792 ctx->aes_mode = DTHE_AES_GCM;
793 ctx->keylen = keylen;
794 memcpy(ctx->key, key, keylen);
795
796 return ret;
797}
798
799static int dthe_ccm_aes_setkey(struct crypto_aead *tfm, const u8 *key, unsigned int keylen)
800{
801 struct dthe_tfm_ctx *ctx = crypto_aead_ctx(tfm);
802 int ret;
803
804 ret = dthe_aead_setkey(tfm, key, keylen);
805 if (ret)
806 return ret;
807
808 ctx->aes_mode = DTHE_AES_CCM;
809 ctx->keylen = keylen;
810 memcpy(ctx->key, key, keylen);
811
812 return ret;
813}
814
815static int dthe_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
816{
817 struct dthe_tfm_ctx *ctx = crypto_aead_ctx(tfm);
818
819 /* Invalid auth size will be handled by crypto_aead_setauthsize() */
820 ctx->authsize = authsize;
821
822 return crypto_sync_aead_setauthsize(ctx->aead_fb, authsize);
823}
824
825static int dthe_aead_do_fallback(struct aead_request *req)
826{
827 struct dthe_tfm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
828 struct dthe_aes_req_ctx *rctx = aead_request_ctx(req);
829
830 SYNC_AEAD_REQUEST_ON_STACK(subreq, ctx->aead_fb);
831
832 aead_request_set_callback(subreq, req->base.flags,
833 req->base.complete, req->base.data);
834 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, req->iv);
835 aead_request_set_ad(subreq, req->assoclen);
836
837 return rctx->enc ? crypto_aead_encrypt(subreq) :
838 crypto_aead_decrypt(subreq);
839}
840
841static void dthe_aead_dma_in_callback(void *data)
842{
843 struct aead_request *req = (struct aead_request *)data;
844 struct dthe_aes_req_ctx *rctx = aead_request_ctx(req);
845
846 complete(&rctx->aes_compl);
847}
848
849static int dthe_aead_run(struct crypto_engine *engine, void *areq)
850{
851 struct aead_request *req = container_of(areq, struct aead_request, base);
852 struct dthe_tfm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
853 struct dthe_aes_req_ctx *rctx = aead_request_ctx(req);
854 struct dthe_data *dev_data = dthe_get_dev(ctx);
855
856 unsigned int cryptlen = req->cryptlen;
857 unsigned int assoclen = req->assoclen;
858 unsigned int authsize = ctx->authsize;
859 unsigned int unpadded_cryptlen;
860 struct scatterlist *src = NULL;
861 struct scatterlist *dst = NULL;
862 struct scatterlist *aad_sg = NULL;
863 u32 iv_in[AES_IV_WORDS];
864
865 int aad_nents = 0;
866 int src_nents = 0;
867 int dst_nents = 0;
868 int aad_mapped_nents = 0;
869 int src_mapped_nents = 0;
870 int dst_mapped_nents = 0;
871
872 u8 *src_assoc_padbuf = rctx->padding;
873 u8 *src_crypt_padbuf = rctx->padding + AES_BLOCK_SIZE;
874 u8 *dst_crypt_padbuf = rctx->padding + AES_BLOCK_SIZE;
875
876 bool diff_dst;
877 enum dma_data_direction aad_dir, src_dir, dst_dir;
878
879 struct device *tx_dev, *rx_dev;
880 struct dma_async_tx_descriptor *desc_in, *desc_out, *desc_aad_out;
881
882 int ret;
883 int err;
884
885 void __iomem *aes_base_reg = dev_data->regs + DTHE_P_AES_BASE;
886
887 u32 aes_irqenable_val = readl_relaxed(aes_base_reg + DTHE_P_AES_IRQENABLE);
888 u32 aes_sysconfig_val = readl_relaxed(aes_base_reg + DTHE_P_AES_SYSCONFIG);
889
890 aes_sysconfig_val |= DTHE_AES_SYSCONFIG_DMA_DATA_IN_OUT_EN;
891 writel_relaxed(aes_sysconfig_val, aes_base_reg + DTHE_P_AES_SYSCONFIG);
892
893 aes_irqenable_val |= DTHE_AES_IRQENABLE_EN_ALL;
894 writel_relaxed(aes_irqenable_val, aes_base_reg + DTHE_P_AES_IRQENABLE);
895
896 /* In decryption, the last authsize bytes are the TAG */
897 if (!rctx->enc)
898 cryptlen -= authsize;
899 unpadded_cryptlen = cryptlen;
900
901 memset(src_assoc_padbuf, 0, AES_BLOCK_SIZE);
902 memset(src_crypt_padbuf, 0, AES_BLOCK_SIZE);
903 memset(dst_crypt_padbuf, 0, AES_BLOCK_SIZE);
904
905 tx_dev = dmaengine_get_dma_device(dev_data->dma_aes_tx);
906 rx_dev = dmaengine_get_dma_device(dev_data->dma_aes_rx);
907
908 if (req->src == req->dst) {
909 diff_dst = false;
910 src_dir = DMA_BIDIRECTIONAL;
911 dst_dir = DMA_BIDIRECTIONAL;
912 } else {
913 diff_dst = true;
914 src_dir = DMA_TO_DEVICE;
915 dst_dir = DMA_FROM_DEVICE;
916 }
917 aad_dir = DMA_TO_DEVICE;
918
919 /* Prep AAD scatterlist (always from req->src) */
920 aad_sg = dthe_aead_prep_aad(req->src, req->assoclen, src_assoc_padbuf);
921 if (IS_ERR(aad_sg)) {
922 ret = PTR_ERR(aad_sg);
923 goto aead_prep_aad_err;
924 }
925
926 /* Prep ciphertext src scatterlist */
927 src = dthe_aead_prep_crypt(req->src, req->assoclen, cryptlen, src_crypt_padbuf);
928 if (IS_ERR(src)) {
929 ret = PTR_ERR(src);
930 goto aead_prep_src_err;
931 }
932
933 /* Prep ciphertext dst scatterlist (only if separate dst) */
934 if (diff_dst) {
935 dst = dthe_aead_prep_crypt(req->dst, req->assoclen, unpadded_cryptlen,
936 dst_crypt_padbuf);
937 if (IS_ERR(dst)) {
938 ret = PTR_ERR(dst);
939 goto aead_prep_dst_err;
940 }
941 } else {
942 dst = src;
943 }
944
945 /* Calculate padded lengths for nents calculations */
946 if (req->assoclen % AES_BLOCK_SIZE)
947 assoclen += AES_BLOCK_SIZE - (req->assoclen % AES_BLOCK_SIZE);
948 if (cryptlen % AES_BLOCK_SIZE)
949 cryptlen += AES_BLOCK_SIZE - (cryptlen % AES_BLOCK_SIZE);
950
951 if (assoclen != 0) {
952 /* Map AAD for TX only */
953 aad_nents = sg_nents_for_len(aad_sg, assoclen);
954 aad_mapped_nents = dma_map_sg(tx_dev, aad_sg, aad_nents, aad_dir);
955 if (aad_mapped_nents == 0) {
956 dev_err(dev_data->dev, "Failed to map AAD for TX\n");
957 ret = -EINVAL;
958 goto aead_dma_map_aad_err;
959 }
960
961 /* Prepare DMA descriptors for AAD TX */
962 desc_aad_out = dmaengine_prep_slave_sg(dev_data->dma_aes_tx, aad_sg,
963 aad_mapped_nents, DMA_MEM_TO_DEV,
964 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
965 if (!desc_aad_out) {
966 dev_err(dev_data->dev, "AAD TX prep_slave_sg() failed\n");
967 ret = -EINVAL;
968 goto aead_dma_prep_aad_err;
969 }
970 }
971
972 if (cryptlen != 0) {
973 /* Map ciphertext src for TX (BIDIRECTIONAL if in-place) */
974 src_nents = sg_nents_for_len(src, cryptlen);
975 src_mapped_nents = dma_map_sg(tx_dev, src, src_nents, src_dir);
976 if (src_mapped_nents == 0) {
977 dev_err(dev_data->dev, "Failed to map ciphertext src for TX\n");
978 ret = -EINVAL;
979 goto aead_dma_prep_aad_err;
980 }
981
982 /* Prepare DMA descriptors for ciphertext TX */
983 desc_out = dmaengine_prep_slave_sg(dev_data->dma_aes_tx, src,
984 src_mapped_nents, DMA_MEM_TO_DEV,
985 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
986 if (!desc_out) {
987 dev_err(dev_data->dev, "Ciphertext TX prep_slave_sg() failed\n");
988 ret = -EINVAL;
989 goto aead_dma_prep_src_err;
990 }
991
992 /* Map ciphertext dst for RX (only if separate dst) */
993 if (diff_dst) {
994 dst_nents = sg_nents_for_len(dst, cryptlen);
995 dst_mapped_nents = dma_map_sg(rx_dev, dst, dst_nents, dst_dir);
996 if (dst_mapped_nents == 0) {
997 dev_err(dev_data->dev, "Failed to map ciphertext dst for RX\n");
998 ret = -EINVAL;
999 goto aead_dma_prep_src_err;
1000 }
1001 } else {
1002 dst_nents = src_nents;
1003 dst_mapped_nents = src_mapped_nents;
1004 }
1005
1006 /* Prepare DMA descriptor for ciphertext RX */
1007 desc_in = dmaengine_prep_slave_sg(dev_data->dma_aes_rx, dst,
1008 dst_mapped_nents, DMA_DEV_TO_MEM,
1009 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1010 if (!desc_in) {
1011 dev_err(dev_data->dev, "Ciphertext RX prep_slave_sg() failed\n");
1012 ret = -EINVAL;
1013 goto aead_dma_prep_dst_err;
1014 }
1015
1016 desc_in->callback = dthe_aead_dma_in_callback;
1017 desc_in->callback_param = req;
1018 } else if (assoclen != 0) {
1019 /* AAD-only operation */
1020 desc_aad_out->callback = dthe_aead_dma_in_callback;
1021 desc_aad_out->callback_param = req;
1022 }
1023
1024 init_completion(&rctx->aes_compl);
1025
1026 /*
1027 * HACK: There is an unknown hw issue where if the previous operation had alen = 0 and
1028 * plen != 0, the current operation's tag calculation is incorrect in the case where
1029 * plen = 0 and alen != 0 currently. This is a workaround for now which somehow works;
1030 * by resetting the context by writing a 1 to the C_LENGTH_0 and AUTH_LENGTH registers.
1031 */
1032 if (cryptlen == 0) {
1033 writel_relaxed(1, aes_base_reg + DTHE_P_AES_C_LENGTH_0);
1034 writel_relaxed(1, aes_base_reg + DTHE_P_AES_AUTH_LENGTH);
1035 }
1036
1037 if (ctx->aes_mode == DTHE_AES_GCM) {
1038 if (req->iv) {
1039 memcpy(iv_in, req->iv, GCM_AES_IV_SIZE);
1040 } else {
1041 iv_in[0] = 0;
1042 iv_in[1] = 0;
1043 iv_in[2] = 0;
1044 }
1045 iv_in[3] = 0x01000000;
1046 } else {
1047 memcpy(iv_in, req->iv, AES_IV_SIZE);
1048 }
1049
1050 /* Clear key2 to reset previous GHASH intermediate data */
1051 for (int i = 0; i < AES_KEYSIZE_256 / sizeof(u32); ++i)
1052 writel_relaxed(0, aes_base_reg + DTHE_P_AES_KEY2_6 + DTHE_REG_SIZE * i);
1053
1054 dthe_aes_set_ctrl_key(ctx, rctx, iv_in);
1055
1056 writel_relaxed(lower_32_bits(unpadded_cryptlen), aes_base_reg + DTHE_P_AES_C_LENGTH_0);
1057 writel_relaxed(upper_32_bits(unpadded_cryptlen), aes_base_reg + DTHE_P_AES_C_LENGTH_1);
1058 writel_relaxed(req->assoclen, aes_base_reg + DTHE_P_AES_AUTH_LENGTH);
1059
1060 /* Submit DMA descriptors: AAD TX, ciphertext TX, ciphertext RX */
1061 if (assoclen != 0)
1062 dmaengine_submit(desc_aad_out);
1063 if (cryptlen != 0) {
1064 dmaengine_submit(desc_out);
1065 dmaengine_submit(desc_in);
1066 }
1067
1068 if (cryptlen != 0)
1069 dma_async_issue_pending(dev_data->dma_aes_rx);
1070 dma_async_issue_pending(dev_data->dma_aes_tx);
1071
1072 /* Need to do timeout to ensure finalise gets called if DMA callback fails for any reason */
1073 ret = wait_for_completion_timeout(&rctx->aes_compl, msecs_to_jiffies(DTHE_DMA_TIMEOUT_MS));
1074 if (!ret) {
1075 ret = -ETIMEDOUT;
1076 if (cryptlen != 0)
1077 dmaengine_terminate_sync(dev_data->dma_aes_rx);
1078 dmaengine_terminate_sync(dev_data->dma_aes_tx);
1079
1080 for (int i = 0; i < AES_BLOCK_WORDS; ++i)
1081 readl_relaxed(aes_base_reg + DTHE_P_AES_DATA_IN_OUT + DTHE_REG_SIZE * i);
1082 } else {
1083 ret = 0;
1084 }
1085
1086 if (cryptlen != 0)
1087 dma_sync_sg_for_cpu(rx_dev, dst, dst_nents, dst_dir);
1088
1089 if (rctx->enc)
1090 err = dthe_aead_enc_get_tag(req);
1091 else
1092 err = dthe_aead_dec_verify_tag(req);
1093
1094 ret = (ret) ? ret : err;
1095
1096aead_dma_prep_dst_err:
1097 if (diff_dst && cryptlen != 0)
1098 dma_unmap_sg(rx_dev, dst, dst_nents, dst_dir);
1099aead_dma_prep_src_err:
1100 if (cryptlen != 0)
1101 dma_unmap_sg(tx_dev, src, src_nents, src_dir);
1102aead_dma_prep_aad_err:
1103 if (assoclen != 0)
1104 dma_unmap_sg(tx_dev, aad_sg, aad_nents, aad_dir);
1105
1106aead_dma_map_aad_err:
1107 if (diff_dst && cryptlen != 0)
1108 kfree(dst);
1109aead_prep_dst_err:
1110 if (cryptlen != 0)
1111 kfree(src);
1112aead_prep_src_err:
1113 if (assoclen != 0)
1114 kfree(aad_sg);
1115
1116aead_prep_aad_err:
1117 memzero_explicit(rctx->padding, 2 * AES_BLOCK_SIZE);
1118
1119 if (ret)
1120 ret = dthe_aead_do_fallback(req);
1121
1122 local_bh_disable();
1123 crypto_finalize_aead_request(engine, req, ret);
1124 local_bh_enable();
1125 return 0;
1126}
1127
1128static int dthe_aead_crypt(struct aead_request *req)
1129{
1130 struct dthe_tfm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1131 struct dthe_aes_req_ctx *rctx = aead_request_ctx(req);
1132 struct dthe_data *dev_data = dthe_get_dev(ctx);
1133 struct crypto_engine *engine;
1134 unsigned int cryptlen = req->cryptlen;
1135 bool is_zero_ctr = true;
1136
1137 /* In decryption, last authsize bytes are the TAG */
1138 if (!rctx->enc)
1139 cryptlen -= ctx->authsize;
1140
1141 if (ctx->aes_mode == DTHE_AES_CCM) {
1142 /*
1143 * For CCM Mode, the 128-bit IV contains the following:
1144 * | 0 .. 2 | 3 .. 7 | 8 .. (127-8*L) | (128-8*L) .. 127 |
1145 * | L-1 | Zero | Nonce | Counter |
1146 * L needs to be between 2-8 (inclusive), i.e. 1 <= (L-1) <= 7
1147 * and the next 5 bits need to be zeroes. Else return -EINVAL
1148 */
1149 u8 *iv = req->iv;
1150 u8 L = iv[0];
1151
1152 /* variable L stores L-1 here */
1153 if (L < 1 || L > 7)
1154 return -EINVAL;
1155 /*
1156 * DTHEv2 HW can only work with zero initial counter in CCM mode.
1157 * Check if the initial counter value is zero or not
1158 */
1159 for (int i = 0; i < L + 1; ++i) {
1160 if (iv[AES_IV_SIZE - 1 - i] != 0) {
1161 is_zero_ctr = false;
1162 break;
1163 }
1164 }
1165 }
1166
1167 /*
1168 * Need to fallback to software in the following cases due to HW restrictions:
1169 * - Both AAD and plaintext/ciphertext are zero length
1170 * - For AES-GCM, AAD length is more than 2^32 - 1 bytes
1171 * - For AES-CCM, AAD length is more than 2^16 - 2^8 bytes
1172 * - For AES-CCM, plaintext/ciphertext length is more than 2^61 - 1 bytes
1173 * - For AES-CCM, AAD length is non-zero but plaintext/ciphertext length is zero
1174 * - For AES-CCM, the initial counter (last L+1 bytes of IV) is not all zeroes
1175 *
1176 * PS: req->cryptlen is currently unsigned int type, which causes the second and fourth
1177 * cases above tautologically false. If req->cryptlen is to be changed to a 64-bit
1178 * type, the check for these would also need to be added below.
1179 */
1180 if ((req->assoclen == 0 && cryptlen == 0) ||
1181 (ctx->aes_mode == DTHE_AES_CCM && req->assoclen > DTHE_AES_CCM_AAD_MAXLEN) ||
1182 (ctx->aes_mode == DTHE_AES_CCM && cryptlen == 0) ||
1183 (ctx->aes_mode == DTHE_AES_CCM && !is_zero_ctr))
1184 return dthe_aead_do_fallback(req);
1185
1186 engine = dev_data->engine;
1187 return crypto_transfer_aead_request_to_engine(engine, req);
1188}
1189
1190static int dthe_aead_encrypt(struct aead_request *req)
1191{
1192 struct dthe_aes_req_ctx *rctx = aead_request_ctx(req);
1193
1194 rctx->enc = 1;
1195 return dthe_aead_crypt(req);
1196}
1197
1198static int dthe_aead_decrypt(struct aead_request *req)
1199{
1200 struct dthe_aes_req_ctx *rctx = aead_request_ctx(req);
1201
1202 rctx->enc = 0;
1203 return dthe_aead_crypt(req);
1204}
1205
1206static struct skcipher_engine_alg cipher_algs[] = {
1207 {
1208 .base.init = dthe_cipher_init_tfm,
1209 .base.setkey = dthe_aes_ecb_setkey,
1210 .base.encrypt = dthe_aes_encrypt,
1211 .base.decrypt = dthe_aes_decrypt,
1212 .base.min_keysize = AES_MIN_KEY_SIZE,
1213 .base.max_keysize = AES_MAX_KEY_SIZE,
1214 .base.base = {
1215 .cra_name = "ecb(aes)",
1216 .cra_driver_name = "ecb-aes-dthev2",
1217 .cra_priority = 299,
1218 .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
1219 CRYPTO_ALG_ASYNC |
1220 CRYPTO_ALG_KERN_DRIVER_ONLY,
1221 .cra_alignmask = AES_BLOCK_SIZE - 1,
1222 .cra_blocksize = AES_BLOCK_SIZE,
1223 .cra_ctxsize = sizeof(struct dthe_tfm_ctx),
1224 .cra_reqsize = sizeof(struct dthe_aes_req_ctx),
1225 .cra_module = THIS_MODULE,
1226 },
1227 .op.do_one_request = dthe_aes_run,
1228 }, /* ECB AES */
1229 {
1230 .base.init = dthe_cipher_init_tfm,
1231 .base.setkey = dthe_aes_cbc_setkey,
1232 .base.encrypt = dthe_aes_encrypt,
1233 .base.decrypt = dthe_aes_decrypt,
1234 .base.min_keysize = AES_MIN_KEY_SIZE,
1235 .base.max_keysize = AES_MAX_KEY_SIZE,
1236 .base.ivsize = AES_IV_SIZE,
1237 .base.base = {
1238 .cra_name = "cbc(aes)",
1239 .cra_driver_name = "cbc-aes-dthev2",
1240 .cra_priority = 299,
1241 .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
1242 CRYPTO_ALG_ASYNC |
1243 CRYPTO_ALG_KERN_DRIVER_ONLY,
1244 .cra_alignmask = AES_BLOCK_SIZE - 1,
1245 .cra_blocksize = AES_BLOCK_SIZE,
1246 .cra_ctxsize = sizeof(struct dthe_tfm_ctx),
1247 .cra_reqsize = sizeof(struct dthe_aes_req_ctx),
1248 .cra_module = THIS_MODULE,
1249 },
1250 .op.do_one_request = dthe_aes_run,
1251 }, /* CBC AES */
1252 {
1253 .base.init = dthe_cipher_init_tfm_fallback,
1254 .base.exit = dthe_cipher_exit_tfm,
1255 .base.setkey = dthe_aes_ctr_setkey,
1256 .base.encrypt = dthe_aes_encrypt,
1257 .base.decrypt = dthe_aes_decrypt,
1258 .base.min_keysize = AES_MIN_KEY_SIZE,
1259 .base.max_keysize = AES_MAX_KEY_SIZE,
1260 .base.ivsize = AES_IV_SIZE,
1261 .base.chunksize = AES_BLOCK_SIZE,
1262 .base.base = {
1263 .cra_name = "ctr(aes)",
1264 .cra_driver_name = "ctr-aes-dthev2",
1265 .cra_priority = 299,
1266 .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
1267 CRYPTO_ALG_ASYNC |
1268 CRYPTO_ALG_KERN_DRIVER_ONLY |
1269 CRYPTO_ALG_NEED_FALLBACK,
1270 .cra_blocksize = 1,
1271 .cra_ctxsize = sizeof(struct dthe_tfm_ctx),
1272 .cra_reqsize = sizeof(struct dthe_aes_req_ctx),
1273 .cra_module = THIS_MODULE,
1274 },
1275 .op.do_one_request = dthe_aes_run,
1276 }, /* CTR AES */
1277 {
1278 .base.init = dthe_cipher_init_tfm_fallback,
1279 .base.exit = dthe_cipher_exit_tfm,
1280 .base.setkey = dthe_aes_xts_setkey,
1281 .base.encrypt = dthe_aes_encrypt,
1282 .base.decrypt = dthe_aes_decrypt,
1283 .base.min_keysize = AES_MIN_KEY_SIZE * 2,
1284 .base.max_keysize = AES_MAX_KEY_SIZE * 2,
1285 .base.ivsize = AES_IV_SIZE,
1286 .base.base = {
1287 .cra_name = "xts(aes)",
1288 .cra_driver_name = "xts-aes-dthev2",
1289 .cra_priority = 299,
1290 .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
1291 CRYPTO_ALG_ASYNC |
1292 CRYPTO_ALG_KERN_DRIVER_ONLY |
1293 CRYPTO_ALG_NEED_FALLBACK,
1294 .cra_alignmask = AES_BLOCK_SIZE - 1,
1295 .cra_blocksize = AES_BLOCK_SIZE,
1296 .cra_ctxsize = sizeof(struct dthe_tfm_ctx),
1297 .cra_reqsize = sizeof(struct dthe_aes_req_ctx),
1298 .cra_module = THIS_MODULE,
1299 },
1300 .op.do_one_request = dthe_aes_run,
1301 }, /* XTS AES */
1302};
1303
1304static struct aead_engine_alg aead_algs[] = {
1305 {
1306 .base.init = dthe_aead_init_tfm,
1307 .base.exit = dthe_aead_exit_tfm,
1308 .base.setkey = dthe_gcm_aes_setkey,
1309 .base.setauthsize = dthe_aead_setauthsize,
1310 .base.maxauthsize = AES_BLOCK_SIZE,
1311 .base.encrypt = dthe_aead_encrypt,
1312 .base.decrypt = dthe_aead_decrypt,
1313 .base.chunksize = AES_BLOCK_SIZE,
1314 .base.ivsize = GCM_AES_IV_SIZE,
1315 .base.base = {
1316 .cra_name = "gcm(aes)",
1317 .cra_driver_name = "gcm-aes-dthev2",
1318 .cra_priority = 299,
1319 .cra_flags = CRYPTO_ALG_TYPE_AEAD |
1320 CRYPTO_ALG_KERN_DRIVER_ONLY |
1321 CRYPTO_ALG_ASYNC |
1322 CRYPTO_ALG_NEED_FALLBACK,
1323 .cra_blocksize = 1,
1324 .cra_ctxsize = sizeof(struct dthe_tfm_ctx),
1325 .cra_reqsize = sizeof(struct dthe_aes_req_ctx),
1326 .cra_module = THIS_MODULE,
1327 },
1328 .op.do_one_request = dthe_aead_run,
1329 }, /* GCM AES */
1330 {
1331 .base.init = dthe_aead_init_tfm,
1332 .base.exit = dthe_aead_exit_tfm,
1333 .base.setkey = dthe_ccm_aes_setkey,
1334 .base.setauthsize = dthe_aead_setauthsize,
1335 .base.maxauthsize = AES_BLOCK_SIZE,
1336 .base.encrypt = dthe_aead_encrypt,
1337 .base.decrypt = dthe_aead_decrypt,
1338 .base.chunksize = AES_BLOCK_SIZE,
1339 .base.ivsize = AES_IV_SIZE,
1340 .base.base = {
1341 .cra_name = "ccm(aes)",
1342 .cra_driver_name = "ccm-aes-dthev2",
1343 .cra_priority = 299,
1344 .cra_flags = CRYPTO_ALG_TYPE_AEAD |
1345 CRYPTO_ALG_KERN_DRIVER_ONLY |
1346 CRYPTO_ALG_ASYNC |
1347 CRYPTO_ALG_NEED_FALLBACK,
1348 .cra_blocksize = 1,
1349 .cra_ctxsize = sizeof(struct dthe_tfm_ctx),
1350 .cra_reqsize = sizeof(struct dthe_aes_req_ctx),
1351 .cra_module = THIS_MODULE,
1352 },
1353 .op.do_one_request = dthe_aead_run,
1354 }, /* CCM AES */
1355};
1356
1357int dthe_register_aes_algs(void)
1358{
1359 int ret = 0;
1360
1361 ret = crypto_engine_register_skciphers(cipher_algs, ARRAY_SIZE(cipher_algs));
1362 if (ret)
1363 return ret;
1364 ret = crypto_engine_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
1365 if (ret)
1366 crypto_engine_unregister_skciphers(cipher_algs, ARRAY_SIZE(cipher_algs));
1367
1368 return ret;
1369}
1370
1371void dthe_unregister_aes_algs(void)
1372{
1373 crypto_engine_unregister_skciphers(cipher_algs, ARRAY_SIZE(cipher_algs));
1374 crypto_engine_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
1375}