Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux
1
fork

Configure Feed

Select the types of activity you want to include in your feed.

crypto: xilinx - Change coherent DMA to streaming DMA API

Update the driver to use streaming DMA API.

Signed-off-by: Harsh Jain <h.jain@amd.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Harsh Jain and committed by
Herbert Xu
c315cb00 56934489

+58 -36
+58 -36
drivers/crypto/xilinx/zynqmp-aes-gcm.c
··· 69 69 70 70 struct xilinx_aead_tfm_ctx { 71 71 struct device *dev; 72 - u8 key[AES_KEYSIZE_256]; 73 - u8 *iv; 72 + dma_addr_t key_dma_addr; 73 + u8 *key; 74 74 u32 keylen; 75 75 u32 authsize; 76 76 u8 keysrc; ··· 88 88 struct crypto_aead *aead = crypto_aead_reqtfm(req); 89 89 struct xilinx_aead_tfm_ctx *tfm_ctx = crypto_aead_ctx(aead); 90 90 struct xilinx_aead_req_ctx *rq_ctx = aead_request_ctx(req); 91 + dma_addr_t dma_addr_data, dma_addr_hw_req; 91 92 struct device *dev = tfm_ctx->dev; 92 93 struct zynqmp_aead_hw_req *hwreq; 93 - dma_addr_t dma_addr_data, dma_addr_hw_req; 94 94 unsigned int data_size; 95 95 unsigned int status; 96 96 int ret; 97 97 size_t dma_size; 98 + void *dmabuf; 98 99 char *kbuf; 99 100 100 - if (tfm_ctx->keysrc == ZYNQMP_AES_KUP_KEY) 101 - dma_size = req->cryptlen + AES_KEYSIZE_256 102 - + GCM_AES_IV_SIZE; 103 - else 104 - dma_size = req->cryptlen + GCM_AES_IV_SIZE; 105 - 106 - kbuf = dma_alloc_coherent(dev, dma_size, &dma_addr_data, GFP_KERNEL); 101 + dma_size = req->cryptlen + XILINX_AES_AUTH_SIZE; 102 + kbuf = kmalloc(dma_size, GFP_KERNEL); 107 103 if (!kbuf) 108 104 return -ENOMEM; 109 105 110 - hwreq = dma_alloc_coherent(dev, sizeof(struct zynqmp_aead_hw_req), 111 - &dma_addr_hw_req, GFP_KERNEL); 112 - if (!hwreq) { 113 - dma_free_coherent(dev, dma_size, kbuf, dma_addr_data); 106 + dmabuf = kmalloc(sizeof(*hwreq) + GCM_AES_IV_SIZE, GFP_KERNEL); 107 + if (!dmabuf) { 108 + kfree(kbuf); 114 109 return -ENOMEM; 115 110 } 116 - 111 + hwreq = dmabuf; 117 112 data_size = req->cryptlen; 118 113 scatterwalk_map_and_copy(kbuf, req->src, 0, req->cryptlen, 0); 119 - memcpy(kbuf + data_size, req->iv, GCM_AES_IV_SIZE); 114 + memcpy(dmabuf + sizeof(struct zynqmp_aead_hw_req), req->iv, GCM_AES_IV_SIZE); 115 + dma_addr_data = dma_map_single(dev, kbuf, dma_size, DMA_BIDIRECTIONAL); 116 + if (unlikely(dma_mapping_error(dev, dma_addr_data))) { 117 + ret = -ENOMEM; 118 + goto freemem; 119 + } 120 120 121 121 hwreq->src = dma_addr_data; 122 122 hwreq->dst = dma_addr_data; 123 - hwreq->iv = hwreq->src + data_size; 124 123 hwreq->keysrc = tfm_ctx->keysrc; 125 124 hwreq->op = rq_ctx->op; 126 125 ··· 128 129 else 129 130 hwreq->size = data_size - XILINX_AES_AUTH_SIZE; 130 131 131 - if (hwreq->keysrc == ZYNQMP_AES_KUP_KEY) { 132 - memcpy(kbuf + data_size + GCM_AES_IV_SIZE, 133 - tfm_ctx->key, AES_KEYSIZE_256); 134 - 135 - hwreq->key = hwreq->src + data_size + GCM_AES_IV_SIZE; 136 - } else { 132 + if (hwreq->keysrc == ZYNQMP_AES_KUP_KEY) 133 + hwreq->key = tfm_ctx->key_dma_addr; 134 + else 137 135 hwreq->key = 0; 136 + 137 + dma_addr_hw_req = dma_map_single(dev, dmabuf, sizeof(struct zynqmp_aead_hw_req) + 138 + GCM_AES_IV_SIZE, 139 + DMA_TO_DEVICE); 140 + if (unlikely(dma_mapping_error(dev, dma_addr_hw_req))) { 141 + ret = -ENOMEM; 142 + dma_unmap_single(dev, dma_addr_data, dma_size, DMA_BIDIRECTIONAL); 143 + goto freemem; 138 144 } 139 - 145 + hwreq->iv = dma_addr_hw_req + sizeof(struct zynqmp_aead_hw_req); 146 + dma_sync_single_for_device(dev, dma_addr_hw_req, sizeof(struct zynqmp_aead_hw_req) + 147 + GCM_AES_IV_SIZE, DMA_TO_DEVICE); 140 148 ret = zynqmp_pm_aes_engine(dma_addr_hw_req, &status); 141 - 149 + dma_unmap_single(dev, dma_addr_hw_req, sizeof(struct zynqmp_aead_hw_req) + GCM_AES_IV_SIZE, 150 + DMA_TO_DEVICE); 151 + dma_unmap_single(dev, dma_addr_data, dma_size, DMA_BIDIRECTIONAL); 142 152 if (ret) { 143 153 dev_err(dev, "ERROR: AES PM API failed\n"); 144 154 } else if (status) { ··· 178 170 ret = 0; 179 171 } 180 172 181 - if (kbuf) { 182 - memzero_explicit(kbuf, dma_size); 183 - dma_free_coherent(dev, dma_size, kbuf, dma_addr_data); 184 - } 185 - if (hwreq) { 186 - memzero_explicit(hwreq, sizeof(struct zynqmp_aead_hw_req)); 187 - dma_free_coherent(dev, sizeof(struct zynqmp_aead_hw_req), 188 - hwreq, dma_addr_hw_req); 189 - } 173 + freemem: 174 + memzero_explicit(kbuf, dma_size); 175 + kfree(kbuf); 176 + memzero_explicit(dmabuf, sizeof(struct zynqmp_aead_hw_req) + GCM_AES_IV_SIZE); 177 + kfree(dmabuf); 190 178 191 179 return ret; 192 180 } ··· 235 231 236 232 if (keylen == AES_KEYSIZE_256) { 237 233 memcpy(tfm_ctx->key, key, keylen); 234 + dma_sync_single_for_device(tfm_ctx->dev, tfm_ctx->key_dma_addr, 235 + AES_KEYSIZE_256, 236 + DMA_TO_DEVICE); 238 237 } 239 238 240 239 tfm_ctx->fbk_cipher->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; ··· 362 355 drv_alg = container_of(alg, struct xilinx_aead_alg, aead.base); 363 356 tfm_ctx->dev = drv_alg->aead_dev->dev; 364 357 tfm_ctx->keylen = 0; 365 - 358 + tfm_ctx->key = NULL; 366 359 tfm_ctx->fbk_cipher = NULL; 367 360 crypto_aead_set_reqsize(aead, sizeof(struct xilinx_aead_req_ctx)); 368 361 ··· 390 383 __func__, drv_ctx->aead.base.base.cra_name); 391 384 return PTR_ERR(tfm_ctx->fbk_cipher); 392 385 } 393 - 386 + tfm_ctx->key = kmalloc(AES_KEYSIZE_256, GFP_KERNEL); 387 + if (!tfm_ctx->key) { 388 + crypto_free_aead(tfm_ctx->fbk_cipher); 389 + return -ENOMEM; 390 + } 391 + tfm_ctx->key_dma_addr = dma_map_single(tfm_ctx->dev, tfm_ctx->key, 392 + AES_KEYSIZE_256, 393 + DMA_TO_DEVICE); 394 + if (unlikely(dma_mapping_error(tfm_ctx->dev, tfm_ctx->key_dma_addr))) { 395 + kfree(tfm_ctx->key); 396 + crypto_free_aead(tfm_ctx->fbk_cipher); 397 + tfm_ctx->fbk_cipher = NULL; 398 + return -ENOMEM; 399 + } 394 400 crypto_aead_set_reqsize(aead, 395 401 max(sizeof(struct xilinx_aead_req_ctx), 396 402 sizeof(struct aead_request) + ··· 425 405 struct xilinx_aead_tfm_ctx *tfm_ctx = 426 406 (struct xilinx_aead_tfm_ctx *)crypto_tfm_ctx(tfm); 427 407 408 + dma_unmap_single(tfm_ctx->dev, tfm_ctx->key_dma_addr, AES_KEYSIZE_256, DMA_TO_DEVICE); 409 + kfree(tfm_ctx->key); 428 410 if (tfm_ctx->fbk_cipher) { 429 411 crypto_free_aead(tfm_ctx->fbk_cipher); 430 412 tfm_ctx->fbk_cipher = NULL;