Lines Matching refs:edesc
889 struct aead_edesc *edesc, in aead_unmap() argument
895 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap()
896 edesc->iv_dma, ivsize, DMA_TO_DEVICE, edesc->qm_sg_dma, in aead_unmap()
897 edesc->qm_sg_bytes); in aead_unmap()
898 dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); in aead_unmap()
901 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, in skcipher_unmap() argument
907 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
908 edesc->iv_dma, ivsize, DMA_BIDIRECTIONAL, edesc->qm_sg_dma, in skcipher_unmap()
909 edesc->qm_sg_bytes); in skcipher_unmap()
915 struct aead_edesc *edesc; in aead_done() local
926 edesc = container_of(drv_req, typeof(*edesc), drv_req); in aead_done()
927 aead_unmap(qidev, edesc, aead_req); in aead_done()
930 qi_cache_free(edesc); in aead_done()
948 struct aead_edesc *edesc; in aead_edesc_alloc() local
962 edesc = qi_cache_alloc(GFP_DMA | flags); in aead_edesc_alloc()
963 if (unlikely(!edesc)) { in aead_edesc_alloc()
976 qi_cache_free(edesc); in aead_edesc_alloc()
984 qi_cache_free(edesc); in aead_edesc_alloc()
995 qi_cache_free(edesc); in aead_edesc_alloc()
1003 qi_cache_free(edesc); in aead_edesc_alloc()
1012 qi_cache_free(edesc); in aead_edesc_alloc()
1027 qi_cache_free(edesc); in aead_edesc_alloc()
1059 sg_table = &edesc->sgt[0]; in aead_edesc_alloc()
1067 qi_cache_free(edesc); in aead_edesc_alloc()
1082 qi_cache_free(edesc); in aead_edesc_alloc()
1087 edesc->src_nents = src_nents; in aead_edesc_alloc()
1088 edesc->dst_nents = dst_nents; in aead_edesc_alloc()
1089 edesc->iv_dma = iv_dma; in aead_edesc_alloc()
1090 edesc->drv_req.app_ctx = req; in aead_edesc_alloc()
1091 edesc->drv_req.cbk = aead_done; in aead_edesc_alloc()
1092 edesc->drv_req.drv_ctx = drv_ctx; in aead_edesc_alloc()
1094 edesc->assoclen = cpu_to_caam32(req->assoclen); in aead_edesc_alloc()
1095 edesc->assoclen_dma = dma_map_single(qidev, &edesc->assoclen, 4, in aead_edesc_alloc()
1097 if (dma_mapping_error(qidev, edesc->assoclen_dma)) { in aead_edesc_alloc()
1101 qi_cache_free(edesc); in aead_edesc_alloc()
1105 dma_to_qm_sg_one(sg_table, edesc->assoclen_dma, 4, 0); in aead_edesc_alloc()
1120 dma_unmap_single(qidev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); in aead_edesc_alloc()
1123 qi_cache_free(edesc); in aead_edesc_alloc()
1127 edesc->qm_sg_dma = qm_sg_dma; in aead_edesc_alloc()
1128 edesc->qm_sg_bytes = qm_sg_bytes; in aead_edesc_alloc()
1134 fd_sgt = &edesc->drv_req.fd_sgt[0]; in aead_edesc_alloc()
1153 return edesc; in aead_edesc_alloc()
1158 struct aead_edesc *edesc; in aead_crypt() local
1167 edesc = aead_edesc_alloc(req, encrypt); in aead_crypt()
1168 if (IS_ERR_OR_NULL(edesc)) in aead_crypt()
1169 return PTR_ERR(edesc); in aead_crypt()
1172 ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req); in aead_crypt()
1176 aead_unmap(ctx->qidev, edesc, req); in aead_crypt()
1177 qi_cache_free(edesc); in aead_crypt()
1207 struct skcipher_edesc *edesc; in skcipher_done() local
1217 edesc = container_of(drv_req, typeof(*edesc), drv_req); in skcipher_done()
1224 edesc->src_nents > 1 ? 100 : ivsize, 1); in skcipher_done()
1227 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); in skcipher_done()
1229 skcipher_unmap(qidev, edesc, req); in skcipher_done()
1237 memcpy(req->iv, (u8 *)&edesc->sgt[0] + edesc->qm_sg_bytes, in skcipher_done()
1240 qi_cache_free(edesc); in skcipher_done()
1253 struct skcipher_edesc *edesc; in skcipher_edesc_alloc() local
1330 edesc = qi_cache_alloc(GFP_DMA | flags); in skcipher_edesc_alloc()
1331 if (unlikely(!edesc)) { in skcipher_edesc_alloc()
1339 sg_table = &edesc->sgt[0]; in skcipher_edesc_alloc()
1348 qi_cache_free(edesc); in skcipher_edesc_alloc()
1352 edesc->src_nents = src_nents; in skcipher_edesc_alloc()
1353 edesc->dst_nents = dst_nents; in skcipher_edesc_alloc()
1354 edesc->iv_dma = iv_dma; in skcipher_edesc_alloc()
1355 edesc->qm_sg_bytes = qm_sg_bytes; in skcipher_edesc_alloc()
1356 edesc->drv_req.app_ctx = req; in skcipher_edesc_alloc()
1357 edesc->drv_req.cbk = skcipher_done; in skcipher_edesc_alloc()
1358 edesc->drv_req.drv_ctx = drv_ctx; in skcipher_edesc_alloc()
1369 edesc->qm_sg_dma = dma_map_single(qidev, sg_table, edesc->qm_sg_bytes, in skcipher_edesc_alloc()
1371 if (dma_mapping_error(qidev, edesc->qm_sg_dma)) { in skcipher_edesc_alloc()
1375 qi_cache_free(edesc); in skcipher_edesc_alloc()
1379 fd_sgt = &edesc->drv_req.fd_sgt[0]; in skcipher_edesc_alloc()
1381 dma_to_qm_sg_one_last_ext(&fd_sgt[1], edesc->qm_sg_dma, in skcipher_edesc_alloc()
1385 dma_to_qm_sg_one_ext(&fd_sgt[0], edesc->qm_sg_dma + in skcipher_edesc_alloc()
1389 dma_to_qm_sg_one_ext(&fd_sgt[0], edesc->qm_sg_dma + dst_sg_idx * in skcipher_edesc_alloc()
1393 return edesc; in skcipher_edesc_alloc()
1406 struct skcipher_edesc *edesc; in skcipher_crypt() local
1440 edesc = skcipher_edesc_alloc(req, encrypt); in skcipher_crypt()
1441 if (IS_ERR(edesc)) in skcipher_crypt()
1442 return PTR_ERR(edesc); in skcipher_crypt()
1444 ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req); in skcipher_crypt()
1448 skcipher_unmap(ctx->qidev, edesc, req); in skcipher_crypt()
1449 qi_cache_free(edesc); in skcipher_crypt()