Lines Matching refs:edesc
361 struct aead_edesc *edesc; in aead_edesc_alloc() local
370 edesc = qi_cache_zalloc(GFP_DMA | flags); in aead_edesc_alloc()
371 if (unlikely(!edesc)) { in aead_edesc_alloc()
384 qi_cache_free(edesc); in aead_edesc_alloc()
392 qi_cache_free(edesc); in aead_edesc_alloc()
401 qi_cache_free(edesc); in aead_edesc_alloc()
415 qi_cache_free(edesc); in aead_edesc_alloc()
429 qi_cache_free(edesc); in aead_edesc_alloc()
437 qi_cache_free(edesc); in aead_edesc_alloc()
467 sg_table = &edesc->sgt[0]; in aead_edesc_alloc()
475 qi_cache_free(edesc); in aead_edesc_alloc()
490 qi_cache_free(edesc); in aead_edesc_alloc()
495 edesc->src_nents = src_nents; in aead_edesc_alloc()
496 edesc->dst_nents = dst_nents; in aead_edesc_alloc()
497 edesc->iv_dma = iv_dma; in aead_edesc_alloc()
505 edesc->assoclen = cpu_to_caam32(req->assoclen - ivsize); in aead_edesc_alloc()
507 edesc->assoclen = cpu_to_caam32(req->assoclen); in aead_edesc_alloc()
508 edesc->assoclen_dma = dma_map_single(dev, &edesc->assoclen, 4, in aead_edesc_alloc()
510 if (dma_mapping_error(dev, edesc->assoclen_dma)) { in aead_edesc_alloc()
514 qi_cache_free(edesc); in aead_edesc_alloc()
518 dma_to_qm_sg_one(sg_table, edesc->assoclen_dma, 4, 0); in aead_edesc_alloc()
533 dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); in aead_edesc_alloc()
536 qi_cache_free(edesc); in aead_edesc_alloc()
540 edesc->qm_sg_dma = qm_sg_dma; in aead_edesc_alloc()
541 edesc->qm_sg_bytes = qm_sg_bytes; in aead_edesc_alloc()
582 return edesc; in aead_edesc_alloc()
1117 struct skcipher_edesc *edesc; in skcipher_edesc_alloc() local
1189 edesc = qi_cache_zalloc(GFP_DMA | flags); in skcipher_edesc_alloc()
1190 if (unlikely(!edesc)) { in skcipher_edesc_alloc()
1198 sg_table = &edesc->sgt[0]; in skcipher_edesc_alloc()
1207 qi_cache_free(edesc); in skcipher_edesc_alloc()
1211 edesc->src_nents = src_nents; in skcipher_edesc_alloc()
1212 edesc->dst_nents = dst_nents; in skcipher_edesc_alloc()
1213 edesc->iv_dma = iv_dma; in skcipher_edesc_alloc()
1214 edesc->qm_sg_bytes = qm_sg_bytes; in skcipher_edesc_alloc()
1225 edesc->qm_sg_dma = dma_map_single(dev, sg_table, edesc->qm_sg_bytes, in skcipher_edesc_alloc()
1227 if (dma_mapping_error(dev, edesc->qm_sg_dma)) { in skcipher_edesc_alloc()
1231 qi_cache_free(edesc); in skcipher_edesc_alloc()
1241 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in skcipher_edesc_alloc()
1246 dpaa2_fl_set_addr(out_fle, edesc->qm_sg_dma + in skcipher_edesc_alloc()
1249 dpaa2_fl_set_addr(out_fle, edesc->qm_sg_dma + dst_sg_idx * in skcipher_edesc_alloc()
1252 return edesc; in skcipher_edesc_alloc()
1255 static void aead_unmap(struct device *dev, struct aead_edesc *edesc, in aead_unmap() argument
1261 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap()
1262 edesc->iv_dma, ivsize, DMA_TO_DEVICE, edesc->qm_sg_dma, in aead_unmap()
1263 edesc->qm_sg_bytes); in aead_unmap()
1264 dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); in aead_unmap()
1267 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, in skcipher_unmap() argument
1273 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
1274 edesc->iv_dma, ivsize, DMA_BIDIRECTIONAL, edesc->qm_sg_dma, in skcipher_unmap()
1275 edesc->qm_sg_bytes); in skcipher_unmap()
1284 struct aead_edesc *edesc = req_ctx->edesc; in aead_encrypt_done() local
1294 aead_unmap(ctx->dev, edesc, req); in aead_encrypt_done()
1295 qi_cache_free(edesc); in aead_encrypt_done()
1305 struct aead_edesc *edesc = req_ctx->edesc; in aead_decrypt_done() local
1315 aead_unmap(ctx->dev, edesc, req); in aead_decrypt_done()
1316 qi_cache_free(edesc); in aead_decrypt_done()
1322 struct aead_edesc *edesc; in aead_encrypt() local
1329 edesc = aead_edesc_alloc(req, true); in aead_encrypt()
1330 if (IS_ERR(edesc)) in aead_encrypt()
1331 return PTR_ERR(edesc); in aead_encrypt()
1337 caam_req->edesc = edesc; in aead_encrypt()
1341 aead_unmap(ctx->dev, edesc, req); in aead_encrypt()
1342 qi_cache_free(edesc); in aead_encrypt()
1350 struct aead_edesc *edesc; in aead_decrypt() local
1357 edesc = aead_edesc_alloc(req, false); in aead_decrypt()
1358 if (IS_ERR(edesc)) in aead_decrypt()
1359 return PTR_ERR(edesc); in aead_decrypt()
1365 caam_req->edesc = edesc; in aead_decrypt()
1369 aead_unmap(ctx->dev, edesc, req); in aead_decrypt()
1370 qi_cache_free(edesc); in aead_decrypt()
1393 struct skcipher_edesc *edesc = req_ctx->edesc; in skcipher_encrypt_done() local
1404 edesc->src_nents > 1 ? 100 : ivsize, 1); in skcipher_encrypt_done()
1407 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); in skcipher_encrypt_done()
1409 skcipher_unmap(ctx->dev, edesc, req); in skcipher_encrypt_done()
1417 memcpy(req->iv, (u8 *)&edesc->sgt[0] + edesc->qm_sg_bytes, in skcipher_encrypt_done()
1420 qi_cache_free(edesc); in skcipher_encrypt_done()
1431 struct skcipher_edesc *edesc = req_ctx->edesc; in skcipher_decrypt_done() local
1442 edesc->src_nents > 1 ? 100 : ivsize, 1); in skcipher_decrypt_done()
1445 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); in skcipher_decrypt_done()
1447 skcipher_unmap(ctx->dev, edesc, req); in skcipher_decrypt_done()
1455 memcpy(req->iv, (u8 *)&edesc->sgt[0] + edesc->qm_sg_bytes, in skcipher_decrypt_done()
1458 qi_cache_free(edesc); in skcipher_decrypt_done()
1472 struct skcipher_edesc *edesc; in skcipher_encrypt() local
1501 edesc = skcipher_edesc_alloc(req); in skcipher_encrypt()
1502 if (IS_ERR(edesc)) in skcipher_encrypt()
1503 return PTR_ERR(edesc); in skcipher_encrypt()
1509 caam_req->edesc = edesc; in skcipher_encrypt()
1513 skcipher_unmap(ctx->dev, edesc, req); in skcipher_encrypt()
1514 qi_cache_free(edesc); in skcipher_encrypt()
1522 struct skcipher_edesc *edesc; in skcipher_decrypt() local
1551 edesc = skcipher_edesc_alloc(req); in skcipher_decrypt()
1552 if (IS_ERR(edesc)) in skcipher_decrypt()
1553 return PTR_ERR(edesc); in skcipher_decrypt()
1559 caam_req->edesc = edesc; in skcipher_decrypt()
1563 skcipher_unmap(ctx->dev, edesc, req); in skcipher_decrypt()
1564 qi_cache_free(edesc); in skcipher_decrypt()
3353 static inline void ahash_unmap(struct device *dev, struct ahash_edesc *edesc, in ahash_unmap() argument
3358 if (edesc->src_nents) in ahash_unmap()
3359 dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); in ahash_unmap()
3361 if (edesc->qm_sg_bytes) in ahash_unmap()
3362 dma_unmap_single(dev, edesc->qm_sg_dma, edesc->qm_sg_bytes, in ahash_unmap()
3373 struct ahash_edesc *edesc, in ahash_unmap_ctx() argument
3382 ahash_unmap(dev, edesc, req); in ahash_unmap_ctx()
3391 struct ahash_edesc *edesc = state->caam_req.edesc; in ahash_done() local
3401 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_done()
3403 qi_cache_free(edesc); in ahash_done()
3418 struct ahash_edesc *edesc = state->caam_req.edesc; in ahash_done_bi() local
3427 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_done_bi()
3428 qi_cache_free(edesc); in ahash_done_bi()
3456 struct ahash_edesc *edesc = state->caam_req.edesc; in ahash_done_ctx_src() local
3466 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_done_ctx_src()
3468 qi_cache_free(edesc); in ahash_done_ctx_src()
3483 struct ahash_edesc *edesc = state->caam_req.edesc; in ahash_done_ctx_dst() local
3492 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_done_ctx_dst()
3493 qi_cache_free(edesc); in ahash_done_ctx_dst()
3530 struct ahash_edesc *edesc; in ahash_update_ctx() local
3558 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_update_ctx()
3559 if (!edesc) { in ahash_update_ctx()
3565 edesc->src_nents = src_nents; in ahash_update_ctx()
3569 sg_table = &edesc->sgt[0]; in ahash_update_ctx()
3588 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_update_ctx()
3590 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_update_ctx()
3595 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_update_ctx()
3600 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_update_ctx()
3610 req_ctx->edesc = edesc; in ahash_update_ctx()
3629 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_update_ctx()
3630 qi_cache_free(edesc); in ahash_update_ctx()
3647 struct ahash_edesc *edesc; in ahash_final_ctx() local
3652 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_final_ctx()
3653 if (!edesc) in ahash_final_ctx()
3657 sg_table = &edesc->sgt[0]; in ahash_final_ctx()
3670 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, in ahash_final_ctx()
3672 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_final_ctx()
3677 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_final_ctx()
3682 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_final_ctx()
3692 req_ctx->edesc = edesc; in ahash_final_ctx()
3700 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_final_ctx()
3701 qi_cache_free(edesc); in ahash_final_ctx()
3719 struct ahash_edesc *edesc; in ahash_finup_ctx() local
3741 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_finup_ctx()
3742 if (!edesc) { in ahash_finup_ctx()
3747 edesc->src_nents = src_nents; in ahash_finup_ctx()
3751 sg_table = &edesc->sgt[0]; in ahash_finup_ctx()
3764 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, in ahash_finup_ctx()
3766 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_finup_ctx()
3771 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_finup_ctx()
3776 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_finup_ctx()
3786 req_ctx->edesc = edesc; in ahash_finup_ctx()
3794 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_finup_ctx()
3795 qi_cache_free(edesc); in ahash_finup_ctx()
3811 struct ahash_edesc *edesc; in ahash_digest() local
3834 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_digest()
3835 if (!edesc) { in ahash_digest()
3840 edesc->src_nents = src_nents; in ahash_digest()
3845 struct dpaa2_sg_entry *sg_table = &edesc->sgt[0]; in ahash_digest()
3849 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_digest()
3851 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_digest()
3855 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_digest()
3857 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_digest()
3882 req_ctx->edesc = edesc; in ahash_digest()
3889 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_digest()
3890 qi_cache_free(edesc); in ahash_digest()
3907 struct ahash_edesc *edesc; in ahash_final_no_ctx() local
3911 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_final_no_ctx()
3912 if (!edesc) in ahash_final_no_ctx()
3954 req_ctx->edesc = edesc; in ahash_final_no_ctx()
3962 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_final_no_ctx()
3963 qi_cache_free(edesc); in ahash_final_no_ctx()
3982 struct ahash_edesc *edesc; in ahash_update_no_ctx() local
4010 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_update_no_ctx()
4011 if (!edesc) { in ahash_update_no_ctx()
4017 edesc->src_nents = src_nents; in ahash_update_no_ctx()
4020 sg_table = &edesc->sgt[0]; in ahash_update_no_ctx()
4028 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_update_no_ctx()
4030 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_update_no_ctx()
4035 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_update_no_ctx()
4050 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_update_no_ctx()
4060 req_ctx->edesc = edesc; in ahash_update_no_ctx()
4083 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_TO_DEVICE); in ahash_update_no_ctx()
4084 qi_cache_free(edesc); in ahash_update_no_ctx()
4101 struct ahash_edesc *edesc; in ahash_finup_no_ctx() local
4123 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_finup_no_ctx()
4124 if (!edesc) { in ahash_finup_no_ctx()
4129 edesc->src_nents = src_nents; in ahash_finup_no_ctx()
4131 sg_table = &edesc->sgt[0]; in ahash_finup_no_ctx()
4139 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, in ahash_finup_no_ctx()
4141 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_finup_no_ctx()
4146 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_finup_no_ctx()
4161 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_finup_no_ctx()
4171 req_ctx->edesc = edesc; in ahash_finup_no_ctx()
4179 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_finup_no_ctx()
4180 qi_cache_free(edesc); in ahash_finup_no_ctx()
4199 struct ahash_edesc *edesc; in ahash_update_first() local
4228 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_update_first()
4229 if (!edesc) { in ahash_update_first()
4235 edesc->src_nents = src_nents; in ahash_update_first()
4236 sg_table = &edesc->sgt[0]; in ahash_update_first()
4248 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_update_first()
4251 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_update_first()
4256 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_update_first()
4258 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_update_first()
4282 req_ctx->edesc = edesc; in ahash_update_first()
4308 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_TO_DEVICE); in ahash_update_first()
4309 qi_cache_free(edesc); in ahash_update_first()