Home
last modified time | relevance | path

Searched refs:src_nents (Results 1 – 25 of 34) sorted by relevance

12

/OK3568_Linux_fs/kernel/drivers/crypto/rockchip/
H A Drk_crypto_utils.c89 bool rk_crypto_check_align(struct scatterlist *src_sg, size_t src_nents, in rk_crypto_check_align() argument
97 if (dst_sg && src_nents != dst_nents) in rk_crypto_check_align()
103 for (i = 0; i < src_nents; i++) { in rk_crypto_check_align()
247 u32 src_nents, dst_nents; in rk_crypto_hw_desc_init() local
256 src_nents = sg_nents_for_len(src_sg, len); in rk_crypto_hw_desc_init()
257 dst_nents = dst_sg ? sg_nents_for_len(dst_sg, len) : src_nents; in rk_crypto_hw_desc_init()
259 if (src_nents != dst_nents) in rk_crypto_hw_desc_init()
262 CRYPTO_TRACE("src_nents = %u, total = %u, len = %llu", src_nents, hw_desc->total, len); in rk_crypto_hw_desc_init()
264 if (src_nents > hw_desc->total) { in rk_crypto_hw_desc_init()
265 pr_err("crypto: nents overflow, %u > %u", src_nents, hw_desc->total); in rk_crypto_hw_desc_init()
[all …]
H A Drk_crypto_core.c67 u32 src_nents, dst_nents; in rk_load_data() local
77 src_nents = alg_ctx->src_nents; in rk_load_data()
89 if (!sg_pcopy_to_buffer(alg_ctx->req_src, alg_ctx->src_nents, in rk_load_data()
117 src_nents = sg_nents_for_len(sg_src, alg_ctx->total); in rk_load_data()
120 CRYPTO_TRACE("src_nents = %u, dst_nents = %u", src_nents, dst_nents); in rk_load_data()
124 alg_ctx->aligned = rk_crypto_check_align(sg_src, src_nents, sg_dst, dst_nents, in rk_load_data()
126 alg_ctx->is_dma = rk_crypto_check_dmafd(sg_src, src_nents) && in rk_load_data()
173 if (!sg_pcopy_to_buffer(alg_ctx->req_src, alg_ctx->src_nents, in rk_load_data()
H A Drk_crypto_utils.h43 bool rk_crypto_check_align(struct scatterlist *src_sg, size_t src_nents,
H A Drk_crypto_skcipher_utils.c340 alg_ctx->src_nents = sg_nents_for_len(req->src, req->cryptlen); in rk_ablk_start()
454 alg_ctx->src_nents = sg_nents_for_len(req->src, total); in rk_aead_start()
460 CRYPTO_TRACE("src_nents = %zu, dst_nents = %zu", alg_ctx->src_nents, alg_ctx->dst_nents); in rk_aead_start()
H A Drk_crypto_v1_skcipher.c257 sg_pcopy_to_buffer(alg_ctx->req_src, alg_ctx->src_nents, in rk_set_data_start()
278 alg_ctx->src_nents = sg_nents_for_len(req->src, req->cryptlen); in rk_ablk_start()
H A Drk_crypto_ahash_utils.c31 alg_ctx->src_nents = 0; in rk_alg_ctx_clear()
374 alg_ctx->src_nents = sg_nents_for_len(src_sg, nbytes); in rk_ahash_start()
H A Drk3288_crypto.h212 size_t src_nents; member
/OK3568_Linux_fs/kernel/drivers/crypto/caam/
H A Dcaamhash.c535 int src_nents; member
548 if (edesc->src_nents) in ahash_unmap()
549 dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); in ahash_unmap()
824 int src_nents, mapped_nents, sec4_sg_bytes, sec4_sg_src_index; in ahash_update_ctx() local
846 src_nents = sg_nents_for_len(req->src, src_len); in ahash_update_ctx()
847 if (src_nents < 0) { in ahash_update_ctx()
849 return src_nents; in ahash_update_ctx()
852 if (src_nents) { in ahash_update_ctx()
853 mapped_nents = dma_map_sg(jrdev, req->src, src_nents, in ahash_update_ctx()
874 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in ahash_update_ctx()
[all …]
H A Dcaamalg_qi2.c146 struct scatterlist *dst, int src_nents, in caam_unmap() argument
152 if (src_nents) in caam_unmap()
153 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
157 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
359 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; in aead_edesc_alloc() local
380 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc()
381 if (unlikely(src_nents < 0)) { in aead_edesc_alloc()
385 return ERR_PTR(src_nents); in aead_edesc_alloc()
396 if (src_nents) { in aead_edesc_alloc()
397 mapped_src_nents = dma_map_sg(dev, req->src, src_nents, in aead_edesc_alloc()
[all …]
H A Dcaamalg_qi.c798 int src_nents; member
820 int src_nents; member
868 struct scatterlist *dst, int src_nents, in caam_unmap() argument
874 if (src_nents) in caam_unmap()
875 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
879 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
895 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap()
907 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
946 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; in aead_edesc_alloc() local
972 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc()
[all …]
H A Dcaamalg_qi2.h111 int src_nents; member
131 int src_nents; member
148 int src_nents; member
H A Dcaamalg.c888 int src_nents; member
914 int src_nents; member
927 struct scatterlist *dst, int src_nents, in caam_unmap() argument
933 if (src_nents) in caam_unmap()
934 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
938 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
953 edesc->src_nents, edesc->dst_nents, 0, 0, in aead_unmap()
964 edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
1249 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); in init_skcipher_job()
1253 edesc->src_nents > 1 ? 100 : req->cryptlen, 1); in init_skcipher_job()
[all …]
H A Dcaampkc.c49 dma_unmap_sg(dev, req_ctx->fixup_src, edesc->src_nents, DMA_TO_DEVICE); in rsa_io_unmap()
253 int src_nents, dst_nents; in rsa_edesc_alloc() local
281 src_nents = sg_nents_for_len(req_ctx->fixup_src, in rsa_edesc_alloc()
285 mapped_src_nents = dma_map_sg(dev, req_ctx->fixup_src, src_nents, in rsa_edesc_alloc()
331 edesc->src_nents = src_nents; in rsa_edesc_alloc()
362 dma_unmap_sg(dev, req_ctx->fixup_src, src_nents, DMA_TO_DEVICE); in rsa_edesc_alloc()
H A Dcaampkc.h135 int src_nents; member
/OK3568_Linux_fs/kernel/drivers/crypto/qce/
H A Dskcipher.c49 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_skcipher_done()
74 int dst_nents, src_nents, ret; in qce_skcipher_async_req_handle() local
84 rctx->src_nents = sg_nents_for_len(req->src, req->cryptlen); in qce_skcipher_async_req_handle()
88 rctx->dst_nents = rctx->src_nents; in qce_skcipher_async_req_handle()
89 if (rctx->src_nents < 0) { in qce_skcipher_async_req_handle()
91 return rctx->src_nents; in qce_skcipher_async_req_handle()
132 src_nents = dma_map_sg(qce->dev, req->src, rctx->src_nents, dir_src); in qce_skcipher_async_req_handle()
133 if (src_nents < 0) { in qce_skcipher_async_req_handle()
134 ret = src_nents; in qce_skcipher_async_req_handle()
140 src_nents = dst_nents - 1; in qce_skcipher_async_req_handle()
[all …]
H A Dsha.c47 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_done()
87 rctx->src_nents = sg_nents_for_len(req->src, req->nbytes); in qce_ahash_async_req_handle()
88 if (rctx->src_nents < 0) { in qce_ahash_async_req_handle()
90 return rctx->src_nents; in qce_ahash_async_req_handle()
93 ret = dma_map_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_async_req_handle()
103 ret = qce_dma_prep_sgs(&qce->dma, req->src, rctx->src_nents, in qce_ahash_async_req_handle()
121 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_async_req_handle()
H A Dcipher.h38 int src_nents; member
H A Dsha.h49 int src_nents; member
/OK3568_Linux_fs/kernel/drivers/crypto/virtio/
H A Dvirtio_crypto_algs.c349 int src_nents, dst_nents; in __virtio_crypto_skcipher_do_req() local
359 src_nents = sg_nents_for_len(req->src, req->cryptlen); in __virtio_crypto_skcipher_do_req()
360 if (src_nents < 0) { in __virtio_crypto_skcipher_do_req()
362 return src_nents; in __virtio_crypto_skcipher_do_req()
368 src_nents, dst_nents); in __virtio_crypto_skcipher_do_req()
371 sg_total = src_nents + dst_nents + 3; in __virtio_crypto_skcipher_do_req()
451 for (sg = req->src; src_nents; sg = sg_next(sg), src_nents--) in __virtio_crypto_skcipher_do_req()
/OK3568_Linux_fs/kernel/drivers/crypto/marvell/cesa/
H A Dcipher.c66 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_cleanup()
69 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_cleanup()
94 len = sg_pcopy_to_buffer(req->src, creq->src_nents, in mv_cesa_skcipher_std_step()
317 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init()
329 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init()
390 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init()
423 creq->src_nents = sg_nents_for_len(req->src, req->cryptlen); in mv_cesa_skcipher_req_init()
424 if (creq->src_nents < 0) { in mv_cesa_skcipher_req_init()
426 return creq->src_nents; in mv_cesa_skcipher_req_init()
H A Dhash.c104 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_cleanup()
192 sreq->offset += sg_pcopy_to_buffer(req->src, creq->src_nents, in mv_cesa_ahash_std_step()
403 sg_pcopy_to_buffer(ahashreq->src, creq->src_nents, in mv_cesa_ahash_req_cleanup()
457 sg_pcopy_to_buffer(req->src, creq->src_nents, in mv_cesa_ahash_cache_req()
622 if (creq->src_nents) { in mv_cesa_ahash_dma_req_init()
623 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_ahash_dma_req_init()
729 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_req_init()
741 creq->src_nents = sg_nents_for_len(req->src, req->nbytes); in mv_cesa_ahash_req_init()
742 if (creq->src_nents < 0) { in mv_cesa_ahash_req_init()
744 return creq->src_nents; in mv_cesa_ahash_req_init()
H A Dcesa.h569 int src_nents; member
615 int src_nents; member
/OK3568_Linux_fs/kernel/drivers/crypto/
H A Dtalitos.c966 unsigned int src_nents = edesc->src_nents ? : 1; in talitos_sg_unmap() local
976 if (src_nents == 1 || !is_sec1) in talitos_sg_unmap()
977 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in talitos_sg_unmap()
981 } else if (src_nents == 1 || !is_sec1) { in talitos_sg_unmap()
982 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in talitos_sg_unmap()
1219 sg_count = edesc->src_nents ?: 1; in ipsec_esp()
1331 int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len; in talitos_edesc_alloc() local
1346 src_nents = sg_nents_for_len(src, src_len); in talitos_edesc_alloc()
1347 if (src_nents < 0) { in talitos_edesc_alloc()
1351 src_nents = (src_nents == 1) ? 0 : src_nents; in talitos_edesc_alloc()
[all …]
H A Dpicoxcell_crypto.c318 int src_nents, dst_nents; in spacc_aead_make_ddts() local
326 src_nents = sg_nents_for_len(areq->src, total); in spacc_aead_make_ddts()
327 if (src_nents < 0) { in spacc_aead_make_ddts()
329 return src_nents; in spacc_aead_make_ddts()
331 if (src_nents + 1 > MAX_DDT_LEN) in spacc_aead_make_ddts()
341 if (src_nents + 1 > MAX_DDT_LEN) in spacc_aead_make_ddts()
357 src_ents = dma_map_sg(engine->dev, areq->src, src_nents, in spacc_aead_make_ddts()
366 dma_unmap_sg(engine->dev, areq->src, src_nents, in spacc_aead_make_ddts()
371 src_ents = dma_map_sg(engine->dev, areq->src, src_nents, in spacc_aead_make_ddts()
/OK3568_Linux_fs/kernel/drivers/crypto/ccp/
H A Dccp-dmaengine.c357 unsigned int src_nents, in ccp_create_desc() argument
376 if (!dst_nents || !src_nents) in ccp_create_desc()
393 src_nents--; in ccp_create_desc()
394 if (!src_nents) in ccp_create_desc()

12