| /OK3568_Linux_fs/kernel/drivers/crypto/rockchip/ |
| H A D | rk3288_crypto.c | 69 struct scatterlist *sg_dst, in check_alignment() argument 76 if (!sg_dst) in check_alignment() 78 out = IS_ALIGNED((uint32_t)sg_dst->offset, 4) && in check_alignment() 79 IS_ALIGNED((uint32_t)sg_dst->length, align_mask); in check_alignment() 82 return (align && (sg_src->length == sg_dst->length)); in check_alignment() 87 struct scatterlist *sg_dst) in rk_load_data() argument 92 check_alignment(sg_src, sg_dst, dev->align_size) : in rk_load_data() 105 if (sg_dst) { in rk_load_data() 106 if (!dma_map_sg(dev->dev, sg_dst, 1, DMA_FROM_DEVICE)) { in rk_load_data() 114 dev->addr_out = sg_dma_address(sg_dst); in rk_load_data() [all …]
|
| H A D | rk_crypto_v1_skcipher.c | 261 err = rk_dev->load_data(rk_dev, alg_ctx->sg_src, alg_ctx->sg_dst); in rk_set_data_start() 279 alg_ctx->sg_dst = req->dst; in rk_ablk_start() 301 memcpy(req->iv, sg_virt(alg_ctx->sg_dst) + in rk_iv_copyback() 302 alg_ctx->sg_dst->length - ivsize, ivsize); in rk_iv_copyback() 323 new_iv = page_address(sg_page(alg_ctx->sg_dst)) + in rk_update_iv() 324 alg_ctx->sg_dst->offset + in rk_update_iv() 325 alg_ctx->sg_dst->length - ivsize; in rk_update_iv() 359 alg_ctx->sg_dst = sg_next(alg_ctx->sg_dst); in rk_ablk_rx()
|
| H A D | rk_crypto_skcipher_utils.c | 115 struct scatterlist *sg_dst; in rk_get_new_iv() local 122 sg_dst = alg_ctx->aligned ? alg_ctx->sg_dst : &alg_ctx->sg_tmp; in rk_get_new_iv() 134 sg_pcopy_to_buffer(sg_dst, alg_ctx->map_nents, in rk_get_new_iv() 140 sg_pcopy_to_buffer(sg_dst, alg_ctx->map_nents, in rk_get_new_iv() 185 err = rk_dev->load_data(rk_dev, alg_ctx->sg_src, alg_ctx->sg_dst); in rk_set_data_start() 276 alg_ctx->sg_dst = sg_next(alg_ctx->sg_dst); in rk_ablk_rx() 341 alg_ctx->sg_dst = req->dst; in rk_ablk_start() 455 alg_ctx->sg_dst = req->dst; in rk_aead_start()
|
| H A D | rk_crypto_core.c | 63 struct scatterlist *sg_dst) in rk_load_data() argument 111 sg_dst = (alg_ctx->req_src == alg_ctx->req_dst) ? sg_src : in rk_load_data() 116 alg_ctx->sg_dst = sg_dst; in rk_load_data() 118 dst_nents = sg_nents_for_len(sg_dst, alg_ctx->total); in rk_load_data() 124 alg_ctx->aligned = rk_crypto_check_align(sg_src, src_nents, sg_dst, dst_nents, in rk_load_data() 127 rk_crypto_check_dmafd(sg_dst, dst_nents); in rk_load_data() 155 if (sg_dst) { in rk_load_data() 156 if (!alg_ctx->is_dma && !dma_map_sg(dev, sg_dst, nents, DMA_FROM_DEVICE)) { in rk_load_data() 165 alg_ctx->addr_out = sg_dma_address(sg_dst); in rk_load_data() 191 if (sg_dst) { in rk_load_data() [all …]
|
| H A D | rk_crypto_utils.c | 70 struct scatterlist *sg_dst, in check_scatter_align() argument 78 if (!sg_dst) in check_scatter_align() 81 out = IS_ALIGNED((u32)sg_dst->offset, 4) && in check_scatter_align() 82 IS_ALIGNED((u32)sg_dst->length, align_mask) && in check_scatter_align() 83 (sg_phys(sg_dst) < SZ_4G); in check_scatter_align() 86 return (align && (sg_src->length == sg_dst->length)); in check_scatter_align()
|
| H A D | rk3288_crypto_skcipher.c | 263 err = dev->load_data(dev, dev->sg_src, dev->sg_dst); in rk_set_data_start() 281 dev->sg_dst = req->dst; in rk_ablk_start() 303 memcpy(req->iv, sg_virt(dev->sg_dst) + in rk_iv_copyback() 304 dev->sg_dst->length - ivsize, ivsize); in rk_iv_copyback() 324 new_iv = page_address(sg_page(dev->sg_dst)) + in rk_update_iv() 325 dev->sg_dst->offset + dev->sg_dst->length - ivsize; in rk_update_iv() 364 dev->sg_dst = sg_next(dev->sg_dst); in rk_ablk_rx()
|
| H A D | rk_crypto_v2_skcipher.c | 350 alg_ctx->sg_src, alg_ctx->sg_dst, alg_ctx->count); in crypto_dma_start() 543 struct scatterlist *sg_src, *sg_dst; in rk_aead_crypt() local 563 sg_dst = (req->src == req->dst) ? sg_src : scatterwalk_ffwd(dst, req->dst, req->assoclen); in rk_aead_crypt() 566 sg_dst, sg_nents_for_len(sg_dst, data_len), in rk_aead_crypt() 570 sg_nents_for_len(sg_dst, data_len) > RK_DEFAULT_LLI_CNT) in rk_aead_crypt()
|
| H A D | rk_crypto_v3_skcipher.c | 349 alg_ctx->sg_src, alg_ctx->sg_dst, alg_ctx->count); in crypto_dma_start() 542 struct scatterlist *sg_src, *sg_dst; in rk_aead_crypt() local 562 sg_dst = (req->src == req->dst) ? sg_src : scatterwalk_ffwd(dst, req->dst, req->assoclen); in rk_aead_crypt() 565 sg_dst, sg_nents_for_len(sg_dst, data_len), in rk_aead_crypt() 569 sg_nents_for_len(sg_dst, data_len) > RK_DEFAULT_LLI_CNT) in rk_aead_crypt()
|
| H A D | rk3288_crypto.h | 205 struct scatterlist *sg_dst; member 226 struct scatterlist *sg_dst);
|
| H A D | rk_crypto_core.h | 101 struct scatterlist *sg_dst); 140 struct scatterlist *sg_dst; member
|
| H A D | rk3288_crypto_ahash.c | 204 dev->sg_dst = NULL; in rk_ahash_start()
|
| H A D | rk_crypto_ahash_utils.c | 73 err = rk_dev->load_data(rk_dev, alg_ctx->sg_src, alg_ctx->sg_dst); in rk_ahash_set_data_start()
|
| /OK3568_Linux_fs/kernel/arch/arm/crypto/ |
| H A D | aes-ce-glue.c | 273 struct scatterlist sg_src[2], sg_dst[2]; in cts_cbc_encrypt() local 303 dst = scatterwalk_ffwd(sg_dst, req->dst, in cts_cbc_encrypt() 331 struct scatterlist sg_src[2], sg_dst[2]; in cts_cbc_decrypt() local 361 dst = scatterwalk_ffwd(sg_dst, req->dst, in cts_cbc_decrypt() 450 struct scatterlist sg_src[2], sg_dst[2]; in xts_encrypt() local 498 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); in xts_encrypt() 522 struct scatterlist sg_src[2], sg_dst[2]; in xts_decrypt() local 570 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); in xts_decrypt()
|
| /OK3568_Linux_fs/kernel/arch/arm64/crypto/ |
| H A D | aes-glue.c | 278 struct scatterlist sg_src[2], sg_dst[2]; in cts_cbc_encrypt() local 307 dst = scatterwalk_ffwd(sg_dst, req->dst, in cts_cbc_encrypt() 335 struct scatterlist sg_src[2], sg_dst[2]; in cts_cbc_decrypt() local 364 dst = scatterwalk_ffwd(sg_dst, req->dst, in cts_cbc_decrypt() 490 struct scatterlist sg_src[2], sg_dst[2]; in xts_encrypt() local 538 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); in xts_encrypt() 562 struct scatterlist sg_src[2], sg_dst[2]; in xts_decrypt() local 610 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); in xts_decrypt()
|
| H A D | aes-neonbs-glue.c | 278 struct scatterlist sg_src[2], sg_dst[2]; in __xts_crypt() local 350 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); in __xts_crypt()
|
| /OK3568_Linux_fs/kernel/drivers/video/rockchip/rga/ |
| H A D | rga.h | 304 struct sg_table *sg_dst; member 386 struct sg_table *sg_dst; member
|
| H A D | rga_drv.c | 763 reg->sg_dst = req->sg_dst; in rga_reg_init() 900 sgt = (!reg) ? req->sg_dst : reg->sg_dst; in rga_put_dma_buf() 1008 req->sg_dst = NULL; in rga_convert_dma_buf() 1080 req->sg_dst = ion_sg_table(rga_drvdata->ion_client, hdl); in rga_convert_dma_buf() 1193 req->sg_dst = NULL; in rga_get_dma_buf() 1205 ret = rga_get_img_info(&req->dst, mmu_flag, &req->sg_dst, in rga_get_dma_buf() 1283 reg1->sg_dst = req1->sg_dst; in rga_reg_init_2()
|
| H A D | rga_mmu_info.c | 683 if (req->sg_dst) { in rga_mmu_info_BitBlt_mode() 684 … ret = rga_MapION(req->sg_dst, &MMU_Base[SrcMemSize], DstMemSize, req->line_draw_info.line_width); in rga_mmu_info_BitBlt_mode() 900 if (req->sg_dst) { in rga_mmu_info_color_fill_mode() 901 … ret = rga_MapION(req->sg_dst, &MMU_Base[0], DstMemSize, req->line_draw_info.line_width); in rga_mmu_info_color_fill_mode() 1027 if (req->sg_dst) { in rga_mmu_info_pre_scale_mode() 1028 … ret = rga_MapION(req->sg_dst, &MMU_Base[SrcMemSize], DstMemSize, req->line_draw_info.line_width); in rga_mmu_info_pre_scale_mode()
|
| /OK3568_Linux_fs/kernel/drivers/crypto/ |
| H A D | s5p-sss.c | 310 struct scatterlist *sg_dst; member 509 dma_unmap_sg(dev->dev, dev->sg_dst, 1, DMA_FROM_DEVICE); in s5p_unset_outdata() 551 dev->sg_dst = sg; in s5p_set_outdata() 582 if (!sg_is_last(dev->sg_dst)) { in s5p_aes_tx() 583 ret = s5p_set_outdata(dev, sg_next(dev->sg_dst)); in s5p_aes_tx() 691 if (sg_is_last(dev->sg_dst)) in s5p_aes_interrupt() 747 s5p_set_dma_outdata(dev, dev->sg_dst); in s5p_aes_interrupt() 1958 s5p_set_dma_outdata(dev, dev->sg_dst); in s5p_aes_crypt_start()
|
| /OK3568_Linux_fs/kernel/drivers/crypto/ux500/cryp/ |
| H A D | cryp.h | 216 struct scatterlist *sg_dst; member
|
| H A D | cryp_core.c | 557 ctx->device->dma.sg_dst = sg; in cryp_set_dma_transfer() 559 ctx->device->dma.sg_dst, in cryp_set_dma_transfer() 574 ctx->device->dma.sg_dst, in cryp_set_dma_transfer() 615 dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_dst, in cryp_dma_done()
|
| /OK3568_Linux_fs/kernel/drivers/dma/ |
| H A D | ste_dma40.c | 2111 struct scatterlist *sg_src, struct scatterlist *sg_dst, in d40_prep_sg_log() argument 2127 ret = d40_log_sg_to_lli(sg_dst, sg_len, in d40_prep_sg_log() 2139 struct scatterlist *sg_src, struct scatterlist *sg_dst, in d40_prep_sg_phy() argument 2158 ret = d40_phy_sg_to_lli(sg_dst, sg_len, dst_dev_addr, in d40_prep_sg_phy() 2210 struct scatterlist *sg_dst, unsigned int sg_len, in d40_prep_sg() argument 2244 ret = d40_prep_sg_log(chan, desc, sg_src, sg_dst, in d40_prep_sg() 2247 ret = d40_prep_sg_phy(chan, desc, sg_src, sg_dst, in d40_prep_sg()
|