Lines Matching refs:jrdev

102 	struct device *jrdev;  member
120 void (*ahash_op_done)(struct device *jrdev, u32 *desc, u32 err,
141 static inline int map_seq_out_ptr_ctx(u32 *desc, struct device *jrdev, in map_seq_out_ptr_ctx() argument
146 state->ctx_dma = dma_map_single(jrdev, state->caam_ctx, in map_seq_out_ptr_ctx()
148 if (dma_mapping_error(jrdev, state->ctx_dma)) { in map_seq_out_ptr_ctx()
149 dev_err(jrdev, "unable to map ctx\n"); in map_seq_out_ptr_ctx()
160 static inline int buf_map_to_sec4_sg(struct device *jrdev, in buf_map_to_sec4_sg() argument
169 state->buf_dma = dma_map_single(jrdev, state->buf, buflen, in buf_map_to_sec4_sg()
171 if (dma_mapping_error(jrdev, state->buf_dma)) { in buf_map_to_sec4_sg()
172 dev_err(jrdev, "unable to map buf\n"); in buf_map_to_sec4_sg()
183 static inline int ctx_map_to_sec4_sg(struct device *jrdev, in ctx_map_to_sec4_sg() argument
188 state->ctx_dma = dma_map_single(jrdev, state->caam_ctx, ctx_len, flag); in ctx_map_to_sec4_sg()
189 if (dma_mapping_error(jrdev, state->ctx_dma)) { in ctx_map_to_sec4_sg()
190 dev_err(jrdev, "unable to map ctx\n"); in ctx_map_to_sec4_sg()
204 struct device *jrdev = ctx->jrdev; in ahash_set_sh_desc() local
205 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); in ahash_set_sh_desc()
214 dma_sync_single_for_device(jrdev, ctx->sh_desc_update_dma, in ahash_set_sh_desc()
225 dma_sync_single_for_device(jrdev, ctx->sh_desc_update_first_dma, in ahash_set_sh_desc()
235 dma_sync_single_for_device(jrdev, ctx->sh_desc_fin_dma, in ahash_set_sh_desc()
246 dma_sync_single_for_device(jrdev, ctx->sh_desc_digest_dma, in ahash_set_sh_desc()
260 struct device *jrdev = ctx->jrdev; in axcbc_set_sh_desc() local
267 dma_sync_single_for_device(jrdev, ctx->sh_desc_update_dma, in axcbc_set_sh_desc()
277 dma_sync_single_for_device(jrdev, ctx->sh_desc_fin_dma, in axcbc_set_sh_desc()
290 dma_sync_single_for_device(jrdev, ctx->sh_desc_update_first_dma, in axcbc_set_sh_desc()
300 dma_sync_single_for_device(jrdev, ctx->sh_desc_digest_dma, in axcbc_set_sh_desc()
312 struct device *jrdev = ctx->jrdev; in acmac_set_sh_desc() local
319 dma_sync_single_for_device(jrdev, ctx->sh_desc_update_dma, in acmac_set_sh_desc()
329 dma_sync_single_for_device(jrdev, ctx->sh_desc_fin_dma, in acmac_set_sh_desc()
339 dma_sync_single_for_device(jrdev, ctx->sh_desc_update_first_dma, in acmac_set_sh_desc()
349 dma_sync_single_for_device(jrdev, ctx->sh_desc_digest_dma, in acmac_set_sh_desc()
362 struct device *jrdev = ctx->jrdev; in hash_digest_key() local
370 dev_err(jrdev, "unable to allocate key input memory\n"); in hash_digest_key()
376 key_dma = dma_map_single(jrdev, key, *keylen, DMA_BIDIRECTIONAL); in hash_digest_key()
377 if (dma_mapping_error(jrdev, key_dma)) { in hash_digest_key()
378 dev_err(jrdev, "unable to map key memory\n"); in hash_digest_key()
402 ret = caam_jr_enqueue(jrdev, desc, split_key_done, &result); in hash_digest_key()
412 dma_unmap_single(jrdev, key_dma, *keylen, DMA_BIDIRECTIONAL); in hash_digest_key()
425 struct device *jrdev = ctx->jrdev; in ahash_setkey() local
428 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent); in ahash_setkey()
432 dev_dbg(jrdev, "keylen %d\n", keylen); in ahash_setkey()
466 dma_sync_single_for_device(ctx->jrdev, in ahash_setkey()
471 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, key, in ahash_setkey()
488 struct device *jrdev = ctx->jrdev; in axcbc_setkey() local
494 dma_sync_single_for_device(jrdev, ctx->adata.key_dma, keylen, in axcbc_setkey()
575 static inline void ahash_done_cpy(struct device *jrdev, u32 *desc, u32 err, in ahash_done_cpy() argument
579 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev); in ahash_done_cpy()
588 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); in ahash_done_cpy()
594 ecode = caam_jr_strstatus(jrdev, err); in ahash_done_cpy()
596 ahash_unmap_ctx(jrdev, edesc, req, digestsize, dir); in ahash_done_cpy()
614 static void ahash_done(struct device *jrdev, u32 *desc, u32 err, in ahash_done() argument
617 ahash_done_cpy(jrdev, desc, err, context, DMA_FROM_DEVICE); in ahash_done()
620 static void ahash_done_ctx_src(struct device *jrdev, u32 *desc, u32 err, in ahash_done_ctx_src() argument
623 ahash_done_cpy(jrdev, desc, err, context, DMA_BIDIRECTIONAL); in ahash_done_ctx_src()
626 static inline void ahash_done_switch(struct device *jrdev, u32 *desc, u32 err, in ahash_done_switch() argument
630 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev); in ahash_done_switch()
639 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); in ahash_done_switch()
644 ecode = caam_jr_strstatus(jrdev, err); in ahash_done_switch()
646 ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, dir); in ahash_done_switch()
677 static void ahash_done_bi(struct device *jrdev, u32 *desc, u32 err, in ahash_done_bi() argument
680 ahash_done_switch(jrdev, desc, err, context, DMA_BIDIRECTIONAL); in ahash_done_bi()
683 static void ahash_done_ctx_dst(struct device *jrdev, u32 *desc, u32 err, in ahash_done_ctx_dst() argument
686 ahash_done_switch(jrdev, desc, err, context, DMA_FROM_DEVICE); in ahash_done_ctx_dst()
707 dev_err(ctx->jrdev, "could not allocate extended descriptor\n"); in ahash_edesc_alloc()
735 src_dma = dma_map_single(ctx->jrdev, sg, sgsize, DMA_TO_DEVICE); in ahash_edesc_add_src()
736 if (dma_mapping_error(ctx->jrdev, src_dma)) { in ahash_edesc_add_src()
737 dev_err(ctx->jrdev, "unable to map S/G table\n"); in ahash_edesc_add_src()
760 struct device *jrdev = ctx->jrdev; in ahash_do_one_req() local
766 ret = caam_jr_enqueue(jrdev, desc, state->ahash_op_done, req); in ahash_do_one_req()
769 ahash_unmap(jrdev, state->edesc, req, 0); in ahash_do_one_req()
778 static int ahash_enqueue_req(struct device *jrdev, in ahash_enqueue_req() argument
779 void (*cbk)(struct device *jrdev, u32 *desc, in ahash_enqueue_req() argument
784 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev); in ahash_enqueue_req()
801 ret = caam_jr_enqueue(jrdev, desc, cbk, req); in ahash_enqueue_req()
804 ahash_unmap_ctx(jrdev, edesc, req, dst_len, dir); in ahash_enqueue_req()
817 struct device *jrdev = ctx->jrdev; in ahash_update_ctx() local
848 dev_err(jrdev, "Invalid number of src SG.\n"); in ahash_update_ctx()
853 mapped_nents = dma_map_sg(jrdev, req->src, src_nents, in ahash_update_ctx()
856 dev_err(jrdev, "unable to DMA map source\n"); in ahash_update_ctx()
874 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in ahash_update_ctx()
881 ret = ctx_map_to_sec4_sg(jrdev, state, ctx->ctx_len, in ahash_update_ctx()
886 ret = buf_map_to_sec4_sg(jrdev, edesc->sec4_sg + 1, state); in ahash_update_ctx()
900 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in ahash_update_ctx()
903 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in ahash_update_ctx()
904 dev_err(jrdev, "unable to map S/G table\n"); in ahash_update_ctx()
918 ret = ahash_enqueue_req(jrdev, ahash_done_bi, req, in ahash_update_ctx()
932 ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_BIDIRECTIONAL); in ahash_update_ctx()
942 struct device *jrdev = ctx->jrdev; in ahash_final_ctx() local
963 ret = ctx_map_to_sec4_sg(jrdev, state, ctx->ctx_len, in ahash_final_ctx()
968 ret = buf_map_to_sec4_sg(jrdev, edesc->sec4_sg + 1, state); in ahash_final_ctx()
974 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in ahash_final_ctx()
976 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in ahash_final_ctx()
977 dev_err(jrdev, "unable to map S/G table\n"); in ahash_final_ctx()
990 return ahash_enqueue_req(jrdev, ahash_done_ctx_src, req, in ahash_final_ctx()
993 ahash_unmap_ctx(jrdev, edesc, req, digestsize, DMA_BIDIRECTIONAL); in ahash_final_ctx()
1003 struct device *jrdev = ctx->jrdev; in ahash_finup_ctx() local
1014 dev_err(jrdev, "Invalid number of src SG.\n"); in ahash_finup_ctx()
1019 mapped_nents = dma_map_sg(jrdev, req->src, src_nents, in ahash_finup_ctx()
1022 dev_err(jrdev, "unable to DMA map source\n"); in ahash_finup_ctx()
1035 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in ahash_finup_ctx()
1043 ret = ctx_map_to_sec4_sg(jrdev, state, ctx->ctx_len, in ahash_finup_ctx()
1048 ret = buf_map_to_sec4_sg(jrdev, edesc->sec4_sg + 1, state); in ahash_finup_ctx()
1064 return ahash_enqueue_req(jrdev, ahash_done_ctx_src, req, in ahash_finup_ctx()
1067 ahash_unmap_ctx(jrdev, edesc, req, digestsize, DMA_BIDIRECTIONAL); in ahash_finup_ctx()
1077 struct device *jrdev = ctx->jrdev; in ahash_digest() local
1088 dev_err(jrdev, "Invalid number of src SG.\n"); in ahash_digest()
1093 mapped_nents = dma_map_sg(jrdev, req->src, src_nents, in ahash_digest()
1096 dev_err(jrdev, "unable to map source for DMA\n"); in ahash_digest()
1107 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in ahash_digest()
1116 ahash_unmap(jrdev, edesc, req, digestsize); in ahash_digest()
1123 ret = map_seq_out_ptr_ctx(desc, jrdev, state, digestsize); in ahash_digest()
1125 ahash_unmap(jrdev, edesc, req, digestsize); in ahash_digest()
1134 return ahash_enqueue_req(jrdev, ahash_done, req, digestsize, in ahash_digest()
1144 struct device *jrdev = ctx->jrdev; in ahash_final_no_ctx() local
1161 state->buf_dma = dma_map_single(jrdev, buf, buflen, in ahash_final_no_ctx()
1163 if (dma_mapping_error(jrdev, state->buf_dma)) { in ahash_final_no_ctx()
1164 dev_err(jrdev, "unable to map src\n"); in ahash_final_no_ctx()
1171 ret = map_seq_out_ptr_ctx(desc, jrdev, state, digestsize); in ahash_final_no_ctx()
1179 return ahash_enqueue_req(jrdev, ahash_done, req, in ahash_final_no_ctx()
1182 ahash_unmap(jrdev, edesc, req, digestsize); in ahash_final_no_ctx()
1193 struct device *jrdev = ctx->jrdev; in ahash_update_no_ctx() local
1224 dev_err(jrdev, "Invalid number of src SG.\n"); in ahash_update_no_ctx()
1229 mapped_nents = dma_map_sg(jrdev, req->src, src_nents, in ahash_update_no_ctx()
1232 dev_err(jrdev, "unable to DMA map source\n"); in ahash_update_no_ctx()
1250 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in ahash_update_no_ctx()
1257 ret = buf_map_to_sec4_sg(jrdev, edesc->sec4_sg, state); in ahash_update_no_ctx()
1265 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in ahash_update_no_ctx()
1268 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in ahash_update_no_ctx()
1269 dev_err(jrdev, "unable to map S/G table\n"); in ahash_update_no_ctx()
1276 ret = map_seq_out_ptr_ctx(desc, jrdev, state, ctx->ctx_len); in ahash_update_no_ctx()
1284 ret = ahash_enqueue_req(jrdev, ahash_done_ctx_dst, req, in ahash_update_no_ctx()
1303 ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_TO_DEVICE); in ahash_update_no_ctx()
1314 struct device *jrdev = ctx->jrdev; in ahash_finup_no_ctx() local
1324 dev_err(jrdev, "Invalid number of src SG.\n"); in ahash_finup_no_ctx()
1329 mapped_nents = dma_map_sg(jrdev, req->src, src_nents, in ahash_finup_no_ctx()
1332 dev_err(jrdev, "unable to DMA map source\n"); in ahash_finup_no_ctx()
1347 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in ahash_finup_no_ctx()
1356 ret = buf_map_to_sec4_sg(jrdev, edesc->sec4_sg, state); in ahash_finup_no_ctx()
1363 dev_err(jrdev, "unable to map S/G table\n"); in ahash_finup_no_ctx()
1367 ret = map_seq_out_ptr_ctx(desc, jrdev, state, digestsize); in ahash_finup_no_ctx()
1375 return ahash_enqueue_req(jrdev, ahash_done, req, in ahash_finup_no_ctx()
1378 ahash_unmap(jrdev, edesc, req, digestsize); in ahash_finup_no_ctx()
1390 struct device *jrdev = ctx->jrdev; in ahash_update_first() local
1419 dev_err(jrdev, "Invalid number of src SG.\n"); in ahash_update_first()
1424 mapped_nents = dma_map_sg(jrdev, req->src, src_nents, in ahash_update_first()
1427 dev_err(jrdev, "unable to map source for DMA\n"); in ahash_update_first()
1443 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in ahash_update_first()
1456 ret = map_seq_out_ptr_ctx(desc, jrdev, state, ctx->ctx_len); in ahash_update_first()
1464 ret = ahash_enqueue_req(jrdev, ahash_done_ctx_dst, req, in ahash_update_first()
1486 ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_TO_DEVICE); in ahash_update_first()
1779 ctx->jrdev = caam_jr_alloc(); in caam_hash_cra_init()
1780 if (IS_ERR(ctx->jrdev)) { in caam_hash_cra_init()
1782 return PTR_ERR(ctx->jrdev); in caam_hash_cra_init()
1785 priv = dev_get_drvdata(ctx->jrdev->parent); in caam_hash_cra_init()
1812 ctx->adata.key_dma = dma_map_single_attrs(ctx->jrdev, ctx->key, in caam_hash_cra_init()
1816 if (dma_mapping_error(ctx->jrdev, ctx->adata.key_dma)) { in caam_hash_cra_init()
1817 dev_err(ctx->jrdev, "unable to map key\n"); in caam_hash_cra_init()
1818 caam_jr_free(ctx->jrdev); in caam_hash_cra_init()
1823 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_update, in caam_hash_cra_init()
1827 if (dma_mapping_error(ctx->jrdev, dma_addr)) { in caam_hash_cra_init()
1828 dev_err(ctx->jrdev, "unable to map shared descriptors\n"); in caam_hash_cra_init()
1831 dma_unmap_single_attrs(ctx->jrdev, ctx->adata.key_dma, in caam_hash_cra_init()
1836 caam_jr_free(ctx->jrdev); in caam_hash_cra_init()
1868 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_update_dma, in caam_hash_cra_exit()
1873 dma_unmap_single_attrs(ctx->jrdev, ctx->adata.key_dma, in caam_hash_cra_exit()
1876 caam_jr_free(ctx->jrdev); in caam_hash_cra_exit()