Lines Matching refs:rctx
30 struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req); in qce_skcipher_done() local
49 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_skcipher_done()
50 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_skcipher_done()
52 sg_free_table(&rctx->dst_tbl); in qce_skcipher_done()
58 memcpy(rctx->iv, result_buf->encr_cntr_iv, rctx->ivsize); in qce_skcipher_done()
66 struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req); in qce_skcipher_async_req_handle() local
76 rctx->iv = req->iv; in qce_skcipher_async_req_handle()
77 rctx->ivsize = crypto_skcipher_ivsize(skcipher); in qce_skcipher_async_req_handle()
78 rctx->cryptlen = req->cryptlen; in qce_skcipher_async_req_handle()
84 rctx->src_nents = sg_nents_for_len(req->src, req->cryptlen); in qce_skcipher_async_req_handle()
86 rctx->dst_nents = sg_nents_for_len(req->dst, req->cryptlen); in qce_skcipher_async_req_handle()
88 rctx->dst_nents = rctx->src_nents; in qce_skcipher_async_req_handle()
89 if (rctx->src_nents < 0) { in qce_skcipher_async_req_handle()
91 return rctx->src_nents; in qce_skcipher_async_req_handle()
93 if (rctx->dst_nents < 0) { in qce_skcipher_async_req_handle()
95 return -rctx->dst_nents; in qce_skcipher_async_req_handle()
98 rctx->dst_nents += 1; in qce_skcipher_async_req_handle()
103 ret = sg_alloc_table(&rctx->dst_tbl, rctx->dst_nents, gfp); in qce_skcipher_async_req_handle()
107 sg_init_one(&rctx->result_sg, qce->dma.result_buf, QCE_RESULT_BUF_SZ); in qce_skcipher_async_req_handle()
109 sg = qce_sgtable_add(&rctx->dst_tbl, req->dst, req->cryptlen); in qce_skcipher_async_req_handle()
115 sg = qce_sgtable_add(&rctx->dst_tbl, &rctx->result_sg, in qce_skcipher_async_req_handle()
123 rctx->dst_sg = rctx->dst_tbl.sgl; in qce_skcipher_async_req_handle()
125 dst_nents = dma_map_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_skcipher_async_req_handle()
132 src_nents = dma_map_sg(qce->dev, req->src, rctx->src_nents, dir_src); in qce_skcipher_async_req_handle()
137 rctx->src_sg = req->src; in qce_skcipher_async_req_handle()
139 rctx->src_sg = rctx->dst_sg; in qce_skcipher_async_req_handle()
143 ret = qce_dma_prep_sgs(&qce->dma, rctx->src_sg, src_nents, in qce_skcipher_async_req_handle()
144 rctx->dst_sg, dst_nents, in qce_skcipher_async_req_handle()
161 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, dir_src); in qce_skcipher_async_req_handle()
163 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_skcipher_async_req_handle()
165 sg_free_table(&rctx->dst_tbl); in qce_skcipher_async_req_handle()
227 struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req); in qce_skcipher_crypt() local
232 rctx->flags = tmpl->alg_flags; in qce_skcipher_crypt()
233 rctx->flags |= encrypt ? QCE_ENCRYPT : QCE_DECRYPT; in qce_skcipher_crypt()
234 keylen = IS_XTS(rctx->flags) ? ctx->enc_keylen >> 1 : ctx->enc_keylen; in qce_skcipher_crypt()
239 if (IS_AES(rctx->flags) && in qce_skcipher_crypt()
242 (IS_XTS(rctx->flags) && req->cryptlen > QCE_SECTOR_SIZE && in qce_skcipher_crypt()
244 skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback); in qce_skcipher_crypt()
245 skcipher_request_set_callback(&rctx->fallback_req, in qce_skcipher_crypt()
249 skcipher_request_set_crypt(&rctx->fallback_req, req->src, in qce_skcipher_crypt()
251 ret = encrypt ? crypto_skcipher_encrypt(&rctx->fallback_req) : in qce_skcipher_crypt()
252 crypto_skcipher_decrypt(&rctx->fallback_req); in qce_skcipher_crypt()