Lines Matching refs:areq
24 static int sun8i_ce_cipher_need_fallback(struct skcipher_request *areq) in sun8i_ce_cipher_need_fallback() argument
26 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); in sun8i_ce_cipher_need_fallback()
29 if (sg_nents(areq->src) > MAX_SG || sg_nents(areq->dst) > MAX_SG) in sun8i_ce_cipher_need_fallback()
32 if (areq->cryptlen < crypto_skcipher_ivsize(tfm)) in sun8i_ce_cipher_need_fallback()
35 if (areq->cryptlen == 0 || areq->cryptlen % 16) in sun8i_ce_cipher_need_fallback()
38 sg = areq->src; in sun8i_ce_cipher_need_fallback()
44 sg = areq->dst; in sun8i_ce_cipher_need_fallback()
53 static int sun8i_ce_cipher_fallback(struct skcipher_request *areq) in sun8i_ce_cipher_fallback() argument
55 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); in sun8i_ce_cipher_fallback()
57 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sun8i_ce_cipher_fallback()
68 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags, in sun8i_ce_cipher_fallback()
69 areq->base.complete, areq->base.data); in sun8i_ce_cipher_fallback()
70 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst, in sun8i_ce_cipher_fallback()
71 areq->cryptlen, areq->iv); in sun8i_ce_cipher_fallback()
81 struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base); in sun8i_ce_cipher_prepare() local
82 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); in sun8i_ce_cipher_prepare()
85 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sun8i_ce_cipher_prepare()
101 crypto_tfm_alg_name(areq->base.tfm), in sun8i_ce_cipher_prepare()
102 areq->cryptlen, in sun8i_ce_cipher_prepare()
103 rctx->op_dir, areq->iv, crypto_skcipher_ivsize(tfm), in sun8i_ce_cipher_prepare()
123 cet->t_dlen = cpu_to_le32(areq->cryptlen); in sun8i_ce_cipher_prepare()
125 cet->t_dlen = cpu_to_le32(areq->cryptlen / 4); in sun8i_ce_cipher_prepare()
153 if (areq->iv && crypto_skcipher_ivsize(tfm) > 0) { in sun8i_ce_cipher_prepare()
166 offset = areq->cryptlen - ivsize; in sun8i_ce_cipher_prepare()
167 scatterwalk_map_and_copy(rctx->backup_iv, areq->src, in sun8i_ce_cipher_prepare()
170 memcpy(rctx->bounce_iv, areq->iv, ivsize); in sun8i_ce_cipher_prepare()
181 if (areq->src == areq->dst) { in sun8i_ce_cipher_prepare()
182 nr_sgs = dma_map_sg(ce->dev, areq->src, sg_nents(areq->src), in sun8i_ce_cipher_prepare()
191 nr_sgs = dma_map_sg(ce->dev, areq->src, sg_nents(areq->src), in sun8i_ce_cipher_prepare()
198 nr_sgd = dma_map_sg(ce->dev, areq->dst, sg_nents(areq->dst), in sun8i_ce_cipher_prepare()
207 len = areq->cryptlen; in sun8i_ce_cipher_prepare()
208 for_each_sg(areq->src, sg, nr_sgs, i) { in sun8i_ce_cipher_prepare()
213 areq->cryptlen, i, cet->t_src[i].len, sg->offset, todo); in sun8i_ce_cipher_prepare()
222 len = areq->cryptlen; in sun8i_ce_cipher_prepare()
223 for_each_sg(areq->dst, sg, nr_sgd, i) { in sun8i_ce_cipher_prepare()
228 areq->cryptlen, i, cet->t_dst[i].len, sg->offset, todo); in sun8i_ce_cipher_prepare()
237 chan->timeout = areq->cryptlen; in sun8i_ce_cipher_prepare()
243 if (areq->src == areq->dst) { in sun8i_ce_cipher_prepare()
244 dma_unmap_sg(ce->dev, areq->src, nr_sgs, DMA_BIDIRECTIONAL); in sun8i_ce_cipher_prepare()
247 dma_unmap_sg(ce->dev, areq->src, nr_sgs, DMA_TO_DEVICE); in sun8i_ce_cipher_prepare()
248 dma_unmap_sg(ce->dev, areq->dst, nr_sgd, DMA_FROM_DEVICE); in sun8i_ce_cipher_prepare()
252 if (areq->iv && ivsize > 0) { in sun8i_ce_cipher_prepare()
255 offset = areq->cryptlen - ivsize; in sun8i_ce_cipher_prepare()
257 memcpy(areq->iv, rctx->backup_iv, ivsize); in sun8i_ce_cipher_prepare()
260 scatterwalk_map_and_copy(areq->iv, areq->dst, offset, in sun8i_ce_cipher_prepare()
273 static int sun8i_ce_cipher_run(struct crypto_engine *engine, void *areq) in sun8i_ce_cipher_run() argument
275 struct skcipher_request *breq = container_of(areq, struct skcipher_request, base); in sun8i_ce_cipher_run()
292 struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base); in sun8i_ce_cipher_unprepare() local
293 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); in sun8i_ce_cipher_unprepare()
296 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sun8i_ce_cipher_unprepare()
309 if (areq->src == areq->dst) { in sun8i_ce_cipher_unprepare()
310 dma_unmap_sg(ce->dev, areq->src, nr_sgs, DMA_BIDIRECTIONAL); in sun8i_ce_cipher_unprepare()
313 dma_unmap_sg(ce->dev, areq->src, nr_sgs, DMA_TO_DEVICE); in sun8i_ce_cipher_unprepare()
314 dma_unmap_sg(ce->dev, areq->dst, nr_sgd, DMA_FROM_DEVICE); in sun8i_ce_cipher_unprepare()
317 if (areq->iv && ivsize > 0) { in sun8i_ce_cipher_unprepare()
320 offset = areq->cryptlen - ivsize; in sun8i_ce_cipher_unprepare()
322 memcpy(areq->iv, rctx->backup_iv, ivsize); in sun8i_ce_cipher_unprepare()
325 scatterwalk_map_and_copy(areq->iv, areq->dst, offset, in sun8i_ce_cipher_unprepare()
336 int sun8i_ce_skdecrypt(struct skcipher_request *areq) in sun8i_ce_skdecrypt() argument
338 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); in sun8i_ce_skdecrypt()
340 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sun8i_ce_skdecrypt()
345 if (sun8i_ce_cipher_need_fallback(areq)) in sun8i_ce_skdecrypt()
346 return sun8i_ce_cipher_fallback(areq); in sun8i_ce_skdecrypt()
352 return crypto_transfer_skcipher_request_to_engine(engine, areq); in sun8i_ce_skdecrypt()
355 int sun8i_ce_skencrypt(struct skcipher_request *areq) in sun8i_ce_skencrypt() argument
357 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); in sun8i_ce_skencrypt()
359 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sun8i_ce_skencrypt()
364 if (sun8i_ce_cipher_need_fallback(areq)) in sun8i_ce_skencrypt()
365 return sun8i_ce_cipher_fallback(areq); in sun8i_ce_skencrypt()
371 return crypto_transfer_skcipher_request_to_engine(engine, areq); in sun8i_ce_skencrypt()