Lines Matching refs:sreq
507 struct safexcel_cipher_req *sreq, in safexcel_context_control() argument
529 (sreq->direction == SAFEXCEL_ENCRYPT ? in safexcel_context_control()
544 if (sreq->direction == SAFEXCEL_ENCRYPT && in safexcel_context_control()
549 else if (sreq->direction == SAFEXCEL_ENCRYPT) in safexcel_context_control()
559 if (sreq->direction == SAFEXCEL_ENCRYPT) in safexcel_context_control()
612 struct safexcel_cipher_req *sreq, in safexcel_handle_req_result() argument
623 if (unlikely(!sreq->rdescs)) in safexcel_handle_req_result()
626 while (sreq->rdescs--) { in safexcel_handle_req_result()
644 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL); in safexcel_handle_req_result()
646 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE); in safexcel_handle_req_result()
647 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE); in safexcel_handle_req_result()
654 (sreq->direction == SAFEXCEL_ENCRYPT)) { in safexcel_handle_req_result()
656 sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv, in safexcel_handle_req_result()
668 struct safexcel_cipher_req *sreq, in safexcel_send_req() argument
690 sreq->nr_src = sg_nents_for_len(src, totlen_src); in safexcel_send_req()
697 if (sreq->direction == SAFEXCEL_DECRYPT) in safexcel_send_req()
709 (sreq->direction == SAFEXCEL_DECRYPT)) { in safexcel_send_req()
715 sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv, in safexcel_send_req()
721 sreq->nr_dst = sg_nents_for_len(dst, totlen_dst); in safexcel_send_req()
731 sreq->nr_src = max(sreq->nr_src, sreq->nr_dst); in safexcel_send_req()
732 sreq->nr_dst = sreq->nr_src; in safexcel_send_req()
734 (sreq->nr_src <= 0))) { in safexcel_send_req()
739 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL); in safexcel_send_req()
741 if (unlikely(totlen_src && (sreq->nr_src <= 0))) { in safexcel_send_req()
746 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE); in safexcel_send_req()
748 if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) { in safexcel_send_req()
751 dma_unmap_sg(priv->dev, src, sreq->nr_src, in safexcel_send_req()
755 dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE); in safexcel_send_req()
780 for_each_sg(src, sg, sreq->nr_src, i) { in safexcel_send_req()
807 safexcel_context_control(ctx, base, sreq, first_cdesc); in safexcel_send_req()
810 sreq->direction, cryptlen, in safexcel_send_req()
817 for_each_sg(dst, sg, sreq->nr_dst, i) { in safexcel_send_req()
818 bool last = (i == sreq->nr_dst - 1); in safexcel_send_req()
887 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL); in safexcel_send_req()
889 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE); in safexcel_send_req()
890 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE); in safexcel_send_req()
899 struct safexcel_cipher_req *sreq, in safexcel_handle_inv_result() argument
908 if (unlikely(!sreq->rdescs)) in safexcel_handle_inv_result()
911 while (sreq->rdescs--) { in safexcel_handle_inv_result()
961 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req); in safexcel_skcipher_handle_result() local
964 if (sreq->needs_inv) { in safexcel_skcipher_handle_result()
965 sreq->needs_inv = false; in safexcel_skcipher_handle_result()
966 err = safexcel_handle_inv_result(priv, ring, async, sreq, in safexcel_skcipher_handle_result()
970 req->dst, req->cryptlen, sreq, in safexcel_skcipher_handle_result()
984 struct safexcel_cipher_req *sreq = aead_request_ctx(req); in safexcel_aead_handle_result() local
987 if (sreq->needs_inv) { in safexcel_aead_handle_result()
988 sreq->needs_inv = false; in safexcel_aead_handle_result()
989 err = safexcel_handle_inv_result(priv, ring, async, sreq, in safexcel_aead_handle_result()
995 sreq, should_complete, ret); in safexcel_aead_handle_result()
1023 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req); in safexcel_skcipher_send() local
1027 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv); in safexcel_skcipher_send()
1029 if (sreq->needs_inv) { in safexcel_skcipher_send()
1041 ret = safexcel_send_req(async, ring, sreq, req->src, in safexcel_skcipher_send()
1046 sreq->rdescs = *results; in safexcel_skcipher_send()
1056 struct safexcel_cipher_req *sreq = aead_request_ctx(req); in safexcel_aead_send() local
1060 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv); in safexcel_aead_send()
1062 if (sreq->needs_inv) in safexcel_aead_send()
1065 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst, in safexcel_aead_send()
1069 sreq->rdescs = *results; in safexcel_aead_send()
1075 struct safexcel_cipher_req *sreq, in safexcel_cipher_exit_inv() argument
1086 sreq->needs_inv = true; in safexcel_cipher_exit_inv()
1110 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req); in safexcel_skcipher_exit_inv() local
1119 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result); in safexcel_skcipher_exit_inv()
1125 struct safexcel_cipher_req *sreq = aead_request_ctx(req); in safexcel_aead_exit_inv() local
1134 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result); in safexcel_aead_exit_inv()
1138 struct safexcel_cipher_req *sreq, in safexcel_queue_req() argument
1145 sreq->needs_inv = false; in safexcel_queue_req()
1146 sreq->direction = dir; in safexcel_queue_req()
1150 sreq->needs_inv = true; in safexcel_queue_req()