Lines Matching refs:creq

29 	struct mv_cesa_ahash_req *creq = ahash_request_ctx(req);  in mv_cesa_ahash_req_iter_init()  local
30 unsigned int len = req->nbytes + creq->cache_ptr; in mv_cesa_ahash_req_iter_init()
32 if (!creq->last_req) in mv_cesa_ahash_req_iter_init()
37 iter->src.op_offset = creq->cache_ptr; in mv_cesa_ahash_req_iter_init()
95 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_dma_last_cleanup() local
97 mv_cesa_ahash_dma_free_padding(&creq->req.dma); in mv_cesa_ahash_dma_last_cleanup()
102 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_dma_cleanup() local
104 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_cleanup()
105 mv_cesa_ahash_dma_free_cache(&creq->req.dma); in mv_cesa_ahash_dma_cleanup()
106 mv_cesa_dma_cleanup(&creq->base); in mv_cesa_ahash_dma_cleanup()
111 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_cleanup() local
113 if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ) in mv_cesa_ahash_cleanup()
119 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_last_cleanup() local
121 if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ) in mv_cesa_ahash_last_cleanup()
125 static int mv_cesa_ahash_pad_len(struct mv_cesa_ahash_req *creq) in mv_cesa_ahash_pad_len() argument
129 index = creq->len & CESA_HASH_BLOCK_SIZE_MSK; in mv_cesa_ahash_pad_len()
135 static int mv_cesa_ahash_pad_req(struct mv_cesa_ahash_req *creq, u8 *buf) in mv_cesa_ahash_pad_req() argument
141 padlen = mv_cesa_ahash_pad_len(creq); in mv_cesa_ahash_pad_req()
144 if (creq->algo_le) { in mv_cesa_ahash_pad_req()
145 __le64 bits = cpu_to_le64(creq->len << 3); in mv_cesa_ahash_pad_req()
149 __be64 bits = cpu_to_be64(creq->len << 3); in mv_cesa_ahash_pad_req()
159 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_std_step() local
160 struct mv_cesa_ahash_std_req *sreq = &creq->req.std; in mv_cesa_ahash_std_step()
161 struct mv_cesa_engine *engine = creq->base.engine; in mv_cesa_ahash_std_step()
169 mv_cesa_adjust_op(engine, &creq->op_tmpl); in mv_cesa_ahash_std_step()
170 memcpy_toio(engine->sram, &creq->op_tmpl, sizeof(creq->op_tmpl)); in mv_cesa_ahash_std_step()
175 writel_relaxed(creq->state[i], in mv_cesa_ahash_std_step()
179 if (creq->cache_ptr) in mv_cesa_ahash_std_step()
181 creq->cache, creq->cache_ptr); in mv_cesa_ahash_std_step()
183 len = min_t(size_t, req->nbytes + creq->cache_ptr - sreq->offset, in mv_cesa_ahash_std_step()
186 if (!creq->last_req) { in mv_cesa_ahash_std_step()
191 if (len - creq->cache_ptr) in mv_cesa_ahash_std_step()
192 sreq->offset += sg_pcopy_to_buffer(req->src, creq->src_nents, in mv_cesa_ahash_std_step()
195 creq->cache_ptr, in mv_cesa_ahash_std_step()
196 len - creq->cache_ptr, in mv_cesa_ahash_std_step()
199 op = &creq->op_tmpl; in mv_cesa_ahash_std_step()
203 if (creq->last_req && sreq->offset == req->nbytes && in mv_cesa_ahash_std_step()
204 creq->len <= CESA_SA_DESC_MAC_SRC_TOTAL_LEN_MAX) { in mv_cesa_ahash_std_step()
214 creq->len <= CESA_SA_DESC_MAC_SRC_TOTAL_LEN_MAX) { in mv_cesa_ahash_std_step()
215 mv_cesa_set_mac_op_total_len(op, creq->len); in mv_cesa_ahash_std_step()
217 int trailerlen = mv_cesa_ahash_pad_len(creq) + 8; in mv_cesa_ahash_std_step()
222 memcpy_fromio(creq->cache, in mv_cesa_ahash_std_step()
227 i = mv_cesa_ahash_pad_req(creq, creq->cache); in mv_cesa_ahash_std_step()
231 creq->cache, i); in mv_cesa_ahash_std_step()
251 creq->cache_ptr = new_cache_ptr; in mv_cesa_ahash_std_step()
262 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_std_process() local
263 struct mv_cesa_ahash_std_req *sreq = &creq->req.std; in mv_cesa_ahash_std_process()
265 if (sreq->offset < (req->nbytes - creq->cache_ptr)) in mv_cesa_ahash_std_process()
273 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_dma_prepare() local
274 struct mv_cesa_req *basereq = &creq->base; in mv_cesa_ahash_dma_prepare()
281 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_std_prepare() local
282 struct mv_cesa_ahash_std_req *sreq = &creq->req.std; in mv_cesa_ahash_std_prepare()
289 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_dma_step() local
290 struct mv_cesa_req *base = &creq->base; in mv_cesa_ahash_dma_step()
298 for (i = 0; i < ARRAY_SIZE(creq->state); i++) in mv_cesa_ahash_dma_step()
299 writel_relaxed(creq->state[i], engine->regs + in mv_cesa_ahash_dma_step()
309 struct mv_cesa_ahash_req *creq = ahash_request_ctx(ahashreq); in mv_cesa_ahash_step() local
311 if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ) in mv_cesa_ahash_step()
320 struct mv_cesa_ahash_req *creq = ahash_request_ctx(ahashreq); in mv_cesa_ahash_process() local
322 if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ) in mv_cesa_ahash_process()
323 return mv_cesa_dma_process(&creq->base, status); in mv_cesa_ahash_process()
331 struct mv_cesa_ahash_req *creq = ahash_request_ctx(ahashreq); in mv_cesa_ahash_complete() local
332 struct mv_cesa_engine *engine = creq->base.engine; in mv_cesa_ahash_complete()
338 if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ && in mv_cesa_ahash_complete()
339 (creq->base.chain.last->flags & CESA_TDMA_TYPE_MSK) == in mv_cesa_ahash_complete()
347 data = creq->base.chain.last->op->ctx.hash.hash; in mv_cesa_ahash_complete()
349 creq->state[i] = le32_to_cpu(data[i]); in mv_cesa_ahash_complete()
354 creq->state[i] = readl_relaxed(engine->regs + in mv_cesa_ahash_complete()
356 if (creq->last_req) { in mv_cesa_ahash_complete()
361 if (creq->algo_le) { in mv_cesa_ahash_complete()
365 result[i] = cpu_to_le32(creq->state[i]); in mv_cesa_ahash_complete()
370 result[i] = cpu_to_be32(creq->state[i]); in mv_cesa_ahash_complete()
382 struct mv_cesa_ahash_req *creq = ahash_request_ctx(ahashreq); in mv_cesa_ahash_prepare() local
384 creq->base.engine = engine; in mv_cesa_ahash_prepare()
386 if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ) in mv_cesa_ahash_prepare()
395 struct mv_cesa_ahash_req *creq = ahash_request_ctx(ahashreq); in mv_cesa_ahash_req_cleanup() local
397 if (creq->last_req) in mv_cesa_ahash_req_cleanup()
402 if (creq->cache_ptr) in mv_cesa_ahash_req_cleanup()
403 sg_pcopy_to_buffer(ahashreq->src, creq->src_nents, in mv_cesa_ahash_req_cleanup()
404 creq->cache, in mv_cesa_ahash_req_cleanup()
405 creq->cache_ptr, in mv_cesa_ahash_req_cleanup()
406 ahashreq->nbytes - creq->cache_ptr); in mv_cesa_ahash_req_cleanup()
419 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_init() local
421 memset(creq, 0, sizeof(*creq)); in mv_cesa_ahash_init()
429 creq->op_tmpl = *tmpl; in mv_cesa_ahash_init()
430 creq->len = 0; in mv_cesa_ahash_init()
431 creq->algo_le = algo_le; in mv_cesa_ahash_init()
447 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_cache_req() local
450 if (creq->cache_ptr + req->nbytes < CESA_MAX_HASH_BLOCK_SIZE && in mv_cesa_ahash_cache_req()
451 !creq->last_req) { in mv_cesa_ahash_cache_req()
457 sg_pcopy_to_buffer(req->src, creq->src_nents, in mv_cesa_ahash_cache_req()
458 creq->cache + creq->cache_ptr, in mv_cesa_ahash_cache_req()
461 creq->cache_ptr += req->nbytes; in mv_cesa_ahash_cache_req()
497 struct mv_cesa_ahash_req *creq, in mv_cesa_ahash_dma_add_cache() argument
500 struct mv_cesa_ahash_dma_req *ahashdreq = &creq->req.dma; in mv_cesa_ahash_dma_add_cache()
503 if (!creq->cache_ptr) in mv_cesa_ahash_dma_add_cache()
510 memcpy(ahashdreq->cache, creq->cache, creq->cache_ptr); in mv_cesa_ahash_dma_add_cache()
515 creq->cache_ptr, in mv_cesa_ahash_dma_add_cache()
523 struct mv_cesa_ahash_req *creq, in mv_cesa_ahash_dma_last_req() argument
526 struct mv_cesa_ahash_dma_req *ahashdreq = &creq->req.dma; in mv_cesa_ahash_dma_last_req()
535 if (creq->len <= CESA_SA_DESC_MAC_SRC_TOTAL_LEN_MAX && frag_len) { in mv_cesa_ahash_dma_last_req()
536 op = mv_cesa_dma_add_frag(chain, &creq->op_tmpl, frag_len, in mv_cesa_ahash_dma_last_req()
541 mv_cesa_set_mac_op_total_len(op, creq->len); in mv_cesa_ahash_dma_last_req()
565 trailerlen = mv_cesa_ahash_pad_req(creq, ahashdreq->padding); in mv_cesa_ahash_dma_last_req()
578 op = mv_cesa_dma_add_frag(chain, &creq->op_tmpl, frag_len + len, in mv_cesa_ahash_dma_last_req()
599 return mv_cesa_dma_add_frag(chain, &creq->op_tmpl, trailerlen - padoff, in mv_cesa_ahash_dma_last_req()
605 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_dma_req_init() local
608 struct mv_cesa_req *basereq = &creq->base; in mv_cesa_ahash_dma_req_init()
619 if (!mv_cesa_mac_op_is_first_frag(&creq->op_tmpl)) in mv_cesa_ahash_dma_req_init()
622 if (creq->src_nents) { in mv_cesa_ahash_dma_req_init()
623 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_ahash_dma_req_init()
638 ret = mv_cesa_ahash_dma_add_cache(&basereq->chain, creq, flags); in mv_cesa_ahash_dma_req_init()
661 &creq->op_tmpl, in mv_cesa_ahash_dma_req_init()
678 if (creq->last_req) in mv_cesa_ahash_dma_req_init()
679 op = mv_cesa_ahash_dma_last_req(&basereq->chain, &iter, creq, in mv_cesa_ahash_dma_req_init()
682 op = mv_cesa_dma_add_frag(&basereq->chain, &creq->op_tmpl, in mv_cesa_ahash_dma_req_init()
705 if (!creq->last_req) in mv_cesa_ahash_dma_req_init()
706 creq->cache_ptr = req->nbytes + creq->cache_ptr - in mv_cesa_ahash_dma_req_init()
709 creq->cache_ptr = 0; in mv_cesa_ahash_dma_req_init()
729 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_req_init()
739 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_req_init() local
741 creq->src_nents = sg_nents_for_len(req->src, req->nbytes); in mv_cesa_ahash_req_init()
742 if (creq->src_nents < 0) { in mv_cesa_ahash_req_init()
744 return creq->src_nents; in mv_cesa_ahash_req_init()
760 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_queue_req() local
775 ret = mv_cesa_queue_req(&req->base, &creq->base); in mv_cesa_ahash_queue_req()
785 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_update() local
787 creq->len += req->nbytes; in mv_cesa_ahash_update()
794 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_final() local
795 struct mv_cesa_op_ctx *tmpl = &creq->op_tmpl; in mv_cesa_ahash_final()
797 mv_cesa_set_mac_op_total_len(tmpl, creq->len); in mv_cesa_ahash_final()
798 creq->last_req = true; in mv_cesa_ahash_final()
806 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_finup() local
807 struct mv_cesa_op_ctx *tmpl = &creq->op_tmpl; in mv_cesa_ahash_finup()
809 creq->len += req->nbytes; in mv_cesa_ahash_finup()
810 mv_cesa_set_mac_op_total_len(tmpl, creq->len); in mv_cesa_ahash_finup()
811 creq->last_req = true; in mv_cesa_ahash_finup()
820 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_export() local
826 *len = creq->len; in mv_cesa_ahash_export()
827 memcpy(hash, creq->state, digsize); in mv_cesa_ahash_export()
829 memcpy(cache, creq->cache, creq->cache_ptr); in mv_cesa_ahash_export()
838 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_import() local
850 mv_cesa_update_op_cfg(&creq->op_tmpl, in mv_cesa_ahash_import()
854 creq->len = len; in mv_cesa_ahash_import()
855 memcpy(creq->state, hash, digsize); in mv_cesa_ahash_import()
856 creq->cache_ptr = 0; in mv_cesa_ahash_import()
862 memcpy(creq->cache, cache, cache_ptr); in mv_cesa_ahash_import()
863 creq->cache_ptr = cache_ptr; in mv_cesa_ahash_import()
870 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_md5_init() local
877 creq->state[0] = MD5_H0; in mv_cesa_md5_init()
878 creq->state[1] = MD5_H1; in mv_cesa_md5_init()
879 creq->state[2] = MD5_H2; in mv_cesa_md5_init()
880 creq->state[3] = MD5_H3; in mv_cesa_md5_init()
940 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_sha1_init() local
947 creq->state[0] = SHA1_H0; in mv_cesa_sha1_init()
948 creq->state[1] = SHA1_H1; in mv_cesa_sha1_init()
949 creq->state[2] = SHA1_H2; in mv_cesa_sha1_init()
950 creq->state[3] = SHA1_H3; in mv_cesa_sha1_init()
951 creq->state[4] = SHA1_H4; in mv_cesa_sha1_init()
1011 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_sha256_init() local
1018 creq->state[0] = SHA256_H0; in mv_cesa_sha256_init()
1019 creq->state[1] = SHA256_H1; in mv_cesa_sha256_init()
1020 creq->state[2] = SHA256_H2; in mv_cesa_sha256_init()
1021 creq->state[3] = SHA256_H3; in mv_cesa_sha256_init()
1022 creq->state[4] = SHA256_H4; in mv_cesa_sha256_init()
1023 creq->state[5] = SHA256_H5; in mv_cesa_sha256_init()
1024 creq->state[6] = SHA256_H6; in mv_cesa_sha256_init()
1025 creq->state[7] = SHA256_H7; in mv_cesa_sha256_init()