Lines Matching refs:actx

170 static int mxs_dcp_start_dma(struct dcp_async_ctx *actx)  in mxs_dcp_start_dma()  argument
174 const int chan = actx->chan; in mxs_dcp_start_dma()
177 struct dcp_dma_desc *desc = &sdcp->coh->desc[actx->chan]; in mxs_dcp_start_dma()
219 static int mxs_dcp_run_aes(struct dcp_async_ctx *actx, in mxs_dcp_run_aes() argument
224 struct dcp_dma_desc *desc = &sdcp->coh->desc[actx->chan]; in mxs_dcp_run_aes()
246 if (actx->fill % AES_BLOCK_SIZE) { in mxs_dcp_run_aes()
275 desc->size = actx->fill; in mxs_dcp_run_aes()
279 ret = mxs_dcp_start_dma(actx); in mxs_dcp_run_aes()
297 struct dcp_async_ctx *actx = crypto_tfm_ctx(arq->tfm); in mxs_dcp_aes_block_crypt() local
319 actx->fill = 0; in mxs_dcp_aes_block_crypt()
322 memcpy(key, actx->key, actx->key_len); in mxs_dcp_aes_block_crypt()
343 if (actx->fill + len > out_off) in mxs_dcp_aes_block_crypt()
344 clen = out_off - actx->fill; in mxs_dcp_aes_block_crypt()
348 memcpy(in_buf + actx->fill, src_buf, clen); in mxs_dcp_aes_block_crypt()
351 actx->fill += clen; in mxs_dcp_aes_block_crypt()
357 if (actx->fill == out_off || sg_is_last(src) || in mxs_dcp_aes_block_crypt()
359 ret = mxs_dcp_run_aes(actx, req, init); in mxs_dcp_aes_block_crypt()
365 actx->fill, dst_off); in mxs_dcp_aes_block_crypt()
366 dst_off += actx->fill; in mxs_dcp_aes_block_crypt()
367 last_out_len = actx->fill; in mxs_dcp_aes_block_crypt()
368 actx->fill = 0; in mxs_dcp_aes_block_crypt()
451 struct dcp_async_ctx *actx = crypto_tfm_ctx(arq->tfm); in mxs_dcp_aes_enqueue() local
455 if (unlikely(actx->key_len != AES_KEYSIZE_128)) in mxs_dcp_aes_enqueue()
460 actx->chan = DCP_CHAN_CRYPTO; in mxs_dcp_aes_enqueue()
462 spin_lock(&sdcp->lock[actx->chan]); in mxs_dcp_aes_enqueue()
463 ret = crypto_enqueue_request(&sdcp->queue[actx->chan], &req->base); in mxs_dcp_aes_enqueue()
464 spin_unlock(&sdcp->lock[actx->chan]); in mxs_dcp_aes_enqueue()
466 wake_up_process(sdcp->thread[actx->chan]); in mxs_dcp_aes_enqueue()
494 struct dcp_async_ctx *actx = crypto_skcipher_ctx(tfm); in mxs_dcp_aes_setkey() local
501 actx->key_len = len; in mxs_dcp_aes_setkey()
503 memcpy(actx->key, key, len); in mxs_dcp_aes_setkey()
512 crypto_skcipher_clear_flags(actx->fallback, CRYPTO_TFM_REQ_MASK); in mxs_dcp_aes_setkey()
513 crypto_skcipher_set_flags(actx->fallback, in mxs_dcp_aes_setkey()
515 return crypto_skcipher_setkey(actx->fallback, key, len); in mxs_dcp_aes_setkey()
521 struct dcp_async_ctx *actx = crypto_skcipher_ctx(tfm); in mxs_dcp_aes_fallback_init_tfm() local
528 actx->fallback = blk; in mxs_dcp_aes_fallback_init_tfm()
536 struct dcp_async_ctx *actx = crypto_skcipher_ctx(tfm); in mxs_dcp_aes_fallback_exit_tfm() local
538 crypto_free_skcipher(actx->fallback); in mxs_dcp_aes_fallback_exit_tfm()
550 struct dcp_async_ctx *actx = crypto_ahash_ctx(tfm); in mxs_dcp_run_sha() local
552 struct dcp_dma_desc *desc = &sdcp->coh->desc[actx->chan]; in mxs_dcp_run_sha()
569 desc->control1 = actx->alg; in mxs_dcp_run_sha()
573 desc->size = actx->fill; in mxs_dcp_run_sha()
583 (actx->alg == MXS_DCP_CONTROL1_HASH_SELECT_SHA1) ? in mxs_dcp_run_sha()
602 ret = mxs_dcp_start_dma(actx); in mxs_dcp_run_sha()
620 struct dcp_async_ctx *actx = crypto_ahash_ctx(tfm); in dcp_sha_req_to_buf() local
640 if (actx->fill + len > DCP_BUF_SZ) in dcp_sha_req_to_buf()
641 clen = DCP_BUF_SZ - actx->fill; in dcp_sha_req_to_buf()
645 scatterwalk_map_and_copy(in_buf + actx->fill, src, oft, clen, in dcp_sha_req_to_buf()
650 actx->fill += clen; in dcp_sha_req_to_buf()
656 if (len && actx->fill == DCP_BUF_SZ) { in dcp_sha_req_to_buf()
660 actx->fill = 0; in dcp_sha_req_to_buf()
676 actx->fill = 0; in dcp_sha_req_to_buf()
725 struct dcp_async_ctx *actx = crypto_ahash_ctx(tfm); in dcp_sha_init() local
733 memset(actx, 0, sizeof(*actx)); in dcp_sha_init()
736 actx->alg = MXS_DCP_CONTROL1_HASH_SELECT_SHA1; in dcp_sha_init()
738 actx->alg = MXS_DCP_CONTROL1_HASH_SELECT_SHA256; in dcp_sha_init()
740 actx->fill = 0; in dcp_sha_init()
741 actx->hot = 0; in dcp_sha_init()
742 actx->chan = DCP_CHAN_HASH_SHA; in dcp_sha_init()
744 mutex_init(&actx->mutex); in dcp_sha_init()
755 struct dcp_async_ctx *actx = crypto_ahash_ctx(tfm); in dcp_sha_update_fx() local
766 mutex_lock(&actx->mutex); in dcp_sha_update_fx()
770 if (!actx->hot) { in dcp_sha_update_fx()
771 actx->hot = 1; in dcp_sha_update_fx()
775 spin_lock(&sdcp->lock[actx->chan]); in dcp_sha_update_fx()
776 ret = crypto_enqueue_request(&sdcp->queue[actx->chan], &req->base); in dcp_sha_update_fx()
777 spin_unlock(&sdcp->lock[actx->chan]); in dcp_sha_update_fx()
779 wake_up_process(sdcp->thread[actx->chan]); in dcp_sha_update_fx()
780 mutex_unlock(&actx->mutex); in dcp_sha_update_fx()
817 struct dcp_async_ctx *actx = crypto_ahash_ctx(tfm); in dcp_sha_import() local
821 memset(actx, 0, sizeof(struct dcp_async_ctx)); in dcp_sha_import()
823 memcpy(actx, &export->async_ctx, sizeof(struct dcp_async_ctx)); in dcp_sha_import()