Home
last modified time | relevance | path

Searched refs:alg_ctx (Results 1 – 10 of 10) sorted by relevance

/OK3568_Linux_fs/kernel/drivers/crypto/rockchip/
H A Drk_crypto_core.c69 struct rk_alg_ctx *alg_ctx = rk_alg_ctx_cast(rk_dev->async_req); in rk_load_data() local
71 alg_ctx->count = 0; in rk_load_data()
74 if (alg_ctx->total == 0) in rk_load_data()
77 src_nents = alg_ctx->src_nents; in rk_load_data()
78 dst_nents = alg_ctx->dst_nents; in rk_load_data()
81 if (alg_ctx->assoclen && alg_ctx->left_bytes == alg_ctx->total) { in rk_load_data()
84 if (alg_ctx->assoclen > rk_dev->aad_max) { in rk_load_data()
89 if (!sg_pcopy_to_buffer(alg_ctx->req_src, alg_ctx->src_nents, in rk_load_data()
90 rk_dev->addr_aad, alg_ctx->assoclen, 0)) { in rk_load_data()
97 sg_init_one(&alg_ctx->sg_aad, rk_dev->addr_aad, alg_ctx->assoclen); in rk_load_data()
[all …]
H A Drk_crypto_skcipher_utils.c116 struct rk_alg_ctx *alg_ctx = &ctx->algs_ctx; in rk_get_new_iv() local
117 uint32_t ivsize = alg_ctx->chunk_size; in rk_get_new_iv()
122 sg_dst = alg_ctx->aligned ? alg_ctx->sg_dst : &alg_ctx->sg_tmp; in rk_get_new_iv()
125 alg_ctx->aligned, alg_ctx->count, ivsize, is_enc); in rk_get_new_iv()
129 rk_ctr128_calc(iv, alg_ctx->count); in rk_get_new_iv()
134 sg_pcopy_to_buffer(sg_dst, alg_ctx->map_nents, in rk_get_new_iv()
135 iv, ivsize, alg_ctx->count - ivsize); in rk_get_new_iv()
140 sg_pcopy_to_buffer(sg_dst, alg_ctx->map_nents, in rk_get_new_iv()
141 iv, ivsize, alg_ctx->count - ivsize); in rk_get_new_iv()
183 struct rk_alg_ctx *alg_ctx = rk_cipher_alg_ctx(rk_dev); in rk_set_data_start() local
[all …]
H A Drk_crypto_v1_skcipher.c231 struct rk_alg_ctx *alg_ctx = rk_alg_ctx_cast(rk_dev); in crypto_dma_start() local
233 CRYPTO_WRITE(rk_dev, RK_CRYPTO_BRDMAS, alg_ctx->addr_in); in crypto_dma_start()
234 CRYPTO_WRITE(rk_dev, RK_CRYPTO_BRDMAL, alg_ctx->count / 4); in crypto_dma_start()
235 CRYPTO_WRITE(rk_dev, RK_CRYPTO_BTDMAS, alg_ctx->addr_out); in crypto_dma_start()
247 struct rk_alg_ctx *alg_ctx = rk_alg_ctx_cast(rk_dev); in rk_set_data_start() local
249 u8 *src_last_blk = page_address(sg_page(alg_ctx->sg_src)) + in rk_set_data_start()
250 alg_ctx->sg_src->offset + alg_ctx->sg_src->length - ivsize; in rk_set_data_start()
257 sg_pcopy_to_buffer(alg_ctx->req_src, alg_ctx->src_nents, in rk_set_data_start()
258 req->iv, ivsize, alg_ctx->total - ivsize); in rk_set_data_start()
261 err = rk_dev->load_data(rk_dev, alg_ctx->sg_src, alg_ctx->sg_dst); in rk_set_data_start()
[all …]
H A Drk_crypto_v2_skcipher.c49 struct rk_alg_ctx *alg_ctx = rk_cipher_alg_ctx(rk_dev); in rk_crypto_irq_handle() local
59 (u32)alg_ctx->addr_in); in rk_crypto_irq_handle()
61 (u32)alg_ctx->addr_out); in rk_crypto_irq_handle()
62 dev_err(rk_dev->dev, "DMA count = %08x\n", alg_ctx->count); in rk_crypto_irq_handle()
205 struct rk_alg_ctx *alg_ctx = &ctx->algs_ctx; in rk_crypto_complete() local
213 alg_ctx->aligned, alg_ctx->align_size); in rk_crypto_complete()
215 alg_ctx->total, alg_ctx->left_bytes, alg_ctx->count); in rk_crypto_complete()
342 struct rk_alg_ctx *alg_ctx = rk_cipher_alg_ctx(rk_dev); in crypto_dma_start() local
344 u32 calc_len = alg_ctx->count; in crypto_dma_start()
348 if (alg_ctx->aligned) in crypto_dma_start()
[all …]
H A Drk_crypto_v3_skcipher.c48 struct rk_alg_ctx *alg_ctx = rk_cipher_alg_ctx(rk_dev); in rk_crypto_irq_handle() local
58 (u32)alg_ctx->addr_in); in rk_crypto_irq_handle()
60 (u32)alg_ctx->addr_out); in rk_crypto_irq_handle()
61 dev_err(rk_dev->dev, "DMA count = %08x\n", alg_ctx->count); in rk_crypto_irq_handle()
204 struct rk_alg_ctx *alg_ctx = &ctx->algs_ctx; in rk_crypto_complete() local
212 alg_ctx->aligned, alg_ctx->align_size); in rk_crypto_complete()
214 alg_ctx->total, alg_ctx->left_bytes, alg_ctx->count); in rk_crypto_complete()
341 struct rk_alg_ctx *alg_ctx = rk_cipher_alg_ctx(rk_dev); in crypto_dma_start() local
343 u32 calc_len = alg_ctx->count; in crypto_dma_start()
347 if (alg_ctx->aligned) in crypto_dma_start()
[all …]
H A Drk_crypto_v2_ahash.c69 struct rk_alg_ctx *alg_ctx = rk_ahash_alg_ctx(rk_dev); in rk_crypto_irq_handle() local
82 (u32)alg_ctx->addr_in); in rk_crypto_irq_handle()
84 (u32)alg_ctx->addr_out); in rk_crypto_irq_handle()
85 dev_err(rk_dev->dev, "DMA count = %08x\n", alg_ctx->count); in rk_crypto_irq_handle()
113 struct rk_alg_ctx *alg_ctx = rk_ahash_alg_ctx(ctx->rk_dev); in rk_ahash_crypto_complete() local
121 alg_ctx->aligned, alg_ctx->align_size); in rk_ahash_crypto_complete()
123 alg_ctx->total, alg_ctx->left_bytes, alg_ctx->count); in rk_ahash_crypto_complete()
214 struct rk_alg_ctx *alg_ctx = rk_ahash_alg_ctx(rk_dev); in rk_ahash_dma_start() local
222 ctx->calc_cnt, alg_ctx->count, is_final); in rk_ahash_dma_start()
224 if (alg_ctx->count % RK_DMA_ALIGNMENT && !is_final) { in rk_ahash_dma_start()
[all …]
H A Drk_crypto_v3_ahash.c136 struct rk_alg_ctx *alg_ctx = rk_ahash_alg_ctx(rk_dev); in rk_crypto_irq_handle() local
149 (u32)alg_ctx->addr_in); in rk_crypto_irq_handle()
151 (u32)alg_ctx->addr_out); in rk_crypto_irq_handle()
152 dev_err(rk_dev->dev, "DMA count = %08x\n", alg_ctx->count); in rk_crypto_irq_handle()
180 struct rk_alg_ctx *alg_ctx = rk_ahash_alg_ctx(ctx->rk_dev); in rk_ahash_crypto_complete() local
188 alg_ctx->aligned, alg_ctx->align_size); in rk_ahash_crypto_complete()
190 alg_ctx->total, alg_ctx->left_bytes, alg_ctx->count); in rk_ahash_crypto_complete()
200 if (alg_ctx->total) in rk_ahash_crypto_complete()
288 struct rk_alg_ctx *alg_ctx = rk_ahash_alg_ctx(rk_dev); in rk_ahash_dma_start() local
296 ctx->calc_cnt, alg_ctx->count, is_final); in rk_ahash_dma_start()
[all …]
H A Drk_crypto_v1_ahash.c98 struct rk_alg_ctx *alg_ctx = rk_alg_ctx_cast(rk_dev); in rk_ahash_reg_init() local
125 CRYPTO_WRITE(rk_dev, RK_CRYPTO_HASH_MSG_LEN, alg_ctx->total); in rk_ahash_reg_init()
226 struct rk_alg_ctx *alg_ctx = rk_alg_ctx_cast(rk_dev); in crypto_ahash_dma_start() local
228 CRYPTO_WRITE(rk_dev, RK_CRYPTO_HRDMAS, alg_ctx->addr_in); in crypto_ahash_dma_start()
229 CRYPTO_WRITE(rk_dev, RK_CRYPTO_HRDMAL, (alg_ctx->count + 3) / 4); in crypto_ahash_dma_start()
237 struct rk_alg_ctx *alg_ctx = rk_alg_ctx_cast(rk_dev); in rk_ahash_set_data_start() local
239 err = rk_dev->load_data(rk_dev, alg_ctx->sg_src, NULL); in rk_ahash_set_data_start()
250 struct rk_alg_ctx *alg_ctx = rk_alg_ctx_cast(rk_dev); in rk_ahash_start() local
252 alg_ctx->total = req->nbytes; in rk_ahash_start()
253 alg_ctx->left_bytes = req->nbytes; in rk_ahash_start()
[all …]
H A Drk_crypto_ahash_utils.c24 static void rk_alg_ctx_clear(struct rk_alg_ctx *alg_ctx) in rk_alg_ctx_clear() argument
26 alg_ctx->total = 0; in rk_alg_ctx_clear()
27 alg_ctx->left_bytes = 0; in rk_alg_ctx_clear()
28 alg_ctx->count = 0; in rk_alg_ctx_clear()
29 alg_ctx->sg_src = 0; in rk_alg_ctx_clear()
30 alg_ctx->req_src = 0; in rk_alg_ctx_clear()
31 alg_ctx->src_nents = 0; in rk_alg_ctx_clear()
69 struct rk_alg_ctx *alg_ctx = rk_ahash_alg_ctx(rk_dev); in rk_ahash_set_data_start() local
73 err = rk_dev->load_data(rk_dev, alg_ctx->sg_src, alg_ctx->sg_dst); in rk_ahash_set_data_start()
75 err = alg_ctx->ops.hw_dma_start(rk_dev, flag); in rk_ahash_set_data_start()
[all …]
H A Drk_crypto_v2_akcipher.c262 struct rk_alg_ctx *alg_ctx = &ctx->algs_ctx; in rk_rsa_init_tfm() local
276 alg_ctx->align_size = crypto_tfm_alg_alignmask(&tfm->base) + 1; in rk_rsa_init_tfm()
278 alg_ctx->ops.start = rk_rsa_start; in rk_rsa_init_tfm()
279 alg_ctx->ops.update = rk_rsa_crypto_rx; in rk_rsa_init_tfm()
280 alg_ctx->ops.complete = rk_rsa_complete; in rk_rsa_init_tfm()