Home
last modified time | relevance | path

Searched refs:cryptlen (Results 1 – 25 of 102) sorted by relevance

12345

/OK3568_Linux_fs/kernel/crypto/
H A Dauthencesn.c97 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_genicv_tail() local
103 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0); in crypto_authenc_esn_genicv_tail()
106 scatterwalk_map_and_copy(hash, dst, assoclen + cryptlen, authsize, 1); in crypto_authenc_esn_genicv_tail()
131 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_genicv() local
141 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1); in crypto_authenc_esn_genicv()
147 ahash_request_set_crypt(ahreq, dst, hash, assoclen + cryptlen); in crypto_authenc_esn_genicv()
190 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_encrypt() local
210 skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv); in crypto_authenc_esn_encrypt()
231 unsigned int cryptlen = req->cryptlen - authsize; in crypto_authenc_esn_decrypt_tail() local
242 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0); in crypto_authenc_esn_decrypt_tail()
[all …]
H A Dchacha20poly1305.c39 __le64 cryptlen; member
59 unsigned int cryptlen; member
101 req->assoclen + rctx->cryptlen, in poly_verify_tag()
113 req->assoclen + rctx->cryptlen, in poly_copy_tag()
131 if (rctx->cryptlen == 0) in chacha_decrypt()
145 rctx->cryptlen, creq->iv); in chacha_decrypt()
158 if (rctx->cryptlen == req->cryptlen) /* encrypting */ in poly_tail_continue()
178 preq->tail.cryptlen = cpu_to_le64(rctx->cryptlen); in poly_tail()
207 padlen = -rctx->cryptlen % POLY1305_BLOCK_SIZE; in poly_cipherpad()
236 if (rctx->cryptlen == req->cryptlen) /* encrypting */ in poly_cipher()
[all …]
H A Dkeywrap.c130 u64 t = 6 * ((req->cryptlen) >> 3); in crypto_kw_decrypt()
138 if (req->cryptlen < (2 * SEMIBSIZE) || req->cryptlen % SEMIBSIZE) in crypto_kw_decrypt()
154 unsigned int nbytes = req->cryptlen; in crypto_kw_decrypt()
208 if (req->cryptlen < (2 * SEMIBSIZE) || req->cryptlen % SEMIBSIZE) in crypto_kw_encrypt()
227 unsigned int nbytes = req->cryptlen; in crypto_kw_encrypt()
H A Daegis128-core.c72 u64 assoclen, u64 cryptlen);
350 u64 assoclen, u64 cryptlen) in crypto_aegis128_final() argument
353 u64 cryptbits = cryptlen * 8; in crypto_aegis128_final()
398 unsigned int cryptlen = req->cryptlen; in crypto_aegis128_encrypt() local
409 cryptlen); in crypto_aegis128_encrypt()
415 crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen); in crypto_aegis128_encrypt()
418 scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen, in crypto_aegis128_encrypt()
429 unsigned int cryptlen = req->cryptlen - authsize; in crypto_aegis128_decrypt() local
434 scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen, in crypto_aegis128_decrypt()
444 cryptlen); in crypto_aegis128_decrypt()
[all …]
H A Dccm.c128 unsigned int cryptlen) in format_input() argument
146 return set_msg_len(info + 16 - l, cryptlen, l); in format_input()
169 unsigned int cryptlen) in crypto_ccm_auth() argument
182 err = format_input(odata, req, cryptlen); in crypto_ccm_auth()
218 cryptlen += ilen; in crypto_ccm_auth()
221 ahash_request_set_crypt(ahreq, plain, pctx->odata, cryptlen); in crypto_ccm_auth()
236 req->assoclen + req->cryptlen, in crypto_ccm_encrypt_done()
292 unsigned int cryptlen = req->cryptlen; in crypto_ccm_encrypt() local
301 err = crypto_ccm_auth(req, sg_next(pctx->src), cryptlen); in crypto_ccm_encrypt()
312 skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv); in crypto_ccm_encrypt()
[all …]
H A Dgcm.c58 unsigned int cryptlen; member
178 unsigned int cryptlen) in crypto_gcm_init_crypt() argument
190 cryptlen + sizeof(pctx->auth_tag), in crypto_gcm_init_crypt()
231 lengths.b = cpu_to_be64(gctx->cryptlen * 8); in gcm_hash_len()
292 remain = gcm_remain(gctx->cryptlen); in gcm_hash_crypt_continue()
321 if (gctx->cryptlen) in gcm_hash_assoc_remain_continue()
323 gctx->src, gctx->cryptlen, flags) ?: in gcm_hash_assoc_remain_continue()
419 req->assoclen + req->cryptlen, in gcm_enc_copy_hash()
430 gctx->cryptlen = req->cryptlen; in gcm_encrypt_continue()
458 crypto_gcm_init_crypt(req, req->cryptlen); in crypto_gcm_encrypt()
[all …]
H A Daead.c87 unsigned int cryptlen = req->cryptlen; in crypto_aead_encrypt() local
95 crypto_stats_aead_encrypt(cryptlen, alg, ret); in crypto_aead_encrypt()
104 unsigned int cryptlen = req->cryptlen; in crypto_aead_decrypt() local
110 else if (req->cryptlen < crypto_aead_authsize(aead)) in crypto_aead_decrypt()
114 crypto_stats_aead_decrypt(cryptlen, alg, ret); in crypto_aead_decrypt()
H A Dechainiv.c37 if (req->cryptlen < ivsize) in echainiv_encrypt()
51 req->assoclen + req->cryptlen, in echainiv_encrypt()
62 req->cryptlen, info); in echainiv_encrypt()
94 if (req->cryptlen < ivsize) in echainiv_decrypt()
104 req->cryptlen - ivsize, req->iv); in echainiv_decrypt()
H A Dxts.c87 const bool cts = (req->cryptlen % XTS_BLOCK_SIZE); in xts_xor_tweak()
164 int offset = req->cryptlen & ~(XTS_BLOCK_SIZE - 1); in xts_cts_final()
167 int tail = req->cryptlen % XTS_BLOCK_SIZE; in xts_cts_final()
209 if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) { in xts_encrypt_done()
229 if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) { in xts_decrypt_done()
247 if (req->cryptlen < XTS_BLOCK_SIZE) in xts_init_crypt()
253 req->cryptlen & ~(XTS_BLOCK_SIZE - 1), NULL); in xts_init_crypt()
272 if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0)) in xts_encrypt()
289 if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0)) in xts_decrypt()
H A Dseqiv.c59 if (req->cryptlen < ivsize) in seqiv_aead_encrypt()
75 req->assoclen + req->cryptlen, in seqiv_aead_encrypt()
97 req->cryptlen - ivsize, info); in seqiv_aead_encrypt()
118 if (req->cryptlen < ivsize + crypto_aead_authsize(geniv)) in seqiv_aead_decrypt()
128 req->cryptlen - ivsize, req->iv); in seqiv_aead_decrypt()
/OK3568_Linux_fs/kernel/arch/x86/crypto/
H A Daegis128-aesni-glue.c44 void *state, void *tag_xor, unsigned int cryptlen,
167 unsigned int cryptlen, in crypto_aegis128_aesni_crypt() argument
182 crypto_aegis128_aesni_final(&state, tag_xor, req->assoclen, cryptlen); in crypto_aegis128_aesni_crypt()
198 unsigned int cryptlen = req->cryptlen; in crypto_aegis128_aesni_encrypt() local
200 crypto_aegis128_aesni_crypt(req, &tag, cryptlen, &OPS); in crypto_aegis128_aesni_encrypt()
203 req->assoclen + cryptlen, authsize, 1); in crypto_aegis128_aesni_encrypt()
220 unsigned int cryptlen = req->cryptlen - authsize; in crypto_aegis128_aesni_decrypt() local
223 req->assoclen + cryptlen, authsize, 0); in crypto_aegis128_aesni_decrypt()
225 crypto_aegis128_aesni_crypt(req, &tag, cryptlen, &OPS); in crypto_aegis128_aesni_decrypt()
/OK3568_Linux_fs/kernel/arch/arm/crypto/
H A Daes-ce-glue.c271 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_encrypt()
282 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_encrypt()
283 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_encrypt()
298 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_encrypt()
301 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in cts_cbc_encrypt()
304 subreq.cryptlen); in cts_cbc_encrypt()
309 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_encrypt()
329 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_decrypt()
340 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_decrypt()
341 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_decrypt()
[all …]
/OK3568_Linux_fs/kernel/drivers/crypto/
H A Domap-aes-gcm.c91 int alen, clen, cryptlen, assoclen, ret; in omap_aes_gcm_copy_buffers() local
99 cryptlen = req->cryptlen; in omap_aes_gcm_copy_buffers()
105 cryptlen -= authlen; in omap_aes_gcm_copy_buffers()
108 clen = ALIGN(cryptlen, AES_BLOCK_SIZE); in omap_aes_gcm_copy_buffers()
110 nsg = !!(assoclen && cryptlen); in omap_aes_gcm_copy_buffers()
128 if (cryptlen) { in omap_aes_gcm_copy_buffers()
134 ret = omap_crypto_align_sg(&tmp, cryptlen, in omap_aes_gcm_copy_buffers()
146 dd->total = cryptlen; in omap_aes_gcm_copy_buffers()
159 if (cryptlen) { in omap_aes_gcm_copy_buffers()
160 ret = omap_crypto_align_sg(&dd->out_sg, cryptlen, in omap_aes_gcm_copy_buffers()
[all …]
/OK3568_Linux_fs/kernel/drivers/crypto/stm32/
H A Dstm32-cryp.c356 return is_encrypt(cryp) ? cryp->areq->cryptlen : in stm32_cryp_get_input_text_len()
357 cryp->areq->cryptlen - cryp->authsize; in stm32_cryp_get_input_text_len()
796 if (req->cryptlen % AES_BLOCK_SIZE) in stm32_cryp_aes_ecb_encrypt()
799 if (req->cryptlen == 0) in stm32_cryp_aes_ecb_encrypt()
807 if (req->cryptlen % AES_BLOCK_SIZE) in stm32_cryp_aes_ecb_decrypt()
810 if (req->cryptlen == 0) in stm32_cryp_aes_ecb_decrypt()
818 if (req->cryptlen % AES_BLOCK_SIZE) in stm32_cryp_aes_cbc_encrypt()
821 if (req->cryptlen == 0) in stm32_cryp_aes_cbc_encrypt()
829 if (req->cryptlen % AES_BLOCK_SIZE) in stm32_cryp_aes_cbc_decrypt()
832 if (req->cryptlen == 0) in stm32_cryp_aes_cbc_decrypt()
[all …]
/OK3568_Linux_fs/kernel/drivers/crypto/allwinner/sun8i-ce/
H A Dsun8i-ce-cipher.c32 if (areq->cryptlen < crypto_skcipher_ivsize(tfm)) in sun8i_ce_cipher_need_fallback()
35 if (areq->cryptlen == 0 || areq->cryptlen % 16) in sun8i_ce_cipher_need_fallback()
71 areq->cryptlen, areq->iv); in sun8i_ce_cipher_fallback()
102 areq->cryptlen, in sun8i_ce_cipher_prepare()
123 cet->t_dlen = cpu_to_le32(areq->cryptlen); in sun8i_ce_cipher_prepare()
125 cet->t_dlen = cpu_to_le32(areq->cryptlen / 4); in sun8i_ce_cipher_prepare()
166 offset = areq->cryptlen - ivsize; in sun8i_ce_cipher_prepare()
207 len = areq->cryptlen; in sun8i_ce_cipher_prepare()
213 areq->cryptlen, i, cet->t_src[i].len, sg->offset, todo); in sun8i_ce_cipher_prepare()
222 len = areq->cryptlen; in sun8i_ce_cipher_prepare()
[all …]
/OK3568_Linux_fs/kernel/drivers/crypto/allwinner/sun4i-ss/
H A Dsun4i-ss-cipher.c31 unsigned int ileft = areq->cryptlen; in sun4i_ss_opti_poll()
32 unsigned int oleft = areq->cryptlen; in sun4i_ss_opti_poll()
40 if (!areq->cryptlen) in sun4i_ss_opti_poll()
52 scatterwalk_map_and_copy(backup_iv, areq->src, areq->cryptlen - ivsize, ivsize, 0); in sun4i_ss_opti_poll()
69 ileft = areq->cryptlen / 4; in sun4i_ss_opti_poll()
70 oleft = areq->cryptlen / 4; in sun4i_ss_opti_poll()
132 scatterwalk_map_and_copy(areq->iv, areq->dst, areq->cryptlen - ivsize, in sun4i_ss_opti_poll()
155 areq->cryptlen, areq->iv); in sun4i_ss_cipher_poll_fallback()
185 unsigned int ileft = areq->cryptlen; in sun4i_ss_cipher_poll()
186 unsigned int oleft = areq->cryptlen; in sun4i_ss_cipher_poll()
[all …]
/OK3568_Linux_fs/kernel/arch/arm64/crypto/
H A Daes-glue.c276 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_encrypt()
286 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_encrypt()
287 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_encrypt()
302 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_encrypt()
305 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in cts_cbc_encrypt()
308 subreq.cryptlen); in cts_cbc_encrypt()
313 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_encrypt()
333 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_decrypt()
343 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_decrypt()
344 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_decrypt()
[all …]
/OK3568_Linux_fs/kernel/drivers/crypto/cavium/nitrox/
H A Dnitrox_aead.c166 creq->gph.param0 = cpu_to_be16(rctx->cryptlen); in nitrox_set_creq()
167 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); in nitrox_set_creq()
228 rctx->cryptlen = areq->cryptlen; in nitrox_aes_gcm_enc()
230 rctx->srclen = areq->assoclen + areq->cryptlen; in nitrox_aes_gcm_enc()
262 rctx->cryptlen = areq->cryptlen - aead->authsize; in nitrox_aes_gcm_dec()
264 rctx->srclen = areq->cryptlen + areq->assoclen; in nitrox_aes_gcm_dec()
450 aead_rctx->cryptlen = areq->cryptlen; in nitrox_rfc4106_enc()
452 aead_rctx->srclen = aead_rctx->assoclen + aead_rctx->cryptlen; in nitrox_rfc4106_enc()
482 aead_rctx->cryptlen = areq->cryptlen - aead->authsize; in nitrox_rfc4106_dec()
485 areq->cryptlen - GCM_RFC4106_IV_SIZE + areq->assoclen; in nitrox_rfc4106_dec()
/OK3568_Linux_fs/kernel/drivers/crypto/qce/
H A Dskcipher.c78 rctx->cryptlen = req->cryptlen; in qce_skcipher_async_req_handle()
84 rctx->src_nents = sg_nents_for_len(req->src, req->cryptlen); in qce_skcipher_async_req_handle()
86 rctx->dst_nents = sg_nents_for_len(req->dst, req->cryptlen); in qce_skcipher_async_req_handle()
109 sg = qce_sgtable_add(&rctx->dst_tbl, req->dst, req->cryptlen); in qce_skcipher_async_req_handle()
151 ret = qce_start(async_req, tmpl->crypto_alg_type, req->cryptlen, 0); in qce_skcipher_async_req_handle()
241 req->cryptlen <= aes_sw_max_len) || in qce_skcipher_crypt()
242 (IS_XTS(rctx->flags) && req->cryptlen > QCE_SECTOR_SIZE && in qce_skcipher_crypt()
243 req->cryptlen % QCE_SECTOR_SIZE))) { in qce_skcipher_crypt()
250 req->dst, req->cryptlen, req->iv); in qce_skcipher_crypt()
/OK3568_Linux_fs/kernel/drivers/crypto/allwinner/sun8i-ss/
H A Dsun8i-ss-cipher.c29 if (areq->cryptlen == 0 || areq->cryptlen % 16) in sun8i_ss_need_fallback()
88 areq->cryptlen, areq->iv); in sun8i_ss_cipher_fallback()
104 unsigned int len = areq->cryptlen; in sun8i_ss_setup_ivs()
113 offset = areq->cryptlen - ivsize; in sun8i_ss_setup_ivs()
179 areq->cryptlen, in sun8i_ss_cipher()
230 len = areq->cryptlen; in sun8i_ss_cipher()
240 areq->cryptlen, i, rctx->t_src[i].len, sg->offset, todo); in sun8i_ss_cipher()
252 len = areq->cryptlen; in sun8i_ss_cipher()
262 areq->cryptlen, i, rctx->t_dst[i].len, sg->offset, todo); in sun8i_ss_cipher()
291 offset = areq->cryptlen - ivsize; in sun8i_ss_cipher()
/OK3568_Linux_fs/kernel/include/linux/
H A Dcrypto.h514 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret);
515 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret);
529 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg);
530 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg);
536 static inline void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret) in crypto_stats_aead_encrypt() argument
538 static inline void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret) in crypto_stats_aead_decrypt() argument
566 static inline void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg … in crypto_stats_skcipher_encrypt() argument
568 static inline void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg … in crypto_stats_skcipher_decrypt() argument
/OK3568_Linux_fs/kernel/drivers/crypto/rockchip/
H A Drk_crypto_skcipher_utils.c79 req->dst, req->cryptlen, req->iv); in rk_cipher_fallback()
336 alg_ctx->left_bytes = req->cryptlen; in rk_ablk_start()
337 alg_ctx->total = req->cryptlen; in rk_ablk_start()
340 alg_ctx->src_nents = sg_nents_for_len(req->src, req->cryptlen); in rk_ablk_start()
343 alg_ctx->dst_nents = sg_nents_for_len(req->dst, req->cryptlen); in rk_ablk_start()
359 if (!IS_ALIGNED(req->cryptlen, ctx->algs_ctx.chunk_size) && in rk_skcipher_handle_req()
390 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, req->iv); in rk_aead_fallback()
446 total = req->cryptlen + req->assoclen; in rk_aead_start()
450 alg_ctx->total = req->cryptlen - authsize; in rk_aead_start()
462 ctx->is_enc, authsize, req->cryptlen, alg_ctx->total, alg_ctx->assoclen); in rk_aead_start()
/OK3568_Linux_fs/kernel/drivers/crypto/virtio/
H A Dvirtio_crypto_algs.c359 src_nents = sg_nents_for_len(req->src, req->cryptlen); in __virtio_crypto_skcipher_do_req()
401 cpu_to_le32(req->cryptlen); in __virtio_crypto_skcipher_do_req()
410 dst_len = min_t(unsigned int, req->cryptlen, dst_len); in __virtio_crypto_skcipher_do_req()
412 req->cryptlen, dst_len); in __virtio_crypto_skcipher_do_req()
414 if (unlikely(req->cryptlen + dst_len + ivsize + in __virtio_crypto_skcipher_do_req()
443 req->cryptlen - AES_BLOCK_SIZE, in __virtio_crypto_skcipher_do_req()
493 if (!req->cryptlen) in virtio_crypto_skcipher_encrypt()
495 if (req->cryptlen % AES_BLOCK_SIZE) in virtio_crypto_skcipher_encrypt()
518 if (!req->cryptlen) in virtio_crypto_skcipher_decrypt()
520 if (req->cryptlen % AES_BLOCK_SIZE) in virtio_crypto_skcipher_decrypt()
[all …]
/OK3568_Linux_fs/kernel/drivers/crypto/amlogic/
H A Damlogic-gxl-cipher.c30 if (areq->cryptlen == 0) in meson_cipher_need_fallback()
75 areq->cryptlen, areq->iv); in meson_cipher_do_fallback()
108 areq->cryptlen, in meson_cipher()
131 if (ivsize > areq->cryptlen) { in meson_cipher()
132 dev_err(mc->dev, "invalid ivsize=%d vs len=%d\n", ivsize, areq->cryptlen); in meson_cipher()
144 offset = areq->cryptlen - ivsize; in meson_cipher()
205 len = areq->cryptlen; in meson_cipher()
250 areq->cryptlen - ivsize, in meson_cipher()
/OK3568_Linux_fs/kernel/drivers/crypto/xilinx/
H A Dzynqmp-aes-gcm.c93 dma_size = req->cryptlen + ZYNQMP_AES_KEY_SIZE in zynqmp_aes_aead_cipher()
96 dma_size = req->cryptlen + GCM_AES_IV_SIZE; in zynqmp_aes_aead_cipher()
109 data_size = req->cryptlen; in zynqmp_aes_aead_cipher()
110 scatterwalk_map_and_copy(kbuf, req->src, 0, req->cryptlen, 0); in zynqmp_aes_aead_cipher()
191 req->cryptlen < ZYNQMP_AES_MIN_INPUT_BLK_SIZE) { in zynqmp_fallback_check()
194 if ((req->cryptlen % ZYNQMP_AES_WORD_LEN) != 0) in zynqmp_fallback_check()
198 req->cryptlen <= ZYNQMP_AES_AUTH_SIZE) { in zynqmp_fallback_check()
224 areq->cryptlen, areq->iv); in zynqmp_handle_aes_req()

12345