| /OK3568_Linux_fs/kernel/crypto/ |
| H A D | ctr.c | 33 u8 *ctrblk = walk->iv; in crypto_ctr_crypt_final() local 40 crypto_cipher_encrypt_one(tfm, keystream, ctrblk); in crypto_ctr_crypt_final() 43 crypto_inc(ctrblk, bsize); in crypto_ctr_crypt_final() 52 u8 *ctrblk = walk->iv; in crypto_ctr_crypt_segment() local 59 fn(crypto_cipher_tfm(tfm), dst, ctrblk); in crypto_ctr_crypt_segment() 63 crypto_inc(ctrblk, bsize); in crypto_ctr_crypt_segment() 80 u8 *ctrblk = walk->iv; in crypto_ctr_crypt_inplace() local 87 fn(crypto_cipher_tfm(tfm), keystream, ctrblk); in crypto_ctr_crypt_inplace() 91 crypto_inc(ctrblk, bsize); in crypto_ctr_crypt_inplace()
|
| /OK3568_Linux_fs/kernel/arch/x86/crypto/ |
| H A D | blowfish_glue.c | 252 u8 *ctrblk = walk->iv; in ctr_crypt_final() local 258 blowfish_enc_blk(ctx, keystream, ctrblk); in ctr_crypt_final() 261 crypto_inc(ctrblk, BF_BLOCK_SIZE); in ctr_crypt_final() 270 u64 ctrblk = be64_to_cpu(*(__be64 *)walk->iv); in __ctr_crypt() local 284 ctrblocks[0] = cpu_to_be64(ctrblk++); in __ctr_crypt() 285 ctrblocks[1] = cpu_to_be64(ctrblk++); in __ctr_crypt() 286 ctrblocks[2] = cpu_to_be64(ctrblk++); in __ctr_crypt() 287 ctrblocks[3] = cpu_to_be64(ctrblk++); in __ctr_crypt() 305 ctrblocks[0] = cpu_to_be64(ctrblk++); in __ctr_crypt() 314 *(__be64 *)walk->iv = cpu_to_be64(ctrblk); in __ctr_crypt()
|
| H A D | cast5_avx_glue.c | 219 u8 *ctrblk = walk->iv; in ctr_crypt_final() local 225 __cast5_encrypt(ctx, keystream, ctrblk); in ctr_crypt_final() 228 crypto_inc(ctrblk, CAST5_BLOCK_SIZE); in ctr_crypt_final() 256 u64 ctrblk; in __ctr_crypt() local 261 ctrblk = *(u64 *)walk->iv; in __ctr_crypt() 264 __cast5_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); in __ctr_crypt() 265 *dst ^= ctrblk; in __ctr_crypt()
|
| H A D | glue_helper.c | 176 le128 ctrblk; in glue_ctr_req_128bit() local 181 be128_to_le128(&ctrblk, (be128 *)walk.iv); in glue_ctr_req_128bit() 194 &ctrblk); in glue_ctr_req_128bit() 204 le128_to_be128((be128 *)walk.iv, &ctrblk); in glue_ctr_req_128bit() 211 le128 ctrblk; in glue_ctr_req_128bit() local 214 be128_to_le128(&ctrblk, (be128 *)walk.iv); in glue_ctr_req_128bit() 218 &ctrblk); in glue_ctr_req_128bit() 220 le128_to_be128((be128 *)walk.iv, &ctrblk); in glue_ctr_req_128bit()
|
| H A D | des3_ede_glue.c | 259 u8 *ctrblk = walk->iv; in ctr_crypt_final() local 265 des3_ede_enc_blk(ctx, keystream, ctrblk); in ctr_crypt_final() 268 crypto_inc(ctrblk, DES3_EDE_BLOCK_SIZE); in ctr_crypt_final() 278 u64 ctrblk = be64_to_cpu(*(__be64 *)walk->iv); in __ctr_crypt() local 285 ctrblocks[0] = cpu_to_be64(ctrblk++); in __ctr_crypt() 286 ctrblocks[1] = cpu_to_be64(ctrblk++); in __ctr_crypt() 287 ctrblocks[2] = cpu_to_be64(ctrblk++); in __ctr_crypt() 306 ctrblocks[0] = cpu_to_be64(ctrblk++); in __ctr_crypt() 317 *(__be64 *)walk->iv = cpu_to_be64(ctrblk); in __ctr_crypt()
|
| H A D | serpent_sse2_glue.c | 52 be128 ctrblk; in serpent_crypt_ctr() local 56 le128_to_be128(&ctrblk, iv); in serpent_crypt_ctr() 59 __serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); in serpent_crypt_ctr() 60 u128_xor(dst, src, (u128 *)&ctrblk); in serpent_crypt_ctr()
|
| H A D | twofish_glue_3way.c | 57 be128 ctrblk; in twofish_enc_blk_ctr() local 64 le128_to_be128(&ctrblk, iv); in twofish_enc_blk_ctr() 67 twofish_enc_blk(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); in twofish_enc_blk_ctr() 68 u128_xor(dst, dst, (u128 *)&ctrblk); in twofish_enc_blk_ctr()
|
| H A D | cast6_avx_glue.c | 53 be128 ctrblk; in cast6_crypt_ctr() local 57 le128_to_be128(&ctrblk, iv); in cast6_crypt_ctr() 60 __cast6_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); in cast6_crypt_ctr() 61 u128_xor(dst, src, (u128 *)&ctrblk); in cast6_crypt_ctr()
|
| H A D | serpent_avx_glue.c | 49 be128 ctrblk; in __serpent_crypt_ctr() local 53 le128_to_be128(&ctrblk, iv); in __serpent_crypt_ctr() 56 __serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); in __serpent_crypt_ctr() 57 u128_xor(dst, src, (u128 *)&ctrblk); in __serpent_crypt_ctr()
|
| H A D | camellia_glue.c | 1279 be128 ctrblk; in camellia_crypt_ctr() local 1286 le128_to_be128(&ctrblk, iv); in camellia_crypt_ctr() 1289 camellia_enc_blk_xor(ctx, (u8 *)dst, (u8 *)&ctrblk); in camellia_crypt_ctr()
|
| H A D | aesni-intel_glue.c | 464 u8 *ctrblk = walk->iv; in ctr_crypt_final() local 470 aesni_enc(ctx, keystream, ctrblk); in ctr_crypt_final() 473 crypto_inc(ctrblk, AES_BLOCK_SIZE); in ctr_crypt_final()
|
| /OK3568_Linux_fs/kernel/arch/s390/crypto/ |
| H A D | des_s390.c | 25 static u8 *ctrblk; variable 333 n = __ctrblk_init(ctrblk, walk.iv, nbytes); in ctr_desall_crypt() 334 ctrptr = (n > DES_BLOCK_SIZE) ? ctrblk : walk.iv; in ctr_desall_crypt() 337 if (ctrptr == ctrblk) in ctr_desall_crypt() 429 if (ctrblk) in des_s390_exit() 430 free_page((unsigned long) ctrblk); in des_s390_exit() 471 ctrblk = (u8 *) __get_free_page(GFP_KERNEL); in des_s390_init() 472 if (!ctrblk) { in des_s390_init()
|
| H A D | paes_s390.c | 39 static u8 *ctrblk; variable 651 n = __ctrblk_init(ctrblk, walk.iv, nbytes); in ctr_paes_crypt() 652 ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk.iv; in ctr_paes_crypt() 656 if (ctrptr == ctrblk) in ctr_paes_crypt() 729 if (ctrblk) in paes_s390_fini() 730 free_page((unsigned long) ctrblk); in paes_s390_fini() 768 ctrblk = (u8 *) __get_free_page(GFP_KERNEL); in paes_s390_init() 769 if (!ctrblk) { in paes_s390_init()
|
| H A D | aes_s390.c | 37 static u8 *ctrblk; variable 588 n = __ctrblk_init(ctrblk, walk.iv, nbytes); in ctr_aes_crypt() 589 ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk.iv; in ctr_aes_crypt() 592 if (ctrptr == ctrblk) in ctr_aes_crypt() 980 if (ctrblk) in aes_s390_fini() 981 free_page((unsigned long) ctrblk); in aes_s390_fini() 1027 ctrblk = (u8 *) __get_free_page(GFP_KERNEL); in aes_s390_init() 1028 if (!ctrblk) { in aes_s390_init()
|
| /OK3568_Linux_fs/kernel/drivers/crypto/vmx/ |
| H A D | aes_ctr.c | 72 u8 *ctrblk = walk->iv; in p8_aes_ctr_final() local 81 aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key); in p8_aes_ctr_final() 87 crypto_inc(ctrblk, AES_BLOCK_SIZE); in p8_aes_ctr_final()
|
| /OK3568_Linux_fs/kernel/arch/sparc/crypto/ |
| H A D | aes_glue.c | 322 u8 *ctrblk = walk->iv; in ctr_crypt_final() local 328 ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk, in ctr_crypt_final() 331 crypto_inc(ctrblk, AES_BLOCK_SIZE); in ctr_crypt_final()
|