Lines Matching refs:walk
35 static inline bool cast5_fpu_begin(bool fpu_enabled, struct skcipher_walk *walk, in cast5_fpu_begin() argument
39 walk, fpu_enabled, nbytes); in cast5_fpu_begin()
52 struct skcipher_walk walk; in ecb_crypt() local
58 err = skcipher_walk_virt(&walk, req, false); in ecb_crypt()
60 while ((nbytes = walk.nbytes)) { in ecb_crypt()
61 u8 *wsrc = walk.src.virt.addr; in ecb_crypt()
62 u8 *wdst = walk.dst.virt.addr; in ecb_crypt()
64 fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes); in ecb_crypt()
93 err = skcipher_walk_done(&walk, nbytes); in ecb_crypt()
115 struct skcipher_walk walk; in cbc_encrypt() local
119 err = skcipher_walk_virt(&walk, req, false); in cbc_encrypt()
121 while ((nbytes = walk.nbytes)) { in cbc_encrypt()
122 u64 *src = (u64 *)walk.src.virt.addr; in cbc_encrypt()
123 u64 *dst = (u64 *)walk.dst.virt.addr; in cbc_encrypt()
124 u64 *iv = (u64 *)walk.iv; in cbc_encrypt()
135 *(u64 *)walk.iv = *iv; in cbc_encrypt()
136 err = skcipher_walk_done(&walk, nbytes); in cbc_encrypt()
143 struct skcipher_walk *walk) in __cbc_decrypt() argument
146 unsigned int nbytes = walk->nbytes; in __cbc_decrypt()
147 u64 *src = (u64 *)walk->src.virt.addr; in __cbc_decrypt()
148 u64 *dst = (u64 *)walk->dst.virt.addr; in __cbc_decrypt()
190 *dst ^= *(u64 *)walk->iv; in __cbc_decrypt()
191 *(u64 *)walk->iv = last_iv; in __cbc_decrypt()
201 struct skcipher_walk walk; in cbc_decrypt() local
205 err = skcipher_walk_virt(&walk, req, false); in cbc_decrypt()
207 while ((nbytes = walk.nbytes)) { in cbc_decrypt()
208 fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes); in cbc_decrypt()
209 nbytes = __cbc_decrypt(ctx, &walk); in cbc_decrypt()
210 err = skcipher_walk_done(&walk, nbytes); in cbc_decrypt()
217 static void ctr_crypt_final(struct skcipher_walk *walk, struct cast5_ctx *ctx) in ctr_crypt_final() argument
219 u8 *ctrblk = walk->iv; in ctr_crypt_final()
221 u8 *src = walk->src.virt.addr; in ctr_crypt_final()
222 u8 *dst = walk->dst.virt.addr; in ctr_crypt_final()
223 unsigned int nbytes = walk->nbytes; in ctr_crypt_final()
231 static unsigned int __ctr_crypt(struct skcipher_walk *walk, in __ctr_crypt() argument
235 unsigned int nbytes = walk->nbytes; in __ctr_crypt()
236 u64 *src = (u64 *)walk->src.virt.addr; in __ctr_crypt()
237 u64 *dst = (u64 *)walk->dst.virt.addr; in __ctr_crypt()
243 (__be64 *)walk->iv); in __ctr_crypt()
261 ctrblk = *(u64 *)walk->iv; in __ctr_crypt()
262 be64_add_cpu((__be64 *)walk->iv, 1); in __ctr_crypt()
281 struct skcipher_walk walk; in ctr_crypt() local
285 err = skcipher_walk_virt(&walk, req, false); in ctr_crypt()
287 while ((nbytes = walk.nbytes) >= CAST5_BLOCK_SIZE) { in ctr_crypt()
288 fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes); in ctr_crypt()
289 nbytes = __ctr_crypt(&walk, ctx); in ctr_crypt()
290 err = skcipher_walk_done(&walk, nbytes); in ctr_crypt()
295 if (walk.nbytes) { in ctr_crypt()
296 ctr_crypt_final(&walk, ctx); in ctr_crypt()
297 err = skcipher_walk_done(&walk, 0); in ctr_crypt()