Lines Matching refs:walk

310 	struct scatter_walk walk;  in gcm_calculate_auth_mac()  local
314 scatterwalk_start(&walk, req->src); in gcm_calculate_auth_mac()
317 u32 n = scatterwalk_clamp(&walk, len); in gcm_calculate_auth_mac()
321 scatterwalk_start(&walk, sg_next(walk.sg)); in gcm_calculate_auth_mac()
322 n = scatterwalk_clamp(&walk, len); in gcm_calculate_auth_mac()
324 p = scatterwalk_map(&walk); in gcm_calculate_auth_mac()
330 scatterwalk_advance(&walk, n); in gcm_calculate_auth_mac()
331 scatterwalk_done(&walk, 0, len); in gcm_calculate_auth_mac()
346 struct skcipher_walk walk; in gcm_encrypt() local
363 err = skcipher_walk_aead_encrypt(&walk, req, false); in gcm_encrypt()
367 const u8 *src = walk.src.virt.addr; in gcm_encrypt()
368 u8 *dst = walk.dst.virt.addr; in gcm_encrypt()
369 int nbytes = walk.nbytes; in gcm_encrypt()
376 } else if (nbytes < walk.total) { in gcm_encrypt()
391 memcpy(walk.dst.virt.addr, in gcm_encrypt()
394 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in gcm_encrypt()
395 } while (walk.nbytes); in gcm_encrypt()
397 while (walk.nbytes >= AES_BLOCK_SIZE) { in gcm_encrypt()
398 int blocks = walk.nbytes / AES_BLOCK_SIZE; in gcm_encrypt()
399 const u8 *src = walk.src.virt.addr; in gcm_encrypt()
400 u8 *dst = walk.dst.virt.addr; in gcm_encrypt()
412 ghash_do_update(blocks, dg, walk.dst.virt.addr, in gcm_encrypt()
415 err = skcipher_walk_done(&walk, in gcm_encrypt()
416 walk.nbytes % AES_BLOCK_SIZE); in gcm_encrypt()
420 if (walk.nbytes) { in gcm_encrypt()
423 crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr, in gcm_encrypt()
424 buf, walk.nbytes); in gcm_encrypt()
426 memcpy(buf, walk.dst.virt.addr, walk.nbytes); in gcm_encrypt()
427 memset(buf + walk.nbytes, 0, sizeof(buf) - walk.nbytes); in gcm_encrypt()
432 walk.nbytes ? buf : NULL); in gcm_encrypt()
434 if (walk.nbytes) in gcm_encrypt()
435 err = skcipher_walk_done(&walk, 0); in gcm_encrypt()
460 struct skcipher_walk walk; in gcm_decrypt() local
482 err = skcipher_walk_aead_decrypt(&walk, req, false); in gcm_decrypt()
488 const u8 *src = walk.src.virt.addr; in gcm_decrypt()
489 u8 *dst = walk.dst.virt.addr; in gcm_decrypt()
490 int nbytes = walk.nbytes; in gcm_decrypt()
497 } else if (nbytes < walk.total) { in gcm_decrypt()
513 memcpy(walk.dst.virt.addr, in gcm_decrypt()
516 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); in gcm_decrypt()
517 } while (walk.nbytes); in gcm_decrypt()
524 while (walk.nbytes >= AES_BLOCK_SIZE) { in gcm_decrypt()
525 int blocks = walk.nbytes / AES_BLOCK_SIZE; in gcm_decrypt()
526 const u8 *src = walk.src.virt.addr; in gcm_decrypt()
527 u8 *dst = walk.dst.virt.addr; in gcm_decrypt()
529 ghash_do_update(blocks, dg, walk.src.virt.addr, in gcm_decrypt()
541 err = skcipher_walk_done(&walk, in gcm_decrypt()
542 walk.nbytes % AES_BLOCK_SIZE); in gcm_decrypt()
546 if (walk.nbytes) { in gcm_decrypt()
547 memcpy(buf, walk.src.virt.addr, walk.nbytes); in gcm_decrypt()
548 memset(buf + walk.nbytes, 0, sizeof(buf) - walk.nbytes); in gcm_decrypt()
553 walk.nbytes ? buf : NULL); in gcm_decrypt()
555 if (walk.nbytes) { in gcm_decrypt()
558 crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr, in gcm_decrypt()
559 buf, walk.nbytes); in gcm_decrypt()
561 err = skcipher_walk_done(&walk, 0); in gcm_decrypt()