Lines Matching refs:dmreq

99 			 struct dm_crypt_request *dmreq);
101 struct dm_crypt_request *dmreq);
310 struct dm_crypt_request *dmreq) in crypt_iv_plain_gen() argument
313 *(__le32 *)iv = cpu_to_le32(dmreq->iv_sector & 0xffffffff); in crypt_iv_plain_gen()
319 struct dm_crypt_request *dmreq) in crypt_iv_plain64_gen() argument
322 *(__le64 *)iv = cpu_to_le64(dmreq->iv_sector); in crypt_iv_plain64_gen()
328 struct dm_crypt_request *dmreq) in crypt_iv_plain64be_gen() argument
332 *(__be64 *)&iv[cc->iv_size - sizeof(u64)] = cpu_to_be64(dmreq->iv_sector); in crypt_iv_plain64be_gen()
338 struct dm_crypt_request *dmreq) in crypt_iv_essiv_gen() argument
345 *(__le64 *)iv = cpu_to_le64(dmreq->iv_sector); in crypt_iv_essiv_gen()
385 struct dm_crypt_request *dmreq) in crypt_iv_benbi_gen() argument
391 val = cpu_to_be64(((u64)dmreq->iv_sector << cc->iv_gen_private.benbi.shift) + 1); in crypt_iv_benbi_gen()
398 struct dm_crypt_request *dmreq) in crypt_iv_null_gen() argument
474 struct dm_crypt_request *dmreq, in crypt_iv_lmk_one() argument
501 buf[0] = cpu_to_le32(dmreq->iv_sector & 0xFFFFFFFF); in crypt_iv_lmk_one()
502 buf[1] = cpu_to_le32((((u64)dmreq->iv_sector >> 32) & 0x00FFFFFF) | 0x80000000); in crypt_iv_lmk_one()
522 struct dm_crypt_request *dmreq) in crypt_iv_lmk_gen() argument
528 if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) { in crypt_iv_lmk_gen()
529 sg = crypt_get_sg_data(cc, dmreq->sg_in); in crypt_iv_lmk_gen()
531 r = crypt_iv_lmk_one(cc, iv, dmreq, src + sg->offset); in crypt_iv_lmk_gen()
540 struct dm_crypt_request *dmreq) in crypt_iv_lmk_post() argument
546 if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) in crypt_iv_lmk_post()
549 sg = crypt_get_sg_data(cc, dmreq->sg_out); in crypt_iv_lmk_post()
551 r = crypt_iv_lmk_one(cc, iv, dmreq, dst + sg->offset); in crypt_iv_lmk_post()
631 struct dm_crypt_request *dmreq, in crypt_iv_tcw_whitening() argument
635 __le64 sector = cpu_to_le64(dmreq->iv_sector); in crypt_iv_tcw_whitening()
669 struct dm_crypt_request *dmreq) in crypt_iv_tcw_gen() argument
673 __le64 sector = cpu_to_le64(dmreq->iv_sector); in crypt_iv_tcw_gen()
678 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) { in crypt_iv_tcw_gen()
679 sg = crypt_get_sg_data(cc, dmreq->sg_in); in crypt_iv_tcw_gen()
681 r = crypt_iv_tcw_whitening(cc, dmreq, src + sg->offset); in crypt_iv_tcw_gen()
695 struct dm_crypt_request *dmreq) in crypt_iv_tcw_post() argument
701 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) in crypt_iv_tcw_post()
705 sg = crypt_get_sg_data(cc, dmreq->sg_out); in crypt_iv_tcw_post()
707 r = crypt_iv_tcw_whitening(cc, dmreq, dst + sg->offset); in crypt_iv_tcw_post()
714 struct dm_crypt_request *dmreq) in crypt_iv_random_gen() argument
739 struct dm_crypt_request *dmreq) in crypt_iv_eboiv_gen() argument
752 *(__le64 *)buf = cpu_to_le64(dmreq->iv_sector * cc->sector_size); in crypt_iv_eboiv_gen()
936 static int crypt_iv_elephant(struct crypt_config *cc, struct dm_crypt_request *dmreq) in crypt_iv_elephant() argument
954 *(__le64 *)es = cpu_to_le64(dmreq->iv_sector * cc->sector_size); in crypt_iv_elephant()
972 sg = crypt_get_sg_data(cc, dmreq->sg_out); in crypt_iv_elephant()
977 if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) { in crypt_iv_elephant()
978 sg2 = crypt_get_sg_data(cc, dmreq->sg_in); in crypt_iv_elephant()
984 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) { in crypt_iv_elephant()
994 if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) { in crypt_iv_elephant()
1010 struct dm_crypt_request *dmreq) in crypt_iv_elephant_gen() argument
1014 if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) { in crypt_iv_elephant_gen()
1015 r = crypt_iv_elephant(cc, dmreq); in crypt_iv_elephant_gen()
1020 return crypt_iv_eboiv_gen(cc, iv, dmreq); in crypt_iv_elephant_gen()
1024 struct dm_crypt_request *dmreq) in crypt_iv_elephant_post() argument
1026 if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) in crypt_iv_elephant_post()
1027 return crypt_iv_elephant(cc, dmreq); in crypt_iv_elephant_post()
1228 static void *req_of_dmreq(struct crypt_config *cc, struct dm_crypt_request *dmreq) in req_of_dmreq() argument
1230 return (void *)((char *)dmreq - cc->dmreq_start); in req_of_dmreq()
1234 struct dm_crypt_request *dmreq) in iv_of_dmreq() argument
1237 return (u8 *)ALIGN((unsigned long)(dmreq + 1), in iv_of_dmreq()
1240 return (u8 *)ALIGN((unsigned long)(dmreq + 1), in iv_of_dmreq()
1245 struct dm_crypt_request *dmreq) in org_iv_of_dmreq() argument
1247 return iv_of_dmreq(cc, dmreq) + cc->iv_size; in org_iv_of_dmreq()
1251 struct dm_crypt_request *dmreq) in org_sector_of_dmreq() argument
1253 u8 *ptr = iv_of_dmreq(cc, dmreq) + cc->iv_size + cc->iv_size; in org_sector_of_dmreq()
1258 struct dm_crypt_request *dmreq) in org_tag_of_dmreq() argument
1260 u8 *ptr = iv_of_dmreq(cc, dmreq) + cc->iv_size + in org_tag_of_dmreq()
1266 struct dm_crypt_request *dmreq) in tag_from_dmreq() argument
1268 struct convert_context *ctx = dmreq->ctx; in tag_from_dmreq()
1271 return &io->integrity_metadata[*org_tag_of_dmreq(cc, dmreq) * in tag_from_dmreq()
1276 struct dm_crypt_request *dmreq) in iv_tag_from_dmreq() argument
1278 return tag_from_dmreq(cc, dmreq) + cc->integrity_tag_size; in iv_tag_from_dmreq()
1288 struct dm_crypt_request *dmreq; in crypt_convert_block_aead() local
1299 dmreq = dmreq_of_req(cc, req); in crypt_convert_block_aead()
1300 dmreq->iv_sector = ctx->cc_sector; in crypt_convert_block_aead()
1302 dmreq->iv_sector >>= cc->sector_shift; in crypt_convert_block_aead()
1303 dmreq->ctx = ctx; in crypt_convert_block_aead()
1305 *org_tag_of_dmreq(cc, dmreq) = tag_offset; in crypt_convert_block_aead()
1307 sector = org_sector_of_dmreq(cc, dmreq); in crypt_convert_block_aead()
1310 iv = iv_of_dmreq(cc, dmreq); in crypt_convert_block_aead()
1311 org_iv = org_iv_of_dmreq(cc, dmreq); in crypt_convert_block_aead()
1312 tag = tag_from_dmreq(cc, dmreq); in crypt_convert_block_aead()
1313 tag_iv = iv_tag_from_dmreq(cc, dmreq); in crypt_convert_block_aead()
1320 sg_init_table(dmreq->sg_in, 4); in crypt_convert_block_aead()
1321 sg_set_buf(&dmreq->sg_in[0], sector, sizeof(uint64_t)); in crypt_convert_block_aead()
1322 sg_set_buf(&dmreq->sg_in[1], org_iv, cc->iv_size); in crypt_convert_block_aead()
1323 sg_set_page(&dmreq->sg_in[2], bv_in.bv_page, cc->sector_size, bv_in.bv_offset); in crypt_convert_block_aead()
1324 sg_set_buf(&dmreq->sg_in[3], tag, cc->integrity_tag_size); in crypt_convert_block_aead()
1326 sg_init_table(dmreq->sg_out, 4); in crypt_convert_block_aead()
1327 sg_set_buf(&dmreq->sg_out[0], sector, sizeof(uint64_t)); in crypt_convert_block_aead()
1328 sg_set_buf(&dmreq->sg_out[1], org_iv, cc->iv_size); in crypt_convert_block_aead()
1329 sg_set_page(&dmreq->sg_out[2], bv_out.bv_page, cc->sector_size, bv_out.bv_offset); in crypt_convert_block_aead()
1330 sg_set_buf(&dmreq->sg_out[3], tag, cc->integrity_tag_size); in crypt_convert_block_aead()
1337 r = cc->iv_gen_ops->generator(cc, org_iv, dmreq); in crypt_convert_block_aead()
1350 aead_request_set_crypt(req, dmreq->sg_in, dmreq->sg_out, in crypt_convert_block_aead()
1357 aead_request_set_crypt(req, dmreq->sg_in, dmreq->sg_out, in crypt_convert_block_aead()
1369 r = cc->iv_gen_ops->post(cc, org_iv, dmreq); in crypt_convert_block_aead()
1385 struct dm_crypt_request *dmreq; in crypt_convert_block_skcipher() local
1394 dmreq = dmreq_of_req(cc, req); in crypt_convert_block_skcipher()
1395 dmreq->iv_sector = ctx->cc_sector; in crypt_convert_block_skcipher()
1397 dmreq->iv_sector >>= cc->sector_shift; in crypt_convert_block_skcipher()
1398 dmreq->ctx = ctx; in crypt_convert_block_skcipher()
1400 *org_tag_of_dmreq(cc, dmreq) = tag_offset; in crypt_convert_block_skcipher()
1402 iv = iv_of_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1403 org_iv = org_iv_of_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1404 tag_iv = iv_tag_from_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1406 sector = org_sector_of_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1410 sg_in = &dmreq->sg_in[0]; in crypt_convert_block_skcipher()
1411 sg_out = &dmreq->sg_out[0]; in crypt_convert_block_skcipher()
1424 r = cc->iv_gen_ops->generator(cc, org_iv, dmreq); in crypt_convert_block_skcipher()
1446 r = cc->iv_gen_ops->post(cc, org_iv, dmreq); in crypt_convert_block_skcipher()
2155 struct dm_crypt_request *dmreq = async_req->data; in kcryptd_async_done() local
2156 struct convert_context *ctx = dmreq->ctx; in kcryptd_async_done()
2171 error = cc->iv_gen_ops->post(cc, org_iv_of_dmreq(cc, dmreq), dmreq); in kcryptd_async_done()
2176 (unsigned long long)le64_to_cpu(*org_sector_of_dmreq(cc, dmreq))); in kcryptd_async_done()
2181 crypt_free_req(cc, req_of_dmreq(cc, dmreq), io->base_bio); in kcryptd_async_done()