Lines Matching refs:ctx_p
66 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_key_type() local
68 return ctx_p->key_type; in cc_key_type()
71 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size) in validate_keys_sizes() argument
73 switch (ctx_p->flow_mode) { in validate_keys_sizes()
78 if (ctx_p->cipher_mode != DRV_CIPHER_XTS) in validate_keys_sizes()
85 if (ctx_p->cipher_mode == DRV_CIPHER_XTS || in validate_keys_sizes()
86 ctx_p->cipher_mode == DRV_CIPHER_ESSIV) in validate_keys_sizes()
106 static int validate_data_size(struct cc_cipher_ctx *ctx_p, in validate_data_size() argument
109 switch (ctx_p->flow_mode) { in validate_data_size()
111 switch (ctx_p->cipher_mode) { in validate_data_size()
135 switch (ctx_p->cipher_mode) { in validate_data_size()
153 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_init() local
161 dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p, in cc_cipher_init()
164 ctx_p->cipher_mode = cc_alg->cipher_mode; in cc_cipher_init()
165 ctx_p->flow_mode = cc_alg->flow_mode; in cc_cipher_init()
166 ctx_p->drvdata = cc_alg->drvdata; in cc_cipher_init()
168 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { in cc_cipher_init()
172 ctx_p->shash_tfm = crypto_alloc_shash("sha256", 0, 0); in cc_cipher_init()
173 if (IS_ERR(ctx_p->shash_tfm)) { in cc_cipher_init()
175 return PTR_ERR(ctx_p->shash_tfm); in cc_cipher_init()
180 ctx_p->fallback_tfm = in cc_cipher_init()
183 if (IS_ERR(ctx_p->fallback_tfm)) { in cc_cipher_init()
189 ctx_p->fallback_tfm = NULL; in cc_cipher_init()
191 fallback_req_size = crypto_skcipher_reqsize(ctx_p->fallback_tfm); in cc_cipher_init()
199 ctx_p->user.key = kzalloc(max_key_buf_size, GFP_KERNEL); in cc_cipher_init()
200 if (!ctx_p->user.key) in cc_cipher_init()
204 ctx_p->user.key); in cc_cipher_init()
207 ctx_p->user.key_dma_addr = dma_map_single(dev, ctx_p->user.key, in cc_cipher_init()
210 if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) { in cc_cipher_init()
212 max_key_buf_size, ctx_p->user.key); in cc_cipher_init()
216 max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr); in cc_cipher_init()
221 kfree(ctx_p->user.key); in cc_cipher_init()
223 crypto_free_skcipher(ctx_p->fallback_tfm); in cc_cipher_init()
224 crypto_free_shash(ctx_p->shash_tfm); in cc_cipher_init()
236 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_exit() local
237 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_cipher_exit()
242 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { in cc_cipher_exit()
244 crypto_free_shash(ctx_p->shash_tfm); in cc_cipher_exit()
245 ctx_p->shash_tfm = NULL; in cc_cipher_exit()
246 crypto_free_skcipher(ctx_p->fallback_tfm); in cc_cipher_exit()
247 ctx_p->fallback_tfm = NULL; in cc_cipher_exit()
251 dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size, in cc_cipher_exit()
254 &ctx_p->user.key_dma_addr); in cc_cipher_exit()
257 dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key); in cc_cipher_exit()
258 kfree_sensitive(ctx_p->user.key); in cc_cipher_exit()
302 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_sethkey() local
303 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_cipher_sethkey()
307 ctx_p, crypto_tfm_alg_name(tfm), keylen); in cc_cipher_sethkey()
325 if (validate_keys_sizes(ctx_p, keylen)) { in cc_cipher_sethkey()
330 ctx_p->keylen = keylen; in cc_cipher_sethkey()
331 ctx_p->fallback_on = false; in cc_cipher_sethkey()
335 if (ctx_p->flow_mode == S_DIN_to_SM4) { in cc_cipher_sethkey()
340 ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1); in cc_cipher_sethkey()
341 if (ctx_p->hw.key1_slot == END_OF_KEYS) { in cc_cipher_sethkey()
347 if (ctx_p->cipher_mode == DRV_CIPHER_XTS || in cc_cipher_sethkey()
348 ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { in cc_cipher_sethkey()
355 ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2); in cc_cipher_sethkey()
356 if (ctx_p->hw.key2_slot == END_OF_KEYS) { in cc_cipher_sethkey()
363 ctx_p->key_type = CC_HW_PROTECTED_KEY; in cc_cipher_sethkey()
365 ctx_p->hw.key1_slot, ctx_p->hw.key2_slot); in cc_cipher_sethkey()
369 if (ctx_p->drvdata->hw_rev < CC_HW_REV_713) { in cc_cipher_sethkey()
374 if (ctx_p->cipher_mode != DRV_CIPHER_CBC && in cc_cipher_sethkey()
375 ctx_p->cipher_mode != DRV_CIPHER_CTR) { in cc_cipher_sethkey()
380 ctx_p->cpp.slot = cc_slot_to_cpp_key(hki.hw_key1); in cc_cipher_sethkey()
381 if (ctx_p->flow_mode == S_DIN_to_AES) in cc_cipher_sethkey()
382 ctx_p->cpp.alg = CC_CPP_AES; in cc_cipher_sethkey()
384 ctx_p->cpp.alg = CC_CPP_SM4; in cc_cipher_sethkey()
385 ctx_p->key_type = CC_POLICY_PROTECTED_KEY; in cc_cipher_sethkey()
387 ctx_p->cpp.alg, ctx_p->cpp.slot); in cc_cipher_sethkey()
402 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_setkey() local
403 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_cipher_setkey()
410 ctx_p, crypto_tfm_alg_name(tfm), keylen); in cc_cipher_setkey()
415 if (validate_keys_sizes(ctx_p, keylen)) { in cc_cipher_setkey()
420 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { in cc_cipher_setkey()
426 if (likely(ctx_p->fallback_tfm)) { in cc_cipher_setkey()
427 ctx_p->fallback_on = true; in cc_cipher_setkey()
428 crypto_skcipher_clear_flags(ctx_p->fallback_tfm, in cc_cipher_setkey()
430 crypto_skcipher_clear_flags(ctx_p->fallback_tfm, flags); in cc_cipher_setkey()
431 return crypto_skcipher_setkey(ctx_p->fallback_tfm, key, keylen); in cc_cipher_setkey()
442 ctx_p->fallback_on = false; in cc_cipher_setkey()
443 ctx_p->key_type = CC_UNPROTECTED_KEY; in cc_cipher_setkey()
450 if (ctx_p->flow_mode == S_DIN_to_DES) { in cc_cipher_setkey()
459 if (ctx_p->cipher_mode == DRV_CIPHER_XTS && in cc_cipher_setkey()
466 dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr, in cc_cipher_setkey()
469 memcpy(ctx_p->user.key, key, keylen); in cc_cipher_setkey()
471 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { in cc_cipher_setkey()
475 err = crypto_shash_tfm_digest(ctx_p->shash_tfm, in cc_cipher_setkey()
476 ctx_p->user.key, keylen, in cc_cipher_setkey()
477 ctx_p->user.key + keylen); in cc_cipher_setkey()
485 dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr, in cc_cipher_setkey()
487 ctx_p->keylen = keylen; in cc_cipher_setkey()
493 static int cc_out_setup_mode(struct cc_cipher_ctx *ctx_p) in cc_out_setup_mode() argument
495 switch (ctx_p->flow_mode) { in cc_out_setup_mode()
503 return ctx_p->flow_mode; in cc_out_setup_mode()
512 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_setup_readiv_desc() local
513 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_setup_readiv_desc()
514 int cipher_mode = ctx_p->cipher_mode; in cc_setup_readiv_desc()
515 int flow_mode = cc_out_setup_mode(ctx_p); in cc_setup_readiv_desc()
519 if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) in cc_setup_readiv_desc()
541 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); in cc_setup_readiv_desc()
554 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); in cc_setup_readiv_desc()
569 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_setup_state_desc() local
570 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_setup_state_desc()
571 int cipher_mode = ctx_p->cipher_mode; in cc_setup_state_desc()
572 int flow_mode = ctx_p->flow_mode; in cc_setup_state_desc()
613 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_setup_xex_state_desc() local
614 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_setup_xex_state_desc()
615 int cipher_mode = ctx_p->cipher_mode; in cc_setup_xex_state_desc()
616 int flow_mode = ctx_p->flow_mode; in cc_setup_xex_state_desc()
618 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; in cc_setup_xex_state_desc()
619 unsigned int key_len = (ctx_p->keylen / 2); in cc_setup_xex_state_desc()
643 ctx_p->hw.key2_slot); in cc_setup_xex_state_desc()
671 static int cc_out_flow_mode(struct cc_cipher_ctx *ctx_p) in cc_out_flow_mode() argument
673 switch (ctx_p->flow_mode) { in cc_out_flow_mode()
681 return ctx_p->flow_mode; in cc_out_flow_mode()
690 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_setup_key_desc() local
691 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_setup_key_desc()
692 int cipher_mode = ctx_p->cipher_mode; in cc_setup_key_desc()
693 int flow_mode = ctx_p->flow_mode; in cc_setup_key_desc()
695 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; in cc_setup_key_desc()
696 unsigned int key_len = ctx_p->keylen; in cc_setup_key_desc()
713 set_cpp_crypto_key(&desc[*seq_size], ctx_p->cpp.slot); in cc_setup_key_desc()
714 flow_mode = cc_out_flow_mode(ctx_p); in cc_setup_key_desc()
719 ctx_p->hw.key1_slot); in cc_setup_key_desc()
752 ctx_p->hw.key1_slot); in cc_setup_key_desc()
773 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_setup_mlli_desc() local
774 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_setup_mlli_desc()
781 ctx_p->drvdata->mlli_sram_addr); in cc_setup_mlli_desc()
787 ctx_p->drvdata->mlli_sram_addr, in cc_setup_mlli_desc()
800 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_setup_flow_desc() local
801 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_setup_flow_desc()
802 unsigned int flow_mode = cc_out_flow_mode(ctx_p); in cc_setup_flow_desc()
803 bool last_desc = (ctx_p->key_type == CC_POLICY_PROTECTED_KEY || in cc_setup_flow_desc()
804 ctx_p->cipher_mode == DRV_CIPHER_ECB); in cc_setup_flow_desc()
818 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); in cc_setup_flow_desc()
825 ctx_p->drvdata->mlli_sram_addr, in cc_setup_flow_desc()
829 ctx_p->drvdata->mlli_sram_addr, in cc_setup_flow_desc()
830 ctx_p->drvdata->mlli_sram_addr); in cc_setup_flow_desc()
832 ctx_p->drvdata->mlli_sram_addr, in cc_setup_flow_desc()
837 ctx_p->drvdata->mlli_sram_addr, in cc_setup_flow_desc()
838 ctx_p->drvdata->mlli_sram_addr + in cc_setup_flow_desc()
841 (ctx_p->drvdata->mlli_sram_addr + in cc_setup_flow_desc()
848 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); in cc_setup_flow_desc()
885 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); in cc_cipher_process() local
886 struct device *dev = drvdata_to_dev(ctx_p->drvdata); in cc_cipher_process()
899 if (validate_data_size(ctx_p, nbytes)) { in cc_cipher_process()
910 if (ctx_p->fallback_on) { in cc_cipher_process()
914 skcipher_request_set_tfm(subreq, ctx_p->fallback_tfm); in cc_cipher_process()
935 if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) { in cc_cipher_process()
937 cc_req.cpp.alg = ctx_p->cpp.alg; in cc_cipher_process()
938 cc_req.cpp.slot = ctx_p->cpp.slot; in cc_cipher_process()
946 rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes, in cc_cipher_process()
970 rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len, in cc_cipher_process()