Home
last modified time | relevance | path

Searched refs:rk_dev (Results 1 – 22 of 22) sorted by relevance

/OK3568_Linux_fs/kernel/drivers/crypto/rockchip/
H A Drk_crypto_core.c40 static int rk_crypto_enable_clk(struct rk_crypto_dev *rk_dev) in rk_crypto_enable_clk() argument
44 dev_dbg(rk_dev->dev, "clk_bulk_prepare_enable.\n"); in rk_crypto_enable_clk()
46 ret = clk_bulk_prepare_enable(rk_dev->clks_num, in rk_crypto_enable_clk()
47 rk_dev->clk_bulks); in rk_crypto_enable_clk()
49 dev_err(rk_dev->dev, "failed to enable clks %d\n", ret); in rk_crypto_enable_clk()
54 static void rk_crypto_disable_clk(struct rk_crypto_dev *rk_dev) in rk_crypto_disable_clk() argument
56 dev_dbg(rk_dev->dev, "clk_bulk_disable_unprepare.\n"); in rk_crypto_disable_clk()
58 clk_bulk_disable_unprepare(rk_dev->clks_num, rk_dev->clk_bulks); in rk_crypto_disable_clk()
61 static int rk_load_data(struct rk_crypto_dev *rk_dev, in rk_load_data() argument
68 struct device *dev = rk_dev->dev; in rk_load_data()
[all …]
H A Drk_crypto_v3_ahash.c45 static void rk_hash_reset(struct rk_crypto_dev *rk_dev) in rk_hash_reset() argument
51 CRYPTO_WRITE(rk_dev, CRYPTO_DMA_INT_EN, 0x00); in rk_hash_reset()
56 CRYPTO_WRITE(rk_dev, CRYPTO_RST_CTL, tmp | tmp_mask); in rk_hash_reset()
60 false, rk_dev, CRYPTO_RST_CTL); in rk_hash_reset()
62 dev_err(rk_dev->dev, "cipher reset pool timeout %ums.", in rk_hash_reset()
65 CRYPTO_WRITE(rk_dev, CRYPTO_HASH_CTL, 0xffff0000); in rk_hash_reset()
68 static int rk_hash_mid_data_store(struct rk_crypto_dev *rk_dev, struct rk_hash_mid_data *mid_data) in rk_hash_mid_data_store() argument
80 false, rk_dev, CRYPTO_MID_VALID); in rk_hash_mid_data_store()
82 CRYPTO_WRITE(rk_dev, CRYPTO_MID_VALID_SWITCH, in rk_hash_mid_data_store()
89 CRYPTO_WRITE(rk_dev, CRYPTO_MID_VALID, in rk_hash_mid_data_store()
[all …]
H A Drk_crypto_v2_ahash.c40 static void rk_hash_reset(struct rk_crypto_dev *rk_dev) in rk_hash_reset() argument
46 CRYPTO_WRITE(rk_dev, CRYPTO_DMA_INT_EN, 0x00); in rk_hash_reset()
51 CRYPTO_WRITE(rk_dev, CRYPTO_RST_CTL, tmp | tmp_mask); in rk_hash_reset()
55 false, rk_dev, CRYPTO_RST_CTL); in rk_hash_reset()
57 dev_err(rk_dev->dev, "cipher reset pool timeout %ums.", in rk_hash_reset()
60 CRYPTO_WRITE(rk_dev, CRYPTO_HASH_CTL, 0xffff0000); in rk_hash_reset()
65 struct rk_crypto_dev *rk_dev = platform_get_drvdata(dev_id); in rk_crypto_irq_handle() local
68 (struct rk_hw_crypto_v2_info *)rk_dev->hw_info; in rk_crypto_irq_handle()
69 struct rk_alg_ctx *alg_ctx = rk_ahash_alg_ctx(rk_dev); in rk_crypto_irq_handle()
72 CRYPTO_WRITE(rk_dev, CRYPTO_DMA_INT_EN, 0); in rk_crypto_irq_handle()
[all …]
H A Drk_crypto_v1_skcipher.c18 struct rk_crypto_dev *rk_dev) in rk_alg_ctx_cast() argument
21 skcipher_request_cast(rk_dev->async_req); in rk_alg_ctx_cast()
30 struct rk_crypto_dev *rk_dev = platform_get_drvdata(dev_id); in rk_crypto_irq_handle() local
33 interrupt_status = CRYPTO_READ(rk_dev, RK_CRYPTO_INTSTS); in rk_crypto_irq_handle()
34 CRYPTO_WRITE(rk_dev, RK_CRYPTO_INTSTS, interrupt_status); in rk_crypto_irq_handle()
37 dev_warn(rk_dev->dev, "DMA Error\n"); in rk_crypto_irq_handle()
38 rk_dev->err = -EFAULT; in rk_crypto_irq_handle()
50 static int rk_handle_req(struct rk_crypto_dev *rk_dev, in rk_handle_req() argument
58 return rk_dev->enqueue(rk_dev, &req->base); in rk_handle_req()
148 struct rk_crypto_dev *rk_dev = ctx->rk_dev; in rk_cipher_encrypt() local
[all …]
H A Drk_crypto_v1_ahash.c21 struct rk_crypto_dev *rk_dev) in rk_alg_ctx_cast() argument
24 ahash_request_cast(rk_dev->async_req); in rk_alg_ctx_cast()
34 struct rk_crypto_dev *rk_dev = platform_get_drvdata(dev_id); in rk_crypto_irq_handle() local
37 interrupt_status = CRYPTO_READ(rk_dev, RK_CRYPTO_INTSTS); in rk_crypto_irq_handle()
38 CRYPTO_WRITE(rk_dev, RK_CRYPTO_INTSTS, interrupt_status); in rk_crypto_irq_handle()
41 dev_warn(rk_dev->dev, "DMA Error\n"); in rk_crypto_irq_handle()
42 rk_dev->err = -EFAULT; in rk_crypto_irq_handle()
94 static void rk_ahash_reg_init(struct rk_crypto_dev *rk_dev) in rk_ahash_reg_init() argument
96 struct ahash_request *req = ahash_request_cast(rk_dev->async_req); in rk_ahash_reg_init()
98 struct rk_alg_ctx *alg_ctx = rk_alg_ctx_cast(rk_dev); in rk_ahash_reg_init()
[all …]
H A Drk_crypto_v2_skcipher.c45 struct rk_crypto_dev *rk_dev = platform_get_drvdata(dev_id); in rk_crypto_irq_handle() local
48 (struct rk_hw_crypto_v2_info *)rk_dev->hw_info; in rk_crypto_irq_handle()
49 struct rk_alg_ctx *alg_ctx = rk_cipher_alg_ctx(rk_dev); in rk_crypto_irq_handle()
51 interrupt_status = CRYPTO_READ(rk_dev, CRYPTO_DMA_INT_ST); in rk_crypto_irq_handle()
52 CRYPTO_WRITE(rk_dev, CRYPTO_DMA_INT_ST, interrupt_status); in rk_crypto_irq_handle()
57 dev_err(rk_dev->dev, "DMA desc = %p\n", hw_info->hw_desc.lli_head); in rk_crypto_irq_handle()
58 dev_err(rk_dev->dev, "DMA addr_in = %08x\n", in rk_crypto_irq_handle()
60 dev_err(rk_dev->dev, "DMA addr_out = %08x\n", in rk_crypto_irq_handle()
62 dev_err(rk_dev->dev, "DMA count = %08x\n", alg_ctx->count); in rk_crypto_irq_handle()
63 dev_err(rk_dev->dev, "DMA desc_dma = %08x\n", in rk_crypto_irq_handle()
[all …]
H A Drk_crypto_v3_skcipher.c44 struct rk_crypto_dev *rk_dev = platform_get_drvdata(dev_id); in rk_crypto_irq_handle() local
47 (struct rk_hw_crypto_v3_info *)rk_dev->hw_info; in rk_crypto_irq_handle()
48 struct rk_alg_ctx *alg_ctx = rk_cipher_alg_ctx(rk_dev); in rk_crypto_irq_handle()
50 interrupt_status = CRYPTO_READ(rk_dev, CRYPTO_DMA_INT_ST); in rk_crypto_irq_handle()
51 CRYPTO_WRITE(rk_dev, CRYPTO_DMA_INT_ST, interrupt_status); in rk_crypto_irq_handle()
56 dev_err(rk_dev->dev, "DMA desc = %p\n", hw_info->hw_desc.lli_head); in rk_crypto_irq_handle()
57 dev_err(rk_dev->dev, "DMA addr_in = %08x\n", in rk_crypto_irq_handle()
59 dev_err(rk_dev->dev, "DMA addr_out = %08x\n", in rk_crypto_irq_handle()
61 dev_err(rk_dev->dev, "DMA count = %08x\n", alg_ctx->count); in rk_crypto_irq_handle()
62 dev_err(rk_dev->dev, "DMA desc_dma = %08x\n", in rk_crypto_irq_handle()
[all …]
H A Dprocfs.c49 static void crypto_show_queue_info(struct seq_file *p, struct rk_crypto_dev *rk_dev) in crypto_show_queue_info() argument
55 spin_lock_irqsave(&rk_dev->lock, flags); in crypto_show_queue_info()
57 qlen = rk_dev->queue.qlen; in crypto_show_queue_info()
58 max_qlen = rk_dev->queue.max_qlen; in crypto_show_queue_info()
59 busy = rk_dev->busy; in crypto_show_queue_info()
61 spin_unlock_irqrestore(&rk_dev->lock, flags); in crypto_show_queue_info()
64 qlen, max_qlen, rk_dev->stat.ever_queue_max, busy ? "busy" : "idle"); in crypto_show_queue_info()
109 struct rk_crypto_dev *rk_dev = p->private; in crypto_show_all() local
110 struct rk_crypto_soc_data *soc_data = rk_dev->soc_data; in crypto_show_all()
111 struct rk_crypto_stat *stat = &rk_dev->stat; in crypto_show_all()
[all …]
H A Drk_crypto_ahash_utils.c46 struct rk_ahash_ctx *rk_ahash_ctx_cast(struct rk_crypto_dev *rk_dev) in rk_ahash_ctx_cast() argument
48 struct ahash_request *req = ahash_request_cast(rk_dev->async_req); in rk_ahash_ctx_cast()
54 struct rk_alg_ctx *rk_ahash_alg_ctx(struct rk_crypto_dev *rk_dev) in rk_ahash_alg_ctx() argument
56 return &(rk_ahash_ctx_cast(rk_dev))->algs_ctx; in rk_ahash_alg_ctx()
66 static int rk_ahash_set_data_start(struct rk_crypto_dev *rk_dev, uint32_t flag) in rk_ahash_set_data_start() argument
69 struct rk_alg_ctx *alg_ctx = rk_ahash_alg_ctx(rk_dev); in rk_ahash_set_data_start()
73 err = rk_dev->load_data(rk_dev, alg_ctx->sg_src, alg_ctx->sg_dst); in rk_ahash_set_data_start()
75 err = alg_ctx->ops.hw_dma_start(rk_dev, flag); in rk_ahash_set_data_start()
213 struct rk_crypto_dev *rk_dev = ctx->rk_dev; in rk_ahash_update() local
221 return rk_dev->enqueue(rk_dev, &req->base); in rk_ahash_update()
[all …]
H A Drk_crypto_skcipher_utils.c27 struct rk_cipher_ctx *rk_cipher_ctx_cast(struct rk_crypto_dev *rk_dev) in rk_cipher_ctx_cast() argument
29 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(rk_dev->async_req->tfm); in rk_cipher_ctx_cast()
34 struct rk_alg_ctx *rk_cipher_alg_ctx(struct rk_crypto_dev *rk_dev) in rk_cipher_alg_ctx() argument
36 return &(rk_cipher_ctx_cast(rk_dev)->algs_ctx); in rk_cipher_alg_ctx()
151 static void rk_iv_copyback(struct rk_crypto_dev *rk_dev) in rk_iv_copyback() argument
154 struct skcipher_request *req = skcipher_request_cast(rk_dev->async_req); in rk_iv_copyback()
155 struct rk_cipher_ctx *ctx = rk_cipher_ctx_cast(rk_dev); in rk_iv_copyback()
165 static void rk_update_iv(struct rk_crypto_dev *rk_dev) in rk_update_iv() argument
168 struct rk_cipher_ctx *ctx = rk_cipher_ctx_cast(rk_dev); in rk_update_iv()
170 struct skcipher_request *req = skcipher_request_cast(rk_dev->async_req); in rk_update_iv()
[all …]
H A Drk_crypto_skcipher_utils.h21 struct rk_alg_ctx *rk_cipher_alg_ctx(struct rk_crypto_dev *rk_dev);
23 struct rk_cipher_ctx *rk_cipher_ctx_cast(struct rk_crypto_dev *rk_dev);
29 int rk_ablk_rx(struct rk_crypto_dev *rk_dev);
31 int rk_ablk_start(struct rk_crypto_dev *rk_dev);
33 int rk_skcipher_handle_req(struct rk_crypto_dev *rk_dev, struct skcipher_request *req);
39 int rk_aead_start(struct rk_crypto_dev *rk_dev);
43 int rk_aead_handle_req(struct rk_crypto_dev *rk_dev, struct aead_request *req);
H A Drk_crypto_v2_akcipher.c171 dev_err(ctx->rk_dev->dev, "[%s:%d] sg copy err\n", in rk_rsa_calc()
204 dev_err(ctx->rk_dev->dev, "[%s:%d] sg copy err\n", in rk_rsa_calc()
236 static int rk_rsa_start(struct rk_crypto_dev *rk_dev) in rk_rsa_start() argument
243 static int rk_rsa_crypto_rx(struct rk_crypto_dev *rk_dev) in rk_rsa_crypto_rx() argument
261 struct rk_crypto_dev *rk_dev; in rk_rsa_init_tfm() local
269 rk_dev = algt->rk_dev; in rk_rsa_init_tfm()
271 if (!rk_dev->request_crypto) in rk_rsa_init_tfm()
274 rk_dev->request_crypto(rk_dev, "rsa"); in rk_rsa_init_tfm()
282 ctx->rk_dev = rk_dev; in rk_rsa_init_tfm()
284 rk_pka_set_crypto_base(ctx->rk_dev->pka_reg); in rk_rsa_init_tfm()
[all …]
H A Drk_crypto_v3.c99 static bool rk_is_cipher_support(struct rk_crypto_dev *rk_dev, u32 algo, u32 mode, u32 key_len) in rk_is_cipher_support() argument
108 version = CRYPTO_READ(rk_dev, CRYPTO_DES_VERSION); in rk_is_cipher_support()
118 version = CRYPTO_READ(rk_dev, CRYPTO_AES_VERSION); in rk_is_cipher_support()
130 version = CRYPTO_READ(rk_dev, CRYPTO_SM4_VERSION); in rk_is_cipher_support()
146 static bool rk_is_hash_support(struct rk_crypto_dev *rk_dev, u32 algo, u32 type) in rk_is_hash_support() argument
152 version = CRYPTO_READ(rk_dev, CRYPTO_HMAC_VERSION); in rk_is_hash_support()
155 version = CRYPTO_READ(rk_dev, CRYPTO_HASH_VERSION); in rk_is_hash_support()
202 bool rk_hw_crypto_v3_algo_valid(struct rk_crypto_dev *rk_dev, struct rk_crypto_algt *aglt) in rk_hw_crypto_v3_algo_valid() argument
206 return rk_is_cipher_support(rk_dev, aglt->algo, aglt->mode, 0); in rk_hw_crypto_v3_algo_valid()
209 return rk_is_hash_support(rk_dev, aglt->algo, aglt->type); in rk_hw_crypto_v3_algo_valid()
H A Drk_crypto_core.h97 void (*request_crypto)(struct rk_crypto_dev *rk_dev, const char *name);
98 void (*release_crypto)(struct rk_crypto_dev *rk_dev, const char *name);
99 int (*load_data)(struct rk_crypto_dev *rk_dev,
102 int (*unload_data)(struct rk_crypto_dev *rk_dev);
103 int (*enqueue)(struct rk_crypto_dev *rk_dev,
120 bool (*hw_is_algo_valid)(struct rk_crypto_dev *rk_dev,
125 int (*start)(struct rk_crypto_dev *rk_dev);
126 int (*update)(struct rk_crypto_dev *rk_dev);
130 int (*hw_write_key)(struct rk_crypto_dev *rk_dev, const u8 *key, u32 key_len);
131 void (*hw_write_iv)(struct rk_crypto_dev *rk_dev, const u8 *iv, u32 iv_len);
[all …]
H A Drk_crypto_ahash_utils.h13 struct rk_alg_ctx *rk_ahash_alg_ctx(struct rk_crypto_dev *rk_dev);
17 struct rk_ahash_ctx *rk_ahash_ctx_cast(struct rk_crypto_dev *rk_dev);
31 int rk_ahash_crypto_rx(struct rk_crypto_dev *rk_dev);
33 int rk_ahash_start(struct rk_crypto_dev *rk_dev);
H A Drk_crypto_utils.c31 void rk_crypto_write_regs(struct rk_crypto_dev *rk_dev, u32 base_addr, const u8 *data, u32 bytes) in rk_crypto_write_regs() argument
37 CRYPTO_WRITE(rk_dev, base_addr, byte2word_be(data + i * 4)); in rk_crypto_write_regs()
42 CRYPTO_WRITE(rk_dev, base_addr, byte2word_be(tmp_buf)); in rk_crypto_write_regs()
46 void rk_crypto_clear_regs(struct rk_crypto_dev *rk_dev, u32 base_addr, u32 words) in rk_crypto_clear_regs() argument
51 CRYPTO_WRITE(rk_dev, base_addr, 0); in rk_crypto_clear_regs()
54 void rk_crypto_read_regs(struct rk_crypto_dev *rk_dev, u32 base_addr, u8 *data, u32 bytes) in rk_crypto_read_regs() argument
59 word2byte_be(CRYPTO_READ(rk_dev, base_addr), data + i * 4); in rk_crypto_read_regs()
64 word2byte_be(CRYPTO_READ(rk_dev, base_addr), tmp_buf); in rk_crypto_read_regs()
H A Drk_crypto_utils.h37 void rk_crypto_write_regs(struct rk_crypto_dev *rk_dev, u32 base_addr, const u8 *data, u32 bytes);
39 void rk_crypto_clear_regs(struct rk_crypto_dev *rk_dev, u32 base_addr, u32 words);
41 void rk_crypto_read_regs(struct rk_crypto_dev *rk_dev, u32 base_addr, u8 *data, u32 bytes);
H A Drk_crypto_v1.h48 bool rk_hw_crypto_v1_algo_valid(struct rk_crypto_dev *rk_dev, struct rk_crypto_algt *aglt);
56 static inline bool rk_hw_crypto_v1_algo_valid(struct rk_crypto_dev *rk_dev, in rk_hw_crypto_v1_algo_valid() argument
H A Drk_crypto_v3.h80 bool rk_hw_crypto_v3_algo_valid(struct rk_crypto_dev *rk_dev, struct rk_crypto_algt *aglt);
88 static inline bool rk_hw_crypto_v3_algo_valid(struct rk_crypto_dev *rk_dev, in rk_hw_crypto_v3_algo_valid() argument
H A Drk_crypto_v2.h79 bool rk_hw_crypto_v2_algo_valid(struct rk_crypto_dev *rk_dev, struct rk_crypto_algt *aglt);
87 static inline bool rk_hw_crypto_v2_algo_valid(struct rk_crypto_dev *rk_dev, in rk_hw_crypto_v2_algo_valid() argument
H A Drk_crypto_v1.c56 bool rk_hw_crypto_v1_algo_valid(struct rk_crypto_dev *rk_dev, struct rk_crypto_algt *aglt) in rk_hw_crypto_v1_algo_valid() argument
H A Drk_crypto_v2.c100 bool rk_hw_crypto_v2_algo_valid(struct rk_crypto_dev *rk_dev, struct rk_crypto_algt *aglt) in rk_hw_crypto_v2_algo_valid() argument