| /OK3568_Linux_fs/kernel/drivers/crypto/ccp/ |
| H A D | ccp-crypto-aes-cmac.c | 28 struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req); in ccp_aes_cmac_complete() local 34 if (rctx->hash_rem) { in ccp_aes_cmac_complete() 36 unsigned int offset = rctx->nbytes - rctx->hash_rem; in ccp_aes_cmac_complete() 38 scatterwalk_map_and_copy(rctx->buf, rctx->src, in ccp_aes_cmac_complete() 39 offset, rctx->hash_rem, 0); in ccp_aes_cmac_complete() 40 rctx->buf_count = rctx->hash_rem; in ccp_aes_cmac_complete() 42 rctx->buf_count = 0; in ccp_aes_cmac_complete() 46 if (req->result && rctx->final) in ccp_aes_cmac_complete() 47 memcpy(req->result, rctx->iv, digest_size); in ccp_aes_cmac_complete() 50 sg_free_table(&rctx->data_sg); in ccp_aes_cmac_complete() [all …]
|
| H A D | ccp-crypto-sha.c | 30 struct ccp_sha_req_ctx *rctx = ahash_request_ctx(req); in ccp_sha_complete() local 36 if (rctx->hash_rem) { in ccp_sha_complete() 38 unsigned int offset = rctx->nbytes - rctx->hash_rem; in ccp_sha_complete() 40 scatterwalk_map_and_copy(rctx->buf, rctx->src, in ccp_sha_complete() 41 offset, rctx->hash_rem, 0); in ccp_sha_complete() 42 rctx->buf_count = rctx->hash_rem; in ccp_sha_complete() 44 rctx->buf_count = 0; in ccp_sha_complete() 48 if (req->result && rctx->final) in ccp_sha_complete() 49 memcpy(req->result, rctx->ctx, digest_size); in ccp_sha_complete() 52 sg_free_table(&rctx->data_sg); in ccp_sha_complete() [all …]
|
| H A D | ccp-crypto-aes-xts.c | 65 struct ccp_aes_req_ctx *rctx = skcipher_request_ctx(req); in ccp_aes_xts_complete() local 70 memcpy(req->iv, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_complete() 109 struct ccp_aes_req_ctx *rctx = skcipher_request_ctx(req); in ccp_aes_xts_crypt() local 151 skcipher_request_set_tfm(&rctx->fallback_req, in ccp_aes_xts_crypt() 153 skcipher_request_set_callback(&rctx->fallback_req, in ccp_aes_xts_crypt() 157 skcipher_request_set_crypt(&rctx->fallback_req, req->src, in ccp_aes_xts_crypt() 159 ret = encrypt ? crypto_skcipher_encrypt(&rctx->fallback_req) : in ccp_aes_xts_crypt() 160 crypto_skcipher_decrypt(&rctx->fallback_req); in ccp_aes_xts_crypt() 164 memcpy(rctx->iv, req->iv, AES_BLOCK_SIZE); in ccp_aes_xts_crypt() 165 sg_init_one(&rctx->iv_sg, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_crypt() [all …]
|
| H A D | ccp-crypto-aes-galois.c | 80 struct ccp_aes_req_ctx *rctx = aead_request_ctx(req); in ccp_aes_gcm_crypt() local 105 memcpy(rctx->iv, req->iv, GCM_AES_IV_SIZE); in ccp_aes_gcm_crypt() 107 rctx->iv[i + GCM_AES_IV_SIZE] = 0; in ccp_aes_gcm_crypt() 108 rctx->iv[AES_BLOCK_SIZE - 1] = 1; in ccp_aes_gcm_crypt() 111 iv_sg = &rctx->iv_sg; in ccp_aes_gcm_crypt() 113 sg_init_one(iv_sg, rctx->iv, iv_len); in ccp_aes_gcm_crypt() 116 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_aes_gcm_crypt() 117 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_aes_gcm_crypt() 118 rctx->cmd.engine = CCP_ENGINE_AES; in ccp_aes_gcm_crypt() 119 rctx->cmd.u.aes.authsize = crypto_aead_authsize(tfm); in ccp_aes_gcm_crypt() [all …]
|
| H A D | ccp-crypto-aes.c | 26 struct ccp_aes_req_ctx *rctx = skcipher_request_ctx(req); in ccp_aes_complete() local 32 memcpy(req->iv, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_complete() 69 struct ccp_aes_req_ctx *rctx = skcipher_request_ctx(req); in ccp_aes_crypt() local 86 memcpy(rctx->iv, req->iv, AES_BLOCK_SIZE); in ccp_aes_crypt() 87 iv_sg = &rctx->iv_sg; in ccp_aes_crypt() 89 sg_init_one(iv_sg, rctx->iv, iv_len); in ccp_aes_crypt() 92 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_aes_crypt() 93 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_aes_crypt() 94 rctx->cmd.engine = CCP_ENGINE_AES; in ccp_aes_crypt() 95 rctx->cmd.u.aes.type = ctx->u.aes.type; in ccp_aes_crypt() [all …]
|
| H A D | ccp-crypto-des3.c | 25 struct ccp_des3_req_ctx *rctx = skcipher_request_ctx(req); in ccp_des3_complete() local 31 memcpy(req->iv, rctx->iv, DES3_EDE_BLOCK_SIZE); in ccp_des3_complete() 64 struct ccp_des3_req_ctx *rctx = skcipher_request_ctx(req); in ccp_des3_crypt() local 81 memcpy(rctx->iv, req->iv, DES3_EDE_BLOCK_SIZE); in ccp_des3_crypt() 82 iv_sg = &rctx->iv_sg; in ccp_des3_crypt() 84 sg_init_one(iv_sg, rctx->iv, iv_len); in ccp_des3_crypt() 87 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); in ccp_des3_crypt() 88 INIT_LIST_HEAD(&rctx->cmd.entry); in ccp_des3_crypt() 89 rctx->cmd.engine = CCP_ENGINE_DES3; in ccp_des3_crypt() 90 rctx->cmd.u.des3.type = ctx->u.des3.type; in ccp_des3_crypt() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/crypto/qce/ |
| H A D | sha.c | 35 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); in qce_ahash_done() local 47 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_done() 48 dma_unmap_sg(qce->dev, &rctx->result_sg, 1, DMA_FROM_DEVICE); in qce_ahash_done() 50 memcpy(rctx->digest, result->auth_iv, digestsize); in qce_ahash_done() 54 rctx->byte_count[0] = cpu_to_be32(result->auth_byte_count[0]); in qce_ahash_done() 55 rctx->byte_count[1] = cpu_to_be32(result->auth_byte_count[1]); in qce_ahash_done() 61 req->src = rctx->src_orig; in qce_ahash_done() 62 req->nbytes = rctx->nbytes_orig; in qce_ahash_done() 63 rctx->last_blk = false; in qce_ahash_done() 64 rctx->first_blk = false; in qce_ahash_done() [all …]
|
| H A D | skcipher.c | 30 struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req); in qce_skcipher_done() local 49 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_skcipher_done() 50 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); in qce_skcipher_done() 52 sg_free_table(&rctx->dst_tbl); in qce_skcipher_done() 58 memcpy(rctx->iv, result_buf->encr_cntr_iv, rctx->ivsize); in qce_skcipher_done() 66 struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req); in qce_skcipher_async_req_handle() local 76 rctx->iv = req->iv; in qce_skcipher_async_req_handle() 77 rctx->ivsize = crypto_skcipher_ivsize(skcipher); in qce_skcipher_async_req_handle() 78 rctx->cryptlen = req->cryptlen; in qce_skcipher_async_req_handle() 84 rctx->src_nents = sg_nents_for_len(req->src, req->cryptlen); in qce_skcipher_async_req_handle() [all …]
|
| H A D | common.c | 147 struct qce_sha_reqctx *rctx = ahash_request_ctx(req); in qce_setup_regs_ahash() local 158 if (!rctx->last_blk && req->nbytes % blocksize) in qce_setup_regs_ahash() 163 if (IS_CMAC(rctx->flags)) { in qce_setup_regs_ahash() 171 auth_cfg = qce_auth_cfg(rctx->flags, rctx->authklen); in qce_setup_regs_ahash() 174 if (IS_SHA_HMAC(rctx->flags) || IS_CMAC(rctx->flags)) { in qce_setup_regs_ahash() 175 u32 authkey_words = rctx->authklen / sizeof(u32); in qce_setup_regs_ahash() 177 qce_cpu_to_be32p_array(mackey, rctx->authkey, rctx->authklen); in qce_setup_regs_ahash() 182 if (IS_CMAC(rctx->flags)) in qce_setup_regs_ahash() 185 if (rctx->first_blk) in qce_setup_regs_ahash() 186 memcpy(auth, rctx->digest, digestsize); in qce_setup_regs_ahash() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/crypto/cavium/nitrox/ |
| H A D | nitrox_aead.c | 152 static int nitrox_set_creq(struct nitrox_aead_rctx *rctx) in nitrox_set_creq() argument 154 struct se_crypto_request *creq = &rctx->nkreq.creq; in nitrox_set_creq() 158 creq->flags = rctx->flags; in nitrox_set_creq() 159 creq->gfp = (rctx->flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL : in nitrox_set_creq() 164 creq->ctrl.s.arg = rctx->ctrl_arg; in nitrox_set_creq() 166 creq->gph.param0 = cpu_to_be16(rctx->cryptlen); in nitrox_set_creq() 167 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); in nitrox_set_creq() 168 creq->gph.param2 = cpu_to_be16(rctx->ivsize + rctx->assoclen); in nitrox_set_creq() 170 param3.auth_offset = rctx->ivsize; in nitrox_set_creq() 173 creq->ctx_handle = rctx->ctx_handle; in nitrox_set_creq() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/crypto/bcm/ |
| H A D | cipher.c | 134 struct iproc_reqctx_s *rctx, in spu_skcipher_rx_sg_create() argument 140 struct iproc_ctx_s *ctx = rctx->ctx; in spu_skcipher_rx_sg_create() 144 rctx->gfp); in spu_skcipher_rx_sg_create() 151 sg_set_buf(sg++, rctx->msg_buf.spu_resp_hdr, ctx->spu_resp_hdr_len); in spu_skcipher_rx_sg_create() 156 sg_set_buf(sg++, rctx->msg_buf.c.supdt_tweak, in spu_skcipher_rx_sg_create() 160 datalen = spu_msg_sg_add(&sg, &rctx->dst_sg, &rctx->dst_skip, in spu_skcipher_rx_sg_create() 161 rctx->dst_nents, chunksize); in spu_skcipher_rx_sg_create() 169 sg_set_buf(sg++, rctx->msg_buf.rx_stat_pad, stat_pad_len); in spu_skcipher_rx_sg_create() 171 memset(rctx->msg_buf.rx_stat, 0, SPU_RX_STATUS_LEN); in spu_skcipher_rx_sg_create() 172 sg_set_buf(sg, rctx->msg_buf.rx_stat, spu->spu_rx_status_len()); in spu_skcipher_rx_sg_create() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/crypto/allwinner/sun8i-ss/ |
| H A D | sun8i-ss-hash.c | 75 struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq); in sun8i_ss_hash_init() local 79 memset(rctx, 0, sizeof(struct sun8i_ss_hash_reqctx)); in sun8i_ss_hash_init() 81 ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm); in sun8i_ss_hash_init() 82 rctx->fallback_req.base.flags = areq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sun8i_ss_hash_init() 84 return crypto_ahash_init(&rctx->fallback_req); in sun8i_ss_hash_init() 89 struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq); in sun8i_ss_hash_export() local 93 ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm); in sun8i_ss_hash_export() 94 rctx->fallback_req.base.flags = areq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sun8i_ss_hash_export() 96 return crypto_ahash_export(&rctx->fallback_req, out); in sun8i_ss_hash_export() 101 struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq); in sun8i_ss_hash_import() local [all …]
|
| H A D | sun8i-ss-cipher.c | 74 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sun8i_ss_cipher_fallback() local 84 skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm); in sun8i_ss_cipher_fallback() 85 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags, in sun8i_ss_cipher_fallback() 87 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst, in sun8i_ss_cipher_fallback() 89 if (rctx->op_dir & SS_DECRYPTION) in sun8i_ss_cipher_fallback() 90 err = crypto_skcipher_decrypt(&rctx->fallback_req); in sun8i_ss_cipher_fallback() 92 err = crypto_skcipher_encrypt(&rctx->fallback_req); in sun8i_ss_cipher_fallback() 101 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sun8i_ss_setup_ivs() local 106 struct sun8i_ss_flow *sf = &ss->flows[rctx->flow]; in sun8i_ss_setup_ivs() 111 rctx->ivlen = ivsize; in sun8i_ss_setup_ivs() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/crypto/stm32/ |
| H A D | stm32-hash.c | 253 struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req); in stm32_hash_write_ctrl() local 260 switch (rctx->flags & HASH_FLAGS_ALGO_MASK) { in stm32_hash_write_ctrl() 277 reg |= (rctx->data_type << HASH_CR_DATATYPE_POS); in stm32_hash_write_ctrl() 279 if (rctx->flags & HASH_FLAGS_HMAC) { in stm32_hash_write_ctrl() 296 static void stm32_hash_append_sg(struct stm32_hash_request_ctx *rctx) in stm32_hash_append_sg() argument 300 while ((rctx->bufcnt < rctx->buflen) && rctx->total) { in stm32_hash_append_sg() 301 count = min(rctx->sg->length - rctx->offset, rctx->total); in stm32_hash_append_sg() 302 count = min(count, rctx->buflen - rctx->bufcnt); in stm32_hash_append_sg() 305 if ((rctx->sg->length == 0) && !sg_is_last(rctx->sg)) { in stm32_hash_append_sg() 306 rctx->sg = sg_next(rctx->sg); in stm32_hash_append_sg() [all …]
|
| /OK3568_Linux_fs/kernel/crypto/ |
| H A D | chacha20poly1305.c | 74 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); in async_done_continue() local 76 rctx->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; in async_done_continue() 97 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); in poly_verify_tag() local 98 u8 tag[sizeof(rctx->tag)]; in poly_verify_tag() 101 req->assoclen + rctx->cryptlen, in poly_verify_tag() 103 if (crypto_memneq(tag, rctx->tag, sizeof(tag))) in poly_verify_tag() 110 struct chachapoly_req_ctx *rctx = aead_request_ctx(req); in poly_copy_tag() local 112 scatterwalk_map_and_copy(rctx->tag, req->dst, in poly_copy_tag() 113 req->assoclen + rctx->cryptlen, in poly_copy_tag() 114 sizeof(rctx->tag), 1); in poly_copy_tag() [all …]
|
| H A D | xts.c | 85 struct xts_request_ctx *rctx = skcipher_request_ctx(req); in xts_xor_tweak() local 90 le128 t = rctx->t; in xts_xor_tweak() 94 req = &rctx->subreq; in xts_xor_tweak() 113 rctx->t = t; in xts_xor_tweak() 118 gf128mul_x_ble(&rctx->t, &t); in xts_xor_tweak() 149 struct xts_request_ctx *rctx = skcipher_request_ctx(req); in xts_cts_done() local 151 scatterwalk_map_and_copy(&b, rctx->tail, 0, XTS_BLOCK_SIZE, 0); in xts_cts_done() 152 le128_xor(&b, &rctx->t, &b); in xts_cts_done() 153 scatterwalk_map_and_copy(&b, rctx->tail, 0, XTS_BLOCK_SIZE, 1); in xts_cts_done() 165 struct xts_request_ctx *rctx = skcipher_request_ctx(req); in xts_cts_final() local [all …]
|
| H A D | adiantum.c | 223 struct adiantum_request_ctx *rctx = skcipher_request_ctx(req); in adiantum_hash_header() local 243 poly1305_core_emit(&state, NULL, &rctx->header_hash); in adiantum_hash_header() 252 struct adiantum_request_ctx *rctx = skcipher_request_ctx(req); in adiantum_hash_message() local 254 struct shash_desc *hash_desc = &rctx->u.hash_desc; in adiantum_hash_message() 286 struct adiantum_request_ctx *rctx = skcipher_request_ctx(req); in adiantum_finish() local 292 if (!rctx->enc) in adiantum_finish() 293 crypto_cipher_decrypt_one(tctx->blockcipher, rctx->rbuf.bytes, in adiantum_finish() 294 rctx->rbuf.bytes); in adiantum_finish() 304 le128_add(&digest, &digest, &rctx->header_hash); in adiantum_finish() 305 le128_sub(&rctx->rbuf.bignum, &rctx->rbuf.bignum, &digest); in adiantum_finish() [all …]
|
| H A D | rmd256.c | 230 struct rmd256_ctx *rctx = shash_desc_ctx(desc); in rmd256_init() local 232 rctx->byte_count = 0; in rmd256_init() 234 rctx->state[0] = RMD_H0; in rmd256_init() 235 rctx->state[1] = RMD_H1; in rmd256_init() 236 rctx->state[2] = RMD_H2; in rmd256_init() 237 rctx->state[3] = RMD_H3; in rmd256_init() 238 rctx->state[4] = RMD_H5; in rmd256_init() 239 rctx->state[5] = RMD_H6; in rmd256_init() 240 rctx->state[6] = RMD_H7; in rmd256_init() 241 rctx->state[7] = RMD_H8; in rmd256_init() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/crypto/allwinner/sun8i-ce/ |
| H A D | sun8i-ce-cipher.c | 57 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sun8i_ce_cipher_fallback() local 67 skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm); in sun8i_ce_cipher_fallback() 68 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags, in sun8i_ce_cipher_fallback() 70 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst, in sun8i_ce_cipher_fallback() 72 if (rctx->op_dir & CE_DECRYPTION) in sun8i_ce_cipher_fallback() 73 err = crypto_skcipher_decrypt(&rctx->fallback_req); in sun8i_ce_cipher_fallback() 75 err = crypto_skcipher_encrypt(&rctx->fallback_req); in sun8i_ce_cipher_fallback() 85 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); in sun8i_ce_cipher_prepare() local 103 rctx->op_dir, areq->iv, crypto_skcipher_ivsize(tfm), in sun8i_ce_cipher_prepare() 110 flow = rctx->flow; in sun8i_ce_cipher_prepare() [all …]
|
| H A D | sun8i-ce-hash.c | 75 struct sun8i_ce_hash_reqctx *rctx = ahash_request_ctx(areq); in sun8i_ce_hash_init() local 79 memset(rctx, 0, sizeof(struct sun8i_ce_hash_reqctx)); in sun8i_ce_hash_init() 81 ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm); in sun8i_ce_hash_init() 82 rctx->fallback_req.base.flags = areq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sun8i_ce_hash_init() 84 return crypto_ahash_init(&rctx->fallback_req); in sun8i_ce_hash_init() 89 struct sun8i_ce_hash_reqctx *rctx = ahash_request_ctx(areq); in sun8i_ce_hash_export() local 93 ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm); in sun8i_ce_hash_export() 94 rctx->fallback_req.base.flags = areq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sun8i_ce_hash_export() 96 return crypto_ahash_export(&rctx->fallback_req, out); in sun8i_ce_hash_export() 101 struct sun8i_ce_hash_reqctx *rctx = ahash_request_ctx(areq); in sun8i_ce_hash_import() local [all …]
|
| /OK3568_Linux_fs/kernel/drivers/crypto/ |
| H A D | sahara.c | 555 struct sahara_aes_reqctx *rctx; in sahara_aes_process() local 569 rctx = skcipher_request_ctx(req); in sahara_aes_process() 571 rctx->mode &= FLAGS_MODE_MASK; in sahara_aes_process() 572 dev->flags = (dev->flags & ~FLAGS_MODE_MASK) | rctx->mode; in sahara_aes_process() 629 struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req); in sahara_aes_crypt() local 642 rctx->mode = mode; in sahara_aes_crypt() 655 struct sahara_aes_reqctx *rctx = skcipher_request_ctx(req); in sahara_aes_ecb_encrypt() local 660 skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback); in sahara_aes_ecb_encrypt() 661 skcipher_request_set_callback(&rctx->fallback_req, in sahara_aes_ecb_encrypt() 665 skcipher_request_set_crypt(&rctx->fallback_req, req->src, in sahara_aes_ecb_encrypt() [all …]
|
| H A D | omap-aes-gcm.c | 46 struct omap_aes_reqctx *rctx; in omap_aes_gcm_done_task() local 50 rctx = aead_request_ctx(dd->aead_req); in omap_aes_gcm_done_task() 65 scatterwalk_map_and_copy(rctx->auth_tag, in omap_aes_gcm_done_task() 77 tag = (u8 *)rctx->auth_tag; in omap_aes_gcm_done_task() 185 struct omap_aes_reqctx *rctx; in omap_aes_gcm_dma_out_callback() local 194 rctx = aead_request_ctx(dd->aead_req); in omap_aes_gcm_dma_out_callback() 195 auth_tag = (u32 *)rctx->auth_tag; in omap_aes_gcm_dma_out_callback() 219 struct omap_aes_reqctx *rctx = aead_request_ctx(req); in omap_aes_gcm_prepare_req() local 220 struct omap_aes_dev *dd = rctx->dd; in omap_aes_gcm_prepare_req() 226 rctx->mode &= FLAGS_MODE_MASK; in omap_aes_gcm_prepare_req() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/crypto/rockchip/ |
| H A D | rk_crypto_v1_ahash.c | 97 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); in rk_ahash_reg_init() local 118 CRYPTO_WRITE(rk_dev, RK_CRYPTO_HASH_CTRL, rctx->mode | in rk_ahash_reg_init() 130 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); in rk_ahash_init() local 134 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in rk_ahash_init() 135 rctx->fallback_req.base.flags = req->base.flags & in rk_ahash_init() 138 return crypto_ahash_init(&rctx->fallback_req); in rk_ahash_init() 143 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); in rk_ahash_update() local 147 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in rk_ahash_update() 148 rctx->fallback_req.base.flags = req->base.flags & in rk_ahash_update() 150 rctx->fallback_req.nbytes = req->nbytes; in rk_ahash_update() [all …]
|
| H A D | rk3288_crypto_ahash.c | 50 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); in rk_ahash_reg_init() local 70 CRYPTO_WRITE(dev, RK_CRYPTO_HASH_CTRL, rctx->mode | in rk_ahash_reg_init() 82 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); in rk_ahash_init() local 86 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in rk_ahash_init() 87 rctx->fallback_req.base.flags = req->base.flags & in rk_ahash_init() 90 return crypto_ahash_init(&rctx->fallback_req); in rk_ahash_init() 95 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); in rk_ahash_update() local 99 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); in rk_ahash_update() 100 rctx->fallback_req.base.flags = req->base.flags & in rk_ahash_update() 102 rctx->fallback_req.nbytes = req->nbytes; in rk_ahash_update() [all …]
|
| H A D | rk_crypto_ahash_utils.c | 196 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); in rk_ahash_init() local 202 memset(rctx, 0x00, sizeof(*rctx)); in rk_ahash_init() 212 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); in rk_ahash_update() local 217 memset(rctx, 0x00, sizeof(*rctx)); in rk_ahash_update() 219 rctx->flag = RK_FLAG_UPDATE; in rk_ahash_update() 228 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); in rk_ahash_final() local 233 memset(rctx, 0x00, sizeof(*rctx)); in rk_ahash_final() 235 rctx->flag = RK_FLAG_FINAL; in rk_ahash_final() 252 struct rk_ahash_rctx *rctx = ahash_request_ctx(req); in rk_ahash_finup() local 257 memset(rctx, 0x00, sizeof(*rctx)); in rk_ahash_finup() [all …]
|