Lines Matching refs:u_ctx

768 	struct uld_ctx *u_ctx = ULD_CTX(ctx);  in create_wreq()  local
774 qid = u_ctx->lldi.rxq_ids[rxqidx]; in create_wreq()
775 fid = u_ctx->lldi.rxq_ids[0]; in create_wreq()
778 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[portno]); in create_wreq()
809 struct uld_ctx *u_ctx = ULD_CTX(ctx); in create_cipher_wr() local
826 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]); in create_cipher_wr()
1170 struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm)); in chcr_handle_cipher_resp() local
1216 wrparam.qid = u_ctx->lldi.rxq_ids[reqctx->rxqidx]; in chcr_handle_cipher_resp()
1225 skb->dev = u_ctx->lldi.ports[0]; in chcr_handle_cipher_resp()
1376 struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm)); in chcr_aes_encrypt() local
1388 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_aes_encrypt()
1395 err = process_cipher(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx], in chcr_aes_encrypt()
1399 skb->dev = u_ctx->lldi.ports[0]; in chcr_aes_encrypt()
1418 struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm)); in chcr_aes_decrypt() local
1434 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_aes_decrypt()
1438 err = process_cipher(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx], in chcr_aes_decrypt()
1442 skb->dev = u_ctx->lldi.ports[0]; in chcr_aes_decrypt()
1449 struct uld_ctx *u_ctx = NULL; in chcr_device_init() local
1454 u_ctx = assign_chcr_device(); in chcr_device_init()
1455 if (!u_ctx) { in chcr_device_init()
1460 ctx->dev = &u_ctx->dev; in chcr_device_init()
1461 ntxq = u_ctx->lldi.ntxq; in chcr_device_init()
1462 rxq_perchan = u_ctx->lldi.nrxq / u_ctx->lldi.nchan; in chcr_device_init()
1463 txq_perchan = ntxq / u_ctx->lldi.nchan; in chcr_device_init()
1465 ctx->nrxq = u_ctx->lldi.nrxq; in chcr_device_init()
1574 struct uld_ctx *u_ctx = ULD_CTX(ctx); in create_hash_wr() local
1585 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]); in create_hash_wr()
1636 dma_map_single(&u_ctx->lldi.pdev->dev, req_ctx->reqbfr, in create_hash_wr()
1638 if (dma_mapping_error(&u_ctx->lldi.pdev->dev, in create_hash_wr()
1666 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm)); in chcr_ahash_update() local
1698 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_ahash_update()
1706 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_update()
1743 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_update()
1748 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_update()
1771 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm)); in chcr_ahash_final() local
1825 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_final()
1839 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm)); in chcr_ahash_finup() local
1857 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_ahash_finup()
1864 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_finup()
1918 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_finup()
1923 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_finup()
1934 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm)); in chcr_ahash_digest() local
1953 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_ahash_digest()
1961 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_digest()
2012 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_digest()
2017 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_digest()
2029 struct uld_ctx *u_ctx = ULD_CTX(ctx); in chcr_ahash_continue() local
2080 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_continue()
2096 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(tfm)); in chcr_handle_ahash_resp() local
2109 dma_unmap_single(&u_ctx->lldi.pdev->dev, hctx_wr->dma_addr, in chcr_handle_ahash_resp()
2136 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req); in chcr_handle_ahash_resp()
2373 struct uld_ctx *u_ctx = ULD_CTX(a_ctx(tfm)); in chcr_aead_common_exit() local
2375 chcr_aead_dma_unmap(&u_ctx->lldi.pdev->dev, req, reqctx->op); in chcr_aead_common_exit()
2444 struct uld_ctx *u_ctx = ULD_CTX(ctx); in create_authenc_wr() local
2464 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]); in create_authenc_wr()
2718 struct uld_ctx *u_ctx = ULD_CTX(ctx); in chcr_add_aead_dst_ent() local
2722 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]); in chcr_add_aead_dst_ent()
2762 struct uld_ctx *u_ctx = ULD_CTX(ctx); in chcr_add_cipher_dst_ent() local
2766 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]); in chcr_add_cipher_dst_ent()
2970 struct uld_ctx *u_ctx = ULD_CTX(ctx); in fill_sec_cpl_for_aead() local
2980 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]); in fill_sec_cpl_for_aead()
3142 struct uld_ctx *u_ctx = ULD_CTX(ctx); in create_gcm_wr() local
3159 rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]); in create_gcm_wr()
3746 struct uld_ctx *u_ctx = ULD_CTX(ctx); in chcr_aead_op() local
3763 if (cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_aead_op()
3778 skb = create_wr_fn(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx], size); in chcr_aead_op()
3785 skb->dev = u_ctx->lldi.ports[0]; in chcr_aead_op()