Lines Matching refs:ccb

73 bnad_cq_cleanup(struct bnad *bnad, struct bna_ccb *ccb)  in bnad_cq_cleanup()  argument
78 for (i = 0; i < ccb->q_depth; i++) { in bnad_cq_cleanup()
79 cmpl = &((struct bna_cq_entry *)ccb->sw_q)[i]; in bnad_cq_cleanup()
519 bnad_cq_setup_skb_frags(struct bna_ccb *ccb, struct sk_buff *skb, u32 nvecs) in bnad_cq_setup_skb_frags() argument
527 cq = ccb->sw_q; in bnad_cq_setup_skb_frags()
528 pi = ccb->producer_index; in bnad_cq_setup_skb_frags()
531 rcb = bna_is_small_rxq(cmpl->rxq_id) ? ccb->rcb[1] : ccb->rcb[0]; in bnad_cq_setup_skb_frags()
561 BNA_QE_INDX_INC(pi, ccb->q_depth); in bnad_cq_setup_skb_frags()
587 bnad_cq_process(struct bnad *bnad, struct bna_ccb *ccb, int budget) in bnad_cq_process() argument
594 struct bna_pkt_rate *pkt_rt = &ccb->pkt_rate; in bnad_cq_process()
595 struct bnad_rx_ctrl *rx_ctrl = ccb->ctrl; in bnad_cq_process()
602 cq = ccb->sw_q; in bnad_cq_process()
605 cmpl = &cq[ccb->producer_index]; in bnad_cq_process()
620 rcb = ccb->rcb[1]; in bnad_cq_process()
622 rcb = ccb->rcb[0]; in bnad_cq_process()
649 pi = ccb->producer_index; in bnad_cq_process()
651 BNA_QE_INDX_INC(pi, ccb->q_depth); in bnad_cq_process()
692 bnad_cq_setup_skb_frags(ccb, skb, nvecs); in bnad_cq_process()
696 ccb->bytes_per_intr += totlen; in bnad_cq_process()
722 cmpl = &cq[ccb->producer_index]; in bnad_cq_process()
724 BNA_QE_INDX_INC(ccb->producer_index, ccb->q_depth); in bnad_cq_process()
729 if (likely(test_bit(BNAD_RXQ_STARTED, &ccb->rcb[0]->flags))) in bnad_cq_process()
730 bna_ib_ack_disable_irq(ccb->i_dbell, packets); in bnad_cq_process()
732 bnad_rxq_post(bnad, ccb->rcb[0]); in bnad_cq_process()
733 if (ccb->rcb[1]) in bnad_cq_process()
734 bnad_rxq_post(bnad, ccb->rcb[1]); in bnad_cq_process()
740 bnad_netif_rx_schedule_poll(struct bnad *bnad, struct bna_ccb *ccb) in bnad_netif_rx_schedule_poll() argument
742 struct bnad_rx_ctrl *rx_ctrl = (struct bnad_rx_ctrl *)(ccb->ctrl); in bnad_netif_rx_schedule_poll()
755 struct bna_ccb *ccb = (struct bna_ccb *)data; in bnad_msix_rx() local
757 if (ccb) { in bnad_msix_rx()
758 ((struct bnad_rx_ctrl *)ccb->ctrl)->rx_intr_ctr++; in bnad_msix_rx()
759 bnad_netif_rx_schedule_poll(ccb->bnad, ccb); in bnad_msix_rx()
839 if (rx_ctrl->ccb) in bnad_isr()
841 rx_ctrl->ccb); in bnad_isr()
1019 bnad_cb_ccb_setup(struct bnad *bnad, struct bna_ccb *ccb) in bnad_cb_ccb_setup() argument
1022 (struct bnad_rx_info *)ccb->cq->rx->priv; in bnad_cb_ccb_setup()
1024 rx_info->rx_ctrl[ccb->id].ccb = ccb; in bnad_cb_ccb_setup()
1025 ccb->ctrl = &rx_info->rx_ctrl[ccb->id]; in bnad_cb_ccb_setup()
1029 bnad_cb_ccb_destroy(struct bnad *bnad, struct bna_ccb *ccb) in bnad_cb_ccb_destroy() argument
1032 (struct bnad_rx_info *)ccb->cq->rx->priv; in bnad_cb_ccb_destroy()
1034 rx_info->rx_ctrl[ccb->id].ccb = NULL; in bnad_cb_ccb_destroy()
1153 struct bna_ccb *ccb; in bnad_cb_rx_stall() local
1159 ccb = rx_ctrl->ccb; in bnad_cb_rx_stall()
1160 if (!ccb) in bnad_cb_rx_stall()
1163 clear_bit(BNAD_RXQ_POST_OK, &ccb->rcb[0]->flags); in bnad_cb_rx_stall()
1165 if (ccb->rcb[1]) in bnad_cb_rx_stall()
1166 clear_bit(BNAD_RXQ_POST_OK, &ccb->rcb[1]->flags); in bnad_cb_rx_stall()
1186 if (!rx_ctrl->ccb) in bnad_rx_cleanup()
1189 bnad = rx_ctrl->ccb->bnad; in bnad_rx_cleanup()
1197 bnad_cq_cleanup(bnad, rx_ctrl->ccb); in bnad_rx_cleanup()
1198 bnad_rxq_cleanup(bnad, rx_ctrl->ccb->rcb[0]); in bnad_rx_cleanup()
1199 if (rx_ctrl->ccb->rcb[1]) in bnad_rx_cleanup()
1200 bnad_rxq_cleanup(bnad, rx_ctrl->ccb->rcb[1]); in bnad_rx_cleanup()
1212 struct bna_ccb *ccb; in bnad_cb_rx_cleanup() local
1218 ccb = rx_ctrl->ccb; in bnad_cb_rx_cleanup()
1219 if (!ccb) in bnad_cb_rx_cleanup()
1222 clear_bit(BNAD_RXQ_STARTED, &ccb->rcb[0]->flags); in bnad_cb_rx_cleanup()
1224 if (ccb->rcb[1]) in bnad_cb_rx_cleanup()
1225 clear_bit(BNAD_RXQ_STARTED, &ccb->rcb[1]->flags); in bnad_cb_rx_cleanup()
1235 struct bna_ccb *ccb; in bnad_cb_rx_post() local
1242 ccb = rx_ctrl->ccb; in bnad_cb_rx_post()
1243 if (!ccb) in bnad_cb_rx_post()
1249 rcb = ccb->rcb[j]; in bnad_cb_rx_post()
1567 if (rx_info->rx_ctrl[i].ccb == NULL) in bnad_rx_msix_unregister()
1570 vector_num = rx_info->rx_ctrl[i].ccb->intr_vector; in bnad_rx_msix_unregister()
1572 rx_info->rx_ctrl[i].ccb); in bnad_rx_msix_unregister()
1588 vector_num = rx_info->rx_ctrl[i].ccb->intr_vector; in bnad_rx_msix_register()
1589 sprintf(rx_info->rx_ctrl[i].ccb->name, "%s CQ %d", in bnad_rx_msix_register()
1591 rx_id + rx_info->rx_ctrl[i].ccb->id); in bnad_rx_msix_register()
1594 rx_info->rx_ctrl[i].ccb->name, in bnad_rx_msix_register()
1595 rx_info->rx_ctrl[i].ccb); in bnad_rx_msix_register()
1761 if (!rx_ctrl->ccb) in bnad_dim_timeout()
1763 bna_rx_dim_update(rx_ctrl->ccb); in bnad_dim_timeout()
1869 rcvd = bnad_cq_process(bnad, rx_ctrl->ccb, budget); in bnad_napi_poll_rx()
1878 if (rx_ctrl->ccb) in bnad_napi_poll_rx()
1879 bnad_enable_rx_irq_unsafe(rx_ctrl->ccb); in bnad_napi_poll_rx()
2173 if (rx_info->rx_ctrl[0].ccb->intr_type == BNA_INTR_T_MSIX) in bnad_destroy_rx()
2392 if (bnad->rx_info[i].rx_ctrl[j].ccb) { in bnad_netdev_qstats_fill()
2394 rx_ctrl[j].ccb->rcb[0]->rxq->rx_packets; in bnad_netdev_qstats_fill()
2396 rx_ctrl[j].ccb->rcb[0]->rxq->rx_bytes; in bnad_netdev_qstats_fill()
2397 if (bnad->rx_info[i].rx_ctrl[j].ccb->rcb[1] && in bnad_netdev_qstats_fill()
2398 bnad->rx_info[i].rx_ctrl[j].ccb-> in bnad_netdev_qstats_fill()
2402 ccb->rcb[1]->rxq->rx_packets; in bnad_netdev_qstats_fill()
2405 ccb->rcb[1]->rxq->rx_bytes; in bnad_netdev_qstats_fill()
3397 if (rx_ctrl->ccb) in bnad_netpoll()
3399 rx_ctrl->ccb); in bnad_netpoll()