Lines Matching refs:next2fill

405 	while (tq->tx_ring.next2comp != tq->tx_ring.next2fill) {  in vmxnet3_tq_cleanup()
425 tq->tx_ring.next2fill = tq->tx_ring.next2comp = 0; in vmxnet3_tq_cleanup()
483 tq->tx_ring.next2fill = tq->tx_ring.next2comp = 0; in vmxnet3_tq_init()
578 rbi = rbi_base + ring->next2fill; in vmxnet3_rq_alloc_rx_buf()
579 gd = ring->base + ring->next2fill; in vmxnet3_rq_alloc_rx_buf()
649 num_allocated, ring->next2fill, ring->next2comp); in vmxnet3_rq_alloc_rx_buf()
652 BUG_ON(num_allocated != 0 && ring->next2fill == ring->next2comp); in vmxnet3_rq_alloc_rx_buf()
691 ctx->sop_txd = tq->tx_ring.base + tq->tx_ring.next2fill; in vmxnet3_map_pkt()
697 tq->tx_ring.next2fill * in vmxnet3_map_pkt()
702 tbi = tq->buf_info + tq->tx_ring.next2fill; in vmxnet3_map_pkt()
707 tq->tx_ring.next2fill, in vmxnet3_map_pkt()
730 tbi = tq->buf_info + tq->tx_ring.next2fill; in vmxnet3_map_pkt()
740 gdesc = tq->tx_ring.base + tq->tx_ring.next2fill; in vmxnet3_map_pkt()
749 tq->tx_ring.next2fill, le64_to_cpu(gdesc->txd.addr), in vmxnet3_map_pkt()
765 tbi = tq->buf_info + tq->tx_ring.next2fill; in vmxnet3_map_pkt()
782 gdesc = tq->tx_ring.base + tq->tx_ring.next2fill; in vmxnet3_map_pkt()
791 tq->tx_ring.next2fill, le64_to_cpu(gdesc->txd.addr), in vmxnet3_map_pkt()
950 tq->tx_ring.next2fill * in vmxnet3_copy_hdr()
956 ctx->copy_size, tq->tx_ring.next2fill); in vmxnet3_copy_hdr()
1107 tq->tx_ring.next2comp, tq->tx_ring.next2fill); in vmxnet3_tq_xmit()
1203 tq->tx_ring.next2fill); in vmxnet3_tq_xmit()
1633 vmxnet3_getRxDesc(rxd, &ring->base[ring->next2fill].rxd, in vmxnet3_rq_rx_complete()
1647 ring->next2fill); in vmxnet3_rq_rx_complete()
1694 rq->rx_ring[ring_idx].next2fill = in vmxnet3_rq_cleanup()
1807 rq->rx_ring[i].next2fill = rq->rx_ring[i].next2comp = 0; in vmxnet3_rq_init()
2735 adapter->rx_queue[i].rx_ring[0].next2fill); in vmxnet3_activate_dev()
2738 adapter->rx_queue[i].rx_ring[1].next2fill); in vmxnet3_activate_dev()