Lines Matching refs:flaglen
344 __le32 flaglen; member
351 __le32 flaglen; member
952 return le32_to_cpu(prd->flaglen) in nv_descr_getlength()
958 return le32_to_cpu(prd->flaglen) & LEN_MASK_V2; in nv_descr_getlength_ex()
1852 np->put_rx.orig->flaglen = cpu_to_le32(np->rx_buf_sz | NV_RX_AVAIL); in nv_alloc_rx()
1894 np->put_rx.ex->flaglen = cpu_to_le32(np->rx_buf_sz | NV_RX2_AVAIL); in nv_alloc_rx_optimized()
1937 np->rx_ring.orig[i].flaglen = 0; in nv_init_rx()
1940 np->rx_ring.ex[i].flaglen = 0; in nv_init_rx()
1973 np->tx_ring.orig[i].flaglen = 0; in nv_init_tx()
1976 np->tx_ring.ex[i].flaglen = 0; in nv_init_tx()
2036 np->tx_ring.orig[i].flaglen = 0; in nv_drain_tx()
2039 np->tx_ring.ex[i].flaglen = 0; in nv_drain_tx()
2067 np->rx_ring.orig[i].flaglen = 0; in nv_drain_rx()
2070 np->rx_ring.ex[i].flaglen = 0; in nv_drain_rx()
2276 put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags); in nv_start_xmit()
2326 put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags); in nv_start_xmit()
2348 prev_tx->flaglen |= cpu_to_le32(tx_flags_extra); in nv_start_xmit()
2362 start_tx->flaglen |= cpu_to_le32(tx_flags | tx_flags_extra); in nv_start_xmit()
2454 put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags); in nv_start_xmit_optimized()
2504 put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags); in nv_start_xmit_optimized()
2526 prev_tx->flaglen |= cpu_to_le32(NV_TX2_LASTPACKET); in nv_start_xmit_optimized()
2567 start_tx->flaglen |= cpu_to_le32(tx_flags | tx_flags_extra); in nv_start_xmit_optimized()
2594 np->tx_change_owner->first_tx_desc->flaglen |= in nv_tx_flip_ownership()
2620 !((flags = le32_to_cpu(np->get_tx.orig->flaglen)) & NV_TX_VALID) && in nv_tx_done()
2690 !((flags = le32_to_cpu(np->get_tx.ex->flaglen)) & NV_TX2_VALID) && in nv_tx_done_optimized()
2780 le32_to_cpu(np->tx_ring.orig[i].flaglen), in nv_tx_timeout()
2782 le32_to_cpu(np->tx_ring.orig[i+1].flaglen), in nv_tx_timeout()
2784 le32_to_cpu(np->tx_ring.orig[i+2].flaglen), in nv_tx_timeout()
2786 le32_to_cpu(np->tx_ring.orig[i+3].flaglen)); in nv_tx_timeout()
2796 le32_to_cpu(np->tx_ring.ex[i].flaglen), in nv_tx_timeout()
2799 le32_to_cpu(np->tx_ring.ex[i+1].flaglen), in nv_tx_timeout()
2802 le32_to_cpu(np->tx_ring.ex[i+2].flaglen), in nv_tx_timeout()
2805 le32_to_cpu(np->tx_ring.ex[i+3].flaglen)); in nv_tx_timeout()
2905 !((flags = le32_to_cpu(np->get_rx.orig->flaglen)) & NV_RX_AVAIL) && in nv_rx_process()
3007 !((flags = le32_to_cpu(np->get_rx.ex->flaglen)) & NV_RX2_AVAIL) && in nv_rx_process_optimized()
5197 np->tx_ring.orig[0].flaglen = cpu_to_le32((pkt_len-1) | np->tx_flags | tx_flags_extra); in nv_loopback_test()
5201 np->tx_ring.ex[0].flaglen = cpu_to_le32((pkt_len-1) | np->tx_flags | tx_flags_extra); in nv_loopback_test()
5210 flags = le32_to_cpu(np->rx_ring.orig[0].flaglen); in nv_loopback_test()
5214 flags = le32_to_cpu(np->rx_ring.ex[0].flaglen); in nv_loopback_test()