Home
last modified time | relevance | path

Searched refs:hwq (Results 1 – 25 of 56) sorted by relevance

123

/OK3568_Linux_fs/kernel/drivers/infiniband/hw/bnxt_re/
H A Dqplib_res.h72 #define HWQ_CMP(idx, hwq) ((idx) & ((hwq)->max_elements - 1)) argument
74 #define HWQ_FREE_SLOTS(hwq) (hwq->max_elements - \ argument
75 ((HWQ_CMP(hwq->prod, hwq)\
76 - HWQ_CMP(hwq->cons, hwq))\
77 & (hwq->max_elements - 1)))
174 struct bnxt_qplib_hwq *hwq; member
292 static inline u8 bnxt_qplib_base_pg_size(struct bnxt_qplib_hwq *hwq) in bnxt_qplib_base_pg_size() argument
297 pbl = &hwq->pbl[PBL_LVL_0]; in bnxt_qplib_base_pg_size()
324 static inline void *bnxt_qplib_get_qe(struct bnxt_qplib_hwq *hwq, in bnxt_qplib_get_qe() argument
329 pg_num = (indx / hwq->qe_ppg); in bnxt_qplib_get_qe()
[all …]
H A Dqplib_fp.c75 dev_dbg(&scq->hwq.pdev->dev, in __bnxt_qplib_add_flush_qp()
83 dev_dbg(&rcq->hwq.pdev->dev, in __bnxt_qplib_add_flush_qp()
142 qp->sq.hwq.prod = 0; in bnxt_qplib_clean_qp()
143 qp->sq.hwq.cons = 0; in bnxt_qplib_clean_qp()
145 qp->rq.hwq.prod = 0; in bnxt_qplib_clean_qp()
146 qp->rq.hwq.cons = 0; in bnxt_qplib_clean_qp()
236 struct bnxt_qplib_hwq *hwq = &nq->hwq; in clean_nq() local
243 spin_lock_bh(&hwq->lock); in clean_nq()
245 raw_cons = hwq->cons; in clean_nq()
247 sw_cons = HWQ_CMP(raw_cons, hwq); in clean_nq()
[all …]
H A Dqplib_res.c157 struct bnxt_qplib_hwq *hwq) in bnxt_qplib_free_hwq() argument
161 if (!hwq->max_elements) in bnxt_qplib_free_hwq()
163 if (hwq->level >= PBL_LVL_MAX) in bnxt_qplib_free_hwq()
166 for (i = 0; i < hwq->level + 1; i++) { in bnxt_qplib_free_hwq()
167 if (i == hwq->level) in bnxt_qplib_free_hwq()
168 __free_pbl(res, &hwq->pbl[i], hwq->is_user); in bnxt_qplib_free_hwq()
170 __free_pbl(res, &hwq->pbl[i], false); in bnxt_qplib_free_hwq()
173 hwq->level = PBL_LVL_MAX; in bnxt_qplib_free_hwq()
174 hwq->max_elements = 0; in bnxt_qplib_free_hwq()
175 hwq->element_size = 0; in bnxt_qplib_free_hwq()
[all …]
H A Dqplib_rcfw.c92 struct bnxt_qplib_hwq *hwq = &cmdq->hwq; in __send_message() local
126 spin_lock_irqsave(&hwq->lock, flags); in __send_message()
127 if (req->cmd_size >= HWQ_FREE_SLOTS(hwq)) { in __send_message()
129 spin_unlock_irqrestore(&hwq->lock, flags); in __send_message()
143 spin_unlock_irqrestore(&hwq->lock, flags); in __send_message()
168 sw_prod = HWQ_CMP(hwq->prod, hwq); in __send_message()
169 cmdqe = bnxt_qplib_get_qe(hwq, sw_prod, NULL); in __send_message()
180 hwq->prod++; in __send_message()
184 cmdq_prod = hwq->prod; in __send_message()
200 spin_unlock_irqrestore(&hwq->lock, flags); in __send_message()
[all …]
H A Dqplib_fp.h99 struct bnxt_qplib_hwq hwq; member
247 struct bnxt_qplib_hwq hwq; member
356 struct bnxt_qplib_hwq *hwq; in bnxt_qplib_queue_full() local
359 hwq = &que->hwq; in bnxt_qplib_queue_full()
361 avail = hwq->cons - hwq->prod; in bnxt_qplib_queue_full()
362 if (hwq->cons <= hwq->prod) in bnxt_qplib_queue_full()
363 avail += hwq->depth; in bnxt_qplib_queue_full()
400 struct bnxt_qplib_hwq hwq; member
473 struct bnxt_qplib_hwq hwq; member
H A Dqplib_sp.c584 if (mrw->hwq.max_elements) in bnxt_qplib_free_mrw()
585 bnxt_qplib_free_hwq(res, &mrw->hwq); in bnxt_qplib_free_mrw()
643 if (mrw->hwq.max_elements) { in bnxt_qplib_dereg_mrw()
646 bnxt_qplib_free_hwq(res, &mrw->hwq); in bnxt_qplib_dereg_mrw()
681 if (mr->hwq.max_elements) in bnxt_qplib_reg_mr()
682 bnxt_qplib_free_hwq(res, &mr->hwq); in bnxt_qplib_reg_mr()
692 rc = bnxt_qplib_alloc_init_hwq(&mr->hwq, &hwq_attr); in bnxt_qplib_reg_mr()
699 pbl_ptr = (dma_addr_t **)mr->hwq.pbl_ptr; in bnxt_qplib_reg_mr()
708 if (mr->hwq.level == PBL_LVL_MAX) { in bnxt_qplib_reg_mr()
714 level = mr->hwq.level + 1; in bnxt_qplib_reg_mr()
[all …]
H A Dqplib_rcfw.h148 struct bnxt_qplib_hwq hwq; member
167 struct bnxt_qplib_hwq hwq; member
H A Dqplib_sp.h119 struct bnxt_qplib_hwq hwq; member
124 struct bnxt_qplib_hwq hwq; member
H A Dmain.c1041 nq->hwq.max_elements = BNXT_QPLIB_NQE_MAX_CNT; in bnxt_re_alloc_res()
1049 rattr.dma_arr = nq->hwq.pbl[PBL_LVL_0].pg_map_arr; in bnxt_re_alloc_res()
1050 rattr.pages = nq->hwq.pbl[rdev->nq[i].hwq.level].pg_count; in bnxt_re_alloc_res()
1453 rattr.dma_arr = creq->hwq.pbl[PBL_LVL_0].pg_map_arr; in bnxt_re_dev_init()
1454 rattr.pages = creq->hwq.pbl[creq->hwq.level].pg_count; in bnxt_re_dev_init()
/OK3568_Linux_fs/kernel/drivers/scsi/cxlflash/
H A Dmain.c159 struct hwq *hwq = get_hwq(afu, cmd->hwq_index); in cmd_complete() local
161 spin_lock_irqsave(&hwq->hsq_slock, lock_flags); in cmd_complete()
163 spin_unlock_irqrestore(&hwq->hsq_slock, lock_flags); in cmd_complete()
191 static void flush_pending_cmds(struct hwq *hwq) in flush_pending_cmds() argument
193 struct cxlflash_cfg *cfg = hwq->afu->parent; in flush_pending_cmds()
198 list_for_each_entry_safe(cmd, tmp, &hwq->pending_cmds, list) { in flush_pending_cmds()
235 static int context_reset(struct hwq *hwq, __be64 __iomem *reset_reg) in context_reset() argument
237 struct cxlflash_cfg *cfg = hwq->afu->parent; in context_reset()
244 dev_dbg(dev, "%s: hwq=%p\n", __func__, hwq); in context_reset()
246 spin_lock_irqsave(&hwq->hsq_slock, lock_flags); in context_reset()
[all …]
H A Dcommon.h196 struct hwq { struct
231 struct hwq hwqs[CXLFLASH_MAX_HWQS]; argument
233 int (*context_reset)(struct hwq *hwq);
255 static inline struct hwq *get_hwq(struct afu *afu, u32 index) in get_hwq()
H A Dsuperpipe.c267 struct hwq *hwq = get_hwq(afu, PRIMARY_HWQ); in afu_attach() local
291 val = hwq->ctx_hndl; in afu_attach()
298 val = SISL_RHT_CNT_ID((u64)MAX_RHT_PER_CONTEXT, (u64)(hwq->ctx_hndl)); in afu_attach()
1659 struct hwq *hwq = get_hwq(afu, PRIMARY_HWQ); in cxlflash_afu_recover() local
1736 reg = readq_be(&hwq->ctrl_map->mbox_r); in cxlflash_afu_recover()
/OK3568_Linux_fs/kernel/drivers/net/wireless/mediatek/mt76/mt7915/
H A Ddma.c11 struct mt76_queue *hwq; in mt7915_init_tx_queues() local
14 hwq = devm_kzalloc(dev->mt76.dev, sizeof(*hwq), GFP_KERNEL); in mt7915_init_tx_queues()
15 if (!hwq) in mt7915_init_tx_queues()
18 err = mt76_queue_alloc(dev, hwq, MT7915_TXQ_BAND0, n_desc, 0, in mt7915_init_tx_queues()
24 dev->mt76.q_tx[i] = hwq; in mt7915_init_tx_queues()
32 struct mt76_queue *hwq; in mt7915_init_mcu_queue() local
35 hwq = devm_kzalloc(dev->mt76.dev, sizeof(*hwq), GFP_KERNEL); in mt7915_init_mcu_queue()
36 if (!hwq) in mt7915_init_mcu_queue()
39 err = mt76_queue_alloc(dev, hwq, idx, n_desc, 0, MT_TX_RING_BASE); in mt7915_init_mcu_queue()
43 dev->mt76.q_tx[qid] = hwq; in mt7915_init_mcu_queue()
/OK3568_Linux_fs/kernel/include/scsi/
H A Dscsi_tcq.h26 u16 hwq; in scsi_host_find_tag() local
31 hwq = blk_mq_unique_tag_to_hwq(tag); in scsi_host_find_tag()
32 if (hwq < shost->tag_set.nr_hw_queues) { in scsi_host_find_tag()
33 req = blk_mq_tag_to_rq(shost->tag_set.tags[hwq], in scsi_host_find_tag()
/OK3568_Linux_fs/kernel/drivers/net/wireless/mediatek/mt76/mt7603/
H A Ddma.c10 struct mt76_queue *hwq; in mt7603_init_tx_queue() local
13 hwq = devm_kzalloc(dev->mt76.dev, sizeof(*hwq), GFP_KERNEL); in mt7603_init_tx_queue()
14 if (!hwq) in mt7603_init_tx_queue()
17 err = mt76_queue_alloc(dev, hwq, idx, n_desc, 0, MT_TX_RING_BASE); in mt7603_init_tx_queue()
21 dev->mt76.q_tx[qid] = hwq; in mt7603_init_tx_queue()
/OK3568_Linux_fs/kernel/drivers/net/wireless/mediatek/mt76/mt7615/
H A Ddma.c17 struct mt76_queue *hwq; in mt7615_init_tx_queue() local
20 hwq = devm_kzalloc(dev->mt76.dev, sizeof(*hwq), GFP_KERNEL); in mt7615_init_tx_queue()
21 if (!hwq) in mt7615_init_tx_queue()
24 err = mt76_queue_alloc(dev, hwq, idx, n_desc, 0, MT_TX_RING_BASE); in mt7615_init_tx_queue()
28 dev->mt76.q_tx[qid] = hwq; in mt7615_init_tx_queue()
/OK3568_Linux_fs/kernel/drivers/net/wireless/mediatek/mt76/
H A Dtx.c347 struct mt76_queue *hwq = dev->q_tx[MT_TXQ_PSD]; in mt76_release_buffered_frames() local
350 spin_lock_bh(&hwq->lock); in mt76_release_buffered_frames()
374 dev->queue_ops->kick(dev, hwq); in mt76_release_buffered_frames()
379 spin_unlock_bh(&hwq->lock); in mt76_release_buffered_frames()
548 struct mt76_queue *hwq; in mt76_stop_tx_queues() local
554 hwq = dev->q_tx[mt76_txq_get_qid(txq)]; in mt76_stop_tx_queues()
557 spin_lock_bh(&hwq->lock); in mt76_stop_tx_queues()
559 spin_unlock_bh(&hwq->lock); in mt76_stop_tx_queues()
H A Dmt76x02_mmio.c109 struct mt76_queue *hwq; in mt76x02_init_tx_queue() local
112 hwq = devm_kzalloc(dev->mt76.dev, sizeof(*hwq), GFP_KERNEL); in mt76x02_init_tx_queue()
113 if (!hwq) in mt76x02_init_tx_queue()
116 err = mt76_queue_alloc(dev, hwq, idx, n_desc, 0, MT_TX_RING_BASE); in mt76x02_init_tx_queue()
120 dev->mt76.q_tx[qid] = hwq; in mt76x02_init_tx_queue()
/OK3568_Linux_fs/kernel/drivers/net/wireless/ti/wlcore/
H A Dtx.c1201 int hwq = wlcore_tx_get_mac80211_queue(wlvif, queue); in wlcore_stop_queue_locked() local
1202 bool stopped = !!wl->queue_stop_reasons[hwq]; in wlcore_stop_queue_locked()
1205 WARN_ON_ONCE(test_and_set_bit(reason, &wl->queue_stop_reasons[hwq])); in wlcore_stop_queue_locked()
1210 ieee80211_stop_queue(wl->hw, hwq); in wlcore_stop_queue_locked()
1227 int hwq = wlcore_tx_get_mac80211_queue(wlvif, queue); in wlcore_wake_queue() local
1232 WARN_ON_ONCE(!test_and_clear_bit(reason, &wl->queue_stop_reasons[hwq])); in wlcore_wake_queue()
1234 if (wl->queue_stop_reasons[hwq]) in wlcore_wake_queue()
1237 ieee80211_wake_queue(wl->hw, hwq); in wlcore_wake_queue()
1304 int hwq = wlcore_tx_get_mac80211_queue(wlvif, queue); in wlcore_is_queue_stopped_by_reason_locked() local
1307 return test_bit(reason, &wl->queue_stop_reasons[hwq]); in wlcore_is_queue_stopped_by_reason_locked()
[all …]
/OK3568_Linux_fs/kernel/drivers/scsi/
H A Dvirtio_scsi.c549 u16 hwq = blk_mq_unique_tag_to_hwq(tag); in virtscsi_pick_vq_mq() local
551 return &vscsi->req_vqs[hwq]; in virtscsi_pick_vq_mq()
724 static void virtscsi_commit_rqs(struct Scsi_Host *shost, u16 hwq) in virtscsi_commit_rqs() argument
728 virtscsi_kick_vq(&vscsi->req_vqs[hwq]); in virtscsi_commit_rqs()
/OK3568_Linux_fs/kernel/drivers/net/wireless/rockchip_wlan/rtl8723bs/core/
H A Drtw_xmit.c5598 void rtw_tx_desc_backup(_adapter *padapter, struct xmit_frame *pxmitframe, u8 desc_size, u8 hwq) in rtw_tx_desc_backup() argument
5610 _rtw_memcpy(tx_backup[hwq][backup_idx[hwq]].tx_bak_desc, pxmit_buf, desc_size); in rtw_tx_desc_backup()
5611 _rtw_memcpy(tx_backup[hwq][backup_idx[hwq]].tx_bak_data_hdr, pxmit_buf+desc_size, TX_BAK_DATA_LEN); in rtw_tx_desc_backup()
5613 tmp32 = rtw_read32(padapter, get_txbd_rw_reg(hwq)); in rtw_tx_desc_backup()
5615 tx_backup[hwq][backup_idx[hwq]].tx_bak_rp = (tmp32>>16)&0xfff; in rtw_tx_desc_backup()
5616 tx_backup[hwq][backup_idx[hwq]].tx_bak_wp = tmp32&0xfff; in rtw_tx_desc_backup()
5618 tx_backup[hwq][backup_idx[hwq]].tx_desc_size = desc_size; in rtw_tx_desc_backup()
5620 backup_idx[hwq] = (backup_idx[hwq] + 1) % TX_BAK_FRMAE_CNT; in rtw_tx_desc_backup()
5635 u8 rtw_get_tx_desc_backup(_adapter *padapter, u8 hwq, struct rtw_tx_desc_backup **pbak) in rtw_get_tx_desc_backup() argument
5637 *pbak = &tx_backup[hwq][0]; in rtw_get_tx_desc_backup()
[all …]
/OK3568_Linux_fs/kernel/drivers/net/wireless/intel/iwlegacy/
H A Dcommon.h2245 il_set_swq_id(struct il_tx_queue *txq, u8 ac, u8 hwq) in il_set_swq_id() argument
2248 BUG_ON(hwq > 31); /* only use 5 bits */ in il_set_swq_id()
2250 txq->swq_id = (hwq << 2) | ac; in il_set_swq_id()
2271 u8 hwq = (queue >> 2) & 0x1f; in il_wake_queue() local
2273 if (test_and_clear_bit(hwq, il->queue_stopped)) in il_wake_queue()
2282 u8 hwq = (queue >> 2) & 0x1f; in il_stop_queue() local
2284 if (!test_and_set_bit(hwq, il->queue_stopped)) in il_stop_queue()
/OK3568_Linux_fs/kernel/drivers/net/wireless/rockchip_wlan/rtl8822bs/core/
H A Drtw_xmit.c6027 void rtw_tx_desc_backup(_adapter *padapter, struct xmit_frame *pxmitframe, u8 desc_size, u8 hwq) in rtw_tx_desc_backup() argument
6037 _rtw_memcpy(tx_backup[hwq][backup_idx[hwq]].tx_bak_desc, pxmit_buf, desc_size); in rtw_tx_desc_backup()
6038 _rtw_memcpy(tx_backup[hwq][backup_idx[hwq]].tx_bak_data_hdr, pxmit_buf+desc_size, TX_BAK_DATA_LEN); in rtw_tx_desc_backup()
6040 tmp32 = rtw_read32(padapter, get_txbd_rw_reg(hwq)); in rtw_tx_desc_backup()
6042 tx_backup[hwq][backup_idx[hwq]].tx_bak_rp = (tmp32>>16)&0xfff; in rtw_tx_desc_backup()
6043 tx_backup[hwq][backup_idx[hwq]].tx_bak_wp = tmp32&0xfff; in rtw_tx_desc_backup()
6045 tx_backup[hwq][backup_idx[hwq]].tx_desc_size = desc_size; in rtw_tx_desc_backup()
6047 backup_idx[hwq] = (backup_idx[hwq] + 1) % TX_BAK_FRMAE_CNT; in rtw_tx_desc_backup()
6062 u8 rtw_get_tx_desc_backup(_adapter *padapter, u8 hwq, struct rtw_tx_desc_backup **pbak) in rtw_get_tx_desc_backup() argument
6064 *pbak = &tx_backup[hwq][0]; in rtw_get_tx_desc_backup()
[all …]
/OK3568_Linux_fs/kernel/drivers/net/wireless/rockchip_wlan/rtl8188fu/core/
H A Drtw_xmit.c5909 void rtw_tx_desc_backup(_adapter *padapter, struct xmit_frame *pxmitframe, u8 desc_size, u8 hwq) in rtw_tx_desc_backup() argument
5919 _rtw_memcpy(tx_backup[hwq][backup_idx[hwq]].tx_bak_desc, pxmit_buf, desc_size); in rtw_tx_desc_backup()
5920 _rtw_memcpy(tx_backup[hwq][backup_idx[hwq]].tx_bak_data_hdr, pxmit_buf+desc_size, TX_BAK_DATA_LEN); in rtw_tx_desc_backup()
5922 tmp32 = rtw_read32(padapter, get_txbd_rw_reg(hwq)); in rtw_tx_desc_backup()
5924 tx_backup[hwq][backup_idx[hwq]].tx_bak_rp = (tmp32>>16)&0xfff; in rtw_tx_desc_backup()
5925 tx_backup[hwq][backup_idx[hwq]].tx_bak_wp = tmp32&0xfff; in rtw_tx_desc_backup()
5927 tx_backup[hwq][backup_idx[hwq]].tx_desc_size = desc_size; in rtw_tx_desc_backup()
5929 backup_idx[hwq] = (backup_idx[hwq] + 1) % TX_BAK_FRMAE_CNT; in rtw_tx_desc_backup()
5944 u8 rtw_get_tx_desc_backup(_adapter *padapter, u8 hwq, struct rtw_tx_desc_backup **pbak) in rtw_get_tx_desc_backup() argument
5946 *pbak = &tx_backup[hwq][0]; in rtw_get_tx_desc_backup()
[all …]
/OK3568_Linux_fs/kernel/drivers/net/wireless/rockchip_wlan/rtl8189fs/core/
H A Drtw_xmit.c5909 void rtw_tx_desc_backup(_adapter *padapter, struct xmit_frame *pxmitframe, u8 desc_size, u8 hwq) in rtw_tx_desc_backup() argument
5919 _rtw_memcpy(tx_backup[hwq][backup_idx[hwq]].tx_bak_desc, pxmit_buf, desc_size); in rtw_tx_desc_backup()
5920 _rtw_memcpy(tx_backup[hwq][backup_idx[hwq]].tx_bak_data_hdr, pxmit_buf+desc_size, TX_BAK_DATA_LEN); in rtw_tx_desc_backup()
5922 tmp32 = rtw_read32(padapter, get_txbd_rw_reg(hwq)); in rtw_tx_desc_backup()
5924 tx_backup[hwq][backup_idx[hwq]].tx_bak_rp = (tmp32>>16)&0xfff; in rtw_tx_desc_backup()
5925 tx_backup[hwq][backup_idx[hwq]].tx_bak_wp = tmp32&0xfff; in rtw_tx_desc_backup()
5927 tx_backup[hwq][backup_idx[hwq]].tx_desc_size = desc_size; in rtw_tx_desc_backup()
5929 backup_idx[hwq] = (backup_idx[hwq] + 1) % TX_BAK_FRMAE_CNT; in rtw_tx_desc_backup()
5944 u8 rtw_get_tx_desc_backup(_adapter *padapter, u8 hwq, struct rtw_tx_desc_backup **pbak) in rtw_get_tx_desc_backup() argument
5946 *pbak = &tx_backup[hwq][0]; in rtw_get_tx_desc_backup()
[all …]

123