Home
last modified time | relevance | path

Searched refs:hwq (Results 1 – 25 of 31) sorted by relevance

12

/linux-6.6.21/drivers/ufs/core/
Dufs-mcq.c115 u32 hwq = blk_mq_unique_tag_to_hwq(utag); in ufshcd_mcq_req_to_hwq() local
117 return &hba->uhq[hwq]; in ufshcd_mcq_req_to_hwq()
201 struct ufs_hw_queue *hwq; in ufshcd_mcq_memory_alloc() local
206 hwq = &hba->uhq[i]; in ufshcd_mcq_memory_alloc()
209 hwq->max_entries; in ufshcd_mcq_memory_alloc()
210 hwq->sqe_base_addr = dmam_alloc_coherent(hba->dev, utrdl_size, in ufshcd_mcq_memory_alloc()
211 &hwq->sqe_dma_addr, in ufshcd_mcq_memory_alloc()
213 if (!hwq->sqe_dma_addr) { in ufshcd_mcq_memory_alloc()
218 cqe_size = sizeof(struct cq_entry) * hwq->max_entries; in ufshcd_mcq_memory_alloc()
219 hwq->cqe_base_addr = dmam_alloc_coherent(hba->dev, cqe_size, in ufshcd_mcq_memory_alloc()
[all …]
Dufshcd-priv.h76 struct ufs_hw_queue *hwq);
78 struct ufs_hw_queue *hwq);
Dufshcd.c444 struct ufs_hw_queue *hwq = ufshcd_mcq_req_to_hwq(hba, rq); in ufshcd_add_command_trace() local
446 hwq_id = hwq->id; in ufshcd_add_command_trace()
2158 struct ufs_hw_queue *hwq) in ufshcd_send_command() argument
2177 spin_lock(&hwq->sq_lock); in ufshcd_send_command()
2178 dest = hwq->sqe_base_addr + hwq->sq_tail_slot; in ufshcd_send_command()
2180 ufshcd_inc_sq_tail(hwq); in ufshcd_send_command()
2181 spin_unlock(&hwq->sq_lock); in ufshcd_send_command()
2825 struct ufs_hw_queue *hwq = NULL; in ufshcd_queuecommand() local
2895 hwq = ufshcd_mcq_req_to_hwq(hba, scsi_cmd_to_rq(cmd)); in ufshcd_queuecommand()
2897 ufshcd_send_command(hba, tag, hwq); in ufshcd_queuecommand()
[all …]
/linux-6.6.21/drivers/infiniband/hw/bnxt_re/
Dqplib_res.h83 #define HWQ_CMP(idx, hwq) ((idx) & ((hwq)->max_elements - 1)) argument
85 #define HWQ_FREE_SLOTS(hwq) (hwq->max_elements - \ argument
86 ((HWQ_CMP(hwq->prod, hwq)\
87 - HWQ_CMP(hwq->cons, hwq))\
88 & (hwq->max_elements - 1)))
186 struct bnxt_qplib_hwq *hwq; member
311 static inline u8 bnxt_qplib_base_pg_size(struct bnxt_qplib_hwq *hwq) in bnxt_qplib_base_pg_size() argument
316 pbl = &hwq->pbl[PBL_LVL_0]; in bnxt_qplib_base_pg_size()
343 static inline void *bnxt_qplib_get_qe(struct bnxt_qplib_hwq *hwq, in bnxt_qplib_get_qe() argument
348 pg_num = (indx / hwq->qe_ppg); in bnxt_qplib_get_qe()
[all …]
Dqplib_fp.c76 dev_dbg(&scq->hwq.pdev->dev, in __bnxt_qplib_add_flush_qp()
84 dev_dbg(&rcq->hwq.pdev->dev, in __bnxt_qplib_add_flush_qp()
143 qp->sq.hwq.prod = 0; in bnxt_qplib_clean_qp()
144 qp->sq.hwq.cons = 0; in bnxt_qplib_clean_qp()
146 qp->rq.hwq.prod = 0; in bnxt_qplib_clean_qp()
147 qp->rq.hwq.cons = 0; in bnxt_qplib_clean_qp()
237 struct bnxt_qplib_hwq *hwq = &nq->hwq; in clean_nq() local
244 spin_lock_bh(&hwq->lock); in clean_nq()
246 raw_cons = hwq->cons; in clean_nq()
248 sw_cons = HWQ_CMP(raw_cons, hwq); in clean_nq()
[all …]
Dqplib_res.c157 struct bnxt_qplib_hwq *hwq) in bnxt_qplib_free_hwq() argument
161 if (!hwq->max_elements) in bnxt_qplib_free_hwq()
163 if (hwq->level >= PBL_LVL_MAX) in bnxt_qplib_free_hwq()
166 for (i = 0; i < hwq->level + 1; i++) { in bnxt_qplib_free_hwq()
167 if (i == hwq->level) in bnxt_qplib_free_hwq()
168 __free_pbl(res, &hwq->pbl[i], hwq->is_user); in bnxt_qplib_free_hwq()
170 __free_pbl(res, &hwq->pbl[i], false); in bnxt_qplib_free_hwq()
173 hwq->level = PBL_LVL_MAX; in bnxt_qplib_free_hwq()
174 hwq->max_elements = 0; in bnxt_qplib_free_hwq()
175 hwq->element_size = 0; in bnxt_qplib_free_hwq()
[all …]
Dqplib_rcfw.c239 struct bnxt_qplib_hwq *hwq = &cmdq->hwq; in __send_message_no_waiter() local
264 sw_prod = HWQ_CMP(hwq->prod, hwq); in __send_message_no_waiter()
265 cmdqe = bnxt_qplib_get_qe(hwq, sw_prod, NULL); in __send_message_no_waiter()
271 hwq->prod++; in __send_message_no_waiter()
275 cmdq_prod = hwq->prod; in __send_message_no_waiter()
290 struct bnxt_qplib_hwq *hwq; in __send_message() local
298 hwq = &cmdq->hwq; in __send_message()
304 spin_lock_irqsave(&hwq->lock, flags); in __send_message()
306 free_slots = HWQ_FREE_SLOTS(hwq); in __send_message()
314 spin_unlock_irqrestore(&hwq->lock, flags); in __send_message()
[all …]
Dqplib_fp.h101 struct bnxt_qplib_hwq hwq; member
249 struct bnxt_qplib_hwq hwq; member
358 struct bnxt_qplib_hwq *hwq; in bnxt_qplib_queue_full() local
361 hwq = &que->hwq; in bnxt_qplib_queue_full()
363 avail = hwq->cons - hwq->prod; in bnxt_qplib_queue_full()
364 if (hwq->cons <= hwq->prod) in bnxt_qplib_queue_full()
365 avail += hwq->depth; in bnxt_qplib_queue_full()
402 struct bnxt_qplib_hwq hwq; member
476 struct bnxt_qplib_hwq hwq; member
Dqplib_sp.c520 if (mrw->hwq.max_elements) in bnxt_qplib_free_mrw()
521 bnxt_qplib_free_hwq(res, &mrw->hwq); in bnxt_qplib_free_mrw()
585 if (mrw->hwq.max_elements) { in bnxt_qplib_dereg_mrw()
588 bnxt_qplib_free_hwq(res, &mrw->hwq); in bnxt_qplib_dereg_mrw()
613 if (mr->hwq.max_elements) in bnxt_qplib_reg_mr()
614 bnxt_qplib_free_hwq(res, &mr->hwq); in bnxt_qplib_reg_mr()
624 rc = bnxt_qplib_alloc_init_hwq(&mr->hwq, &hwq_attr); in bnxt_qplib_reg_mr()
637 if (mr->hwq.level == PBL_LVL_MAX) { in bnxt_qplib_reg_mr()
643 level = mr->hwq.level; in bnxt_qplib_reg_mr()
644 req.pbl = cpu_to_le64(mr->hwq.pbl[PBL_LVL_0].pg_map_arr[0]); in bnxt_qplib_reg_mr()
[all …]
Dqplib_rcfw.h188 struct bnxt_qplib_hwq hwq; member
207 struct bnxt_qplib_hwq hwq; member
Dqplib_sp.h119 struct bnxt_qplib_hwq hwq; member
124 struct bnxt_qplib_hwq hwq; member
Dmain.c1174 nq->hwq.max_elements = BNXT_QPLIB_NQE_MAX_CNT; in bnxt_re_alloc_res()
1182 rattr.dma_arr = nq->hwq.pbl[PBL_LVL_0].pg_map_arr; in bnxt_re_alloc_res()
1183 rattr.pages = nq->hwq.pbl[rdev->nq[i].hwq.level].pg_count; in bnxt_re_alloc_res()
1500 rattr.dma_arr = creq->hwq.pbl[PBL_LVL_0].pg_map_arr; in bnxt_re_dev_init()
1501 rattr.pages = creq->hwq.pbl[creq->hwq.level].pg_count; in bnxt_re_dev_init()
Dib_verbs.c2567 wqe->frmr.pbl_ptr = (__le64 *)qplib_frpl->hwq.pbl_ptr[0]; in bnxt_re_build_reg_wqe()
2568 wqe->frmr.pbl_dma_ptr = qplib_frpl->hwq.pbl_dma_ptr[0]; in bnxt_re_build_reg_wqe()
2571 wqe->frmr.levels = qplib_frpl->hwq.level; in bnxt_re_build_reg_wqe()
3007 resp.tail = cq->qplib_cq.hwq.cons; in bnxt_re_create_cq()
3613 lib_qp->id, lib_qp->sq.hwq.prod, in send_phantom_wqe()
3614 HWQ_CMP(lib_qp->sq.hwq.prod, &lib_qp->sq.hwq), in send_phantom_wqe()
3811 mr->qplib_mr.hwq.level = PBL_LVL_MAX; in bnxt_re_get_dma_mr()
/linux-6.6.21/drivers/scsi/cxlflash/
Dmain.c159 struct hwq *hwq = get_hwq(afu, cmd->hwq_index); in cmd_complete() local
161 spin_lock_irqsave(&hwq->hsq_slock, lock_flags); in cmd_complete()
163 spin_unlock_irqrestore(&hwq->hsq_slock, lock_flags); in cmd_complete()
191 static void flush_pending_cmds(struct hwq *hwq) in flush_pending_cmds() argument
193 struct cxlflash_cfg *cfg = hwq->afu->parent; in flush_pending_cmds()
198 list_for_each_entry_safe(cmd, tmp, &hwq->pending_cmds, list) { in flush_pending_cmds()
235 static int context_reset(struct hwq *hwq, __be64 __iomem *reset_reg) in context_reset() argument
237 struct cxlflash_cfg *cfg = hwq->afu->parent; in context_reset()
244 dev_dbg(dev, "%s: hwq=%p\n", __func__, hwq); in context_reset()
246 spin_lock_irqsave(&hwq->hsq_slock, lock_flags); in context_reset()
[all …]
Dcommon.h196 struct hwq { struct
231 struct hwq hwqs[CXLFLASH_MAX_HWQS]; argument
233 int (*context_reset)(struct hwq *hwq);
255 static inline struct hwq *get_hwq(struct afu *afu, u32 index) in get_hwq()
Dsuperpipe.c267 struct hwq *hwq = get_hwq(afu, PRIMARY_HWQ); in afu_attach() local
291 val = hwq->ctx_hndl; in afu_attach()
298 val = SISL_RHT_CNT_ID((u64)MAX_RHT_PER_CONTEXT, (u64)(hwq->ctx_hndl)); in afu_attach()
1660 struct hwq *hwq = get_hwq(afu, PRIMARY_HWQ); in cxlflash_afu_recover() local
1737 reg = readq_be(&hwq->ctrl_map->mbox_r); in cxlflash_afu_recover()
/linux-6.6.21/include/scsi/
Dscsi_tcq.h26 u16 hwq; in scsi_host_find_tag() local
31 hwq = blk_mq_unique_tag_to_hwq(tag); in scsi_host_find_tag()
32 if (hwq < shost->tag_set.nr_hw_queues) { in scsi_host_find_tag()
33 req = blk_mq_tag_to_rq(shost->tag_set.tags[hwq], in scsi_host_find_tag()
/linux-6.6.21/drivers/net/wireless/ti/wlcore/
Dtx.c1199 int hwq = wlcore_tx_get_mac80211_queue(wlvif, queue); in wlcore_stop_queue_locked() local
1200 bool stopped = !!wl->queue_stop_reasons[hwq]; in wlcore_stop_queue_locked()
1203 WARN_ON_ONCE(test_and_set_bit(reason, &wl->queue_stop_reasons[hwq])); in wlcore_stop_queue_locked()
1208 ieee80211_stop_queue(wl->hw, hwq); in wlcore_stop_queue_locked()
1225 int hwq = wlcore_tx_get_mac80211_queue(wlvif, queue); in wlcore_wake_queue() local
1230 WARN_ON_ONCE(!test_and_clear_bit(reason, &wl->queue_stop_reasons[hwq])); in wlcore_wake_queue()
1232 if (wl->queue_stop_reasons[hwq]) in wlcore_wake_queue()
1235 ieee80211_wake_queue(wl->hw, hwq); in wlcore_wake_queue()
1302 int hwq = wlcore_tx_get_mac80211_queue(wlvif, queue); in wlcore_is_queue_stopped_by_reason_locked() local
1305 return test_bit(reason, &wl->queue_stop_reasons[hwq]); in wlcore_is_queue_stopped_by_reason_locked()
[all …]
/linux-6.6.21/drivers/net/wireless/mediatek/mt76/
Dtx.c411 struct mt76_queue *hwq = phy->q_tx[MT_TXQ_PSD]; in mt76_release_buffered_frames() local
414 spin_lock_bh(&hwq->lock); in mt76_release_buffered_frames()
438 dev->queue_ops->kick(dev, hwq); in mt76_release_buffered_frames()
443 spin_unlock_bh(&hwq->lock); in mt76_release_buffered_frames()
644 struct mt76_queue *hwq; in mt76_stop_tx_queues() local
650 hwq = phy->q_tx[mt76_txq_get_qid(txq)]; in mt76_stop_tx_queues()
653 spin_lock_bh(&hwq->lock); in mt76_stop_tx_queues()
655 spin_unlock_bh(&hwq->lock); in mt76_stop_tx_queues()
Dmac80211.c1683 struct mt76_queue *hwq; in mt76_init_queue() local
1686 hwq = devm_kzalloc(dev->dev, sizeof(*hwq), GFP_KERNEL); in mt76_init_queue()
1687 if (!hwq) in mt76_init_queue()
1690 hwq->flags = flags; in mt76_init_queue()
1692 err = dev->queue_ops->alloc(dev, hwq, idx, n_desc, 0, ring_base); in mt76_init_queue()
1696 return hwq; in mt76_init_queue()
/linux-6.6.21/drivers/scsi/
Dvirtio_scsi.c556 u16 hwq = blk_mq_unique_tag_to_hwq(tag); in virtscsi_pick_vq_mq() local
558 return &vscsi->req_vqs[hwq]; in virtscsi_pick_vq_mq()
730 static void virtscsi_commit_rqs(struct Scsi_Host *shost, u16 hwq) in virtscsi_commit_rqs() argument
734 virtscsi_kick_vq(&vscsi->req_vqs[hwq]); in virtscsi_commit_rqs()
/linux-6.6.21/drivers/net/wireless/intel/iwlegacy/
Dcommon.h2246 il_set_swq_id(struct il_tx_queue *txq, u8 ac, u8 hwq) in il_set_swq_id() argument
2249 BUG_ON(hwq > 31); /* only use 5 bits */ in il_set_swq_id()
2251 txq->swq_id = (hwq << 2) | ac; in il_set_swq_id()
2272 u8 hwq = (queue >> 2) & 0x1f; in il_wake_queue() local
2274 if (test_and_clear_bit(hwq, il->queue_stopped)) in il_wake_queue()
2283 u8 hwq = (queue >> 2) & 0x1f; in il_stop_queue() local
2285 if (!test_and_set_bit(hwq, il->queue_stopped)) in il_stop_queue()
/linux-6.6.21/Documentation/devicetree/bindings/media/
Dmediatek,vcodec-decoder.yaml37 - const: hwq
/linux-6.6.21/drivers/ufs/host/
Dufs-mediatek.c1592 struct ufs_hw_queue *hwq; in ufs_mtk_mcq_intr() local
1596 hwq = &hba->uhq[qid]; in ufs_mtk_mcq_intr()
1603 ufshcd_mcq_poll_cqe_lock(hba, hwq); in ufs_mtk_mcq_intr()
/linux-6.6.21/drivers/block/
Dnbd.c753 u16 hwq; in nbd_handle_reply() local
759 hwq = blk_mq_unique_tag_to_hwq(tag); in nbd_handle_reply()
760 if (hwq < nbd->tag_set.nr_hw_queues) in nbd_handle_reply()
761 req = blk_mq_tag_to_rq(nbd->tag_set.tags[hwq], in nbd_handle_reply()

12