/linux-6.1.9/drivers/infiniband/sw/rxe/ |
D | rxe_req.c | 18 struct rxe_send_wqe *wqe, int npsn) in retry_first_write_send() 40 struct rxe_send_wqe *wqe; in req_retry() local 113 struct rxe_send_wqe *wqe; in req_next_wqe() local 179 static int rxe_wqe_is_fenced(struct rxe_qp *qp, struct rxe_send_wqe *wqe) in rxe_wqe_is_fenced() 320 static int next_opcode(struct rxe_qp *qp, struct rxe_send_wqe *wqe, in next_opcode() 350 static inline int check_init_depth(struct rxe_qp *qp, struct rxe_send_wqe *wqe) in check_init_depth() 382 struct rxe_send_wqe *wqe, in init_req_packet() 458 struct rxe_send_wqe *wqe, struct rxe_pkt_info *pkt, in finish_packet() 493 struct rxe_send_wqe *wqe, in update_wqe_state() 505 struct rxe_send_wqe *wqe, in update_wqe_psn() [all …]
|
D | rxe_comp.c | 142 struct rxe_send_wqe *wqe; in get_wqe() local 175 struct rxe_send_wqe *wqe) in check_psn() 214 struct rxe_send_wqe *wqe) in check_ack() 345 struct rxe_send_wqe *wqe) in do_read() 365 struct rxe_send_wqe *wqe) in do_atomic() 382 static void make_send_cqe(struct rxe_qp *qp, struct rxe_send_wqe *wqe, in make_send_cqe() 425 static void do_complete(struct rxe_qp *qp, struct rxe_send_wqe *wqe) in do_complete() 461 struct rxe_send_wqe *wqe) in complete_ack() 505 struct rxe_send_wqe *wqe) in complete_wqe() 527 struct rxe_send_wqe *wqe; in rxe_drain_resp_pkts() local [all …]
|
D | rxe_mw.c | 50 static int rxe_check_bind_mw(struct rxe_qp *qp, struct rxe_send_wqe *wqe, in rxe_check_bind_mw() 135 static void rxe_do_bind_mw(struct rxe_qp *qp, struct rxe_send_wqe *wqe, in rxe_do_bind_mw() 164 int rxe_bind_mw(struct rxe_qp *qp, struct rxe_send_wqe *wqe) in rxe_bind_mw()
|
/linux-6.1.9/io_uring/ |
D | io-wq.c | 50 struct io_wqe *wqe; member 162 static inline struct io_wqe_acct *io_get_acct(struct io_wqe *wqe, bool bound) in io_get_acct() 167 static inline struct io_wqe_acct *io_work_get_acct(struct io_wqe *wqe, in io_work_get_acct() 187 struct io_wqe *wqe = worker->wqe; in io_worker_cancel_cb() local 211 struct io_wqe *wqe = worker->wqe; in io_worker_exit() local 259 static bool io_wqe_activate_free_worker(struct io_wqe *wqe, in io_wqe_activate_free_worker() 292 static bool io_wqe_create_worker(struct io_wqe *wqe, struct io_wqe_acct *acct) in io_wqe_create_worker() 324 struct io_wqe *wqe; in create_worker_cb() local 352 struct io_wqe *wqe = worker->wqe; in io_queue_worker_create() local 398 struct io_wqe *wqe = worker->wqe; in io_wqe_dec_running() local [all …]
|
/linux-6.1.9/drivers/infiniband/hw/irdma/ |
D | uk.c | 15 static void irdma_set_fragment(__le64 *wqe, u32 offset, struct ib_sge *sge, in irdma_set_fragment() 39 static void irdma_set_fragment_gen_1(__le64 *wqe, u32 offset, in irdma_set_fragment_gen_1() 61 __le64 *wqe; in irdma_nop_1() local 96 __le64 *wqe; in irdma_clr_wqes() local 161 void irdma_qp_push_wqe(struct irdma_qp_uk *qp, __le64 *wqe, u16 quanta, in irdma_qp_push_wqe() 191 __le64 *wqe; in irdma_qp_get_next_send_wqe() local 246 __le64 *wqe; in irdma_qp_get_next_recv_wqe() local 274 __le64 *wqe; in irdma_uk_rdma_write() local 379 __le64 *wqe; in irdma_uk_rdma_read() local 460 __le64 *wqe; in irdma_uk_send() local [all …]
|
D | ctrl.c | 186 __le64 *wqe; in irdma_sc_add_arp_cache_entry() local 222 __le64 *wqe; in irdma_sc_del_arp_cache_entry() local 256 __le64 *wqe; in irdma_sc_manage_apbvt_entry() local 304 __le64 *wqe; in irdma_sc_manage_qhash_table_entry() local 452 __le64 *wqe; in irdma_sc_qp_create() local 502 __le64 *wqe; in irdma_sc_qp_modify() local 572 __le64 *wqe; in irdma_sc_qp_destroy() local 764 __le64 *wqe; in irdma_sc_alloc_local_mac_entry() local 799 __le64 *wqe; in irdma_sc_add_local_mac_entry() local 837 __le64 *wqe; in irdma_sc_del_local_mac_entry() local [all …]
|
D | uda.c | 23 __le64 *wqe; in irdma_sc_access_ah() local 119 __le64 *wqe; in irdma_access_mcast_grp() local
|
D | puda.c | 82 __le64 *wqe; in irdma_puda_post_recvbuf() local 421 __le64 *wqe; in irdma_puda_send() local 602 __le64 *wqe; in irdma_puda_qp_wqe() local 719 __le64 *wqe; in irdma_puda_cq_wqe() local 1117 __le64 *wqe; in irdma_ilq_putback_rcvbuf() local
|
/linux-6.1.9/drivers/infiniband/hw/qib/ |
D | qib_rc.c | 42 static u32 restart_sge(struct rvt_sge_state *ss, struct rvt_swqe *wqe, in restart_sge() 222 struct rvt_swqe *wqe; in qib_make_rc_req() local 739 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, n); in reset_psn() local 823 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, qp->s_acked); in qib_restart_rc() local 859 struct rvt_swqe *wqe; in reset_sending_psn() local 885 struct rvt_swqe *wqe; in qib_rc_send_complete() local 952 struct rvt_swqe *wqe, in do_rc_completion() 1012 struct rvt_swqe *wqe; in do_rc_ack() local 1227 struct rvt_swqe *wqe; in rdma_seq_err() local 1277 struct rvt_swqe *wqe; in qib_rc_rcv_resp() local
|
/linux-6.1.9/drivers/infiniband/sw/siw/ |
D | siw_qp_tx.c | 42 struct siw_wqe *wqe = &c_tx->wqe_active; in siw_try_1seg() local 121 struct siw_wqe *wqe = &c_tx->wqe_active; in siw_qp_prepare_tx() local 433 struct siw_wqe *wqe = &c_tx->wqe_active; in siw_tx_hdt() local 713 static void siw_prepare_fpdu(struct siw_qp *qp, struct siw_wqe *wqe) in siw_prepare_fpdu() 771 static int siw_check_sgl_tx(struct ib_pd *pd, struct siw_wqe *wqe, in siw_check_sgl_tx() 801 static int siw_qp_sq_proc_tx(struct siw_qp *qp, struct siw_wqe *wqe) in siw_qp_sq_proc_tx() 986 static int siw_qp_sq_proc_local(struct siw_qp *qp, struct siw_wqe *wqe) in siw_qp_sq_proc_local() 1034 struct siw_wqe *wqe = tx_wqe(qp); in siw_qp_sq_process() local
|
D | siw_qp_rx.c | 169 struct siw_wqe *wqe = &frx->wqe_active; in siw_rresp_check_ntoh() local 281 struct siw_wqe *wqe = &frx->wqe_active; in siw_send_check_ntoh() local 334 struct siw_wqe *wqe = NULL; in siw_rqe_get() local 425 struct siw_wqe *wqe; in siw_proc_send() local 560 struct siw_wqe *wqe = rx_wqe(frx); in siw_proc_write() local 746 struct siw_wqe *wqe = NULL; in siw_orqe_start_rx() local 790 struct siw_wqe *wqe = rx_wqe(frx); in siw_proc_rresp() local 1222 struct siw_wqe *wqe = rx_wqe(qp->rx_fpdu); in siw_rdmap_complete() local
|
D | siw_qp.c | 262 struct siw_wqe *wqe = tx_wqe(qp); in siw_qp_mpa_rts() local 462 struct siw_wqe *wqe = tx_wqe(qp); in siw_send_terminate() local 884 struct siw_wqe *wqe = tx_wqe(qp); in siw_activate_tx_from_sq() local 980 struct siw_wqe *wqe = tx_wqe(qp); in siw_activate_tx() local 1191 struct siw_wqe *wqe = tx_wqe(qp); in siw_sq_flush() local 1266 struct siw_wqe *wqe = &qp->rx_untagged.wqe_active; in siw_rq_flush() local
|
/linux-6.1.9/drivers/infiniband/hw/cxgb4/ |
D | qp.c | 489 static int build_rdma_send(struct t4_sq *sq, union t4_wr *wqe, in build_rdma_send() 556 static int build_rdma_write(struct t4_sq *sq, union t4_wr *wqe, in build_rdma_write() 660 static int build_rdma_read(union t4_wr *wqe, const struct ib_send_wr *wr, in build_rdma_read() 697 union t4_wr *wqe; in post_write_cmpl() local 759 static int build_rdma_recv(struct c4iw_qp *qhp, union t4_recv_wr *wqe, in build_rdma_recv() 774 static int build_srq_recv(union t4_recv_wr *wqe, const struct ib_recv_wr *wr, in build_srq_recv() 820 static int build_memreg(struct t4_sq *sq, union t4_wr *wqe, in build_memreg() 884 static int build_inv_stag(union t4_wr *wqe, const struct ib_send_wr *wr, in build_inv_stag() 1086 union t4_wr *wqe = NULL; in c4iw_post_send() local 1266 union t4_recv_wr *wqe = NULL; in c4iw_post_receive() local [all …]
|
/linux-6.1.9/drivers/infiniband/hw/mlx5/ |
D | umr.c | 228 struct mlx5r_umr_wqe *wqe, bool with_data) in mlx5r_umr_post_send() 289 struct mlx5r_umr_wqe *wqe, bool with_data) in mlx5r_umr_post_send_wait() 361 struct mlx5r_umr_wqe wqe = {}; in mlx5r_umr_revoke_mr() local 398 struct mlx5r_umr_wqe wqe = {}; in mlx5r_umr_rereg_pd_access() local 571 struct mlx5r_umr_wqe *wqe, in mlx5r_umr_final_update_xlt() 612 struct mlx5r_umr_wqe wqe = {}; in mlx5r_umr_update_mr_pas() local 697 struct mlx5r_umr_wqe wqe = {}; in mlx5r_umr_update_xlt() local
|
/linux-6.1.9/drivers/infiniband/hw/hfi1/ |
D | rc.c | 394 struct rvt_swqe *wqe; in hfi1_make_rc_req() local 1418 struct rvt_swqe *wqe) in update_num_rd_atomic() 1461 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, n); in reset_psn() local 1567 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, qp->s_acked); in hfi1_restart_rc() local 1637 struct rvt_swqe *wqe; in reset_sending_psn() local 1697 struct rvt_swqe *wqe; in hfi1_rc_send_complete() local 1825 struct rvt_swqe *wqe, in do_rc_completion() 1970 struct rvt_swqe *wqe; in do_rc_ack() local 2266 struct rvt_swqe *wqe; in rdma_seq_err() local 2310 struct rvt_swqe *wqe; in rc_rcv_resp() local
|
D | tid_rdma.c | 378 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, i); in hfi1_qp_priv_init() local 416 struct rvt_swqe *wqe; in hfi1_qp_priv_tid_free() local 1621 void __trdma_clean_swqe(struct rvt_qp *qp, struct rvt_swqe *wqe) in __trdma_clean_swqe() 1703 u32 hfi1_build_tid_rdma_read_packet(struct rvt_swqe *wqe, in hfi1_build_tid_rdma_read_packet() 1787 u32 hfi1_build_tid_rdma_read_req(struct rvt_qp *qp, struct rvt_swqe *wqe, in hfi1_build_tid_rdma_read_req() 2418 struct rvt_swqe *wqe; in find_tid_request() local 2582 struct rvt_swqe *wqe; in hfi1_kern_read_tid_flow_free() local 2630 struct rvt_qp *qp, struct rvt_swqe *wqe) in restart_tid_rdma_read_req() 2663 struct rvt_swqe *wqe; in handle_read_kdeth_eflags() local 3046 void hfi1_tid_rdma_restart_req(struct rvt_qp *qp, struct rvt_swqe *wqe, in hfi1_tid_rdma_restart_req() [all …]
|
D | ud.c | 224 static void hfi1_make_bth_deth(struct rvt_qp *qp, struct rvt_swqe *wqe, in hfi1_make_bth_deth() 262 struct rvt_swqe *wqe) in hfi1_make_ud_req_9B() 333 struct rvt_swqe *wqe) in hfi1_make_ud_req_16B() 441 struct rvt_swqe *wqe; in hfi1_make_ud_req() local
|
/linux-6.1.9/drivers/infiniband/sw/rdmavt/ |
D | qp.c | 593 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, qp->s_last); in rvt_clear_mr_refs() local 623 static bool rvt_swqe_has_lkey(struct rvt_swqe *wqe, u32 lkey) in rvt_swqe_has_lkey() 646 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, s_last); in rvt_qp_sends_has_lkey() local 981 struct rvt_swqe *wqe; in free_ud_wq_attr() local 1001 struct rvt_swqe *wqe; in alloc_ud_wq_attr() local 1799 struct rvt_rwqe *wqe; in rvt_post_recv() local 1966 struct rvt_swqe *wqe; in rvt_post_one_wr() local 2214 struct rvt_rwqe *wqe; in rvt_post_srq_recv() local 2269 static int init_sge(struct rvt_qp *qp, struct rvt_rwqe *wqe) in init_sge() 2352 struct rvt_rwqe *wqe; in rvt_get_rwqe() local [all …]
|
/linux-6.1.9/drivers/infiniband/hw/bnxt_re/ |
D | ib_verbs.c | 392 struct bnxt_qplib_swqe *wqe = &fence->bind_wqe; in bnxt_re_create_fence_wqe() local 420 struct bnxt_qplib_swqe wqe; in bnxt_re_bind_fence_mw() local 1752 struct bnxt_qplib_swqe wqe; in bnxt_re_post_srq_recv() local 2112 struct bnxt_qplib_swqe *wqe, in bnxt_re_build_qp1_send_v2() 2286 struct bnxt_qplib_swqe *wqe, in bnxt_re_build_qp1_shadow_qp_recv() 2335 struct bnxt_qplib_swqe *wqe) in bnxt_re_build_send_wqe() 2373 struct bnxt_qplib_swqe *wqe) in bnxt_re_build_rdma_wqe() 2405 struct bnxt_qplib_swqe *wqe) in bnxt_re_build_atomic_wqe() 2432 struct bnxt_qplib_swqe *wqe) in bnxt_re_build_inv_wqe() 2451 struct bnxt_qplib_swqe *wqe) in bnxt_re_build_reg_wqe() [all …]
|
/linux-6.1.9/drivers/net/ethernet/huawei/hinic/ |
D | hinic_hw_cmdq.c | 54 #define CMDQ_WQE_HEADER(wqe) ((struct hinic_cmdq_header *)(wqe)) argument 177 static void cmdq_prepare_wqe_ctrl(struct hinic_cmdq_wqe *wqe, int wrapped, in cmdq_prepare_wqe_ctrl() 234 static void cmdq_set_direct_wqe_data(struct hinic_cmdq_direct_wqe *wqe, in cmdq_set_direct_wqe_data() 243 static void cmdq_set_lcmd_wqe(struct hinic_cmdq_wqe *wqe, in cmdq_set_lcmd_wqe() 271 static void cmdq_set_direct_wqe(struct hinic_cmdq_wqe *wqe, in cmdq_set_direct_wqe() 529 struct hinic_cmdq_wqe *wqe) in clear_wqe_complete_bit() 563 struct hinic_cmdq_wqe *wqe) in cmdq_arm_ceq_handler()
|
/linux-6.1.9/drivers/infiniband/hw/mthca/ |
D | mthca_srq.c | 92 static inline int *wqe_to_link(void *wqe) in wqe_to_link() 158 void *wqe; in mthca_alloc_srq_buf() local 495 void *wqe; in mthca_tavor_post_srq_recv() local 588 void *wqe; in mthca_arbel_post_srq_recv() local
|
/linux-6.1.9/drivers/net/ethernet/mellanox/mlx5/core/en_accel/ |
D | ktls_txrx.c | 74 mlx5e_ktls_build_static_params(struct mlx5e_set_tls_static_params_wqe *wqe, in mlx5e_ktls_build_static_params() 117 mlx5e_ktls_build_progress_params(struct mlx5e_set_tls_progress_params_wqe *wqe, in mlx5e_ktls_build_progress_params()
|
/linux-6.1.9/drivers/net/ethernet/mellanox/mlx5/core/ |
D | en_tx.c | 353 struct mlx5e_tx_wqe *wqe; in mlx5e_tx_flush() local 416 struct mlx5e_tx_wqe *wqe, u16 pi, bool xmit_more) in mlx5e_sq_xmit_wqe() 514 struct mlx5e_tx_wqe *wqe; in mlx5e_tx_mpwqe_session_start() local 669 struct mlx5e_tx_wqe *wqe; in mlx5e_xmit() local 727 struct mlx5e_tx_wqe *wqe; in mlx5e_sq_xmit_simple() local 978 struct mlx5i_tx_wqe *wqe; in mlx5i_sq_xmit() local
|
/linux-6.1.9/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/ |
D | rx.c | 168 struct mlx5e_rx_wqe_cyc *wqe; in mlx5e_xsk_alloc_rx_wqes_batched() local 190 struct mlx5e_rx_wqe_cyc *wqe; in mlx5e_xsk_alloc_rx_wqes() local
|
/linux-6.1.9/drivers/scsi/lpfc/ |
D | lpfc_nvme.c | 397 union lpfc_wqe128 *wqe; in lpfc_nvme_gen_req() local 709 struct lpfc_iocbq *wqe, *next_wqe; in __lpfc_nvme_ls_abort() local 829 union lpfc_wqe128 *wqe; in lpfc_nvme_adj_fcp_sgls() local 1212 union lpfc_wqe128 *wqe = &pwqeq->wqe; in lpfc_nvme_prep_io_cmd() local 1338 union lpfc_wqe128 *wqe = &lpfc_ncmd->cur_iocbq.wqe; in lpfc_nvme_prep_io_dma() local 2051 union lpfc_wqe128 *wqe; in lpfc_get_nvme_buf() local
|