Home
last modified time | relevance | path

Searched refs:cqe64 (Results 1 – 5 of 5) sorted by relevance

/linux-6.1.9/drivers/infiniband/hw/mlx5/
Dcq.c82 struct mlx5_cqe64 *cqe64; in get_sw_cqe() local
84 cqe64 = (cq->mcq.cqe_sz == 64) ? cqe : cqe + 64; in get_sw_cqe()
86 if (likely(get_cqe_opcode(cqe64) != MLX5_CQE_INVALID) && in get_sw_cqe()
87 !((cqe64->op_own & MLX5_CQE_OWNER_MASK) ^ !!(n & (cq->ibcq.cqe + 1)))) { in get_sw_cqe()
338 static void handle_atomics(struct mlx5_ib_qp *qp, struct mlx5_cqe64 *cqe64, in handle_atomics() argument
450 struct mlx5_cqe64 *cqe64; in mlx5_poll_one() local
464 cqe64 = (cq->mcq.cqe_sz == 64) ? cqe : cqe + 64; in mlx5_poll_one()
473 opcode = get_cqe_opcode(cqe64); in mlx5_poll_one()
486 qpn = ntohl(cqe64->sop_drop_qpn) & 0xffffff; in mlx5_poll_one()
500 wqe_ctr = be16_to_cpu(cqe64->wqe_counter); in mlx5_poll_one()
[all …]
/linux-6.1.9/drivers/net/ethernet/mellanox/mlx5/core/steering/
Ddr_send.c52 static int dr_parse_cqe(struct mlx5dr_cq *dr_cq, struct mlx5_cqe64 *cqe64) in dr_parse_cqe() argument
57 opcode = get_cqe_opcode(cqe64); in dr_parse_cqe()
59 idx = be16_to_cpu(cqe64->wqe_counter) & in dr_parse_cqe()
65 idx = be16_to_cpu(cqe64->wqe_counter) & in dr_parse_cqe()
77 struct mlx5_cqe64 *cqe64; in dr_cq_poll_one() local
80 cqe64 = mlx5_cqwq_get_cqe(&dr_cq->wq); in dr_cq_poll_one()
81 if (!cqe64) in dr_cq_poll_one()
85 err = dr_parse_cqe(dr_cq, cqe64); in dr_cq_poll_one()
/linux-6.1.9/drivers/vfio/pci/mlx5/
Dcmd.c615 struct mlx5_cqe64 *cqe64; in init_cq_frag_buf() local
621 cqe64 = buf->cqe_size == 64 ? cqe : cqe + 64; in init_cq_frag_buf()
622 cqe64->op_own = MLX5_CQE_INVALID << 4; in init_cq_frag_buf()
1265 struct mlx5_cqe64 *cqe64; in get_sw_cqe() local
1267 cqe64 = (cq->mcq.cqe_sz == 64) ? cqe : cqe + 64; in get_sw_cqe()
1269 if (likely(get_cqe_opcode(cqe64) != MLX5_CQE_INVALID) && in get_sw_cqe()
1270 !((cqe64->op_own & MLX5_CQE_OWNER_MASK) ^ !!(n & (cq->ncqe)))) { in get_sw_cqe()
1271 return cqe64; in get_sw_cqe()
/linux-6.1.9/drivers/vdpa/mlx5/net/
Dmlx5_vnet.c368 struct mlx5_cqe64 *cqe64; in cq_frag_buf_init() local
374 cqe64 = cqe; in cq_frag_buf_init()
375 cqe64->op_own = MLX5_CQE_INVALID << 4; in cq_frag_buf_init()
381 struct mlx5_cqe64 *cqe64 = get_cqe(cq, n & (cq->cqe - 1)); in get_sw_cqe() local
383 if (likely(get_cqe_opcode(cqe64) != MLX5_CQE_INVALID) && in get_sw_cqe()
384 !((cqe64->op_own & MLX5_CQE_OWNER_MASK) ^ !!(n & cq->cqe))) in get_sw_cqe()
385 return cqe64; in get_sw_cqe()
524 struct mlx5_cqe64 *cqe64; in mlx5_vdpa_poll_one() local
526 cqe64 = next_cqe_sw(vcq); in mlx5_vdpa_poll_one()
527 if (!cqe64) in mlx5_vdpa_poll_one()
/linux-6.1.9/include/linux/mlx5/
Ddevice.h1036 struct mlx5_cqe64 cqe64; member