Home
last modified time | relevance | path

Searched refs:MLX5_SEND_WQE_BB (Results 1 – 14 of 14) sorted by relevance

/linux-6.1.9/drivers/net/ethernet/mellanox/mlx5/core/en_accel/
Dktls_utils.h54 (DIV_ROUND_UP(sizeof(struct mlx5e_set_tls_static_params_wqe), MLX5_SEND_WQE_BB))
57 (DIV_ROUND_UP(sizeof(struct mlx5e_set_tls_progress_params_wqe), MLX5_SEND_WQE_BB))
60 (DIV_ROUND_UP(sizeof(struct mlx5e_get_tls_progress_params_wqe), MLX5_SEND_WQE_BB))
Dktls_tx.c14 (DIV_ROUND_UP(sizeof(struct mlx5e_dump_wqe), MLX5_SEND_WQE_BB))
/linux-6.1.9/drivers/net/ethernet/mellanox/mlx5/core/lib/
Daso.h11 (DIV_ROUND_UP(sizeof(struct mlx5_aso_wqe), MLX5_SEND_WQE_BB))
13 (DIV_ROUND_UP(sizeof(struct mlx5_aso_wqe_data), MLX5_SEND_WQE_BB))
Daso.c280 MLX5_SET(wq, wq, log_wq_stride, ilog2(MLX5_SEND_WQE_BB)); in mlx5_aso_create_sq()
/linux-6.1.9/include/linux/mlx5/
Dqp.h41 #define MLX5_SIG_WQE_SIZE (MLX5_SEND_WQE_BB * 8)
156 MLX5_SEND_WQE_BB = 64, enumerator
159 #define MLX5_SEND_WQEBB_NUM_DS (MLX5_SEND_WQE_BB / MLX5_SEND_WQE_DS)
165 #define MLX5_SEND_WQE_MAX_SIZE (MLX5_SEND_WQE_MAX_WQEBBS * MLX5_SEND_WQE_BB)
/linux-6.1.9/drivers/net/ethernet/mellanox/mlx5/core/en/
Dparams.c108 max_wqe_size = mlx5e_get_max_sq_aligned_wqebbs(mdev) * MLX5_SEND_WQE_BB; in mlx5e_mpwrq_log_wqe_sz()
160 MLX5_SEND_WQE_BB); in mlx5e_mpwrq_umr_wqebbs()
174 MLX5_SEND_WQE_BB / mlx5e_mpwrq_umr_entry_size(umr_mode)); in mlx5e_mpwrq_mtts_per_wqe()
979 MLX5_SET(wq, wq, log_wq_stride, ilog2(MLX5_SEND_WQE_BB)); in mlx5e_build_sq_param_common()
1158 useful_space = PAGE_SIZE - mlx5e_get_max_sq_wqebbs(mdev) + MLX5_SEND_WQE_BB; in mlx5e_build_icosq_log_wq_sz()
1159 total_pages = DIV_ROUND_UP(wqebbs * MLX5_SEND_WQE_BB, useful_space); in mlx5e_build_icosq_log_wq_sz()
1160 wqebbs = total_pages * (PAGE_SIZE / MLX5_SEND_WQE_BB); in mlx5e_build_icosq_log_wq_sz()
Dtxrx.h448 WARN_ON_ONCE(PAGE_SIZE / MLX5_SEND_WQE_BB < mlx5e_get_max_sq_wqebbs(mdev)); in mlx5e_stop_room_for_wqe()
Dreporter_tx.c272 sq_stride = MLX5_SEND_WQE_BB; in mlx5e_tx_reporter_diagnose_generic_txqsq()
/linux-6.1.9/drivers/net/ethernet/mellanox/mlx5/core/
Dwq.c105 u8 log_sq_stride = ilog2(MLX5_SEND_WQE_BB); in mlx5_wq_qp_create()
134 u16 sq_strides_offset = rq_byte_size / MLX5_SEND_WQE_BB; in mlx5_wq_qp_create()
Den.h154 (DIV_ROUND_UP(MLX5E_KLM_UMR_WQE_SZ(klm_entries), MLX5_SEND_WQE_BB))
166 MLX5E_KLM_ENTRIES_PER_WQE(MLX5_SEND_WQE_BB * mlx5e_get_max_sq_aligned_wqebbs(mdev))
217 MLX5_CAP_GEN(mdev, max_wqe_sz_sq) / MLX5_SEND_WQE_BB); in mlx5e_get_max_sq_wqebbs()
/linux-6.1.9/drivers/infiniband/hw/mlx5/
Dwr.h41 return fragment_end + MLX5_SEND_WQE_BB; in get_sq_edge()
Dmem.c132 MLX5_SEND_WQE_BB); in post_send_nop()
Dwr.c683 *size = ALIGN(*size, MLX5_SEND_WQE_BB >> 4); in set_reg_wr()
774 qp->sq.cur_post += DIV_ROUND_UP(size * 16, MLX5_SEND_WQE_BB); in mlx5r_finish_wqe()
780 seg = PTR_ALIGN(seg, MLX5_SEND_WQE_BB); in mlx5r_finish_wqe()
Dqp.c154 min_t(size_t, buflen - bytes_copied, MLX5_SEND_WQE_BB); in mlx5_ib_read_kernel_wqe_sq()
392 wq_size = max_t(int, wq_size, MLX5_SEND_WQE_BB); in set_rq_size()
481 ALIGN(max_t(int, inl_size, size), MLX5_SEND_WQE_BB) < MLX5_SIG_WQE_SIZE) in calc_send_wqe()
484 return ALIGN(max_t(int, inl_size, size), MLX5_SEND_WQE_BB); in calc_send_wqe()
535 qp->sq.wqe_cnt = wq_size / MLX5_SEND_WQE_BB; in calc_sq_size()
538 attr->cap.max_send_wr, wqe_size, MLX5_SEND_WQE_BB, in calc_sq_size()
543 qp->sq.wqe_shift = ilog2(MLX5_SEND_WQE_BB); in calc_sq_size()
913 qp->sq.wqe_shift = ilog2(MLX5_SEND_WQE_BB); in _create_user_qp()
1063 MLX5_SEND_WQE_BB; in _create_kernel_qp()
1066 ilog2(MLX5_SEND_WQE_BB), in _create_kernel_qp()
[all …]