/linux-6.6.21/drivers/net/ethernet/mellanox/mlx5/core/en_accel/ |
D | ipsec_rxtx.h | 82 static inline bool mlx5e_ipsec_eseg_meta(struct mlx5_wqe_eth_seg *eseg) in mlx5e_ipsec_eseg_meta() argument 84 return eseg->flow_table_metadata & cpu_to_be32(MLX5_ETH_WQE_FT_META_IPSEC); in mlx5e_ipsec_eseg_meta() 88 struct mlx5_wqe_eth_seg *eseg); 126 struct mlx5_wqe_eth_seg *eseg) in mlx5e_ipsec_txwqe_build_eseg_csum() argument 130 if (!mlx5e_ipsec_eseg_meta(eseg)) in mlx5e_ipsec_txwqe_build_eseg_csum() 133 eseg->cs_flags = MLX5_ETH_WQE_L3_CSUM; in mlx5e_ipsec_txwqe_build_eseg_csum() 136 eseg->cs_flags |= MLX5_ETH_WQE_L3_INNER_CSUM; in mlx5e_ipsec_txwqe_build_eseg_csum() 138 eseg->cs_flags |= MLX5_ETH_WQE_L4_INNER_CSUM; in mlx5e_ipsec_txwqe_build_eseg_csum() 140 eseg->cs_flags |= MLX5_ETH_WQE_L4_CSUM; in mlx5e_ipsec_txwqe_build_eseg_csum() 153 static inline bool mlx5e_ipsec_eseg_meta(struct mlx5_wqe_eth_seg *eseg) in mlx5e_ipsec_eseg_meta() argument [all …]
|
D | ipsec_rxtx.c | 76 struct mlx5_wqe_eth_seg *eseg, u8 mode, in mlx5e_ipsec_set_swp() argument 93 eseg->swp_outer_l3_offset = skb_network_offset(skb) / 2; in mlx5e_ipsec_set_swp() 95 eseg->swp_flags |= MLX5_ETH_WQE_SWP_OUTER_L3_IPV6; in mlx5e_ipsec_set_swp() 99 eseg->swp_inner_l3_offset = skb_inner_network_offset(skb) / 2; in mlx5e_ipsec_set_swp() 101 eseg->swp_flags |= MLX5_ETH_WQE_SWP_INNER_L3_IPV6; in mlx5e_ipsec_set_swp() 105 eseg->swp_flags |= MLX5_ETH_WQE_SWP_INNER_L4_UDP; in mlx5e_ipsec_set_swp() 109 eseg->swp_inner_l4_offset = skb_inner_transport_offset(skb) / 2; in mlx5e_ipsec_set_swp() 124 eseg->swp_flags |= MLX5_ETH_WQE_SWP_OUTER_L4_UDP; in mlx5e_ipsec_set_swp() 128 eseg->swp_outer_l4_offset = skb_inner_transport_offset(skb) / 2; in mlx5e_ipsec_set_swp() 137 eseg->swp_flags |= MLX5_ETH_WQE_SWP_INNER_L4_UDP; in mlx5e_ipsec_set_swp() [all …]
|
D | en_accel.h | 55 mlx5e_tx_tunnel_accel(struct sk_buff *skb, struct mlx5_wqe_eth_seg *eseg, u16 ihs) in mlx5e_tx_tunnel_accel() argument 88 mlx5e_set_eseg_swp(skb, eseg, &swp_spec); in mlx5e_tx_tunnel_accel() 90 mlx5e_eseg_swp_offsets_add_vlan(eseg); in mlx5e_tx_tunnel_accel() 169 struct mlx5_wqe_eth_seg *eseg, u16 ihs) in mlx5e_accel_tx_eseg() argument 173 mlx5e_ipsec_tx_build_eseg(priv, skb, eseg); in mlx5e_accel_tx_eseg() 178 mlx5e_macsec_tx_build_eseg(priv->macsec, skb, eseg); in mlx5e_accel_tx_eseg() 183 mlx5e_tx_tunnel_accel(skb, eseg, ihs); in mlx5e_accel_tx_eseg()
|
D | macsec.h | 23 struct mlx5_wqe_eth_seg *eseg);
|
D | macsec.c | 1644 struct mlx5_wqe_eth_seg *eseg) in mlx5e_macsec_tx_build_eseg() argument 1654 eseg->flow_table_metadata = cpu_to_be32(MLX5_ETH_WQE_FT_META_MACSEC | fs_id << 2); in mlx5e_macsec_tx_build_eseg()
|
/linux-6.6.21/drivers/net/ethernet/mellanox/mlx5/core/ |
D | en_tx.c | 121 struct mlx5_wqe_eth_seg *eseg) in mlx5e_txwqe_build_eseg_csum() argument 123 if (unlikely(mlx5e_ipsec_txwqe_build_eseg_csum(sq, skb, eseg))) in mlx5e_txwqe_build_eseg_csum() 127 eseg->cs_flags = MLX5_ETH_WQE_L3_CSUM; in mlx5e_txwqe_build_eseg_csum() 129 eseg->cs_flags |= MLX5_ETH_WQE_L3_INNER_CSUM | in mlx5e_txwqe_build_eseg_csum() 133 eseg->cs_flags |= MLX5_ETH_WQE_L4_CSUM; in mlx5e_txwqe_build_eseg_csum() 138 eseg->cs_flags = MLX5_ETH_WQE_L3_CSUM | MLX5_ETH_WQE_L4_CSUM; in mlx5e_txwqe_build_eseg_csum() 375 struct mlx5_wqe_eth_seg *eseg, bool xmit_more) in mlx5e_txwqe_complete() argument 399 u8 metadata_index = be32_to_cpu(eseg->flow_table_metadata); in mlx5e_txwqe_complete() 424 struct mlx5_wqe_eth_seg *eseg; in mlx5e_sq_xmit_wqe() local 437 eseg = &wqe->eth; in mlx5e_sq_xmit_wqe() [all …]
|
D | en_main.c | 1888 struct mlx5_wqe_eth_seg *eseg = &wqe->eth; in mlx5e_open_xdpsq() local 1896 eseg->inline_hdr.sz = cpu_to_be16(inline_hdr_sz); in mlx5e_open_xdpsq()
|
/linux-6.6.21/drivers/net/ethernet/mellanox/mlx5/core/en/ |
D | txrx.h | 421 static inline void mlx5e_eseg_swp_offsets_add_vlan(struct mlx5_wqe_eth_seg *eseg) in mlx5e_eseg_swp_offsets_add_vlan() argument 424 eseg->swp_outer_l3_offset += VLAN_HLEN / 2; in mlx5e_eseg_swp_offsets_add_vlan() 425 eseg->swp_outer_l4_offset += VLAN_HLEN / 2; in mlx5e_eseg_swp_offsets_add_vlan() 426 eseg->swp_inner_l3_offset += VLAN_HLEN / 2; in mlx5e_eseg_swp_offsets_add_vlan() 427 eseg->swp_inner_l4_offset += VLAN_HLEN / 2; in mlx5e_eseg_swp_offsets_add_vlan() 431 mlx5e_set_eseg_swp(struct sk_buff *skb, struct mlx5_wqe_eth_seg *eseg, in mlx5e_set_eseg_swp() argument 435 eseg->swp_outer_l3_offset = skb_network_offset(skb) / 2; in mlx5e_set_eseg_swp() 437 eseg->swp_flags |= MLX5_ETH_WQE_SWP_OUTER_L3_IPV6; in mlx5e_set_eseg_swp() 439 eseg->swp_outer_l4_offset = skb_transport_offset(skb) / 2; in mlx5e_set_eseg_swp() 441 eseg->swp_flags |= MLX5_ETH_WQE_SWP_OUTER_L4_UDP; in mlx5e_set_eseg_swp() [all …]
|
D | xdp.c | 490 struct mlx5_wqe_eth_seg *eseg; in mlx5e_xmit_xdp_frame() local 545 eseg = &wqe->eth; in mlx5e_xmit_xdp_frame() 550 memcpy(eseg->inline_hdr.start, xdptxd->data, sizeof(eseg->inline_hdr.start)); in mlx5e_xmit_xdp_frame() 551 memcpy(dseg, xdptxd->data + sizeof(eseg->inline_hdr.start), in mlx5e_xmit_xdp_frame() 552 inline_hdr_sz - sizeof(eseg->inline_hdr.start)); in mlx5e_xmit_xdp_frame() 572 memset(eseg, 0, sizeof(*eseg) - sizeof(eseg->trailer)); in mlx5e_xmit_xdp_frame() 574 eseg->inline_hdr.sz = cpu_to_be16(inline_hdr_sz); in mlx5e_xmit_xdp_frame()
|
/linux-6.6.21/drivers/infiniband/hw/mlx5/ |
D | wr.c | 57 struct mlx5_wqe_eth_seg *eseg = *seg; in set_eth_seg() local 59 memset(eseg, 0, sizeof(struct mlx5_wqe_eth_seg)); in set_eth_seg() 62 eseg->cs_flags = MLX5_ETH_WQE_L3_CSUM | in set_eth_seg() 72 eseg->mss = cpu_to_be16(ud_wr->mss); in set_eth_seg() 73 eseg->inline_hdr.sz = cpu_to_be16(left); in set_eth_seg() 79 copysz = min_t(u64, *cur_edge - (void *)eseg->inline_hdr.start, in set_eth_seg() 81 memcpy(eseg->inline_hdr.start, pdata, copysz); in set_eth_seg() 83 sizeof(eseg->inline_hdr.start) + copysz, 16); in set_eth_seg()
|