1 // SPDX-License-Identifier: BSD-3-Clause-Clear
2 /*
3 * Copyright (c) 2018-2021 The Linux Foundation. All rights reserved.
4 * Copyright (c) 2021-2022 Qualcomm Innovation Center, Inc. All rights reserved.
5 */
6 #include <linux/dma-mapping.h>
7 #include "hal_tx.h"
8 #include "hal_rx.h"
9 #include "debug.h"
10 #include "hal_desc.h"
11 #include "hif.h"
12
13 static const struct hal_srng_config hw_srng_config_template[] = {
14 /* TODO: max_rings can populated by querying HW capabilities */
15 [HAL_REO_DST] = {
16 .start_ring_id = HAL_SRNG_RING_ID_REO2SW1,
17 .max_rings = 8,
18 .entry_size = sizeof(struct hal_reo_dest_ring) >> 2,
19 .mac_type = ATH12K_HAL_SRNG_UMAC,
20 .ring_dir = HAL_SRNG_DIR_DST,
21 .max_size = HAL_REO_REO2SW1_RING_BASE_MSB_RING_SIZE,
22 },
23 [HAL_REO_EXCEPTION] = {
24 /* Designating REO2SW0 ring as exception ring.
25 * Any of theREO2SW rings can be used as exception ring.
26 */
27 .start_ring_id = HAL_SRNG_RING_ID_REO2SW0,
28 .max_rings = 1,
29 .entry_size = sizeof(struct hal_reo_dest_ring) >> 2,
30 .mac_type = ATH12K_HAL_SRNG_UMAC,
31 .ring_dir = HAL_SRNG_DIR_DST,
32 .max_size = HAL_REO_REO2SW0_RING_BASE_MSB_RING_SIZE,
33 },
34 [HAL_REO_REINJECT] = {
35 .start_ring_id = HAL_SRNG_RING_ID_SW2REO,
36 .max_rings = 4,
37 .entry_size = sizeof(struct hal_reo_entrance_ring) >> 2,
38 .mac_type = ATH12K_HAL_SRNG_UMAC,
39 .ring_dir = HAL_SRNG_DIR_SRC,
40 .max_size = HAL_REO_SW2REO_RING_BASE_MSB_RING_SIZE,
41 },
42 [HAL_REO_CMD] = {
43 .start_ring_id = HAL_SRNG_RING_ID_REO_CMD,
44 .max_rings = 1,
45 .entry_size = (sizeof(struct hal_tlv_64_hdr) +
46 sizeof(struct hal_reo_get_queue_stats)) >> 2,
47 .mac_type = ATH12K_HAL_SRNG_UMAC,
48 .ring_dir = HAL_SRNG_DIR_SRC,
49 .max_size = HAL_REO_CMD_RING_BASE_MSB_RING_SIZE,
50 },
51 [HAL_REO_STATUS] = {
52 .start_ring_id = HAL_SRNG_RING_ID_REO_STATUS,
53 .max_rings = 1,
54 .entry_size = (sizeof(struct hal_tlv_64_hdr) +
55 sizeof(struct hal_reo_get_queue_stats_status)) >> 2,
56 .mac_type = ATH12K_HAL_SRNG_UMAC,
57 .ring_dir = HAL_SRNG_DIR_DST,
58 .max_size = HAL_REO_STATUS_RING_BASE_MSB_RING_SIZE,
59 },
60 [HAL_TCL_DATA] = {
61 .start_ring_id = HAL_SRNG_RING_ID_SW2TCL1,
62 .max_rings = 6,
63 .entry_size = sizeof(struct hal_tcl_data_cmd) >> 2,
64 .mac_type = ATH12K_HAL_SRNG_UMAC,
65 .ring_dir = HAL_SRNG_DIR_SRC,
66 .max_size = HAL_SW2TCL1_RING_BASE_MSB_RING_SIZE,
67 },
68 [HAL_TCL_CMD] = {
69 .start_ring_id = HAL_SRNG_RING_ID_SW2TCL_CMD,
70 .max_rings = 1,
71 .entry_size = sizeof(struct hal_tcl_gse_cmd) >> 2,
72 .mac_type = ATH12K_HAL_SRNG_UMAC,
73 .ring_dir = HAL_SRNG_DIR_SRC,
74 .max_size = HAL_SW2TCL1_CMD_RING_BASE_MSB_RING_SIZE,
75 },
76 [HAL_TCL_STATUS] = {
77 .start_ring_id = HAL_SRNG_RING_ID_TCL_STATUS,
78 .max_rings = 1,
79 .entry_size = (sizeof(struct hal_tlv_hdr) +
80 sizeof(struct hal_tcl_status_ring)) >> 2,
81 .mac_type = ATH12K_HAL_SRNG_UMAC,
82 .ring_dir = HAL_SRNG_DIR_DST,
83 .max_size = HAL_TCL_STATUS_RING_BASE_MSB_RING_SIZE,
84 },
85 [HAL_CE_SRC] = {
86 .start_ring_id = HAL_SRNG_RING_ID_CE0_SRC,
87 .max_rings = 16,
88 .entry_size = sizeof(struct hal_ce_srng_src_desc) >> 2,
89 .mac_type = ATH12K_HAL_SRNG_UMAC,
90 .ring_dir = HAL_SRNG_DIR_SRC,
91 .max_size = HAL_CE_SRC_RING_BASE_MSB_RING_SIZE,
92 },
93 [HAL_CE_DST] = {
94 .start_ring_id = HAL_SRNG_RING_ID_CE0_DST,
95 .max_rings = 16,
96 .entry_size = sizeof(struct hal_ce_srng_dest_desc) >> 2,
97 .mac_type = ATH12K_HAL_SRNG_UMAC,
98 .ring_dir = HAL_SRNG_DIR_SRC,
99 .max_size = HAL_CE_DST_RING_BASE_MSB_RING_SIZE,
100 },
101 [HAL_CE_DST_STATUS] = {
102 .start_ring_id = HAL_SRNG_RING_ID_CE0_DST_STATUS,
103 .max_rings = 16,
104 .entry_size = sizeof(struct hal_ce_srng_dst_status_desc) >> 2,
105 .mac_type = ATH12K_HAL_SRNG_UMAC,
106 .ring_dir = HAL_SRNG_DIR_DST,
107 .max_size = HAL_CE_DST_STATUS_RING_BASE_MSB_RING_SIZE,
108 },
109 [HAL_WBM_IDLE_LINK] = {
110 .start_ring_id = HAL_SRNG_RING_ID_WBM_IDLE_LINK,
111 .max_rings = 1,
112 .entry_size = sizeof(struct hal_wbm_link_desc) >> 2,
113 .mac_type = ATH12K_HAL_SRNG_UMAC,
114 .ring_dir = HAL_SRNG_DIR_SRC,
115 .max_size = HAL_WBM_IDLE_LINK_RING_BASE_MSB_RING_SIZE,
116 },
117 [HAL_SW2WBM_RELEASE] = {
118 .start_ring_id = HAL_SRNG_RING_ID_WBM_SW0_RELEASE,
119 .max_rings = 2,
120 .entry_size = sizeof(struct hal_wbm_release_ring) >> 2,
121 .mac_type = ATH12K_HAL_SRNG_UMAC,
122 .ring_dir = HAL_SRNG_DIR_SRC,
123 .max_size = HAL_SW2WBM_RELEASE_RING_BASE_MSB_RING_SIZE,
124 },
125 [HAL_WBM2SW_RELEASE] = {
126 .start_ring_id = HAL_SRNG_RING_ID_WBM2SW0_RELEASE,
127 .max_rings = 8,
128 .entry_size = sizeof(struct hal_wbm_release_ring) >> 2,
129 .mac_type = ATH12K_HAL_SRNG_UMAC,
130 .ring_dir = HAL_SRNG_DIR_DST,
131 .max_size = HAL_WBM2SW_RELEASE_RING_BASE_MSB_RING_SIZE,
132 },
133 [HAL_RXDMA_BUF] = {
134 .start_ring_id = HAL_SRNG_SW2RXDMA_BUF0,
135 .max_rings = 1,
136 .entry_size = sizeof(struct hal_wbm_buffer_ring) >> 2,
137 .mac_type = ATH12K_HAL_SRNG_DMAC,
138 .ring_dir = HAL_SRNG_DIR_SRC,
139 .max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
140 },
141 [HAL_RXDMA_DST] = {
142 .start_ring_id = HAL_SRNG_RING_ID_WMAC1_RXDMA2SW0,
143 .max_rings = 0,
144 .entry_size = 0,
145 .mac_type = ATH12K_HAL_SRNG_PMAC,
146 .ring_dir = HAL_SRNG_DIR_DST,
147 .max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
148 },
149 [HAL_RXDMA_MONITOR_BUF] = {
150 .start_ring_id = HAL_SRNG_SW2RXMON_BUF0,
151 .max_rings = 1,
152 .entry_size = sizeof(struct hal_mon_buf_ring) >> 2,
153 .mac_type = ATH12K_HAL_SRNG_PMAC,
154 .ring_dir = HAL_SRNG_DIR_SRC,
155 .max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
156 },
157 [HAL_RXDMA_MONITOR_STATUS] = { 0, },
158 [HAL_RXDMA_MONITOR_DESC] = { 0, },
159 [HAL_RXDMA_DIR_BUF] = {
160 .start_ring_id = HAL_SRNG_RING_ID_RXDMA_DIR_BUF,
161 .max_rings = 2,
162 .entry_size = 8 >> 2, /* TODO: Define the struct */
163 .mac_type = ATH12K_HAL_SRNG_PMAC,
164 .ring_dir = HAL_SRNG_DIR_SRC,
165 .max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
166 },
167 [HAL_PPE2TCL] = {
168 .start_ring_id = HAL_SRNG_RING_ID_PPE2TCL1,
169 .max_rings = 1,
170 .entry_size = sizeof(struct hal_tcl_entrance_from_ppe_ring) >> 2,
171 .mac_type = ATH12K_HAL_SRNG_PMAC,
172 .ring_dir = HAL_SRNG_DIR_SRC,
173 .max_size = HAL_SW2TCL1_RING_BASE_MSB_RING_SIZE,
174 },
175 [HAL_PPE_RELEASE] = {
176 .start_ring_id = HAL_SRNG_RING_ID_WBM_PPE_RELEASE,
177 .max_rings = 1,
178 .entry_size = sizeof(struct hal_wbm_release_ring) >> 2,
179 .mac_type = ATH12K_HAL_SRNG_PMAC,
180 .ring_dir = HAL_SRNG_DIR_SRC,
181 .max_size = HAL_WBM2PPE_RELEASE_RING_BASE_MSB_RING_SIZE,
182 },
183 [HAL_TX_MONITOR_BUF] = {
184 .start_ring_id = HAL_SRNG_SW2TXMON_BUF0,
185 .max_rings = 1,
186 .entry_size = sizeof(struct hal_mon_buf_ring) >> 2,
187 .mac_type = ATH12K_HAL_SRNG_PMAC,
188 .ring_dir = HAL_SRNG_DIR_SRC,
189 .max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
190 },
191 [HAL_RXDMA_MONITOR_DST] = {
192 .start_ring_id = HAL_SRNG_RING_ID_WMAC1_SW2RXMON_BUF0,
193 .max_rings = 1,
194 .entry_size = sizeof(struct hal_mon_dest_desc) >> 2,
195 .mac_type = ATH12K_HAL_SRNG_PMAC,
196 .ring_dir = HAL_SRNG_DIR_DST,
197 .max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
198 },
199 [HAL_TX_MONITOR_DST] = {
200 .start_ring_id = HAL_SRNG_RING_ID_WMAC1_TXMON2SW0_BUF0,
201 .max_rings = 1,
202 .entry_size = sizeof(struct hal_mon_dest_desc) >> 2,
203 .mac_type = ATH12K_HAL_SRNG_PMAC,
204 .ring_dir = HAL_SRNG_DIR_DST,
205 .max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
206 }
207 };
208
209 static const struct ath12k_hal_tcl_to_wbm_rbm_map
210 ath12k_hal_qcn9274_tcl_to_wbm_rbm_map[DP_TCL_NUM_RING_MAX] = {
211 {
212 .wbm_ring_num = 0,
213 .rbm_id = HAL_RX_BUF_RBM_SW0_BM,
214 },
215 {
216 .wbm_ring_num = 1,
217 .rbm_id = HAL_RX_BUF_RBM_SW1_BM,
218 },
219 {
220 .wbm_ring_num = 2,
221 .rbm_id = HAL_RX_BUF_RBM_SW2_BM,
222 },
223 {
224 .wbm_ring_num = 4,
225 .rbm_id = HAL_RX_BUF_RBM_SW4_BM,
226 }
227 };
228
229 static const struct ath12k_hal_tcl_to_wbm_rbm_map
230 ath12k_hal_wcn7850_tcl_to_wbm_rbm_map[DP_TCL_NUM_RING_MAX] = {
231 {
232 .wbm_ring_num = 0,
233 .rbm_id = HAL_RX_BUF_RBM_SW0_BM,
234 },
235 {
236 .wbm_ring_num = 2,
237 .rbm_id = HAL_RX_BUF_RBM_SW2_BM,
238 },
239 {
240 .wbm_ring_num = 4,
241 .rbm_id = HAL_RX_BUF_RBM_SW4_BM,
242 },
243 };
244
ath12k_hal_reo1_ring_id_offset(struct ath12k_base * ab)245 static unsigned int ath12k_hal_reo1_ring_id_offset(struct ath12k_base *ab)
246 {
247 return HAL_REO1_RING_ID(ab) - HAL_REO1_RING_BASE_LSB(ab);
248 }
249
ath12k_hal_reo1_ring_msi1_base_lsb_offset(struct ath12k_base * ab)250 static unsigned int ath12k_hal_reo1_ring_msi1_base_lsb_offset(struct ath12k_base *ab)
251 {
252 return HAL_REO1_RING_MSI1_BASE_LSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
253 }
254
ath12k_hal_reo1_ring_msi1_base_msb_offset(struct ath12k_base * ab)255 static unsigned int ath12k_hal_reo1_ring_msi1_base_msb_offset(struct ath12k_base *ab)
256 {
257 return HAL_REO1_RING_MSI1_BASE_MSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
258 }
259
ath12k_hal_reo1_ring_msi1_data_offset(struct ath12k_base * ab)260 static unsigned int ath12k_hal_reo1_ring_msi1_data_offset(struct ath12k_base *ab)
261 {
262 return HAL_REO1_RING_MSI1_DATA(ab) - HAL_REO1_RING_BASE_LSB(ab);
263 }
264
ath12k_hal_reo1_ring_base_msb_offset(struct ath12k_base * ab)265 static unsigned int ath12k_hal_reo1_ring_base_msb_offset(struct ath12k_base *ab)
266 {
267 return HAL_REO1_RING_BASE_MSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
268 }
269
ath12k_hal_reo1_ring_producer_int_setup_offset(struct ath12k_base * ab)270 static unsigned int ath12k_hal_reo1_ring_producer_int_setup_offset(struct ath12k_base *ab)
271 {
272 return HAL_REO1_RING_PRODUCER_INT_SETUP(ab) - HAL_REO1_RING_BASE_LSB(ab);
273 }
274
ath12k_hal_reo1_ring_hp_addr_lsb_offset(struct ath12k_base * ab)275 static unsigned int ath12k_hal_reo1_ring_hp_addr_lsb_offset(struct ath12k_base *ab)
276 {
277 return HAL_REO1_RING_HP_ADDR_LSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
278 }
279
ath12k_hal_reo1_ring_hp_addr_msb_offset(struct ath12k_base * ab)280 static unsigned int ath12k_hal_reo1_ring_hp_addr_msb_offset(struct ath12k_base *ab)
281 {
282 return HAL_REO1_RING_HP_ADDR_MSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
283 }
284
ath12k_hal_reo1_ring_misc_offset(struct ath12k_base * ab)285 static unsigned int ath12k_hal_reo1_ring_misc_offset(struct ath12k_base *ab)
286 {
287 return HAL_REO1_RING_MISC(ab) - HAL_REO1_RING_BASE_LSB(ab);
288 }
289
ath12k_hw_qcn9274_rx_desc_get_first_msdu(struct hal_rx_desc * desc)290 static bool ath12k_hw_qcn9274_rx_desc_get_first_msdu(struct hal_rx_desc *desc)
291 {
292 return !!le16_get_bits(desc->u.qcn9274.msdu_end.info5,
293 RX_MSDU_END_INFO5_FIRST_MSDU);
294 }
295
ath12k_hw_qcn9274_rx_desc_get_last_msdu(struct hal_rx_desc * desc)296 static bool ath12k_hw_qcn9274_rx_desc_get_last_msdu(struct hal_rx_desc *desc)
297 {
298 return !!le16_get_bits(desc->u.qcn9274.msdu_end.info5,
299 RX_MSDU_END_INFO5_LAST_MSDU);
300 }
301
ath12k_hw_qcn9274_rx_desc_get_l3_pad_bytes(struct hal_rx_desc * desc)302 static u8 ath12k_hw_qcn9274_rx_desc_get_l3_pad_bytes(struct hal_rx_desc *desc)
303 {
304 return le16_get_bits(desc->u.qcn9274.msdu_end.info5,
305 RX_MSDU_END_INFO5_L3_HDR_PADDING);
306 }
307
ath12k_hw_qcn9274_rx_desc_encrypt_valid(struct hal_rx_desc * desc)308 static bool ath12k_hw_qcn9274_rx_desc_encrypt_valid(struct hal_rx_desc *desc)
309 {
310 return !!le32_get_bits(desc->u.qcn9274.mpdu_start.info4,
311 RX_MPDU_START_INFO4_ENCRYPT_INFO_VALID);
312 }
313
ath12k_hw_qcn9274_rx_desc_get_encrypt_type(struct hal_rx_desc * desc)314 static u32 ath12k_hw_qcn9274_rx_desc_get_encrypt_type(struct hal_rx_desc *desc)
315 {
316 return le32_get_bits(desc->u.qcn9274.mpdu_start.info2,
317 RX_MPDU_START_INFO2_ENC_TYPE);
318 }
319
ath12k_hw_qcn9274_rx_desc_get_decap_type(struct hal_rx_desc * desc)320 static u8 ath12k_hw_qcn9274_rx_desc_get_decap_type(struct hal_rx_desc *desc)
321 {
322 return le32_get_bits(desc->u.qcn9274.msdu_end.info11,
323 RX_MSDU_END_INFO11_DECAP_FORMAT);
324 }
325
ath12k_hw_qcn9274_rx_desc_get_mesh_ctl(struct hal_rx_desc * desc)326 static u8 ath12k_hw_qcn9274_rx_desc_get_mesh_ctl(struct hal_rx_desc *desc)
327 {
328 return le32_get_bits(desc->u.qcn9274.msdu_end.info11,
329 RX_MSDU_END_INFO11_MESH_CTRL_PRESENT);
330 }
331
ath12k_hw_qcn9274_rx_desc_get_mpdu_seq_ctl_vld(struct hal_rx_desc * desc)332 static bool ath12k_hw_qcn9274_rx_desc_get_mpdu_seq_ctl_vld(struct hal_rx_desc *desc)
333 {
334 return !!le32_get_bits(desc->u.qcn9274.mpdu_start.info4,
335 RX_MPDU_START_INFO4_MPDU_SEQ_CTRL_VALID);
336 }
337
ath12k_hw_qcn9274_rx_desc_get_mpdu_fc_valid(struct hal_rx_desc * desc)338 static bool ath12k_hw_qcn9274_rx_desc_get_mpdu_fc_valid(struct hal_rx_desc *desc)
339 {
340 return !!le32_get_bits(desc->u.qcn9274.mpdu_start.info4,
341 RX_MPDU_START_INFO4_MPDU_FCTRL_VALID);
342 }
343
ath12k_hw_qcn9274_rx_desc_get_mpdu_start_seq_no(struct hal_rx_desc * desc)344 static u16 ath12k_hw_qcn9274_rx_desc_get_mpdu_start_seq_no(struct hal_rx_desc *desc)
345 {
346 return le32_get_bits(desc->u.qcn9274.mpdu_start.info4,
347 RX_MPDU_START_INFO4_MPDU_SEQ_NUM);
348 }
349
ath12k_hw_qcn9274_rx_desc_get_msdu_len(struct hal_rx_desc * desc)350 static u16 ath12k_hw_qcn9274_rx_desc_get_msdu_len(struct hal_rx_desc *desc)
351 {
352 return le32_get_bits(desc->u.qcn9274.msdu_end.info10,
353 RX_MSDU_END_INFO10_MSDU_LENGTH);
354 }
355
ath12k_hw_qcn9274_rx_desc_get_msdu_sgi(struct hal_rx_desc * desc)356 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_sgi(struct hal_rx_desc *desc)
357 {
358 return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
359 RX_MSDU_END_INFO12_SGI);
360 }
361
ath12k_hw_qcn9274_rx_desc_get_msdu_rate_mcs(struct hal_rx_desc * desc)362 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_rate_mcs(struct hal_rx_desc *desc)
363 {
364 return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
365 RX_MSDU_END_INFO12_RATE_MCS);
366 }
367
ath12k_hw_qcn9274_rx_desc_get_msdu_rx_bw(struct hal_rx_desc * desc)368 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_rx_bw(struct hal_rx_desc *desc)
369 {
370 return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
371 RX_MSDU_END_INFO12_RECV_BW);
372 }
373
ath12k_hw_qcn9274_rx_desc_get_msdu_freq(struct hal_rx_desc * desc)374 static u32 ath12k_hw_qcn9274_rx_desc_get_msdu_freq(struct hal_rx_desc *desc)
375 {
376 return __le32_to_cpu(desc->u.qcn9274.msdu_end.phy_meta_data);
377 }
378
ath12k_hw_qcn9274_rx_desc_get_msdu_pkt_type(struct hal_rx_desc * desc)379 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_pkt_type(struct hal_rx_desc *desc)
380 {
381 return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
382 RX_MSDU_END_INFO12_PKT_TYPE);
383 }
384
ath12k_hw_qcn9274_rx_desc_get_msdu_nss(struct hal_rx_desc * desc)385 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_nss(struct hal_rx_desc *desc)
386 {
387 return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
388 RX_MSDU_END_INFO12_MIMO_SS_BITMAP);
389 }
390
ath12k_hw_qcn9274_rx_desc_get_mpdu_tid(struct hal_rx_desc * desc)391 static u8 ath12k_hw_qcn9274_rx_desc_get_mpdu_tid(struct hal_rx_desc *desc)
392 {
393 return le16_get_bits(desc->u.qcn9274.msdu_end.info5,
394 RX_MSDU_END_INFO5_TID);
395 }
396
ath12k_hw_qcn9274_rx_desc_get_mpdu_peer_id(struct hal_rx_desc * desc)397 static u16 ath12k_hw_qcn9274_rx_desc_get_mpdu_peer_id(struct hal_rx_desc *desc)
398 {
399 return __le16_to_cpu(desc->u.qcn9274.mpdu_start.sw_peer_id);
400 }
401
ath12k_hw_qcn9274_rx_desc_copy_end_tlv(struct hal_rx_desc * fdesc,struct hal_rx_desc * ldesc)402 static void ath12k_hw_qcn9274_rx_desc_copy_end_tlv(struct hal_rx_desc *fdesc,
403 struct hal_rx_desc *ldesc)
404 {
405 memcpy(&fdesc->u.qcn9274.msdu_end, &ldesc->u.qcn9274.msdu_end,
406 sizeof(struct rx_msdu_end_qcn9274));
407 }
408
ath12k_hw_qcn9274_rx_desc_get_mpdu_ppdu_id(struct hal_rx_desc * desc)409 static u32 ath12k_hw_qcn9274_rx_desc_get_mpdu_ppdu_id(struct hal_rx_desc *desc)
410 {
411 return __le16_to_cpu(desc->u.qcn9274.mpdu_start.phy_ppdu_id);
412 }
413
ath12k_hw_qcn9274_rx_desc_set_msdu_len(struct hal_rx_desc * desc,u16 len)414 static void ath12k_hw_qcn9274_rx_desc_set_msdu_len(struct hal_rx_desc *desc, u16 len)
415 {
416 u32 info = __le32_to_cpu(desc->u.qcn9274.msdu_end.info10);
417
418 info &= ~RX_MSDU_END_INFO10_MSDU_LENGTH;
419 info |= u32_encode_bits(len, RX_MSDU_END_INFO10_MSDU_LENGTH);
420
421 desc->u.qcn9274.msdu_end.info10 = __cpu_to_le32(info);
422 }
423
ath12k_hw_qcn9274_rx_desc_get_msdu_payload(struct hal_rx_desc * desc)424 static u8 *ath12k_hw_qcn9274_rx_desc_get_msdu_payload(struct hal_rx_desc *desc)
425 {
426 return &desc->u.qcn9274.msdu_payload[0];
427 }
428
ath12k_hw_qcn9274_rx_desc_get_mpdu_start_offset(void)429 static u32 ath12k_hw_qcn9274_rx_desc_get_mpdu_start_offset(void)
430 {
431 return offsetof(struct hal_rx_desc_qcn9274, mpdu_start);
432 }
433
ath12k_hw_qcn9274_rx_desc_get_msdu_end_offset(void)434 static u32 ath12k_hw_qcn9274_rx_desc_get_msdu_end_offset(void)
435 {
436 return offsetof(struct hal_rx_desc_qcn9274, msdu_end);
437 }
438
ath12k_hw_qcn9274_rx_desc_mac_addr2_valid(struct hal_rx_desc * desc)439 static bool ath12k_hw_qcn9274_rx_desc_mac_addr2_valid(struct hal_rx_desc *desc)
440 {
441 return __le32_to_cpu(desc->u.qcn9274.mpdu_start.info4) &
442 RX_MPDU_START_INFO4_MAC_ADDR2_VALID;
443 }
444
ath12k_hw_qcn9274_rx_desc_mpdu_start_addr2(struct hal_rx_desc * desc)445 static u8 *ath12k_hw_qcn9274_rx_desc_mpdu_start_addr2(struct hal_rx_desc *desc)
446 {
447 return desc->u.qcn9274.mpdu_start.addr2;
448 }
449
ath12k_hw_qcn9274_rx_desc_is_da_mcbc(struct hal_rx_desc * desc)450 static bool ath12k_hw_qcn9274_rx_desc_is_da_mcbc(struct hal_rx_desc *desc)
451 {
452 return __le16_to_cpu(desc->u.qcn9274.msdu_end.info5) &
453 RX_MSDU_END_INFO5_DA_IS_MCBC;
454 }
455
ath12k_hw_qcn9274_rx_desc_get_dot11_hdr(struct hal_rx_desc * desc,struct ieee80211_hdr * hdr)456 static void ath12k_hw_qcn9274_rx_desc_get_dot11_hdr(struct hal_rx_desc *desc,
457 struct ieee80211_hdr *hdr)
458 {
459 hdr->frame_control = desc->u.qcn9274.mpdu_start.frame_ctrl;
460 hdr->duration_id = desc->u.qcn9274.mpdu_start.duration;
461 ether_addr_copy(hdr->addr1, desc->u.qcn9274.mpdu_start.addr1);
462 ether_addr_copy(hdr->addr2, desc->u.qcn9274.mpdu_start.addr2);
463 ether_addr_copy(hdr->addr3, desc->u.qcn9274.mpdu_start.addr3);
464 if (__le32_to_cpu(desc->u.qcn9274.mpdu_start.info4) &
465 RX_MPDU_START_INFO4_MAC_ADDR4_VALID) {
466 ether_addr_copy(hdr->addr4, desc->u.qcn9274.mpdu_start.addr4);
467 }
468 hdr->seq_ctrl = desc->u.qcn9274.mpdu_start.seq_ctrl;
469 }
470
ath12k_hw_qcn9274_rx_desc_get_crypto_hdr(struct hal_rx_desc * desc,u8 * crypto_hdr,enum hal_encrypt_type enctype)471 static void ath12k_hw_qcn9274_rx_desc_get_crypto_hdr(struct hal_rx_desc *desc,
472 u8 *crypto_hdr,
473 enum hal_encrypt_type enctype)
474 {
475 unsigned int key_id;
476
477 switch (enctype) {
478 case HAL_ENCRYPT_TYPE_OPEN:
479 return;
480 case HAL_ENCRYPT_TYPE_TKIP_NO_MIC:
481 case HAL_ENCRYPT_TYPE_TKIP_MIC:
482 crypto_hdr[0] =
483 HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274.mpdu_start.pn[0]);
484 crypto_hdr[1] = 0;
485 crypto_hdr[2] =
486 HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274.mpdu_start.pn[0]);
487 break;
488 case HAL_ENCRYPT_TYPE_CCMP_128:
489 case HAL_ENCRYPT_TYPE_CCMP_256:
490 case HAL_ENCRYPT_TYPE_GCMP_128:
491 case HAL_ENCRYPT_TYPE_AES_GCMP_256:
492 crypto_hdr[0] =
493 HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274.mpdu_start.pn[0]);
494 crypto_hdr[1] =
495 HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274.mpdu_start.pn[0]);
496 crypto_hdr[2] = 0;
497 break;
498 case HAL_ENCRYPT_TYPE_WEP_40:
499 case HAL_ENCRYPT_TYPE_WEP_104:
500 case HAL_ENCRYPT_TYPE_WEP_128:
501 case HAL_ENCRYPT_TYPE_WAPI_GCM_SM4:
502 case HAL_ENCRYPT_TYPE_WAPI:
503 return;
504 }
505 key_id = le32_get_bits(desc->u.qcn9274.mpdu_start.info5,
506 RX_MPDU_START_INFO5_KEY_ID);
507 crypto_hdr[3] = 0x20 | (key_id << 6);
508 crypto_hdr[4] = HAL_RX_MPDU_INFO_PN_GET_BYTE3(desc->u.qcn9274.mpdu_start.pn[0]);
509 crypto_hdr[5] = HAL_RX_MPDU_INFO_PN_GET_BYTE4(desc->u.qcn9274.mpdu_start.pn[0]);
510 crypto_hdr[6] = HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274.mpdu_start.pn[1]);
511 crypto_hdr[7] = HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274.mpdu_start.pn[1]);
512 }
513
ath12k_hw_qcn9274_rx_desc_get_mpdu_frame_ctl(struct hal_rx_desc * desc)514 static u16 ath12k_hw_qcn9274_rx_desc_get_mpdu_frame_ctl(struct hal_rx_desc *desc)
515 {
516 return __le16_to_cpu(desc->u.qcn9274.mpdu_start.frame_ctrl);
517 }
518
ath12k_hal_srng_create_config_qcn9274(struct ath12k_base * ab)519 static int ath12k_hal_srng_create_config_qcn9274(struct ath12k_base *ab)
520 {
521 struct ath12k_hal *hal = &ab->hal;
522 struct hal_srng_config *s;
523
524 hal->srng_config = kmemdup(hw_srng_config_template,
525 sizeof(hw_srng_config_template),
526 GFP_KERNEL);
527 if (!hal->srng_config)
528 return -ENOMEM;
529
530 s = &hal->srng_config[HAL_REO_DST];
531 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO1_RING_BASE_LSB(ab);
532 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO1_RING_HP;
533 s->reg_size[0] = HAL_REO2_RING_BASE_LSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
534 s->reg_size[1] = HAL_REO2_RING_HP - HAL_REO1_RING_HP;
535
536 s = &hal->srng_config[HAL_REO_EXCEPTION];
537 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_SW0_RING_BASE_LSB(ab);
538 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_SW0_RING_HP;
539
540 s = &hal->srng_config[HAL_REO_REINJECT];
541 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_SW2REO_RING_BASE_LSB(ab);
542 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_SW2REO_RING_HP;
543 s->reg_size[0] = HAL_SW2REO1_RING_BASE_LSB(ab) - HAL_SW2REO_RING_BASE_LSB(ab);
544 s->reg_size[1] = HAL_SW2REO1_RING_HP - HAL_SW2REO_RING_HP;
545
546 s = &hal->srng_config[HAL_REO_CMD];
547 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_CMD_RING_BASE_LSB(ab);
548 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_CMD_HP;
549
550 s = &hal->srng_config[HAL_REO_STATUS];
551 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_STATUS_RING_BASE_LSB(ab);
552 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_STATUS_HP;
553
554 s = &hal->srng_config[HAL_TCL_DATA];
555 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL1_RING_BASE_LSB;
556 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL1_RING_HP;
557 s->reg_size[0] = HAL_TCL2_RING_BASE_LSB - HAL_TCL1_RING_BASE_LSB;
558 s->reg_size[1] = HAL_TCL2_RING_HP - HAL_TCL1_RING_HP;
559
560 s = &hal->srng_config[HAL_TCL_CMD];
561 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_RING_BASE_LSB(ab);
562 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_RING_HP;
563
564 s = &hal->srng_config[HAL_TCL_STATUS];
565 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_STATUS_RING_BASE_LSB(ab);
566 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_STATUS_RING_HP;
567
568 s = &hal->srng_config[HAL_CE_SRC];
569 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_SRC_REG + HAL_CE_DST_RING_BASE_LSB;
570 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_SRC_REG + HAL_CE_DST_RING_HP;
571 s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_SRC_REG -
572 HAL_SEQ_WCSS_UMAC_CE0_SRC_REG;
573 s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_SRC_REG -
574 HAL_SEQ_WCSS_UMAC_CE0_SRC_REG;
575
576 s = &hal->srng_config[HAL_CE_DST];
577 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG + HAL_CE_DST_RING_BASE_LSB;
578 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG + HAL_CE_DST_RING_HP;
579 s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
580 HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
581 s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
582 HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
583
584 s = &hal->srng_config[HAL_CE_DST_STATUS];
585 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG +
586 HAL_CE_DST_STATUS_RING_BASE_LSB;
587 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG + HAL_CE_DST_STATUS_RING_HP;
588 s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
589 HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
590 s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
591 HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
592
593 s = &hal->srng_config[HAL_WBM_IDLE_LINK];
594 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_IDLE_LINK_RING_BASE_LSB(ab);
595 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_IDLE_LINK_RING_HP;
596
597 s = &hal->srng_config[HAL_SW2WBM_RELEASE];
598 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG +
599 HAL_WBM_SW_RELEASE_RING_BASE_LSB(ab);
600 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_SW_RELEASE_RING_HP;
601 s->reg_size[0] = HAL_WBM_SW1_RELEASE_RING_BASE_LSB(ab) -
602 HAL_WBM_SW_RELEASE_RING_BASE_LSB(ab);
603 s->reg_size[1] = HAL_WBM_SW1_RELEASE_RING_HP - HAL_WBM_SW_RELEASE_RING_HP;
604
605 s = &hal->srng_config[HAL_WBM2SW_RELEASE];
606 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM0_RELEASE_RING_BASE_LSB(ab);
607 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM0_RELEASE_RING_HP;
608 s->reg_size[0] = HAL_WBM1_RELEASE_RING_BASE_LSB(ab) -
609 HAL_WBM0_RELEASE_RING_BASE_LSB(ab);
610 s->reg_size[1] = HAL_WBM1_RELEASE_RING_HP - HAL_WBM0_RELEASE_RING_HP;
611
612 /* Some LMAC rings are not accessed from the host:
613 * RXDMA_BUG, RXDMA_DST, RXDMA_MONITOR_BUF, RXDMA_MONITOR_STATUS,
614 * RXDMA_MONITOR_DST, RXDMA_MONITOR_DESC, RXDMA_DIR_BUF_SRC,
615 * RXDMA_RX_MONITOR_BUF, TX_MONITOR_BUF, TX_MONITOR_DST, SW2RXDMA
616 */
617 s = &hal->srng_config[HAL_PPE2TCL];
618 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_PPE2TCL1_RING_BASE_LSB;
619 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_PPE2TCL1_RING_HP;
620
621 s = &hal->srng_config[HAL_PPE_RELEASE];
622 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG +
623 HAL_WBM_PPE_RELEASE_RING_BASE_LSB(ab);
624 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_PPE_RELEASE_RING_HP;
625
626 return 0;
627 }
628
ath12k_hw_qcn9274_dp_rx_h_msdu_done(struct hal_rx_desc * desc)629 static bool ath12k_hw_qcn9274_dp_rx_h_msdu_done(struct hal_rx_desc *desc)
630 {
631 return !!le32_get_bits(desc->u.qcn9274.msdu_end.info14,
632 RX_MSDU_END_INFO14_MSDU_DONE);
633 }
634
ath12k_hw_qcn9274_dp_rx_h_l4_cksum_fail(struct hal_rx_desc * desc)635 static bool ath12k_hw_qcn9274_dp_rx_h_l4_cksum_fail(struct hal_rx_desc *desc)
636 {
637 return !!le32_get_bits(desc->u.qcn9274.msdu_end.info13,
638 RX_MSDU_END_INFO13_TCP_UDP_CKSUM_FAIL);
639 }
640
ath12k_hw_qcn9274_dp_rx_h_ip_cksum_fail(struct hal_rx_desc * desc)641 static bool ath12k_hw_qcn9274_dp_rx_h_ip_cksum_fail(struct hal_rx_desc *desc)
642 {
643 return !!le32_get_bits(desc->u.qcn9274.msdu_end.info13,
644 RX_MSDU_END_INFO13_IP_CKSUM_FAIL);
645 }
646
ath12k_hw_qcn9274_dp_rx_h_is_decrypted(struct hal_rx_desc * desc)647 static bool ath12k_hw_qcn9274_dp_rx_h_is_decrypted(struct hal_rx_desc *desc)
648 {
649 return (le32_get_bits(desc->u.qcn9274.msdu_end.info14,
650 RX_MSDU_END_INFO14_DECRYPT_STATUS_CODE) ==
651 RX_DESC_DECRYPT_STATUS_CODE_OK);
652 }
653
ath12k_hw_qcn9274_dp_rx_h_mpdu_err(struct hal_rx_desc * desc)654 static u32 ath12k_hw_qcn9274_dp_rx_h_mpdu_err(struct hal_rx_desc *desc)
655 {
656 u32 info = __le32_to_cpu(desc->u.qcn9274.msdu_end.info13);
657 u32 errmap = 0;
658
659 if (info & RX_MSDU_END_INFO13_FCS_ERR)
660 errmap |= HAL_RX_MPDU_ERR_FCS;
661
662 if (info & RX_MSDU_END_INFO13_DECRYPT_ERR)
663 errmap |= HAL_RX_MPDU_ERR_DECRYPT;
664
665 if (info & RX_MSDU_END_INFO13_TKIP_MIC_ERR)
666 errmap |= HAL_RX_MPDU_ERR_TKIP_MIC;
667
668 if (info & RX_MSDU_END_INFO13_A_MSDU_ERROR)
669 errmap |= HAL_RX_MPDU_ERR_AMSDU_ERR;
670
671 if (info & RX_MSDU_END_INFO13_OVERFLOW_ERR)
672 errmap |= HAL_RX_MPDU_ERR_OVERFLOW;
673
674 if (info & RX_MSDU_END_INFO13_MSDU_LEN_ERR)
675 errmap |= HAL_RX_MPDU_ERR_MSDU_LEN;
676
677 if (info & RX_MSDU_END_INFO13_MPDU_LEN_ERR)
678 errmap |= HAL_RX_MPDU_ERR_MPDU_LEN;
679
680 return errmap;
681 }
682
683 const struct hal_ops hal_qcn9274_ops = {
684 .rx_desc_get_first_msdu = ath12k_hw_qcn9274_rx_desc_get_first_msdu,
685 .rx_desc_get_last_msdu = ath12k_hw_qcn9274_rx_desc_get_last_msdu,
686 .rx_desc_get_l3_pad_bytes = ath12k_hw_qcn9274_rx_desc_get_l3_pad_bytes,
687 .rx_desc_encrypt_valid = ath12k_hw_qcn9274_rx_desc_encrypt_valid,
688 .rx_desc_get_encrypt_type = ath12k_hw_qcn9274_rx_desc_get_encrypt_type,
689 .rx_desc_get_decap_type = ath12k_hw_qcn9274_rx_desc_get_decap_type,
690 .rx_desc_get_mesh_ctl = ath12k_hw_qcn9274_rx_desc_get_mesh_ctl,
691 .rx_desc_get_mpdu_seq_ctl_vld = ath12k_hw_qcn9274_rx_desc_get_mpdu_seq_ctl_vld,
692 .rx_desc_get_mpdu_fc_valid = ath12k_hw_qcn9274_rx_desc_get_mpdu_fc_valid,
693 .rx_desc_get_mpdu_start_seq_no = ath12k_hw_qcn9274_rx_desc_get_mpdu_start_seq_no,
694 .rx_desc_get_msdu_len = ath12k_hw_qcn9274_rx_desc_get_msdu_len,
695 .rx_desc_get_msdu_sgi = ath12k_hw_qcn9274_rx_desc_get_msdu_sgi,
696 .rx_desc_get_msdu_rate_mcs = ath12k_hw_qcn9274_rx_desc_get_msdu_rate_mcs,
697 .rx_desc_get_msdu_rx_bw = ath12k_hw_qcn9274_rx_desc_get_msdu_rx_bw,
698 .rx_desc_get_msdu_freq = ath12k_hw_qcn9274_rx_desc_get_msdu_freq,
699 .rx_desc_get_msdu_pkt_type = ath12k_hw_qcn9274_rx_desc_get_msdu_pkt_type,
700 .rx_desc_get_msdu_nss = ath12k_hw_qcn9274_rx_desc_get_msdu_nss,
701 .rx_desc_get_mpdu_tid = ath12k_hw_qcn9274_rx_desc_get_mpdu_tid,
702 .rx_desc_get_mpdu_peer_id = ath12k_hw_qcn9274_rx_desc_get_mpdu_peer_id,
703 .rx_desc_copy_end_tlv = ath12k_hw_qcn9274_rx_desc_copy_end_tlv,
704 .rx_desc_get_mpdu_ppdu_id = ath12k_hw_qcn9274_rx_desc_get_mpdu_ppdu_id,
705 .rx_desc_set_msdu_len = ath12k_hw_qcn9274_rx_desc_set_msdu_len,
706 .rx_desc_get_msdu_payload = ath12k_hw_qcn9274_rx_desc_get_msdu_payload,
707 .rx_desc_get_mpdu_start_offset = ath12k_hw_qcn9274_rx_desc_get_mpdu_start_offset,
708 .rx_desc_get_msdu_end_offset = ath12k_hw_qcn9274_rx_desc_get_msdu_end_offset,
709 .rx_desc_mac_addr2_valid = ath12k_hw_qcn9274_rx_desc_mac_addr2_valid,
710 .rx_desc_mpdu_start_addr2 = ath12k_hw_qcn9274_rx_desc_mpdu_start_addr2,
711 .rx_desc_is_da_mcbc = ath12k_hw_qcn9274_rx_desc_is_da_mcbc,
712 .rx_desc_get_dot11_hdr = ath12k_hw_qcn9274_rx_desc_get_dot11_hdr,
713 .rx_desc_get_crypto_header = ath12k_hw_qcn9274_rx_desc_get_crypto_hdr,
714 .rx_desc_get_mpdu_frame_ctl = ath12k_hw_qcn9274_rx_desc_get_mpdu_frame_ctl,
715 .create_srng_config = ath12k_hal_srng_create_config_qcn9274,
716 .tcl_to_wbm_rbm_map = ath12k_hal_qcn9274_tcl_to_wbm_rbm_map,
717 .dp_rx_h_msdu_done = ath12k_hw_qcn9274_dp_rx_h_msdu_done,
718 .dp_rx_h_l4_cksum_fail = ath12k_hw_qcn9274_dp_rx_h_l4_cksum_fail,
719 .dp_rx_h_ip_cksum_fail = ath12k_hw_qcn9274_dp_rx_h_ip_cksum_fail,
720 .dp_rx_h_is_decrypted = ath12k_hw_qcn9274_dp_rx_h_is_decrypted,
721 .dp_rx_h_mpdu_err = ath12k_hw_qcn9274_dp_rx_h_mpdu_err,
722 };
723
ath12k_hw_wcn7850_rx_desc_get_first_msdu(struct hal_rx_desc * desc)724 static bool ath12k_hw_wcn7850_rx_desc_get_first_msdu(struct hal_rx_desc *desc)
725 {
726 return !!le16_get_bits(desc->u.wcn7850.msdu_end.info5,
727 RX_MSDU_END_INFO5_FIRST_MSDU);
728 }
729
ath12k_hw_wcn7850_rx_desc_get_last_msdu(struct hal_rx_desc * desc)730 static bool ath12k_hw_wcn7850_rx_desc_get_last_msdu(struct hal_rx_desc *desc)
731 {
732 return !!le16_get_bits(desc->u.wcn7850.msdu_end.info5,
733 RX_MSDU_END_INFO5_LAST_MSDU);
734 }
735
ath12k_hw_wcn7850_rx_desc_get_l3_pad_bytes(struct hal_rx_desc * desc)736 static u8 ath12k_hw_wcn7850_rx_desc_get_l3_pad_bytes(struct hal_rx_desc *desc)
737 {
738 return le16_get_bits(desc->u.wcn7850.msdu_end.info5,
739 RX_MSDU_END_INFO5_L3_HDR_PADDING);
740 }
741
ath12k_hw_wcn7850_rx_desc_encrypt_valid(struct hal_rx_desc * desc)742 static bool ath12k_hw_wcn7850_rx_desc_encrypt_valid(struct hal_rx_desc *desc)
743 {
744 return !!le32_get_bits(desc->u.wcn7850.mpdu_start.info4,
745 RX_MPDU_START_INFO4_ENCRYPT_INFO_VALID);
746 }
747
ath12k_hw_wcn7850_rx_desc_get_encrypt_type(struct hal_rx_desc * desc)748 static u32 ath12k_hw_wcn7850_rx_desc_get_encrypt_type(struct hal_rx_desc *desc)
749 {
750 return le32_get_bits(desc->u.wcn7850.mpdu_start.info2,
751 RX_MPDU_START_INFO2_ENC_TYPE);
752 }
753
ath12k_hw_wcn7850_rx_desc_get_decap_type(struct hal_rx_desc * desc)754 static u8 ath12k_hw_wcn7850_rx_desc_get_decap_type(struct hal_rx_desc *desc)
755 {
756 return le32_get_bits(desc->u.wcn7850.msdu_end.info11,
757 RX_MSDU_END_INFO11_DECAP_FORMAT);
758 }
759
ath12k_hw_wcn7850_rx_desc_get_mesh_ctl(struct hal_rx_desc * desc)760 static u8 ath12k_hw_wcn7850_rx_desc_get_mesh_ctl(struct hal_rx_desc *desc)
761 {
762 return le32_get_bits(desc->u.wcn7850.msdu_end.info11,
763 RX_MSDU_END_INFO11_MESH_CTRL_PRESENT);
764 }
765
ath12k_hw_wcn7850_rx_desc_get_mpdu_seq_ctl_vld(struct hal_rx_desc * desc)766 static bool ath12k_hw_wcn7850_rx_desc_get_mpdu_seq_ctl_vld(struct hal_rx_desc *desc)
767 {
768 return !!le32_get_bits(desc->u.wcn7850.mpdu_start.info4,
769 RX_MPDU_START_INFO4_MPDU_SEQ_CTRL_VALID);
770 }
771
ath12k_hw_wcn7850_rx_desc_get_mpdu_fc_valid(struct hal_rx_desc * desc)772 static bool ath12k_hw_wcn7850_rx_desc_get_mpdu_fc_valid(struct hal_rx_desc *desc)
773 {
774 return !!le32_get_bits(desc->u.wcn7850.mpdu_start.info4,
775 RX_MPDU_START_INFO4_MPDU_FCTRL_VALID);
776 }
777
ath12k_hw_wcn7850_rx_desc_get_mpdu_start_seq_no(struct hal_rx_desc * desc)778 static u16 ath12k_hw_wcn7850_rx_desc_get_mpdu_start_seq_no(struct hal_rx_desc *desc)
779 {
780 return le32_get_bits(desc->u.wcn7850.mpdu_start.info4,
781 RX_MPDU_START_INFO4_MPDU_SEQ_NUM);
782 }
783
ath12k_hw_wcn7850_rx_desc_get_msdu_len(struct hal_rx_desc * desc)784 static u16 ath12k_hw_wcn7850_rx_desc_get_msdu_len(struct hal_rx_desc *desc)
785 {
786 return le32_get_bits(desc->u.wcn7850.msdu_end.info10,
787 RX_MSDU_END_INFO10_MSDU_LENGTH);
788 }
789
ath12k_hw_wcn7850_rx_desc_get_msdu_sgi(struct hal_rx_desc * desc)790 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_sgi(struct hal_rx_desc *desc)
791 {
792 return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
793 RX_MSDU_END_INFO12_SGI);
794 }
795
ath12k_hw_wcn7850_rx_desc_get_msdu_rate_mcs(struct hal_rx_desc * desc)796 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_rate_mcs(struct hal_rx_desc *desc)
797 {
798 return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
799 RX_MSDU_END_INFO12_RATE_MCS);
800 }
801
ath12k_hw_wcn7850_rx_desc_get_msdu_rx_bw(struct hal_rx_desc * desc)802 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_rx_bw(struct hal_rx_desc *desc)
803 {
804 return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
805 RX_MSDU_END_INFO12_RECV_BW);
806 }
807
ath12k_hw_wcn7850_rx_desc_get_msdu_freq(struct hal_rx_desc * desc)808 static u32 ath12k_hw_wcn7850_rx_desc_get_msdu_freq(struct hal_rx_desc *desc)
809 {
810 return __le32_to_cpu(desc->u.wcn7850.msdu_end.phy_meta_data);
811 }
812
ath12k_hw_wcn7850_rx_desc_get_msdu_pkt_type(struct hal_rx_desc * desc)813 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_pkt_type(struct hal_rx_desc *desc)
814 {
815 return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
816 RX_MSDU_END_INFO12_PKT_TYPE);
817 }
818
ath12k_hw_wcn7850_rx_desc_get_msdu_nss(struct hal_rx_desc * desc)819 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_nss(struct hal_rx_desc *desc)
820 {
821 return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
822 RX_MSDU_END_INFO12_MIMO_SS_BITMAP);
823 }
824
ath12k_hw_wcn7850_rx_desc_get_mpdu_tid(struct hal_rx_desc * desc)825 static u8 ath12k_hw_wcn7850_rx_desc_get_mpdu_tid(struct hal_rx_desc *desc)
826 {
827 return le16_get_bits(desc->u.wcn7850.msdu_end.info5,
828 RX_MSDU_END_INFO5_TID);
829 }
830
ath12k_hw_wcn7850_rx_desc_get_mpdu_peer_id(struct hal_rx_desc * desc)831 static u16 ath12k_hw_wcn7850_rx_desc_get_mpdu_peer_id(struct hal_rx_desc *desc)
832 {
833 return __le16_to_cpu(desc->u.wcn7850.mpdu_start.sw_peer_id);
834 }
835
ath12k_hw_wcn7850_rx_desc_copy_end_tlv(struct hal_rx_desc * fdesc,struct hal_rx_desc * ldesc)836 static void ath12k_hw_wcn7850_rx_desc_copy_end_tlv(struct hal_rx_desc *fdesc,
837 struct hal_rx_desc *ldesc)
838 {
839 memcpy(&fdesc->u.wcn7850.msdu_end, &ldesc->u.wcn7850.msdu_end,
840 sizeof(struct rx_msdu_end_qcn9274));
841 }
842
ath12k_hw_wcn7850_rx_desc_get_mpdu_start_tag(struct hal_rx_desc * desc)843 static u32 ath12k_hw_wcn7850_rx_desc_get_mpdu_start_tag(struct hal_rx_desc *desc)
844 {
845 return le64_get_bits(desc->u.wcn7850.mpdu_start_tag,
846 HAL_TLV_HDR_TAG);
847 }
848
ath12k_hw_wcn7850_rx_desc_get_mpdu_ppdu_id(struct hal_rx_desc * desc)849 static u32 ath12k_hw_wcn7850_rx_desc_get_mpdu_ppdu_id(struct hal_rx_desc *desc)
850 {
851 return __le16_to_cpu(desc->u.wcn7850.mpdu_start.phy_ppdu_id);
852 }
853
ath12k_hw_wcn7850_rx_desc_set_msdu_len(struct hal_rx_desc * desc,u16 len)854 static void ath12k_hw_wcn7850_rx_desc_set_msdu_len(struct hal_rx_desc *desc, u16 len)
855 {
856 u32 info = __le32_to_cpu(desc->u.wcn7850.msdu_end.info10);
857
858 info &= ~RX_MSDU_END_INFO10_MSDU_LENGTH;
859 info |= u32_encode_bits(len, RX_MSDU_END_INFO10_MSDU_LENGTH);
860
861 desc->u.wcn7850.msdu_end.info10 = __cpu_to_le32(info);
862 }
863
ath12k_hw_wcn7850_rx_desc_get_msdu_payload(struct hal_rx_desc * desc)864 static u8 *ath12k_hw_wcn7850_rx_desc_get_msdu_payload(struct hal_rx_desc *desc)
865 {
866 return &desc->u.wcn7850.msdu_payload[0];
867 }
868
ath12k_hw_wcn7850_rx_desc_get_mpdu_start_offset(void)869 static u32 ath12k_hw_wcn7850_rx_desc_get_mpdu_start_offset(void)
870 {
871 return offsetof(struct hal_rx_desc_wcn7850, mpdu_start_tag);
872 }
873
ath12k_hw_wcn7850_rx_desc_get_msdu_end_offset(void)874 static u32 ath12k_hw_wcn7850_rx_desc_get_msdu_end_offset(void)
875 {
876 return offsetof(struct hal_rx_desc_wcn7850, msdu_end_tag);
877 }
878
ath12k_hw_wcn7850_rx_desc_mac_addr2_valid(struct hal_rx_desc * desc)879 static bool ath12k_hw_wcn7850_rx_desc_mac_addr2_valid(struct hal_rx_desc *desc)
880 {
881 return __le32_to_cpu(desc->u.wcn7850.mpdu_start.info4) &
882 RX_MPDU_START_INFO4_MAC_ADDR2_VALID;
883 }
884
ath12k_hw_wcn7850_rx_desc_mpdu_start_addr2(struct hal_rx_desc * desc)885 static u8 *ath12k_hw_wcn7850_rx_desc_mpdu_start_addr2(struct hal_rx_desc *desc)
886 {
887 return desc->u.wcn7850.mpdu_start.addr2;
888 }
889
ath12k_hw_wcn7850_rx_desc_is_da_mcbc(struct hal_rx_desc * desc)890 static bool ath12k_hw_wcn7850_rx_desc_is_da_mcbc(struct hal_rx_desc *desc)
891 {
892 return __le32_to_cpu(desc->u.wcn7850.msdu_end.info13) &
893 RX_MSDU_END_INFO13_MCAST_BCAST;
894 }
895
ath12k_hw_wcn7850_rx_desc_get_dot11_hdr(struct hal_rx_desc * desc,struct ieee80211_hdr * hdr)896 static void ath12k_hw_wcn7850_rx_desc_get_dot11_hdr(struct hal_rx_desc *desc,
897 struct ieee80211_hdr *hdr)
898 {
899 hdr->frame_control = desc->u.wcn7850.mpdu_start.frame_ctrl;
900 hdr->duration_id = desc->u.wcn7850.mpdu_start.duration;
901 ether_addr_copy(hdr->addr1, desc->u.wcn7850.mpdu_start.addr1);
902 ether_addr_copy(hdr->addr2, desc->u.wcn7850.mpdu_start.addr2);
903 ether_addr_copy(hdr->addr3, desc->u.wcn7850.mpdu_start.addr3);
904 if (__le32_to_cpu(desc->u.wcn7850.mpdu_start.info4) &
905 RX_MPDU_START_INFO4_MAC_ADDR4_VALID) {
906 ether_addr_copy(hdr->addr4, desc->u.wcn7850.mpdu_start.addr4);
907 }
908 hdr->seq_ctrl = desc->u.wcn7850.mpdu_start.seq_ctrl;
909 }
910
ath12k_hw_wcn7850_rx_desc_get_crypto_hdr(struct hal_rx_desc * desc,u8 * crypto_hdr,enum hal_encrypt_type enctype)911 static void ath12k_hw_wcn7850_rx_desc_get_crypto_hdr(struct hal_rx_desc *desc,
912 u8 *crypto_hdr,
913 enum hal_encrypt_type enctype)
914 {
915 unsigned int key_id;
916
917 switch (enctype) {
918 case HAL_ENCRYPT_TYPE_OPEN:
919 return;
920 case HAL_ENCRYPT_TYPE_TKIP_NO_MIC:
921 case HAL_ENCRYPT_TYPE_TKIP_MIC:
922 crypto_hdr[0] =
923 HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.wcn7850.mpdu_start.pn[0]);
924 crypto_hdr[1] = 0;
925 crypto_hdr[2] =
926 HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.wcn7850.mpdu_start.pn[0]);
927 break;
928 case HAL_ENCRYPT_TYPE_CCMP_128:
929 case HAL_ENCRYPT_TYPE_CCMP_256:
930 case HAL_ENCRYPT_TYPE_GCMP_128:
931 case HAL_ENCRYPT_TYPE_AES_GCMP_256:
932 crypto_hdr[0] =
933 HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.wcn7850.mpdu_start.pn[0]);
934 crypto_hdr[1] =
935 HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.wcn7850.mpdu_start.pn[0]);
936 crypto_hdr[2] = 0;
937 break;
938 case HAL_ENCRYPT_TYPE_WEP_40:
939 case HAL_ENCRYPT_TYPE_WEP_104:
940 case HAL_ENCRYPT_TYPE_WEP_128:
941 case HAL_ENCRYPT_TYPE_WAPI_GCM_SM4:
942 case HAL_ENCRYPT_TYPE_WAPI:
943 return;
944 }
945 key_id = u32_get_bits(__le32_to_cpu(desc->u.wcn7850.mpdu_start.info5),
946 RX_MPDU_START_INFO5_KEY_ID);
947 crypto_hdr[3] = 0x20 | (key_id << 6);
948 crypto_hdr[4] = HAL_RX_MPDU_INFO_PN_GET_BYTE3(desc->u.wcn7850.mpdu_start.pn[0]);
949 crypto_hdr[5] = HAL_RX_MPDU_INFO_PN_GET_BYTE4(desc->u.wcn7850.mpdu_start.pn[0]);
950 crypto_hdr[6] = HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.wcn7850.mpdu_start.pn[1]);
951 crypto_hdr[7] = HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.wcn7850.mpdu_start.pn[1]);
952 }
953
ath12k_hw_wcn7850_rx_desc_get_mpdu_frame_ctl(struct hal_rx_desc * desc)954 static u16 ath12k_hw_wcn7850_rx_desc_get_mpdu_frame_ctl(struct hal_rx_desc *desc)
955 {
956 return __le16_to_cpu(desc->u.wcn7850.mpdu_start.frame_ctrl);
957 }
958
ath12k_hal_srng_create_config_wcn7850(struct ath12k_base * ab)959 static int ath12k_hal_srng_create_config_wcn7850(struct ath12k_base *ab)
960 {
961 struct ath12k_hal *hal = &ab->hal;
962 struct hal_srng_config *s;
963
964 hal->srng_config = kmemdup(hw_srng_config_template,
965 sizeof(hw_srng_config_template),
966 GFP_KERNEL);
967 if (!hal->srng_config)
968 return -ENOMEM;
969
970 s = &hal->srng_config[HAL_REO_DST];
971 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO1_RING_BASE_LSB(ab);
972 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO1_RING_HP;
973 s->reg_size[0] = HAL_REO2_RING_BASE_LSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
974 s->reg_size[1] = HAL_REO2_RING_HP - HAL_REO1_RING_HP;
975
976 s = &hal->srng_config[HAL_REO_EXCEPTION];
977 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_SW0_RING_BASE_LSB(ab);
978 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_SW0_RING_HP;
979
980 s = &hal->srng_config[HAL_REO_REINJECT];
981 s->max_rings = 1;
982 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_SW2REO_RING_BASE_LSB(ab);
983 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_SW2REO_RING_HP;
984
985 s = &hal->srng_config[HAL_REO_CMD];
986 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_CMD_RING_BASE_LSB(ab);
987 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_CMD_HP;
988
989 s = &hal->srng_config[HAL_REO_STATUS];
990 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_STATUS_RING_BASE_LSB(ab);
991 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_STATUS_HP;
992
993 s = &hal->srng_config[HAL_TCL_DATA];
994 s->max_rings = 5;
995 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL1_RING_BASE_LSB;
996 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL1_RING_HP;
997 s->reg_size[0] = HAL_TCL2_RING_BASE_LSB - HAL_TCL1_RING_BASE_LSB;
998 s->reg_size[1] = HAL_TCL2_RING_HP - HAL_TCL1_RING_HP;
999
1000 s = &hal->srng_config[HAL_TCL_CMD];
1001 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_RING_BASE_LSB(ab);
1002 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_RING_HP;
1003
1004 s = &hal->srng_config[HAL_TCL_STATUS];
1005 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_STATUS_RING_BASE_LSB(ab);
1006 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_STATUS_RING_HP;
1007
1008 s = &hal->srng_config[HAL_CE_SRC];
1009 s->max_rings = 12;
1010 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_SRC_REG + HAL_CE_DST_RING_BASE_LSB;
1011 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_SRC_REG + HAL_CE_DST_RING_HP;
1012 s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_SRC_REG -
1013 HAL_SEQ_WCSS_UMAC_CE0_SRC_REG;
1014 s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_SRC_REG -
1015 HAL_SEQ_WCSS_UMAC_CE0_SRC_REG;
1016
1017 s = &hal->srng_config[HAL_CE_DST];
1018 s->max_rings = 12;
1019 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG + HAL_CE_DST_RING_BASE_LSB;
1020 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG + HAL_CE_DST_RING_HP;
1021 s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
1022 HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
1023 s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
1024 HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
1025
1026 s = &hal->srng_config[HAL_CE_DST_STATUS];
1027 s->max_rings = 12;
1028 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG +
1029 HAL_CE_DST_STATUS_RING_BASE_LSB;
1030 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG + HAL_CE_DST_STATUS_RING_HP;
1031 s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
1032 HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
1033 s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG -
1034 HAL_SEQ_WCSS_UMAC_CE0_DST_REG;
1035
1036 s = &hal->srng_config[HAL_WBM_IDLE_LINK];
1037 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_IDLE_LINK_RING_BASE_LSB(ab);
1038 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_IDLE_LINK_RING_HP;
1039
1040 s = &hal->srng_config[HAL_SW2WBM_RELEASE];
1041 s->max_rings = 1;
1042 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG +
1043 HAL_WBM_SW_RELEASE_RING_BASE_LSB(ab);
1044 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_SW_RELEASE_RING_HP;
1045
1046 s = &hal->srng_config[HAL_WBM2SW_RELEASE];
1047 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM0_RELEASE_RING_BASE_LSB(ab);
1048 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM0_RELEASE_RING_HP;
1049 s->reg_size[0] = HAL_WBM1_RELEASE_RING_BASE_LSB(ab) -
1050 HAL_WBM0_RELEASE_RING_BASE_LSB(ab);
1051 s->reg_size[1] = HAL_WBM1_RELEASE_RING_HP - HAL_WBM0_RELEASE_RING_HP;
1052
1053 s = &hal->srng_config[HAL_RXDMA_BUF];
1054 s->max_rings = 2;
1055 s->mac_type = ATH12K_HAL_SRNG_PMAC;
1056
1057 s = &hal->srng_config[HAL_RXDMA_DST];
1058 s->max_rings = 1;
1059 s->entry_size = sizeof(struct hal_reo_entrance_ring) >> 2;
1060
1061 /* below rings are not used */
1062 s = &hal->srng_config[HAL_RXDMA_DIR_BUF];
1063 s->max_rings = 0;
1064
1065 s = &hal->srng_config[HAL_PPE2TCL];
1066 s->max_rings = 0;
1067
1068 s = &hal->srng_config[HAL_PPE_RELEASE];
1069 s->max_rings = 0;
1070
1071 s = &hal->srng_config[HAL_TX_MONITOR_BUF];
1072 s->max_rings = 0;
1073
1074 s = &hal->srng_config[HAL_TX_MONITOR_DST];
1075 s->max_rings = 0;
1076
1077 s = &hal->srng_config[HAL_PPE2TCL];
1078 s->max_rings = 0;
1079
1080 return 0;
1081 }
1082
ath12k_hw_wcn7850_dp_rx_h_msdu_done(struct hal_rx_desc * desc)1083 static bool ath12k_hw_wcn7850_dp_rx_h_msdu_done(struct hal_rx_desc *desc)
1084 {
1085 return !!le32_get_bits(desc->u.wcn7850.msdu_end.info14,
1086 RX_MSDU_END_INFO14_MSDU_DONE);
1087 }
1088
ath12k_hw_wcn7850_dp_rx_h_l4_cksum_fail(struct hal_rx_desc * desc)1089 static bool ath12k_hw_wcn7850_dp_rx_h_l4_cksum_fail(struct hal_rx_desc *desc)
1090 {
1091 return !!le32_get_bits(desc->u.wcn7850.msdu_end.info13,
1092 RX_MSDU_END_INFO13_TCP_UDP_CKSUM_FAIL);
1093 }
1094
ath12k_hw_wcn7850_dp_rx_h_ip_cksum_fail(struct hal_rx_desc * desc)1095 static bool ath12k_hw_wcn7850_dp_rx_h_ip_cksum_fail(struct hal_rx_desc *desc)
1096 {
1097 return !!le32_get_bits(desc->u.wcn7850.msdu_end.info13,
1098 RX_MSDU_END_INFO13_IP_CKSUM_FAIL);
1099 }
1100
ath12k_hw_wcn7850_dp_rx_h_is_decrypted(struct hal_rx_desc * desc)1101 static bool ath12k_hw_wcn7850_dp_rx_h_is_decrypted(struct hal_rx_desc *desc)
1102 {
1103 return (le32_get_bits(desc->u.wcn7850.msdu_end.info14,
1104 RX_MSDU_END_INFO14_DECRYPT_STATUS_CODE) ==
1105 RX_DESC_DECRYPT_STATUS_CODE_OK);
1106 }
1107
ath12k_hw_wcn7850_dp_rx_h_mpdu_err(struct hal_rx_desc * desc)1108 static u32 ath12k_hw_wcn7850_dp_rx_h_mpdu_err(struct hal_rx_desc *desc)
1109 {
1110 u32 info = __le32_to_cpu(desc->u.wcn7850.msdu_end.info13);
1111 u32 errmap = 0;
1112
1113 if (info & RX_MSDU_END_INFO13_FCS_ERR)
1114 errmap |= HAL_RX_MPDU_ERR_FCS;
1115
1116 if (info & RX_MSDU_END_INFO13_DECRYPT_ERR)
1117 errmap |= HAL_RX_MPDU_ERR_DECRYPT;
1118
1119 if (info & RX_MSDU_END_INFO13_TKIP_MIC_ERR)
1120 errmap |= HAL_RX_MPDU_ERR_TKIP_MIC;
1121
1122 if (info & RX_MSDU_END_INFO13_A_MSDU_ERROR)
1123 errmap |= HAL_RX_MPDU_ERR_AMSDU_ERR;
1124
1125 if (info & RX_MSDU_END_INFO13_OVERFLOW_ERR)
1126 errmap |= HAL_RX_MPDU_ERR_OVERFLOW;
1127
1128 if (info & RX_MSDU_END_INFO13_MSDU_LEN_ERR)
1129 errmap |= HAL_RX_MPDU_ERR_MSDU_LEN;
1130
1131 if (info & RX_MSDU_END_INFO13_MPDU_LEN_ERR)
1132 errmap |= HAL_RX_MPDU_ERR_MPDU_LEN;
1133
1134 return errmap;
1135 }
1136
1137 const struct hal_ops hal_wcn7850_ops = {
1138 .rx_desc_get_first_msdu = ath12k_hw_wcn7850_rx_desc_get_first_msdu,
1139 .rx_desc_get_last_msdu = ath12k_hw_wcn7850_rx_desc_get_last_msdu,
1140 .rx_desc_get_l3_pad_bytes = ath12k_hw_wcn7850_rx_desc_get_l3_pad_bytes,
1141 .rx_desc_encrypt_valid = ath12k_hw_wcn7850_rx_desc_encrypt_valid,
1142 .rx_desc_get_encrypt_type = ath12k_hw_wcn7850_rx_desc_get_encrypt_type,
1143 .rx_desc_get_decap_type = ath12k_hw_wcn7850_rx_desc_get_decap_type,
1144 .rx_desc_get_mesh_ctl = ath12k_hw_wcn7850_rx_desc_get_mesh_ctl,
1145 .rx_desc_get_mpdu_seq_ctl_vld = ath12k_hw_wcn7850_rx_desc_get_mpdu_seq_ctl_vld,
1146 .rx_desc_get_mpdu_fc_valid = ath12k_hw_wcn7850_rx_desc_get_mpdu_fc_valid,
1147 .rx_desc_get_mpdu_start_seq_no = ath12k_hw_wcn7850_rx_desc_get_mpdu_start_seq_no,
1148 .rx_desc_get_msdu_len = ath12k_hw_wcn7850_rx_desc_get_msdu_len,
1149 .rx_desc_get_msdu_sgi = ath12k_hw_wcn7850_rx_desc_get_msdu_sgi,
1150 .rx_desc_get_msdu_rate_mcs = ath12k_hw_wcn7850_rx_desc_get_msdu_rate_mcs,
1151 .rx_desc_get_msdu_rx_bw = ath12k_hw_wcn7850_rx_desc_get_msdu_rx_bw,
1152 .rx_desc_get_msdu_freq = ath12k_hw_wcn7850_rx_desc_get_msdu_freq,
1153 .rx_desc_get_msdu_pkt_type = ath12k_hw_wcn7850_rx_desc_get_msdu_pkt_type,
1154 .rx_desc_get_msdu_nss = ath12k_hw_wcn7850_rx_desc_get_msdu_nss,
1155 .rx_desc_get_mpdu_tid = ath12k_hw_wcn7850_rx_desc_get_mpdu_tid,
1156 .rx_desc_get_mpdu_peer_id = ath12k_hw_wcn7850_rx_desc_get_mpdu_peer_id,
1157 .rx_desc_copy_end_tlv = ath12k_hw_wcn7850_rx_desc_copy_end_tlv,
1158 .rx_desc_get_mpdu_start_tag = ath12k_hw_wcn7850_rx_desc_get_mpdu_start_tag,
1159 .rx_desc_get_mpdu_ppdu_id = ath12k_hw_wcn7850_rx_desc_get_mpdu_ppdu_id,
1160 .rx_desc_set_msdu_len = ath12k_hw_wcn7850_rx_desc_set_msdu_len,
1161 .rx_desc_get_msdu_payload = ath12k_hw_wcn7850_rx_desc_get_msdu_payload,
1162 .rx_desc_get_mpdu_start_offset = ath12k_hw_wcn7850_rx_desc_get_mpdu_start_offset,
1163 .rx_desc_get_msdu_end_offset = ath12k_hw_wcn7850_rx_desc_get_msdu_end_offset,
1164 .rx_desc_mac_addr2_valid = ath12k_hw_wcn7850_rx_desc_mac_addr2_valid,
1165 .rx_desc_mpdu_start_addr2 = ath12k_hw_wcn7850_rx_desc_mpdu_start_addr2,
1166 .rx_desc_is_da_mcbc = ath12k_hw_wcn7850_rx_desc_is_da_mcbc,
1167 .rx_desc_get_dot11_hdr = ath12k_hw_wcn7850_rx_desc_get_dot11_hdr,
1168 .rx_desc_get_crypto_header = ath12k_hw_wcn7850_rx_desc_get_crypto_hdr,
1169 .rx_desc_get_mpdu_frame_ctl = ath12k_hw_wcn7850_rx_desc_get_mpdu_frame_ctl,
1170 .create_srng_config = ath12k_hal_srng_create_config_wcn7850,
1171 .tcl_to_wbm_rbm_map = ath12k_hal_wcn7850_tcl_to_wbm_rbm_map,
1172 .dp_rx_h_msdu_done = ath12k_hw_wcn7850_dp_rx_h_msdu_done,
1173 .dp_rx_h_l4_cksum_fail = ath12k_hw_wcn7850_dp_rx_h_l4_cksum_fail,
1174 .dp_rx_h_ip_cksum_fail = ath12k_hw_wcn7850_dp_rx_h_ip_cksum_fail,
1175 .dp_rx_h_is_decrypted = ath12k_hw_wcn7850_dp_rx_h_is_decrypted,
1176 .dp_rx_h_mpdu_err = ath12k_hw_wcn7850_dp_rx_h_mpdu_err,
1177 };
1178
ath12k_hal_alloc_cont_rdp(struct ath12k_base * ab)1179 static int ath12k_hal_alloc_cont_rdp(struct ath12k_base *ab)
1180 {
1181 struct ath12k_hal *hal = &ab->hal;
1182 size_t size;
1183
1184 size = sizeof(u32) * HAL_SRNG_RING_ID_MAX;
1185 hal->rdp.vaddr = dma_alloc_coherent(ab->dev, size, &hal->rdp.paddr,
1186 GFP_KERNEL);
1187 if (!hal->rdp.vaddr)
1188 return -ENOMEM;
1189
1190 return 0;
1191 }
1192
ath12k_hal_free_cont_rdp(struct ath12k_base * ab)1193 static void ath12k_hal_free_cont_rdp(struct ath12k_base *ab)
1194 {
1195 struct ath12k_hal *hal = &ab->hal;
1196 size_t size;
1197
1198 if (!hal->rdp.vaddr)
1199 return;
1200
1201 size = sizeof(u32) * HAL_SRNG_RING_ID_MAX;
1202 dma_free_coherent(ab->dev, size,
1203 hal->rdp.vaddr, hal->rdp.paddr);
1204 hal->rdp.vaddr = NULL;
1205 }
1206
ath12k_hal_alloc_cont_wrp(struct ath12k_base * ab)1207 static int ath12k_hal_alloc_cont_wrp(struct ath12k_base *ab)
1208 {
1209 struct ath12k_hal *hal = &ab->hal;
1210 size_t size;
1211
1212 size = sizeof(u32) * (HAL_SRNG_NUM_PMAC_RINGS + HAL_SRNG_NUM_DMAC_RINGS);
1213 hal->wrp.vaddr = dma_alloc_coherent(ab->dev, size, &hal->wrp.paddr,
1214 GFP_KERNEL);
1215 if (!hal->wrp.vaddr)
1216 return -ENOMEM;
1217
1218 return 0;
1219 }
1220
ath12k_hal_free_cont_wrp(struct ath12k_base * ab)1221 static void ath12k_hal_free_cont_wrp(struct ath12k_base *ab)
1222 {
1223 struct ath12k_hal *hal = &ab->hal;
1224 size_t size;
1225
1226 if (!hal->wrp.vaddr)
1227 return;
1228
1229 size = sizeof(u32) * (HAL_SRNG_NUM_PMAC_RINGS + HAL_SRNG_NUM_DMAC_RINGS);
1230 dma_free_coherent(ab->dev, size,
1231 hal->wrp.vaddr, hal->wrp.paddr);
1232 hal->wrp.vaddr = NULL;
1233 }
1234
ath12k_hal_ce_dst_setup(struct ath12k_base * ab,struct hal_srng * srng,int ring_num)1235 static void ath12k_hal_ce_dst_setup(struct ath12k_base *ab,
1236 struct hal_srng *srng, int ring_num)
1237 {
1238 struct hal_srng_config *srng_config = &ab->hal.srng_config[HAL_CE_DST];
1239 u32 addr;
1240 u32 val;
1241
1242 addr = HAL_CE_DST_RING_CTRL +
1243 srng_config->reg_start[HAL_SRNG_REG_GRP_R0] +
1244 ring_num * srng_config->reg_size[HAL_SRNG_REG_GRP_R0];
1245
1246 val = ath12k_hif_read32(ab, addr);
1247 val &= ~HAL_CE_DST_R0_DEST_CTRL_MAX_LEN;
1248 val |= u32_encode_bits(srng->u.dst_ring.max_buffer_length,
1249 HAL_CE_DST_R0_DEST_CTRL_MAX_LEN);
1250 ath12k_hif_write32(ab, addr, val);
1251 }
1252
ath12k_hal_srng_dst_hw_init(struct ath12k_base * ab,struct hal_srng * srng)1253 static void ath12k_hal_srng_dst_hw_init(struct ath12k_base *ab,
1254 struct hal_srng *srng)
1255 {
1256 struct ath12k_hal *hal = &ab->hal;
1257 u32 val;
1258 u64 hp_addr;
1259 u32 reg_base;
1260
1261 reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R0];
1262
1263 if (srng->flags & HAL_SRNG_FLAGS_MSI_INTR) {
1264 ath12k_hif_write32(ab, reg_base +
1265 ath12k_hal_reo1_ring_msi1_base_lsb_offset(ab),
1266 srng->msi_addr);
1267
1268 val = u32_encode_bits(((u64)srng->msi_addr >> HAL_ADDR_MSB_REG_SHIFT),
1269 HAL_REO1_RING_MSI1_BASE_MSB_ADDR) |
1270 HAL_REO1_RING_MSI1_BASE_MSB_MSI1_ENABLE;
1271 ath12k_hif_write32(ab, reg_base +
1272 ath12k_hal_reo1_ring_msi1_base_msb_offset(ab), val);
1273
1274 ath12k_hif_write32(ab,
1275 reg_base + ath12k_hal_reo1_ring_msi1_data_offset(ab),
1276 srng->msi_data);
1277 }
1278
1279 ath12k_hif_write32(ab, reg_base, srng->ring_base_paddr);
1280
1281 val = u32_encode_bits(((u64)srng->ring_base_paddr >> HAL_ADDR_MSB_REG_SHIFT),
1282 HAL_REO1_RING_BASE_MSB_RING_BASE_ADDR_MSB) |
1283 u32_encode_bits((srng->entry_size * srng->num_entries),
1284 HAL_REO1_RING_BASE_MSB_RING_SIZE);
1285 ath12k_hif_write32(ab, reg_base + ath12k_hal_reo1_ring_base_msb_offset(ab), val);
1286
1287 val = u32_encode_bits(srng->ring_id, HAL_REO1_RING_ID_RING_ID) |
1288 u32_encode_bits(srng->entry_size, HAL_REO1_RING_ID_ENTRY_SIZE);
1289 ath12k_hif_write32(ab, reg_base + ath12k_hal_reo1_ring_id_offset(ab), val);
1290
1291 /* interrupt setup */
1292 val = u32_encode_bits((srng->intr_timer_thres_us >> 3),
1293 HAL_REO1_RING_PRDR_INT_SETUP_INTR_TMR_THOLD);
1294
1295 val |= u32_encode_bits((srng->intr_batch_cntr_thres_entries * srng->entry_size),
1296 HAL_REO1_RING_PRDR_INT_SETUP_BATCH_COUNTER_THOLD);
1297
1298 ath12k_hif_write32(ab,
1299 reg_base + ath12k_hal_reo1_ring_producer_int_setup_offset(ab),
1300 val);
1301
1302 hp_addr = hal->rdp.paddr +
1303 ((unsigned long)srng->u.dst_ring.hp_addr -
1304 (unsigned long)hal->rdp.vaddr);
1305 ath12k_hif_write32(ab, reg_base + ath12k_hal_reo1_ring_hp_addr_lsb_offset(ab),
1306 hp_addr & HAL_ADDR_LSB_REG_MASK);
1307 ath12k_hif_write32(ab, reg_base + ath12k_hal_reo1_ring_hp_addr_msb_offset(ab),
1308 hp_addr >> HAL_ADDR_MSB_REG_SHIFT);
1309
1310 /* Initialize head and tail pointers to indicate ring is empty */
1311 reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R2];
1312 ath12k_hif_write32(ab, reg_base, 0);
1313 ath12k_hif_write32(ab, reg_base + HAL_REO1_RING_TP_OFFSET, 0);
1314 *srng->u.dst_ring.hp_addr = 0;
1315
1316 reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R0];
1317 val = 0;
1318 if (srng->flags & HAL_SRNG_FLAGS_DATA_TLV_SWAP)
1319 val |= HAL_REO1_RING_MISC_DATA_TLV_SWAP;
1320 if (srng->flags & HAL_SRNG_FLAGS_RING_PTR_SWAP)
1321 val |= HAL_REO1_RING_MISC_HOST_FW_SWAP;
1322 if (srng->flags & HAL_SRNG_FLAGS_MSI_SWAP)
1323 val |= HAL_REO1_RING_MISC_MSI_SWAP;
1324 val |= HAL_REO1_RING_MISC_SRNG_ENABLE;
1325
1326 ath12k_hif_write32(ab, reg_base + ath12k_hal_reo1_ring_misc_offset(ab), val);
1327 }
1328
ath12k_hal_srng_src_hw_init(struct ath12k_base * ab,struct hal_srng * srng)1329 static void ath12k_hal_srng_src_hw_init(struct ath12k_base *ab,
1330 struct hal_srng *srng)
1331 {
1332 struct ath12k_hal *hal = &ab->hal;
1333 u32 val;
1334 u64 tp_addr;
1335 u32 reg_base;
1336
1337 reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R0];
1338
1339 if (srng->flags & HAL_SRNG_FLAGS_MSI_INTR) {
1340 ath12k_hif_write32(ab, reg_base +
1341 HAL_TCL1_RING_MSI1_BASE_LSB_OFFSET(ab),
1342 srng->msi_addr);
1343
1344 val = u32_encode_bits(((u64)srng->msi_addr >> HAL_ADDR_MSB_REG_SHIFT),
1345 HAL_TCL1_RING_MSI1_BASE_MSB_ADDR) |
1346 HAL_TCL1_RING_MSI1_BASE_MSB_MSI1_ENABLE;
1347 ath12k_hif_write32(ab, reg_base +
1348 HAL_TCL1_RING_MSI1_BASE_MSB_OFFSET(ab),
1349 val);
1350
1351 ath12k_hif_write32(ab, reg_base +
1352 HAL_TCL1_RING_MSI1_DATA_OFFSET(ab),
1353 srng->msi_data);
1354 }
1355
1356 ath12k_hif_write32(ab, reg_base, srng->ring_base_paddr);
1357
1358 val = u32_encode_bits(((u64)srng->ring_base_paddr >> HAL_ADDR_MSB_REG_SHIFT),
1359 HAL_TCL1_RING_BASE_MSB_RING_BASE_ADDR_MSB) |
1360 u32_encode_bits((srng->entry_size * srng->num_entries),
1361 HAL_TCL1_RING_BASE_MSB_RING_SIZE);
1362 ath12k_hif_write32(ab, reg_base + HAL_TCL1_RING_BASE_MSB_OFFSET, val);
1363
1364 val = u32_encode_bits(srng->entry_size, HAL_REO1_RING_ID_ENTRY_SIZE);
1365 ath12k_hif_write32(ab, reg_base + HAL_TCL1_RING_ID_OFFSET(ab), val);
1366
1367 val = u32_encode_bits(srng->intr_timer_thres_us,
1368 HAL_TCL1_RING_CONSR_INT_SETUP_IX0_INTR_TMR_THOLD);
1369
1370 val |= u32_encode_bits((srng->intr_batch_cntr_thres_entries * srng->entry_size),
1371 HAL_TCL1_RING_CONSR_INT_SETUP_IX0_BATCH_COUNTER_THOLD);
1372
1373 ath12k_hif_write32(ab,
1374 reg_base + HAL_TCL1_RING_CONSR_INT_SETUP_IX0_OFFSET(ab),
1375 val);
1376
1377 val = 0;
1378 if (srng->flags & HAL_SRNG_FLAGS_LOW_THRESH_INTR_EN) {
1379 val |= u32_encode_bits(srng->u.src_ring.low_threshold,
1380 HAL_TCL1_RING_CONSR_INT_SETUP_IX1_LOW_THOLD);
1381 }
1382 ath12k_hif_write32(ab,
1383 reg_base + HAL_TCL1_RING_CONSR_INT_SETUP_IX1_OFFSET(ab),
1384 val);
1385
1386 if (srng->ring_id != HAL_SRNG_RING_ID_WBM_IDLE_LINK) {
1387 tp_addr = hal->rdp.paddr +
1388 ((unsigned long)srng->u.src_ring.tp_addr -
1389 (unsigned long)hal->rdp.vaddr);
1390 ath12k_hif_write32(ab,
1391 reg_base + HAL_TCL1_RING_TP_ADDR_LSB_OFFSET(ab),
1392 tp_addr & HAL_ADDR_LSB_REG_MASK);
1393 ath12k_hif_write32(ab,
1394 reg_base + HAL_TCL1_RING_TP_ADDR_MSB_OFFSET(ab),
1395 tp_addr >> HAL_ADDR_MSB_REG_SHIFT);
1396 }
1397
1398 /* Initialize head and tail pointers to indicate ring is empty */
1399 reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R2];
1400 ath12k_hif_write32(ab, reg_base, 0);
1401 ath12k_hif_write32(ab, reg_base + HAL_TCL1_RING_TP_OFFSET, 0);
1402 *srng->u.src_ring.tp_addr = 0;
1403
1404 reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R0];
1405 val = 0;
1406 if (srng->flags & HAL_SRNG_FLAGS_DATA_TLV_SWAP)
1407 val |= HAL_TCL1_RING_MISC_DATA_TLV_SWAP;
1408 if (srng->flags & HAL_SRNG_FLAGS_RING_PTR_SWAP)
1409 val |= HAL_TCL1_RING_MISC_HOST_FW_SWAP;
1410 if (srng->flags & HAL_SRNG_FLAGS_MSI_SWAP)
1411 val |= HAL_TCL1_RING_MISC_MSI_SWAP;
1412
1413 /* Loop count is not used for SRC rings */
1414 val |= HAL_TCL1_RING_MISC_MSI_LOOPCNT_DISABLE;
1415
1416 val |= HAL_TCL1_RING_MISC_SRNG_ENABLE;
1417
1418 if (srng->ring_id == HAL_SRNG_RING_ID_WBM_IDLE_LINK)
1419 val |= HAL_TCL1_RING_MISC_MSI_RING_ID_DISABLE;
1420
1421 ath12k_hif_write32(ab, reg_base + HAL_TCL1_RING_MISC_OFFSET(ab), val);
1422 }
1423
ath12k_hal_srng_hw_init(struct ath12k_base * ab,struct hal_srng * srng)1424 static void ath12k_hal_srng_hw_init(struct ath12k_base *ab,
1425 struct hal_srng *srng)
1426 {
1427 if (srng->ring_dir == HAL_SRNG_DIR_SRC)
1428 ath12k_hal_srng_src_hw_init(ab, srng);
1429 else
1430 ath12k_hal_srng_dst_hw_init(ab, srng);
1431 }
1432
ath12k_hal_srng_get_ring_id(struct ath12k_base * ab,enum hal_ring_type type,int ring_num,int mac_id)1433 static int ath12k_hal_srng_get_ring_id(struct ath12k_base *ab,
1434 enum hal_ring_type type,
1435 int ring_num, int mac_id)
1436 {
1437 struct hal_srng_config *srng_config = &ab->hal.srng_config[type];
1438 int ring_id;
1439
1440 if (ring_num >= srng_config->max_rings) {
1441 ath12k_warn(ab, "invalid ring number :%d\n", ring_num);
1442 return -EINVAL;
1443 }
1444
1445 ring_id = srng_config->start_ring_id + ring_num;
1446 if (srng_config->mac_type == ATH12K_HAL_SRNG_PMAC)
1447 ring_id += mac_id * HAL_SRNG_RINGS_PER_PMAC;
1448
1449 if (WARN_ON(ring_id >= HAL_SRNG_RING_ID_MAX))
1450 return -EINVAL;
1451
1452 return ring_id;
1453 }
1454
ath12k_hal_srng_get_entrysize(struct ath12k_base * ab,u32 ring_type)1455 int ath12k_hal_srng_get_entrysize(struct ath12k_base *ab, u32 ring_type)
1456 {
1457 struct hal_srng_config *srng_config;
1458
1459 if (WARN_ON(ring_type >= HAL_MAX_RING_TYPES))
1460 return -EINVAL;
1461
1462 srng_config = &ab->hal.srng_config[ring_type];
1463
1464 return (srng_config->entry_size << 2);
1465 }
1466
ath12k_hal_srng_get_max_entries(struct ath12k_base * ab,u32 ring_type)1467 int ath12k_hal_srng_get_max_entries(struct ath12k_base *ab, u32 ring_type)
1468 {
1469 struct hal_srng_config *srng_config;
1470
1471 if (WARN_ON(ring_type >= HAL_MAX_RING_TYPES))
1472 return -EINVAL;
1473
1474 srng_config = &ab->hal.srng_config[ring_type];
1475
1476 return (srng_config->max_size / srng_config->entry_size);
1477 }
1478
ath12k_hal_srng_get_params(struct ath12k_base * ab,struct hal_srng * srng,struct hal_srng_params * params)1479 void ath12k_hal_srng_get_params(struct ath12k_base *ab, struct hal_srng *srng,
1480 struct hal_srng_params *params)
1481 {
1482 params->ring_base_paddr = srng->ring_base_paddr;
1483 params->ring_base_vaddr = srng->ring_base_vaddr;
1484 params->num_entries = srng->num_entries;
1485 params->intr_timer_thres_us = srng->intr_timer_thres_us;
1486 params->intr_batch_cntr_thres_entries =
1487 srng->intr_batch_cntr_thres_entries;
1488 params->low_threshold = srng->u.src_ring.low_threshold;
1489 params->msi_addr = srng->msi_addr;
1490 params->msi2_addr = srng->msi2_addr;
1491 params->msi_data = srng->msi_data;
1492 params->msi2_data = srng->msi2_data;
1493 params->flags = srng->flags;
1494 }
1495
ath12k_hal_srng_get_hp_addr(struct ath12k_base * ab,struct hal_srng * srng)1496 dma_addr_t ath12k_hal_srng_get_hp_addr(struct ath12k_base *ab,
1497 struct hal_srng *srng)
1498 {
1499 if (!(srng->flags & HAL_SRNG_FLAGS_LMAC_RING))
1500 return 0;
1501
1502 if (srng->ring_dir == HAL_SRNG_DIR_SRC)
1503 return ab->hal.wrp.paddr +
1504 ((unsigned long)srng->u.src_ring.hp_addr -
1505 (unsigned long)ab->hal.wrp.vaddr);
1506 else
1507 return ab->hal.rdp.paddr +
1508 ((unsigned long)srng->u.dst_ring.hp_addr -
1509 (unsigned long)ab->hal.rdp.vaddr);
1510 }
1511
ath12k_hal_srng_get_tp_addr(struct ath12k_base * ab,struct hal_srng * srng)1512 dma_addr_t ath12k_hal_srng_get_tp_addr(struct ath12k_base *ab,
1513 struct hal_srng *srng)
1514 {
1515 if (!(srng->flags & HAL_SRNG_FLAGS_LMAC_RING))
1516 return 0;
1517
1518 if (srng->ring_dir == HAL_SRNG_DIR_SRC)
1519 return ab->hal.rdp.paddr +
1520 ((unsigned long)srng->u.src_ring.tp_addr -
1521 (unsigned long)ab->hal.rdp.vaddr);
1522 else
1523 return ab->hal.wrp.paddr +
1524 ((unsigned long)srng->u.dst_ring.tp_addr -
1525 (unsigned long)ab->hal.wrp.vaddr);
1526 }
1527
ath12k_hal_ce_get_desc_size(enum hal_ce_desc type)1528 u32 ath12k_hal_ce_get_desc_size(enum hal_ce_desc type)
1529 {
1530 switch (type) {
1531 case HAL_CE_DESC_SRC:
1532 return sizeof(struct hal_ce_srng_src_desc);
1533 case HAL_CE_DESC_DST:
1534 return sizeof(struct hal_ce_srng_dest_desc);
1535 case HAL_CE_DESC_DST_STATUS:
1536 return sizeof(struct hal_ce_srng_dst_status_desc);
1537 }
1538
1539 return 0;
1540 }
1541
ath12k_hal_ce_src_set_desc(struct hal_ce_srng_src_desc * desc,dma_addr_t paddr,u32 len,u32 id,u8 byte_swap_data)1542 void ath12k_hal_ce_src_set_desc(struct hal_ce_srng_src_desc *desc, dma_addr_t paddr,
1543 u32 len, u32 id, u8 byte_swap_data)
1544 {
1545 desc->buffer_addr_low = cpu_to_le32(paddr & HAL_ADDR_LSB_REG_MASK);
1546 desc->buffer_addr_info =
1547 le32_encode_bits(((u64)paddr >> HAL_ADDR_MSB_REG_SHIFT),
1548 HAL_CE_SRC_DESC_ADDR_INFO_ADDR_HI) |
1549 le32_encode_bits(byte_swap_data,
1550 HAL_CE_SRC_DESC_ADDR_INFO_BYTE_SWAP) |
1551 le32_encode_bits(0, HAL_CE_SRC_DESC_ADDR_INFO_GATHER) |
1552 le32_encode_bits(len, HAL_CE_SRC_DESC_ADDR_INFO_LEN);
1553 desc->meta_info = le32_encode_bits(id, HAL_CE_SRC_DESC_META_INFO_DATA);
1554 }
1555
ath12k_hal_ce_dst_set_desc(struct hal_ce_srng_dest_desc * desc,dma_addr_t paddr)1556 void ath12k_hal_ce_dst_set_desc(struct hal_ce_srng_dest_desc *desc, dma_addr_t paddr)
1557 {
1558 desc->buffer_addr_low = cpu_to_le32(paddr & HAL_ADDR_LSB_REG_MASK);
1559 desc->buffer_addr_info =
1560 le32_encode_bits(((u64)paddr >> HAL_ADDR_MSB_REG_SHIFT),
1561 HAL_CE_DEST_DESC_ADDR_INFO_ADDR_HI);
1562 }
1563
ath12k_hal_ce_dst_status_get_length(struct hal_ce_srng_dst_status_desc * desc)1564 u32 ath12k_hal_ce_dst_status_get_length(struct hal_ce_srng_dst_status_desc *desc)
1565 {
1566 u32 len;
1567
1568 len = le32_get_bits(desc->flags, HAL_CE_DST_STATUS_DESC_FLAGS_LEN);
1569 desc->flags &= ~cpu_to_le32(HAL_CE_DST_STATUS_DESC_FLAGS_LEN);
1570
1571 return len;
1572 }
1573
ath12k_hal_set_link_desc_addr(struct hal_wbm_link_desc * desc,u32 cookie,dma_addr_t paddr)1574 void ath12k_hal_set_link_desc_addr(struct hal_wbm_link_desc *desc, u32 cookie,
1575 dma_addr_t paddr)
1576 {
1577 desc->buf_addr_info.info0 = le32_encode_bits((paddr & HAL_ADDR_LSB_REG_MASK),
1578 BUFFER_ADDR_INFO0_ADDR);
1579 desc->buf_addr_info.info1 =
1580 le32_encode_bits(((u64)paddr >> HAL_ADDR_MSB_REG_SHIFT),
1581 BUFFER_ADDR_INFO1_ADDR) |
1582 le32_encode_bits(1, BUFFER_ADDR_INFO1_RET_BUF_MGR) |
1583 le32_encode_bits(cookie, BUFFER_ADDR_INFO1_SW_COOKIE);
1584 }
1585
ath12k_hal_srng_dst_peek(struct ath12k_base * ab,struct hal_srng * srng)1586 void *ath12k_hal_srng_dst_peek(struct ath12k_base *ab, struct hal_srng *srng)
1587 {
1588 lockdep_assert_held(&srng->lock);
1589
1590 if (srng->u.dst_ring.tp != srng->u.dst_ring.cached_hp)
1591 return (srng->ring_base_vaddr + srng->u.dst_ring.tp);
1592
1593 return NULL;
1594 }
1595
ath12k_hal_srng_dst_get_next_entry(struct ath12k_base * ab,struct hal_srng * srng)1596 void *ath12k_hal_srng_dst_get_next_entry(struct ath12k_base *ab,
1597 struct hal_srng *srng)
1598 {
1599 void *desc;
1600
1601 lockdep_assert_held(&srng->lock);
1602
1603 if (srng->u.dst_ring.tp == srng->u.dst_ring.cached_hp)
1604 return NULL;
1605
1606 desc = srng->ring_base_vaddr + srng->u.dst_ring.tp;
1607
1608 srng->u.dst_ring.tp = (srng->u.dst_ring.tp + srng->entry_size) %
1609 srng->ring_size;
1610
1611 return desc;
1612 }
1613
ath12k_hal_srng_dst_num_free(struct ath12k_base * ab,struct hal_srng * srng,bool sync_hw_ptr)1614 int ath12k_hal_srng_dst_num_free(struct ath12k_base *ab, struct hal_srng *srng,
1615 bool sync_hw_ptr)
1616 {
1617 u32 tp, hp;
1618
1619 lockdep_assert_held(&srng->lock);
1620
1621 tp = srng->u.dst_ring.tp;
1622
1623 if (sync_hw_ptr) {
1624 hp = *srng->u.dst_ring.hp_addr;
1625 srng->u.dst_ring.cached_hp = hp;
1626 } else {
1627 hp = srng->u.dst_ring.cached_hp;
1628 }
1629
1630 if (hp >= tp)
1631 return (hp - tp) / srng->entry_size;
1632 else
1633 return (srng->ring_size - tp + hp) / srng->entry_size;
1634 }
1635
1636 /* Returns number of available entries in src ring */
ath12k_hal_srng_src_num_free(struct ath12k_base * ab,struct hal_srng * srng,bool sync_hw_ptr)1637 int ath12k_hal_srng_src_num_free(struct ath12k_base *ab, struct hal_srng *srng,
1638 bool sync_hw_ptr)
1639 {
1640 u32 tp, hp;
1641
1642 lockdep_assert_held(&srng->lock);
1643
1644 hp = srng->u.src_ring.hp;
1645
1646 if (sync_hw_ptr) {
1647 tp = *srng->u.src_ring.tp_addr;
1648 srng->u.src_ring.cached_tp = tp;
1649 } else {
1650 tp = srng->u.src_ring.cached_tp;
1651 }
1652
1653 if (tp > hp)
1654 return ((tp - hp) / srng->entry_size) - 1;
1655 else
1656 return ((srng->ring_size - hp + tp) / srng->entry_size) - 1;
1657 }
1658
ath12k_hal_srng_src_get_next_entry(struct ath12k_base * ab,struct hal_srng * srng)1659 void *ath12k_hal_srng_src_get_next_entry(struct ath12k_base *ab,
1660 struct hal_srng *srng)
1661 {
1662 void *desc;
1663 u32 next_hp;
1664
1665 lockdep_assert_held(&srng->lock);
1666
1667 /* TODO: Using % is expensive, but we have to do this since size of some
1668 * SRNG rings is not power of 2 (due to descriptor sizes). Need to see
1669 * if separate function is defined for rings having power of 2 ring size
1670 * (TCL2SW, REO2SW, SW2RXDMA and CE rings) so that we can avoid the
1671 * overhead of % by using mask (with &).
1672 */
1673 next_hp = (srng->u.src_ring.hp + srng->entry_size) % srng->ring_size;
1674
1675 if (next_hp == srng->u.src_ring.cached_tp)
1676 return NULL;
1677
1678 desc = srng->ring_base_vaddr + srng->u.src_ring.hp;
1679 srng->u.src_ring.hp = next_hp;
1680
1681 /* TODO: Reap functionality is not used by all rings. If particular
1682 * ring does not use reap functionality, we need not update reap_hp
1683 * with next_hp pointer. Need to make sure a separate function is used
1684 * before doing any optimization by removing below code updating
1685 * reap_hp.
1686 */
1687 srng->u.src_ring.reap_hp = next_hp;
1688
1689 return desc;
1690 }
1691
ath12k_hal_srng_src_reap_next(struct ath12k_base * ab,struct hal_srng * srng)1692 void *ath12k_hal_srng_src_reap_next(struct ath12k_base *ab,
1693 struct hal_srng *srng)
1694 {
1695 void *desc;
1696 u32 next_reap_hp;
1697
1698 lockdep_assert_held(&srng->lock);
1699
1700 next_reap_hp = (srng->u.src_ring.reap_hp + srng->entry_size) %
1701 srng->ring_size;
1702
1703 if (next_reap_hp == srng->u.src_ring.cached_tp)
1704 return NULL;
1705
1706 desc = srng->ring_base_vaddr + next_reap_hp;
1707 srng->u.src_ring.reap_hp = next_reap_hp;
1708
1709 return desc;
1710 }
1711
ath12k_hal_srng_src_get_next_reaped(struct ath12k_base * ab,struct hal_srng * srng)1712 void *ath12k_hal_srng_src_get_next_reaped(struct ath12k_base *ab,
1713 struct hal_srng *srng)
1714 {
1715 void *desc;
1716
1717 lockdep_assert_held(&srng->lock);
1718
1719 if (srng->u.src_ring.hp == srng->u.src_ring.reap_hp)
1720 return NULL;
1721
1722 desc = srng->ring_base_vaddr + srng->u.src_ring.hp;
1723 srng->u.src_ring.hp = (srng->u.src_ring.hp + srng->entry_size) %
1724 srng->ring_size;
1725
1726 return desc;
1727 }
1728
ath12k_hal_srng_access_begin(struct ath12k_base * ab,struct hal_srng * srng)1729 void ath12k_hal_srng_access_begin(struct ath12k_base *ab, struct hal_srng *srng)
1730 {
1731 lockdep_assert_held(&srng->lock);
1732
1733 if (srng->ring_dir == HAL_SRNG_DIR_SRC)
1734 srng->u.src_ring.cached_tp =
1735 *(volatile u32 *)srng->u.src_ring.tp_addr;
1736 else
1737 srng->u.dst_ring.cached_hp = *srng->u.dst_ring.hp_addr;
1738 }
1739
1740 /* Update cached ring head/tail pointers to HW. ath12k_hal_srng_access_begin()
1741 * should have been called before this.
1742 */
ath12k_hal_srng_access_end(struct ath12k_base * ab,struct hal_srng * srng)1743 void ath12k_hal_srng_access_end(struct ath12k_base *ab, struct hal_srng *srng)
1744 {
1745 lockdep_assert_held(&srng->lock);
1746
1747 /* TODO: See if we need a write memory barrier here */
1748 if (srng->flags & HAL_SRNG_FLAGS_LMAC_RING) {
1749 /* For LMAC rings, ring pointer updates are done through FW and
1750 * hence written to a shared memory location that is read by FW
1751 */
1752 if (srng->ring_dir == HAL_SRNG_DIR_SRC) {
1753 srng->u.src_ring.last_tp =
1754 *(volatile u32 *)srng->u.src_ring.tp_addr;
1755 *srng->u.src_ring.hp_addr = srng->u.src_ring.hp;
1756 } else {
1757 srng->u.dst_ring.last_hp = *srng->u.dst_ring.hp_addr;
1758 *srng->u.dst_ring.tp_addr = srng->u.dst_ring.tp;
1759 }
1760 } else {
1761 if (srng->ring_dir == HAL_SRNG_DIR_SRC) {
1762 srng->u.src_ring.last_tp =
1763 *(volatile u32 *)srng->u.src_ring.tp_addr;
1764 ath12k_hif_write32(ab,
1765 (unsigned long)srng->u.src_ring.hp_addr -
1766 (unsigned long)ab->mem,
1767 srng->u.src_ring.hp);
1768 } else {
1769 srng->u.dst_ring.last_hp = *srng->u.dst_ring.hp_addr;
1770 ath12k_hif_write32(ab,
1771 (unsigned long)srng->u.dst_ring.tp_addr -
1772 (unsigned long)ab->mem,
1773 srng->u.dst_ring.tp);
1774 }
1775 }
1776
1777 srng->timestamp = jiffies;
1778 }
1779
ath12k_hal_setup_link_idle_list(struct ath12k_base * ab,struct hal_wbm_idle_scatter_list * sbuf,u32 nsbufs,u32 tot_link_desc,u32 end_offset)1780 void ath12k_hal_setup_link_idle_list(struct ath12k_base *ab,
1781 struct hal_wbm_idle_scatter_list *sbuf,
1782 u32 nsbufs, u32 tot_link_desc,
1783 u32 end_offset)
1784 {
1785 struct ath12k_buffer_addr *link_addr;
1786 int i;
1787 u32 reg_scatter_buf_sz = HAL_WBM_IDLE_SCATTER_BUF_SIZE / 64;
1788 u32 val;
1789
1790 link_addr = (void *)sbuf[0].vaddr + HAL_WBM_IDLE_SCATTER_BUF_SIZE;
1791
1792 for (i = 1; i < nsbufs; i++) {
1793 link_addr->info0 = cpu_to_le32(sbuf[i].paddr & HAL_ADDR_LSB_REG_MASK);
1794
1795 link_addr->info1 =
1796 le32_encode_bits((u64)sbuf[i].paddr >> HAL_ADDR_MSB_REG_SHIFT,
1797 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_39_32) |
1798 le32_encode_bits(BASE_ADDR_MATCH_TAG_VAL,
1799 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_MATCH_TAG);
1800
1801 link_addr = (void *)sbuf[i].vaddr +
1802 HAL_WBM_IDLE_SCATTER_BUF_SIZE;
1803 }
1804
1805 val = u32_encode_bits(reg_scatter_buf_sz, HAL_WBM_SCATTER_BUFFER_SIZE) |
1806 u32_encode_bits(0x1, HAL_WBM_LINK_DESC_IDLE_LIST_MODE);
1807
1808 ath12k_hif_write32(ab,
1809 HAL_SEQ_WCSS_UMAC_WBM_REG +
1810 HAL_WBM_R0_IDLE_LIST_CONTROL_ADDR(ab),
1811 val);
1812
1813 val = u32_encode_bits(reg_scatter_buf_sz * nsbufs,
1814 HAL_WBM_SCATTER_RING_SIZE_OF_IDLE_LINK_DESC_LIST);
1815 ath12k_hif_write32(ab,
1816 HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_R0_IDLE_LIST_SIZE_ADDR(ab),
1817 val);
1818
1819 val = u32_encode_bits(sbuf[0].paddr & HAL_ADDR_LSB_REG_MASK,
1820 BUFFER_ADDR_INFO0_ADDR);
1821 ath12k_hif_write32(ab,
1822 HAL_SEQ_WCSS_UMAC_WBM_REG +
1823 HAL_WBM_SCATTERED_RING_BASE_LSB(ab),
1824 val);
1825
1826 val = u32_encode_bits(BASE_ADDR_MATCH_TAG_VAL,
1827 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_MATCH_TAG) |
1828 u32_encode_bits((u64)sbuf[0].paddr >> HAL_ADDR_MSB_REG_SHIFT,
1829 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_39_32);
1830 ath12k_hif_write32(ab,
1831 HAL_SEQ_WCSS_UMAC_WBM_REG +
1832 HAL_WBM_SCATTERED_RING_BASE_MSB(ab),
1833 val);
1834
1835 /* Setup head and tail pointers for the idle list */
1836 val = u32_encode_bits(sbuf[nsbufs - 1].paddr, BUFFER_ADDR_INFO0_ADDR);
1837 ath12k_hif_write32(ab,
1838 HAL_SEQ_WCSS_UMAC_WBM_REG +
1839 HAL_WBM_SCATTERED_DESC_PTR_HEAD_INFO_IX0(ab),
1840 val);
1841
1842 val = u32_encode_bits(((u64)sbuf[nsbufs - 1].paddr >> HAL_ADDR_MSB_REG_SHIFT),
1843 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_39_32) |
1844 u32_encode_bits((end_offset >> 2),
1845 HAL_WBM_SCATTERED_DESC_HEAD_P_OFFSET_IX1);
1846 ath12k_hif_write32(ab,
1847 HAL_SEQ_WCSS_UMAC_WBM_REG +
1848 HAL_WBM_SCATTERED_DESC_PTR_HEAD_INFO_IX1(ab),
1849 val);
1850
1851 val = u32_encode_bits(sbuf[0].paddr, BUFFER_ADDR_INFO0_ADDR);
1852 ath12k_hif_write32(ab,
1853 HAL_SEQ_WCSS_UMAC_WBM_REG +
1854 HAL_WBM_SCATTERED_DESC_PTR_HEAD_INFO_IX0(ab),
1855 val);
1856
1857 val = u32_encode_bits(sbuf[0].paddr, BUFFER_ADDR_INFO0_ADDR);
1858 ath12k_hif_write32(ab,
1859 HAL_SEQ_WCSS_UMAC_WBM_REG +
1860 HAL_WBM_SCATTERED_DESC_PTR_TAIL_INFO_IX0(ab),
1861 val);
1862
1863 val = u32_encode_bits(((u64)sbuf[0].paddr >> HAL_ADDR_MSB_REG_SHIFT),
1864 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_39_32) |
1865 u32_encode_bits(0, HAL_WBM_SCATTERED_DESC_TAIL_P_OFFSET_IX1);
1866 ath12k_hif_write32(ab,
1867 HAL_SEQ_WCSS_UMAC_WBM_REG +
1868 HAL_WBM_SCATTERED_DESC_PTR_TAIL_INFO_IX1(ab),
1869 val);
1870
1871 val = 2 * tot_link_desc;
1872 ath12k_hif_write32(ab,
1873 HAL_SEQ_WCSS_UMAC_WBM_REG +
1874 HAL_WBM_SCATTERED_DESC_PTR_HP_ADDR(ab),
1875 val);
1876
1877 /* Enable the SRNG */
1878 val = u32_encode_bits(1, HAL_WBM_IDLE_LINK_RING_MISC_SRNG_ENABLE) |
1879 u32_encode_bits(1, HAL_WBM_IDLE_LINK_RING_MISC_RIND_ID_DISABLE);
1880 ath12k_hif_write32(ab,
1881 HAL_SEQ_WCSS_UMAC_WBM_REG +
1882 HAL_WBM_IDLE_LINK_RING_MISC_ADDR(ab),
1883 val);
1884 }
1885
ath12k_hal_srng_setup(struct ath12k_base * ab,enum hal_ring_type type,int ring_num,int mac_id,struct hal_srng_params * params)1886 int ath12k_hal_srng_setup(struct ath12k_base *ab, enum hal_ring_type type,
1887 int ring_num, int mac_id,
1888 struct hal_srng_params *params)
1889 {
1890 struct ath12k_hal *hal = &ab->hal;
1891 struct hal_srng_config *srng_config = &ab->hal.srng_config[type];
1892 struct hal_srng *srng;
1893 int ring_id;
1894 u32 idx;
1895 int i;
1896 u32 reg_base;
1897
1898 ring_id = ath12k_hal_srng_get_ring_id(ab, type, ring_num, mac_id);
1899 if (ring_id < 0)
1900 return ring_id;
1901
1902 srng = &hal->srng_list[ring_id];
1903
1904 srng->ring_id = ring_id;
1905 srng->ring_dir = srng_config->ring_dir;
1906 srng->ring_base_paddr = params->ring_base_paddr;
1907 srng->ring_base_vaddr = params->ring_base_vaddr;
1908 srng->entry_size = srng_config->entry_size;
1909 srng->num_entries = params->num_entries;
1910 srng->ring_size = srng->entry_size * srng->num_entries;
1911 srng->intr_batch_cntr_thres_entries =
1912 params->intr_batch_cntr_thres_entries;
1913 srng->intr_timer_thres_us = params->intr_timer_thres_us;
1914 srng->flags = params->flags;
1915 srng->msi_addr = params->msi_addr;
1916 srng->msi2_addr = params->msi2_addr;
1917 srng->msi_data = params->msi_data;
1918 srng->msi2_data = params->msi2_data;
1919 srng->initialized = 1;
1920 spin_lock_init(&srng->lock);
1921 lockdep_set_class(&srng->lock, &srng->lock_key);
1922
1923 for (i = 0; i < HAL_SRNG_NUM_REG_GRP; i++) {
1924 srng->hwreg_base[i] = srng_config->reg_start[i] +
1925 (ring_num * srng_config->reg_size[i]);
1926 }
1927
1928 memset(srng->ring_base_vaddr, 0,
1929 (srng->entry_size * srng->num_entries) << 2);
1930
1931 reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R2];
1932
1933 if (srng->ring_dir == HAL_SRNG_DIR_SRC) {
1934 srng->u.src_ring.hp = 0;
1935 srng->u.src_ring.cached_tp = 0;
1936 srng->u.src_ring.reap_hp = srng->ring_size - srng->entry_size;
1937 srng->u.src_ring.tp_addr = (void *)(hal->rdp.vaddr + ring_id);
1938 srng->u.src_ring.low_threshold = params->low_threshold *
1939 srng->entry_size;
1940 if (srng_config->mac_type == ATH12K_HAL_SRNG_UMAC) {
1941 if (!ab->hw_params->supports_shadow_regs)
1942 srng->u.src_ring.hp_addr =
1943 (u32 *)((unsigned long)ab->mem + reg_base);
1944 else
1945 ath12k_dbg(ab, ATH12K_DBG_HAL,
1946 "hal type %d ring_num %d reg_base 0x%x shadow 0x%lx\n",
1947 type, ring_num,
1948 reg_base,
1949 (unsigned long)srng->u.src_ring.hp_addr -
1950 (unsigned long)ab->mem);
1951 } else {
1952 idx = ring_id - HAL_SRNG_RING_ID_DMAC_CMN_ID_START;
1953 srng->u.src_ring.hp_addr = (void *)(hal->wrp.vaddr +
1954 idx);
1955 srng->flags |= HAL_SRNG_FLAGS_LMAC_RING;
1956 }
1957 } else {
1958 /* During initialization loop count in all the descriptors
1959 * will be set to zero, and HW will set it to 1 on completing
1960 * descriptor update in first loop, and increments it by 1 on
1961 * subsequent loops (loop count wraps around after reaching
1962 * 0xffff). The 'loop_cnt' in SW ring state is the expected
1963 * loop count in descriptors updated by HW (to be processed
1964 * by SW).
1965 */
1966 srng->u.dst_ring.loop_cnt = 1;
1967 srng->u.dst_ring.tp = 0;
1968 srng->u.dst_ring.cached_hp = 0;
1969 srng->u.dst_ring.hp_addr = (void *)(hal->rdp.vaddr + ring_id);
1970 if (srng_config->mac_type == ATH12K_HAL_SRNG_UMAC) {
1971 if (!ab->hw_params->supports_shadow_regs)
1972 srng->u.dst_ring.tp_addr =
1973 (u32 *)((unsigned long)ab->mem + reg_base +
1974 (HAL_REO1_RING_TP - HAL_REO1_RING_HP));
1975 else
1976 ath12k_dbg(ab, ATH12K_DBG_HAL,
1977 "type %d ring_num %d target_reg 0x%x shadow 0x%lx\n",
1978 type, ring_num,
1979 reg_base + HAL_REO1_RING_TP - HAL_REO1_RING_HP,
1980 (unsigned long)srng->u.dst_ring.tp_addr -
1981 (unsigned long)ab->mem);
1982 } else {
1983 /* For PMAC & DMAC rings, tail pointer updates will be done
1984 * through FW by writing to a shared memory location
1985 */
1986 idx = ring_id - HAL_SRNG_RING_ID_DMAC_CMN_ID_START;
1987 srng->u.dst_ring.tp_addr = (void *)(hal->wrp.vaddr +
1988 idx);
1989 srng->flags |= HAL_SRNG_FLAGS_LMAC_RING;
1990 }
1991 }
1992
1993 if (srng_config->mac_type != ATH12K_HAL_SRNG_UMAC)
1994 return ring_id;
1995
1996 ath12k_hal_srng_hw_init(ab, srng);
1997
1998 if (type == HAL_CE_DST) {
1999 srng->u.dst_ring.max_buffer_length = params->max_buffer_len;
2000 ath12k_hal_ce_dst_setup(ab, srng, ring_num);
2001 }
2002
2003 return ring_id;
2004 }
2005
ath12k_hal_srng_update_hp_tp_addr(struct ath12k_base * ab,int shadow_cfg_idx,enum hal_ring_type ring_type,int ring_num)2006 static void ath12k_hal_srng_update_hp_tp_addr(struct ath12k_base *ab,
2007 int shadow_cfg_idx,
2008 enum hal_ring_type ring_type,
2009 int ring_num)
2010 {
2011 struct hal_srng *srng;
2012 struct ath12k_hal *hal = &ab->hal;
2013 int ring_id;
2014 struct hal_srng_config *srng_config = &hal->srng_config[ring_type];
2015
2016 ring_id = ath12k_hal_srng_get_ring_id(ab, ring_type, ring_num, 0);
2017 if (ring_id < 0)
2018 return;
2019
2020 srng = &hal->srng_list[ring_id];
2021
2022 if (srng_config->ring_dir == HAL_SRNG_DIR_DST)
2023 srng->u.dst_ring.tp_addr = (u32 *)(HAL_SHADOW_REG(shadow_cfg_idx) +
2024 (unsigned long)ab->mem);
2025 else
2026 srng->u.src_ring.hp_addr = (u32 *)(HAL_SHADOW_REG(shadow_cfg_idx) +
2027 (unsigned long)ab->mem);
2028 }
2029
ath12k_hal_srng_update_shadow_config(struct ath12k_base * ab,enum hal_ring_type ring_type,int ring_num)2030 int ath12k_hal_srng_update_shadow_config(struct ath12k_base *ab,
2031 enum hal_ring_type ring_type,
2032 int ring_num)
2033 {
2034 struct ath12k_hal *hal = &ab->hal;
2035 struct hal_srng_config *srng_config = &hal->srng_config[ring_type];
2036 int shadow_cfg_idx = hal->num_shadow_reg_configured;
2037 u32 target_reg;
2038
2039 if (shadow_cfg_idx >= HAL_SHADOW_NUM_REGS)
2040 return -EINVAL;
2041
2042 hal->num_shadow_reg_configured++;
2043
2044 target_reg = srng_config->reg_start[HAL_HP_OFFSET_IN_REG_START];
2045 target_reg += srng_config->reg_size[HAL_HP_OFFSET_IN_REG_START] *
2046 ring_num;
2047
2048 /* For destination ring, shadow the TP */
2049 if (srng_config->ring_dir == HAL_SRNG_DIR_DST)
2050 target_reg += HAL_OFFSET_FROM_HP_TO_TP;
2051
2052 hal->shadow_reg_addr[shadow_cfg_idx] = target_reg;
2053
2054 /* update hp/tp addr to hal structure*/
2055 ath12k_hal_srng_update_hp_tp_addr(ab, shadow_cfg_idx, ring_type,
2056 ring_num);
2057
2058 ath12k_dbg(ab, ATH12K_DBG_HAL,
2059 "target_reg %x, shadow reg 0x%x shadow_idx 0x%x, ring_type %d, ring num %d",
2060 target_reg,
2061 HAL_SHADOW_REG(shadow_cfg_idx),
2062 shadow_cfg_idx,
2063 ring_type, ring_num);
2064
2065 return 0;
2066 }
2067
ath12k_hal_srng_shadow_config(struct ath12k_base * ab)2068 void ath12k_hal_srng_shadow_config(struct ath12k_base *ab)
2069 {
2070 struct ath12k_hal *hal = &ab->hal;
2071 int ring_type, ring_num;
2072
2073 /* update all the non-CE srngs. */
2074 for (ring_type = 0; ring_type < HAL_MAX_RING_TYPES; ring_type++) {
2075 struct hal_srng_config *srng_config = &hal->srng_config[ring_type];
2076
2077 if (ring_type == HAL_CE_SRC ||
2078 ring_type == HAL_CE_DST ||
2079 ring_type == HAL_CE_DST_STATUS)
2080 continue;
2081
2082 if (srng_config->mac_type == ATH12K_HAL_SRNG_DMAC ||
2083 srng_config->mac_type == ATH12K_HAL_SRNG_PMAC)
2084 continue;
2085
2086 for (ring_num = 0; ring_num < srng_config->max_rings; ring_num++)
2087 ath12k_hal_srng_update_shadow_config(ab, ring_type, ring_num);
2088 }
2089 }
2090
ath12k_hal_srng_get_shadow_config(struct ath12k_base * ab,u32 ** cfg,u32 * len)2091 void ath12k_hal_srng_get_shadow_config(struct ath12k_base *ab,
2092 u32 **cfg, u32 *len)
2093 {
2094 struct ath12k_hal *hal = &ab->hal;
2095
2096 *len = hal->num_shadow_reg_configured;
2097 *cfg = hal->shadow_reg_addr;
2098 }
2099
ath12k_hal_srng_shadow_update_hp_tp(struct ath12k_base * ab,struct hal_srng * srng)2100 void ath12k_hal_srng_shadow_update_hp_tp(struct ath12k_base *ab,
2101 struct hal_srng *srng)
2102 {
2103 lockdep_assert_held(&srng->lock);
2104
2105 /* check whether the ring is empty. Update the shadow
2106 * HP only when then ring isn't' empty.
2107 */
2108 if (srng->ring_dir == HAL_SRNG_DIR_SRC &&
2109 *srng->u.src_ring.tp_addr != srng->u.src_ring.hp)
2110 ath12k_hal_srng_access_end(ab, srng);
2111 }
2112
ath12k_hal_register_srng_lock_keys(struct ath12k_base * ab)2113 static void ath12k_hal_register_srng_lock_keys(struct ath12k_base *ab)
2114 {
2115 struct ath12k_hal *hal = &ab->hal;
2116 u32 ring_id;
2117
2118 for (ring_id = 0; ring_id < HAL_SRNG_RING_ID_MAX; ring_id++)
2119 lockdep_register_key(&hal->srng_list[ring_id].lock_key);
2120 }
2121
ath12k_hal_unregister_srng_lock_keys(struct ath12k_base * ab)2122 static void ath12k_hal_unregister_srng_lock_keys(struct ath12k_base *ab)
2123 {
2124 struct ath12k_hal *hal = &ab->hal;
2125 u32 ring_id;
2126
2127 for (ring_id = 0; ring_id < HAL_SRNG_RING_ID_MAX; ring_id++)
2128 lockdep_unregister_key(&hal->srng_list[ring_id].lock_key);
2129 }
2130
ath12k_hal_srng_init(struct ath12k_base * ab)2131 int ath12k_hal_srng_init(struct ath12k_base *ab)
2132 {
2133 struct ath12k_hal *hal = &ab->hal;
2134 int ret;
2135
2136 memset(hal, 0, sizeof(*hal));
2137
2138 ret = ab->hw_params->hal_ops->create_srng_config(ab);
2139 if (ret)
2140 goto err_hal;
2141
2142 ret = ath12k_hal_alloc_cont_rdp(ab);
2143 if (ret)
2144 goto err_hal;
2145
2146 ret = ath12k_hal_alloc_cont_wrp(ab);
2147 if (ret)
2148 goto err_free_cont_rdp;
2149
2150 ath12k_hal_register_srng_lock_keys(ab);
2151
2152 return 0;
2153
2154 err_free_cont_rdp:
2155 ath12k_hal_free_cont_rdp(ab);
2156
2157 err_hal:
2158 return ret;
2159 }
2160
ath12k_hal_srng_deinit(struct ath12k_base * ab)2161 void ath12k_hal_srng_deinit(struct ath12k_base *ab)
2162 {
2163 struct ath12k_hal *hal = &ab->hal;
2164
2165 ath12k_hal_unregister_srng_lock_keys(ab);
2166 ath12k_hal_free_cont_rdp(ab);
2167 ath12k_hal_free_cont_wrp(ab);
2168 kfree(hal->srng_config);
2169 hal->srng_config = NULL;
2170 }
2171
ath12k_hal_dump_srng_stats(struct ath12k_base * ab)2172 void ath12k_hal_dump_srng_stats(struct ath12k_base *ab)
2173 {
2174 struct hal_srng *srng;
2175 struct ath12k_ext_irq_grp *irq_grp;
2176 struct ath12k_ce_pipe *ce_pipe;
2177 int i;
2178
2179 ath12k_err(ab, "Last interrupt received for each CE:\n");
2180 for (i = 0; i < ab->hw_params->ce_count; i++) {
2181 ce_pipe = &ab->ce.ce_pipe[i];
2182
2183 if (ath12k_ce_get_attr_flags(ab, i) & CE_ATTR_DIS_INTR)
2184 continue;
2185
2186 ath12k_err(ab, "CE_id %d pipe_num %d %ums before\n",
2187 i, ce_pipe->pipe_num,
2188 jiffies_to_msecs(jiffies - ce_pipe->timestamp));
2189 }
2190
2191 ath12k_err(ab, "\nLast interrupt received for each group:\n");
2192 for (i = 0; i < ATH12K_EXT_IRQ_GRP_NUM_MAX; i++) {
2193 irq_grp = &ab->ext_irq_grp[i];
2194 ath12k_err(ab, "group_id %d %ums before\n",
2195 irq_grp->grp_id,
2196 jiffies_to_msecs(jiffies - irq_grp->timestamp));
2197 }
2198
2199 for (i = 0; i < HAL_SRNG_RING_ID_MAX; i++) {
2200 srng = &ab->hal.srng_list[i];
2201
2202 if (!srng->initialized)
2203 continue;
2204
2205 if (srng->ring_dir == HAL_SRNG_DIR_SRC)
2206 ath12k_err(ab,
2207 "src srng id %u hp %u, reap_hp %u, cur tp %u, cached tp %u last tp %u napi processed before %ums\n",
2208 srng->ring_id, srng->u.src_ring.hp,
2209 srng->u.src_ring.reap_hp,
2210 *srng->u.src_ring.tp_addr, srng->u.src_ring.cached_tp,
2211 srng->u.src_ring.last_tp,
2212 jiffies_to_msecs(jiffies - srng->timestamp));
2213 else if (srng->ring_dir == HAL_SRNG_DIR_DST)
2214 ath12k_err(ab,
2215 "dst srng id %u tp %u, cur hp %u, cached hp %u last hp %u napi processed before %ums\n",
2216 srng->ring_id, srng->u.dst_ring.tp,
2217 *srng->u.dst_ring.hp_addr,
2218 srng->u.dst_ring.cached_hp,
2219 srng->u.dst_ring.last_hp,
2220 jiffies_to_msecs(jiffies - srng->timestamp));
2221 }
2222 }
2223