1 // SPDX-License-Identifier: ISC
2 /* Copyright (C) 2020 MediaTek Inc. */
3 
4 #include "mt7921.h"
5 #include "../dma.h"
6 #include "mac.h"
7 
mt7921_poll_tx(struct napi_struct * napi,int budget)8 static int mt7921_poll_tx(struct napi_struct *napi, int budget)
9 {
10 	struct mt7921_dev *dev;
11 
12 	dev = container_of(napi, struct mt7921_dev, mt76.tx_napi);
13 
14 	if (!mt76_connac_pm_ref(&dev->mphy, &dev->pm)) {
15 		napi_complete(napi);
16 		queue_work(dev->mt76.wq, &dev->pm.wake_work);
17 		return 0;
18 	}
19 
20 	mt76_connac_tx_cleanup(&dev->mt76);
21 	if (napi_complete(napi))
22 		mt7921_irq_enable(dev, MT_INT_TX_DONE_ALL);
23 	mt76_connac_pm_unref(&dev->mphy, &dev->pm);
24 
25 	return 0;
26 }
27 
mt7921_poll_rx(struct napi_struct * napi,int budget)28 static int mt7921_poll_rx(struct napi_struct *napi, int budget)
29 {
30 	struct mt7921_dev *dev;
31 	int done;
32 
33 	dev = container_of(napi->dev, struct mt7921_dev, mt76.napi_dev);
34 
35 	if (!mt76_connac_pm_ref(&dev->mphy, &dev->pm)) {
36 		napi_complete(napi);
37 		queue_work(dev->mt76.wq, &dev->pm.wake_work);
38 		return 0;
39 	}
40 	done = mt76_dma_rx_poll(napi, budget);
41 	mt76_connac_pm_unref(&dev->mphy, &dev->pm);
42 
43 	return done;
44 }
45 
mt7921_dma_prefetch(struct mt7921_dev * dev)46 static void mt7921_dma_prefetch(struct mt7921_dev *dev)
47 {
48 #define PREFETCH(base, depth)	((base) << 16 | (depth))
49 
50 	mt76_wr(dev, MT_WFDMA0_RX_RING0_EXT_CTRL, PREFETCH(0x0, 0x4));
51 	mt76_wr(dev, MT_WFDMA0_RX_RING2_EXT_CTRL, PREFETCH(0x40, 0x4));
52 	mt76_wr(dev, MT_WFDMA0_RX_RING3_EXT_CTRL, PREFETCH(0x80, 0x4));
53 	mt76_wr(dev, MT_WFDMA0_RX_RING4_EXT_CTRL, PREFETCH(0xc0, 0x4));
54 	mt76_wr(dev, MT_WFDMA0_RX_RING5_EXT_CTRL, PREFETCH(0x100, 0x4));
55 
56 	mt76_wr(dev, MT_WFDMA0_TX_RING0_EXT_CTRL, PREFETCH(0x140, 0x4));
57 	mt76_wr(dev, MT_WFDMA0_TX_RING1_EXT_CTRL, PREFETCH(0x180, 0x4));
58 	mt76_wr(dev, MT_WFDMA0_TX_RING2_EXT_CTRL, PREFETCH(0x1c0, 0x4));
59 	mt76_wr(dev, MT_WFDMA0_TX_RING3_EXT_CTRL, PREFETCH(0x200, 0x4));
60 	mt76_wr(dev, MT_WFDMA0_TX_RING4_EXT_CTRL, PREFETCH(0x240, 0x4));
61 	mt76_wr(dev, MT_WFDMA0_TX_RING5_EXT_CTRL, PREFETCH(0x280, 0x4));
62 	mt76_wr(dev, MT_WFDMA0_TX_RING6_EXT_CTRL, PREFETCH(0x2c0, 0x4));
63 	mt76_wr(dev, MT_WFDMA0_TX_RING16_EXT_CTRL, PREFETCH(0x340, 0x4));
64 	mt76_wr(dev, MT_WFDMA0_TX_RING17_EXT_CTRL, PREFETCH(0x380, 0x4));
65 }
66 
mt7921_dma_disable(struct mt7921_dev * dev,bool force)67 static int mt7921_dma_disable(struct mt7921_dev *dev, bool force)
68 {
69 	if (force) {
70 		/* reset */
71 		mt76_clear(dev, MT_WFDMA0_RST,
72 			   MT_WFDMA0_RST_DMASHDL_ALL_RST |
73 			   MT_WFDMA0_RST_LOGIC_RST);
74 
75 		mt76_set(dev, MT_WFDMA0_RST,
76 			 MT_WFDMA0_RST_DMASHDL_ALL_RST |
77 			 MT_WFDMA0_RST_LOGIC_RST);
78 	}
79 
80 	/* disable dmashdl */
81 	mt76_clear(dev, MT_WFDMA0_GLO_CFG_EXT0,
82 		   MT_WFDMA0_CSR_TX_DMASHDL_ENABLE);
83 	mt76_set(dev, MT_DMASHDL_SW_CONTROL, MT_DMASHDL_DMASHDL_BYPASS);
84 
85 	/* disable WFDMA0 */
86 	mt76_clear(dev, MT_WFDMA0_GLO_CFG,
87 		   MT_WFDMA0_GLO_CFG_TX_DMA_EN | MT_WFDMA0_GLO_CFG_RX_DMA_EN |
88 		   MT_WFDMA0_GLO_CFG_CSR_DISP_BASE_PTR_CHAIN_EN |
89 		   MT_WFDMA0_GLO_CFG_OMIT_TX_INFO |
90 		   MT_WFDMA0_GLO_CFG_OMIT_RX_INFO |
91 		   MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2);
92 
93 	if (!mt76_poll(dev, MT_WFDMA0_GLO_CFG,
94 		       MT_WFDMA0_GLO_CFG_TX_DMA_BUSY |
95 		       MT_WFDMA0_GLO_CFG_RX_DMA_BUSY, 0, 1000))
96 		return -ETIMEDOUT;
97 
98 	return 0;
99 }
100 
mt7921_dma_enable(struct mt7921_dev * dev)101 static int mt7921_dma_enable(struct mt7921_dev *dev)
102 {
103 	/* configure perfetch settings */
104 	mt7921_dma_prefetch(dev);
105 
106 	/* reset dma idx */
107 	mt76_wr(dev, MT_WFDMA0_RST_DTX_PTR, ~0);
108 
109 	/* configure delay interrupt */
110 	mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG0, 0);
111 
112 	mt76_set(dev, MT_WFDMA0_GLO_CFG,
113 		 MT_WFDMA0_GLO_CFG_TX_WB_DDONE |
114 		 MT_WFDMA0_GLO_CFG_FIFO_LITTLE_ENDIAN |
115 		 MT_WFDMA0_GLO_CFG_CLK_GAT_DIS |
116 		 MT_WFDMA0_GLO_CFG_OMIT_TX_INFO |
117 		 MT_WFDMA0_GLO_CFG_CSR_DISP_BASE_PTR_CHAIN_EN |
118 		 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2);
119 
120 	mt76_set(dev, MT_WFDMA0_GLO_CFG,
121 		 MT_WFDMA0_GLO_CFG_TX_DMA_EN | MT_WFDMA0_GLO_CFG_RX_DMA_EN);
122 
123 	mt76_set(dev, MT_WFDMA_DUMMY_CR, MT_WFDMA_NEED_REINIT);
124 
125 	/* enable interrupts for TX/RX rings */
126 	mt7921_irq_enable(dev,
127 			  MT_INT_RX_DONE_ALL | MT_INT_TX_DONE_ALL |
128 			  MT_INT_MCU_CMD);
129 	mt76_set(dev, MT_MCU2HOST_SW_INT_ENA, MT_MCU_CMD_WAKE_RX_PCIE);
130 
131 	return 0;
132 }
133 
mt7921_dma_reset(struct mt7921_dev * dev,bool force)134 static int mt7921_dma_reset(struct mt7921_dev *dev, bool force)
135 {
136 	int i, err;
137 
138 	err = mt7921_dma_disable(dev, force);
139 	if (err)
140 		return err;
141 
142 	/* reset hw queues */
143 	for (i = 0; i < __MT_TXQ_MAX; i++)
144 		mt76_queue_reset(dev, dev->mphy.q_tx[i]);
145 
146 	for (i = 0; i < __MT_MCUQ_MAX; i++)
147 		mt76_queue_reset(dev, dev->mt76.q_mcu[i]);
148 
149 	mt76_for_each_q_rx(&dev->mt76, i)
150 		mt76_queue_reset(dev, &dev->mt76.q_rx[i]);
151 
152 	mt76_tx_status_check(&dev->mt76, true);
153 
154 	return mt7921_dma_enable(dev);
155 }
156 
mt7921_wfsys_reset(struct mt7921_dev * dev)157 int mt7921_wfsys_reset(struct mt7921_dev *dev)
158 {
159 	mt76_clear(dev, MT_WFSYS_SW_RST_B, WFSYS_SW_RST_B);
160 	msleep(50);
161 	mt76_set(dev, MT_WFSYS_SW_RST_B, WFSYS_SW_RST_B);
162 
163 	if (!__mt76_poll_msec(&dev->mt76, MT_WFSYS_SW_RST_B,
164 			      WFSYS_SW_INIT_DONE, WFSYS_SW_INIT_DONE, 500))
165 		return -ETIMEDOUT;
166 
167 	return 0;
168 }
169 
mt7921_wpdma_reset(struct mt7921_dev * dev,bool force)170 int mt7921_wpdma_reset(struct mt7921_dev *dev, bool force)
171 {
172 	int i, err;
173 
174 	/* clean up hw queues */
175 	for (i = 0; i < ARRAY_SIZE(dev->mt76.phy.q_tx); i++)
176 		mt76_queue_tx_cleanup(dev, dev->mphy.q_tx[i], true);
177 
178 	for (i = 0; i < ARRAY_SIZE(dev->mt76.q_mcu); i++)
179 		mt76_queue_tx_cleanup(dev, dev->mt76.q_mcu[i], true);
180 
181 	mt76_for_each_q_rx(&dev->mt76, i)
182 		mt76_queue_rx_cleanup(dev, &dev->mt76.q_rx[i]);
183 
184 	if (force) {
185 		err = mt7921_wfsys_reset(dev);
186 		if (err)
187 			return err;
188 	}
189 	err = mt7921_dma_reset(dev, force);
190 	if (err)
191 		return err;
192 
193 	mt76_for_each_q_rx(&dev->mt76, i)
194 		mt76_queue_rx_reset(dev, i);
195 
196 	return 0;
197 }
198 
mt7921_wpdma_reinit_cond(struct mt7921_dev * dev)199 int mt7921_wpdma_reinit_cond(struct mt7921_dev *dev)
200 {
201 	struct mt76_connac_pm *pm = &dev->pm;
202 	int err;
203 
204 	/* check if the wpdma must be reinitialized */
205 	if (mt7921_dma_need_reinit(dev)) {
206 		/* disable interrutpts */
207 		mt76_wr(dev, MT_WFDMA0_HOST_INT_ENA, 0);
208 		mt76_wr(dev, MT_PCIE_MAC_INT_ENABLE, 0x0);
209 
210 		err = mt7921_wpdma_reset(dev, false);
211 		if (err) {
212 			dev_err(dev->mt76.dev, "wpdma reset failed\n");
213 			return err;
214 		}
215 
216 		/* enable interrutpts */
217 		mt76_wr(dev, MT_PCIE_MAC_INT_ENABLE, 0xff);
218 		pm->stats.lp_wake++;
219 	}
220 
221 	return 0;
222 }
223 
mt7921_dma_init(struct mt7921_dev * dev)224 int mt7921_dma_init(struct mt7921_dev *dev)
225 {
226 	int ret;
227 
228 	mt76_dma_attach(&dev->mt76);
229 
230 	ret = mt7921_dma_disable(dev, true);
231 	if (ret)
232 		return ret;
233 
234 	ret = mt7921_wfsys_reset(dev);
235 	if (ret)
236 		return ret;
237 
238 	/* init tx queue */
239 	ret = mt76_connac_init_tx_queues(dev->phy.mt76, MT7921_TXQ_BAND0,
240 					 MT7921_TX_RING_SIZE,
241 					 MT_TX_RING_BASE, 0);
242 	if (ret)
243 		return ret;
244 
245 	mt76_wr(dev, MT_WFDMA0_TX_RING0_EXT_CTRL, 0x4);
246 
247 	/* command to WM */
248 	ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WM, MT7921_TXQ_MCU_WM,
249 				  MT7921_TX_MCU_RING_SIZE, MT_TX_RING_BASE);
250 	if (ret)
251 		return ret;
252 
253 	/* firmware download */
254 	ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_FWDL, MT7921_TXQ_FWDL,
255 				  MT7921_TX_FWDL_RING_SIZE, MT_TX_RING_BASE);
256 	if (ret)
257 		return ret;
258 
259 	/* event from WM before firmware download */
260 	ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU],
261 			       MT7921_RXQ_MCU_WM,
262 			       MT7921_RX_MCU_RING_SIZE,
263 			       MT_RX_BUF_SIZE, MT_RX_EVENT_RING_BASE);
264 	if (ret)
265 		return ret;
266 
267 	/* Change mcu queue after firmware download */
268 	ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU_WA],
269 			       MT7921_RXQ_MCU_WM,
270 			       MT7921_RX_MCU_RING_SIZE,
271 			       MT_RX_BUF_SIZE, MT_WFDMA0(0x540));
272 	if (ret)
273 		return ret;
274 
275 	/* rx data */
276 	ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN],
277 			       MT7921_RXQ_BAND0, MT7921_RX_RING_SIZE,
278 			       MT_RX_BUF_SIZE, MT_RX_DATA_RING_BASE);
279 	if (ret)
280 		return ret;
281 
282 	ret = mt76_init_queues(dev, mt7921_poll_rx);
283 	if (ret < 0)
284 		return ret;
285 
286 	netif_napi_add_tx(&dev->mt76.tx_napi_dev, &dev->mt76.tx_napi,
287 			  mt7921_poll_tx);
288 	napi_enable(&dev->mt76.tx_napi);
289 
290 	return mt7921_dma_enable(dev);
291 }
292 
mt7921_dma_cleanup(struct mt7921_dev * dev)293 void mt7921_dma_cleanup(struct mt7921_dev *dev)
294 {
295 	/* disable */
296 	mt76_clear(dev, MT_WFDMA0_GLO_CFG,
297 		   MT_WFDMA0_GLO_CFG_TX_DMA_EN |
298 		   MT_WFDMA0_GLO_CFG_RX_DMA_EN |
299 		   MT_WFDMA0_GLO_CFG_CSR_DISP_BASE_PTR_CHAIN_EN |
300 		   MT_WFDMA0_GLO_CFG_OMIT_TX_INFO |
301 		   MT_WFDMA0_GLO_CFG_OMIT_RX_INFO |
302 		   MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2);
303 
304 	/* reset */
305 	mt76_clear(dev, MT_WFDMA0_RST,
306 		   MT_WFDMA0_RST_DMASHDL_ALL_RST |
307 		   MT_WFDMA0_RST_LOGIC_RST);
308 
309 	mt76_set(dev, MT_WFDMA0_RST,
310 		 MT_WFDMA0_RST_DMASHDL_ALL_RST |
311 		 MT_WFDMA0_RST_LOGIC_RST);
312 
313 	mt76_dma_cleanup(&dev->mt76);
314 }
315