1 // SPDX-License-Identifier: ISC
2 /* Copyright (C) 2020 MediaTek Inc. */
3
4 #include "mt76_connac.h"
5 #include "mt76_connac2_mac.h"
6
mt76_connac_pm_wake(struct mt76_phy * phy,struct mt76_connac_pm * pm)7 int mt76_connac_pm_wake(struct mt76_phy *phy, struct mt76_connac_pm *pm)
8 {
9 struct mt76_dev *dev = phy->dev;
10
11 if (mt76_is_usb(dev))
12 return 0;
13
14 cancel_delayed_work_sync(&pm->ps_work);
15 if (!test_bit(MT76_STATE_PM, &phy->state))
16 return 0;
17
18 if (pm->suspended)
19 return 0;
20
21 queue_work(dev->wq, &pm->wake_work);
22 if (!wait_event_timeout(pm->wait,
23 !test_bit(MT76_STATE_PM, &phy->state),
24 3 * HZ)) {
25 ieee80211_wake_queues(phy->hw);
26 return -ETIMEDOUT;
27 }
28
29 return 0;
30 }
31 EXPORT_SYMBOL_GPL(mt76_connac_pm_wake);
32
mt76_connac_power_save_sched(struct mt76_phy * phy,struct mt76_connac_pm * pm)33 void mt76_connac_power_save_sched(struct mt76_phy *phy,
34 struct mt76_connac_pm *pm)
35 {
36 struct mt76_dev *dev = phy->dev;
37
38 if (mt76_is_usb(dev))
39 return;
40
41 if (!pm->enable)
42 return;
43
44 if (pm->suspended)
45 return;
46
47 pm->last_activity = jiffies;
48
49 if (!test_bit(MT76_STATE_PM, &phy->state)) {
50 cancel_delayed_work(&phy->mac_work);
51 queue_delayed_work(dev->wq, &pm->ps_work, pm->idle_timeout);
52 }
53 }
54 EXPORT_SYMBOL_GPL(mt76_connac_power_save_sched);
55
mt76_connac_free_pending_tx_skbs(struct mt76_connac_pm * pm,struct mt76_wcid * wcid)56 void mt76_connac_free_pending_tx_skbs(struct mt76_connac_pm *pm,
57 struct mt76_wcid *wcid)
58 {
59 int i;
60
61 spin_lock_bh(&pm->txq_lock);
62 for (i = 0; i < IEEE80211_NUM_ACS; i++) {
63 if (wcid && pm->tx_q[i].wcid != wcid)
64 continue;
65
66 dev_kfree_skb(pm->tx_q[i].skb);
67 pm->tx_q[i].skb = NULL;
68 }
69 spin_unlock_bh(&pm->txq_lock);
70 }
71 EXPORT_SYMBOL_GPL(mt76_connac_free_pending_tx_skbs);
72
mt76_connac_pm_queue_skb(struct ieee80211_hw * hw,struct mt76_connac_pm * pm,struct mt76_wcid * wcid,struct sk_buff * skb)73 void mt76_connac_pm_queue_skb(struct ieee80211_hw *hw,
74 struct mt76_connac_pm *pm,
75 struct mt76_wcid *wcid,
76 struct sk_buff *skb)
77 {
78 int qid = skb_get_queue_mapping(skb);
79 struct mt76_phy *phy = hw->priv;
80
81 spin_lock_bh(&pm->txq_lock);
82 if (!pm->tx_q[qid].skb) {
83 ieee80211_stop_queues(hw);
84 pm->tx_q[qid].wcid = wcid;
85 pm->tx_q[qid].skb = skb;
86 queue_work(phy->dev->wq, &pm->wake_work);
87 } else {
88 dev_kfree_skb(skb);
89 }
90 spin_unlock_bh(&pm->txq_lock);
91 }
92 EXPORT_SYMBOL_GPL(mt76_connac_pm_queue_skb);
93
mt76_connac_pm_dequeue_skbs(struct mt76_phy * phy,struct mt76_connac_pm * pm)94 void mt76_connac_pm_dequeue_skbs(struct mt76_phy *phy,
95 struct mt76_connac_pm *pm)
96 {
97 int i;
98
99 spin_lock_bh(&pm->txq_lock);
100 for (i = 0; i < IEEE80211_NUM_ACS; i++) {
101 struct mt76_wcid *wcid = pm->tx_q[i].wcid;
102 struct ieee80211_sta *sta = NULL;
103
104 if (!pm->tx_q[i].skb)
105 continue;
106
107 if (wcid && wcid->sta)
108 sta = container_of((void *)wcid, struct ieee80211_sta,
109 drv_priv);
110
111 mt76_tx(phy, sta, wcid, pm->tx_q[i].skb);
112 pm->tx_q[i].skb = NULL;
113 }
114 spin_unlock_bh(&pm->txq_lock);
115
116 mt76_worker_schedule(&phy->dev->tx_worker);
117 }
118 EXPORT_SYMBOL_GPL(mt76_connac_pm_dequeue_skbs);
119
120 static u16
mt76_connac2_mac_tx_rate_val(struct mt76_phy * mphy,struct ieee80211_vif * vif,bool beacon,bool mcast)121 mt76_connac2_mac_tx_rate_val(struct mt76_phy *mphy, struct ieee80211_vif *vif,
122 bool beacon, bool mcast)
123 {
124 u8 mode = 0, band = mphy->chandef.chan->band;
125 int rateidx = 0, mcast_rate;
126
127 if (!vif)
128 goto legacy;
129
130 if (is_mt7921(mphy->dev)) {
131 rateidx = ffs(vif->bss_conf.basic_rates) - 1;
132 goto legacy;
133 }
134
135 if (beacon) {
136 struct cfg80211_bitrate_mask *mask;
137
138 mask = &vif->bss_conf.beacon_tx_rate;
139 if (hweight16(mask->control[band].he_mcs[0]) == 1) {
140 rateidx = ffs(mask->control[band].he_mcs[0]) - 1;
141 mode = MT_PHY_TYPE_HE_SU;
142 goto out;
143 } else if (hweight16(mask->control[band].vht_mcs[0]) == 1) {
144 rateidx = ffs(mask->control[band].vht_mcs[0]) - 1;
145 mode = MT_PHY_TYPE_VHT;
146 goto out;
147 } else if (hweight8(mask->control[band].ht_mcs[0]) == 1) {
148 rateidx = ffs(mask->control[band].ht_mcs[0]) - 1;
149 mode = MT_PHY_TYPE_HT;
150 goto out;
151 } else if (hweight32(mask->control[band].legacy) == 1) {
152 rateidx = ffs(mask->control[band].legacy) - 1;
153 goto legacy;
154 }
155 }
156
157 mcast_rate = vif->bss_conf.mcast_rate[band];
158 if (mcast && mcast_rate > 0)
159 rateidx = mcast_rate - 1;
160 else
161 rateidx = ffs(vif->bss_conf.basic_rates) - 1;
162
163 legacy:
164 rateidx = mt76_calculate_default_rate(mphy, rateidx);
165 mode = rateidx >> 8;
166 rateidx &= GENMASK(7, 0);
167
168 out:
169 return FIELD_PREP(MT_TX_RATE_IDX, rateidx) |
170 FIELD_PREP(MT_TX_RATE_MODE, mode);
171 }
172
173 static void
mt76_connac2_mac_write_txwi_8023(__le32 * txwi,struct sk_buff * skb,struct mt76_wcid * wcid)174 mt76_connac2_mac_write_txwi_8023(__le32 *txwi, struct sk_buff *skb,
175 struct mt76_wcid *wcid)
176 {
177 u8 tid = skb->priority & IEEE80211_QOS_CTL_TID_MASK;
178 u8 fc_type, fc_stype;
179 u16 ethertype;
180 bool wmm = false;
181 u32 val;
182
183 if (wcid->sta) {
184 struct ieee80211_sta *sta;
185
186 sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv);
187 wmm = sta->wme;
188 }
189
190 val = FIELD_PREP(MT_TXD1_HDR_FORMAT, MT_HDR_FORMAT_802_3) |
191 FIELD_PREP(MT_TXD1_TID, tid);
192
193 ethertype = get_unaligned_be16(&skb->data[12]);
194 if (ethertype >= ETH_P_802_3_MIN)
195 val |= MT_TXD1_ETH_802_3;
196
197 txwi[1] |= cpu_to_le32(val);
198
199 fc_type = IEEE80211_FTYPE_DATA >> 2;
200 fc_stype = wmm ? IEEE80211_STYPE_QOS_DATA >> 4 : 0;
201
202 val = FIELD_PREP(MT_TXD2_FRAME_TYPE, fc_type) |
203 FIELD_PREP(MT_TXD2_SUB_TYPE, fc_stype);
204
205 txwi[2] |= cpu_to_le32(val);
206
207 val = FIELD_PREP(MT_TXD7_TYPE, fc_type) |
208 FIELD_PREP(MT_TXD7_SUB_TYPE, fc_stype);
209
210 txwi[7] |= cpu_to_le32(val);
211 }
212
213 static void
mt76_connac2_mac_write_txwi_80211(struct mt76_dev * dev,__le32 * txwi,struct sk_buff * skb,struct ieee80211_key_conf * key)214 mt76_connac2_mac_write_txwi_80211(struct mt76_dev *dev, __le32 *txwi,
215 struct sk_buff *skb,
216 struct ieee80211_key_conf *key)
217 {
218 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data;
219 struct ieee80211_mgmt *mgmt = (struct ieee80211_mgmt *)skb->data;
220 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb);
221 bool multicast = is_multicast_ether_addr(hdr->addr1);
222 u8 tid = skb->priority & IEEE80211_QOS_CTL_TID_MASK;
223 __le16 fc = hdr->frame_control;
224 u8 fc_type, fc_stype;
225 u32 val;
226
227 if (ieee80211_is_action(fc) &&
228 mgmt->u.action.category == WLAN_CATEGORY_BACK &&
229 mgmt->u.action.u.addba_req.action_code == WLAN_ACTION_ADDBA_REQ) {
230 u16 capab = le16_to_cpu(mgmt->u.action.u.addba_req.capab);
231
232 txwi[5] |= cpu_to_le32(MT_TXD5_ADD_BA);
233 tid = (capab >> 2) & IEEE80211_QOS_CTL_TID_MASK;
234 } else if (ieee80211_is_back_req(hdr->frame_control)) {
235 struct ieee80211_bar *bar = (struct ieee80211_bar *)hdr;
236 u16 control = le16_to_cpu(bar->control);
237
238 tid = FIELD_GET(IEEE80211_BAR_CTRL_TID_INFO_MASK, control);
239 }
240
241 val = FIELD_PREP(MT_TXD1_HDR_FORMAT, MT_HDR_FORMAT_802_11) |
242 FIELD_PREP(MT_TXD1_HDR_INFO,
243 ieee80211_get_hdrlen_from_skb(skb) / 2) |
244 FIELD_PREP(MT_TXD1_TID, tid);
245
246 txwi[1] |= cpu_to_le32(val);
247
248 fc_type = (le16_to_cpu(fc) & IEEE80211_FCTL_FTYPE) >> 2;
249 fc_stype = (le16_to_cpu(fc) & IEEE80211_FCTL_STYPE) >> 4;
250
251 val = FIELD_PREP(MT_TXD2_FRAME_TYPE, fc_type) |
252 FIELD_PREP(MT_TXD2_SUB_TYPE, fc_stype) |
253 FIELD_PREP(MT_TXD2_MULTICAST, multicast);
254
255 if (key && multicast && ieee80211_is_robust_mgmt_frame(skb) &&
256 key->cipher == WLAN_CIPHER_SUITE_AES_CMAC) {
257 val |= MT_TXD2_BIP;
258 txwi[3] &= ~cpu_to_le32(MT_TXD3_PROTECT_FRAME);
259 }
260
261 if (!ieee80211_is_data(fc) || multicast ||
262 info->flags & IEEE80211_TX_CTL_USE_MINRATE)
263 val |= MT_TXD2_FIX_RATE;
264
265 txwi[2] |= cpu_to_le32(val);
266
267 if (ieee80211_is_beacon(fc)) {
268 txwi[3] &= ~cpu_to_le32(MT_TXD3_SW_POWER_MGMT);
269 txwi[3] |= cpu_to_le32(MT_TXD3_REM_TX_COUNT);
270 if (!is_mt7921(dev))
271 txwi[7] |= cpu_to_le32(FIELD_PREP(MT_TXD7_SPE_IDX,
272 0x18));
273 }
274
275 if (info->flags & IEEE80211_TX_CTL_INJECTED) {
276 u16 seqno = le16_to_cpu(hdr->seq_ctrl);
277
278 if (ieee80211_is_back_req(hdr->frame_control)) {
279 struct ieee80211_bar *bar;
280
281 bar = (struct ieee80211_bar *)skb->data;
282 seqno = le16_to_cpu(bar->start_seq_num);
283 }
284
285 val = MT_TXD3_SN_VALID |
286 FIELD_PREP(MT_TXD3_SEQ, IEEE80211_SEQ_TO_SN(seqno));
287 txwi[3] |= cpu_to_le32(val);
288 txwi[7] &= ~cpu_to_le32(MT_TXD7_HW_AMSDU);
289 }
290
291 if (mt76_is_mmio(dev)) {
292 val = FIELD_PREP(MT_TXD7_TYPE, fc_type) |
293 FIELD_PREP(MT_TXD7_SUB_TYPE, fc_stype);
294 txwi[7] |= cpu_to_le32(val);
295 } else {
296 val = FIELD_PREP(MT_TXD8_L_TYPE, fc_type) |
297 FIELD_PREP(MT_TXD8_L_SUB_TYPE, fc_stype);
298 txwi[8] |= cpu_to_le32(val);
299 }
300 }
301
mt76_connac2_mac_write_txwi(struct mt76_dev * dev,__le32 * txwi,struct sk_buff * skb,struct mt76_wcid * wcid,struct ieee80211_key_conf * key,int pid,u32 changed)302 void mt76_connac2_mac_write_txwi(struct mt76_dev *dev, __le32 *txwi,
303 struct sk_buff *skb, struct mt76_wcid *wcid,
304 struct ieee80211_key_conf *key, int pid,
305 u32 changed)
306 {
307 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb);
308 bool ext_phy = info->hw_queue & MT_TX_HW_QUEUE_EXT_PHY;
309 struct ieee80211_vif *vif = info->control.vif;
310 struct mt76_phy *mphy = &dev->phy;
311 u8 p_fmt, q_idx, omac_idx = 0, wmm_idx = 0, band_idx = 0;
312 u32 val, sz_txd = mt76_is_mmio(dev) ? MT_TXD_SIZE : MT_SDIO_TXD_SIZE;
313 bool is_8023 = info->flags & IEEE80211_TX_CTL_HW_80211_ENCAP;
314 bool beacon = !!(changed & (BSS_CHANGED_BEACON |
315 BSS_CHANGED_BEACON_ENABLED));
316 bool inband_disc = !!(changed & (BSS_CHANGED_UNSOL_BCAST_PROBE_RESP |
317 BSS_CHANGED_FILS_DISCOVERY));
318
319 if (vif) {
320 struct mt76_vif *mvif = (struct mt76_vif *)vif->drv_priv;
321
322 omac_idx = mvif->omac_idx;
323 wmm_idx = mvif->wmm_idx;
324 band_idx = mvif->band_idx;
325 }
326
327 if (ext_phy && dev->phy2)
328 mphy = dev->phy2;
329
330 if (inband_disc) {
331 p_fmt = MT_TX_TYPE_FW;
332 q_idx = MT_LMAC_ALTX0;
333 } else if (beacon) {
334 p_fmt = MT_TX_TYPE_FW;
335 q_idx = MT_LMAC_BCN0;
336 } else if (skb_get_queue_mapping(skb) >= MT_TXQ_PSD) {
337 p_fmt = mt76_is_mmio(dev) ? MT_TX_TYPE_CT : MT_TX_TYPE_SF;
338 q_idx = MT_LMAC_ALTX0;
339 } else {
340 p_fmt = mt76_is_mmio(dev) ? MT_TX_TYPE_CT : MT_TX_TYPE_SF;
341 q_idx = wmm_idx * MT76_CONNAC_MAX_WMM_SETS +
342 mt76_connac_lmac_mapping(skb_get_queue_mapping(skb));
343 }
344
345 val = FIELD_PREP(MT_TXD0_TX_BYTES, skb->len + sz_txd) |
346 FIELD_PREP(MT_TXD0_PKT_FMT, p_fmt) |
347 FIELD_PREP(MT_TXD0_Q_IDX, q_idx);
348 txwi[0] = cpu_to_le32(val);
349
350 val = MT_TXD1_LONG_FORMAT |
351 FIELD_PREP(MT_TXD1_WLAN_IDX, wcid->idx) |
352 FIELD_PREP(MT_TXD1_OWN_MAC, omac_idx);
353 if (!is_mt7921(dev))
354 val |= MT_TXD1_VTA;
355 if (ext_phy || band_idx)
356 val |= MT_TXD1_TGID;
357
358 txwi[1] = cpu_to_le32(val);
359 txwi[2] = 0;
360
361 val = FIELD_PREP(MT_TXD3_REM_TX_COUNT, 15);
362 if (!is_mt7921(dev))
363 val |= MT_TXD3_SW_POWER_MGMT;
364 if (key)
365 val |= MT_TXD3_PROTECT_FRAME;
366 if (info->flags & IEEE80211_TX_CTL_NO_ACK)
367 val |= MT_TXD3_NO_ACK;
368
369 txwi[3] = cpu_to_le32(val);
370 txwi[4] = 0;
371
372 val = FIELD_PREP(MT_TXD5_PID, pid);
373 if (pid >= MT_PACKET_ID_FIRST)
374 val |= MT_TXD5_TX_STATUS_HOST;
375
376 txwi[5] = cpu_to_le32(val);
377 txwi[6] = 0;
378 txwi[7] = wcid->amsdu ? cpu_to_le32(MT_TXD7_HW_AMSDU) : 0;
379
380 if (is_8023)
381 mt76_connac2_mac_write_txwi_8023(txwi, skb, wcid);
382 else
383 mt76_connac2_mac_write_txwi_80211(dev, txwi, skb, key);
384
385 if (txwi[2] & cpu_to_le32(MT_TXD2_FIX_RATE)) {
386 /* Fixed rata is available just for 802.11 txd */
387 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data;
388 bool multicast = is_multicast_ether_addr(hdr->addr1);
389 u16 rate = mt76_connac2_mac_tx_rate_val(mphy, vif, beacon,
390 multicast);
391 u32 val = MT_TXD6_FIXED_BW;
392
393 /* hardware won't add HTC for mgmt/ctrl frame */
394 txwi[2] |= cpu_to_le32(MT_TXD2_HTC_VLD);
395
396 val |= FIELD_PREP(MT_TXD6_TX_RATE, rate);
397 txwi[6] |= cpu_to_le32(val);
398 txwi[3] |= cpu_to_le32(MT_TXD3_BA_DISABLE);
399 }
400 }
401 EXPORT_SYMBOL_GPL(mt76_connac2_mac_write_txwi);
402