Lines Matching refs:lq_sta

376 				  struct iwl_lq_sta *lq_sta,
380 struct iwl_lq_sta *lq_sta,
382 static void rs_stay_in_table(struct iwl_lq_sta *lq_sta, bool force_search);
583 u8 tid, struct iwl_lq_sta *lq_sta, in rs_tl_turn_on_agg() argument
603 (lq_sta->tx_agg_tid_en & BIT(tid)) && in rs_tl_turn_on_agg()
606 if (rs_tl_turn_on_agg_for_tid(mvm, lq_sta, tid, sta) == 0) in rs_tl_turn_on_agg()
701 struct iwl_lq_sta *lq_sta, in rs_collect_tpc_data() argument
757 struct lq_sta_pers *pers = &mvmsta->lq_sta.rs_drv.pers; in rs_collect_tlc_data()
957 static u16 rs_get_supported_rates(struct iwl_lq_sta *lq_sta, in rs_get_supported_rates() argument
961 return lq_sta->active_legacy_rate; in rs_get_supported_rates()
963 return lq_sta->active_siso_rate; in rs_get_supported_rates()
965 return lq_sta->active_mimo2_rate; in rs_get_supported_rates()
1027 static inline bool rs_rate_supported(struct iwl_lq_sta *lq_sta, in rs_rate_supported() argument
1030 return BIT(rate->index) & rs_get_supported_rates(lq_sta, rate); in rs_rate_supported()
1036 static bool rs_get_lower_rate_in_column(struct iwl_lq_sta *lq_sta, in rs_get_lower_rate_in_column() argument
1042 struct iwl_mvm *mvm = lq_sta->pers.drv; in rs_get_lower_rate_in_column()
1044 rate_mask = rs_get_supported_rates(lq_sta, rate); in rs_get_lower_rate_in_column()
1058 static void rs_get_lower_rate_down_column(struct iwl_lq_sta *lq_sta, in rs_get_lower_rate_down_column() argument
1061 struct iwl_mvm *mvm = lq_sta->pers.drv; in rs_get_lower_rate_down_column()
1068 if (lq_sta->band == NL80211_BAND_5GHZ) in rs_get_lower_rate_down_column()
1092 if (!rs_rate_supported(lq_sta, rate)) in rs_get_lower_rate_down_column()
1093 rs_get_lower_rate_in_column(lq_sta, rate); in rs_get_lower_rate_down_column()
1191 struct iwl_lq_sta *lq_sta) in rs_set_stay_in_table() argument
1194 lq_sta->rs_state = RS_STATE_STAY_IN_COLUMN; in rs_set_stay_in_table()
1196 lq_sta->table_count_limit = IWL_MVM_RS_LEGACY_TABLE_COUNT; in rs_set_stay_in_table()
1197 lq_sta->max_failure_limit = IWL_MVM_RS_LEGACY_FAILURE_LIMIT; in rs_set_stay_in_table()
1198 lq_sta->max_success_limit = IWL_MVM_RS_LEGACY_SUCCESS_LIMIT; in rs_set_stay_in_table()
1200 lq_sta->table_count_limit = IWL_MVM_RS_NON_LEGACY_TABLE_COUNT; in rs_set_stay_in_table()
1201 lq_sta->max_failure_limit = IWL_MVM_RS_NON_LEGACY_FAILURE_LIMIT; in rs_set_stay_in_table()
1202 lq_sta->max_success_limit = IWL_MVM_RS_NON_LEGACY_SUCCESS_LIMIT; in rs_set_stay_in_table()
1204 lq_sta->table_count = 0; in rs_set_stay_in_table()
1205 lq_sta->total_failed = 0; in rs_set_stay_in_table()
1206 lq_sta->total_success = 0; in rs_set_stay_in_table()
1207 lq_sta->flush_timer = jiffies; in rs_set_stay_in_table()
1208 lq_sta->visited_columns = 0; in rs_set_stay_in_table()
1218 static int rs_get_max_allowed_rate(struct iwl_lq_sta *lq_sta, in rs_get_max_allowed_rate() argument
1223 return lq_sta->max_legacy_rate_idx; in rs_get_max_allowed_rate()
1225 return lq_sta->max_siso_rate_idx; in rs_get_max_allowed_rate()
1227 return lq_sta->max_mimo2_rate_idx; in rs_get_max_allowed_rate()
1232 return lq_sta->max_legacy_rate_idx; in rs_get_max_allowed_rate()
1235 static const u16 *rs_get_expected_tpt_table(struct iwl_lq_sta *lq_sta, in rs_get_expected_tpt_table() argument
1293 if (!column->sgi && !lq_sta->is_agg) /* Normal */ in rs_get_expected_tpt_table()
1295 else if (column->sgi && !lq_sta->is_agg) /* SGI */ in rs_get_expected_tpt_table()
1297 else if (!column->sgi && lq_sta->is_agg) /* AGG */ in rs_get_expected_tpt_table()
1303 static void rs_set_expected_tpt_table(struct iwl_lq_sta *lq_sta, in rs_set_expected_tpt_table() argument
1309 tbl->expected_tpt = rs_get_expected_tpt_table(lq_sta, column, rate->bw); in rs_set_expected_tpt_table()
1325 struct iwl_lq_sta *lq_sta, in rs_get_best_rate() argument
1330 &(lq_sta->lq_info[lq_sta->active_tbl]); in rs_get_best_rate()
1344 target_tpt = lq_sta->last_tpt; in rs_get_best_rate()
1411 static void rs_stay_in_table(struct iwl_lq_sta *lq_sta, bool force_search) in rs_stay_in_table() argument
1418 mvm = lq_sta->pers.drv; in rs_stay_in_table()
1419 active_tbl = lq_sta->active_tbl; in rs_stay_in_table()
1421 tbl = &(lq_sta->lq_info[active_tbl]); in rs_stay_in_table()
1424 if (lq_sta->rs_state == RS_STATE_STAY_IN_COLUMN) { in rs_stay_in_table()
1426 if (lq_sta->flush_timer) in rs_stay_in_table()
1429 (unsigned long)(lq_sta->flush_timer + in rs_stay_in_table()
1441 (lq_sta->total_failed > lq_sta->max_failure_limit) || in rs_stay_in_table()
1442 (lq_sta->total_success > lq_sta->max_success_limit) || in rs_stay_in_table()
1443 ((!lq_sta->search_better_tbl) && in rs_stay_in_table()
1444 (lq_sta->flush_timer) && (flush_interval_passed))) { in rs_stay_in_table()
1447 lq_sta->total_failed, in rs_stay_in_table()
1448 lq_sta->total_success, in rs_stay_in_table()
1452 lq_sta->rs_state = RS_STATE_SEARCH_CYCLE_STARTED; in rs_stay_in_table()
1455 lq_sta->total_failed = 0; in rs_stay_in_table()
1456 lq_sta->total_success = 0; in rs_stay_in_table()
1457 lq_sta->flush_timer = 0; in rs_stay_in_table()
1459 lq_sta->visited_columns = BIT(tbl->column); in rs_stay_in_table()
1467 lq_sta->table_count++; in rs_stay_in_table()
1468 if (lq_sta->table_count >= in rs_stay_in_table()
1469 lq_sta->table_count_limit) { in rs_stay_in_table()
1470 lq_sta->table_count = 0; in rs_stay_in_table()
1481 if (lq_sta->rs_state == RS_STATE_SEARCH_CYCLE_STARTED) { in rs_stay_in_table()
1534 struct iwl_lq_sta *lq_sta, in rs_update_rate_tbl() argument
1537 rs_fill_lq_cmd(mvm, sta, lq_sta, &tbl->rate); in rs_update_rate_tbl()
1538 iwl_mvm_send_lq_cmd(mvm, &lq_sta->lq); in rs_update_rate_tbl()
1543 struct iwl_lq_sta *lq_sta, in rs_tweak_rate_tbl() argument
1581 rs_set_expected_tpt_table(lq_sta, tbl); in rs_tweak_rate_tbl()
1587 struct iwl_lq_sta *lq_sta, in rs_get_next_column() argument
1606 if (lq_sta->visited_columns & BIT(next_col_id)) { in rs_get_next_column()
1636 tpt = lq_sta->last_tpt / 100; in rs_get_next_column()
1637 expected_tpt_tbl = rs_get_expected_tpt_table(lq_sta, next_col, in rs_get_next_column()
1642 max_rate = rs_get_max_allowed_rate(lq_sta, next_col); in rs_get_next_column()
1671 struct iwl_lq_sta *lq_sta, in rs_switch_to_column() argument
1675 struct iwl_scale_tbl_info *tbl = &lq_sta->lq_info[lq_sta->active_tbl]; in rs_switch_to_column()
1677 &lq_sta->lq_info[rs_search_tbl(lq_sta->active_tbl)]; in rs_switch_to_column()
1690 if (lq_sta->band == NL80211_BAND_5GHZ) in rs_switch_to_column()
1697 rate_mask = lq_sta->active_legacy_rate; in rs_switch_to_column()
1699 rate->type = lq_sta->is_vht ? LQ_VHT_SISO : LQ_HT_SISO; in rs_switch_to_column()
1700 rate_mask = lq_sta->active_siso_rate; in rs_switch_to_column()
1702 rate->type = lq_sta->is_vht ? LQ_VHT_MIMO2 : LQ_HT_MIMO2; in rs_switch_to_column()
1703 rate_mask = lq_sta->active_mimo2_rate; in rs_switch_to_column()
1710 rate->ldpc = lq_sta->ldpc; in rs_switch_to_column()
1714 rs_set_expected_tpt_table(lq_sta, search_tbl); in rs_switch_to_column()
1716 lq_sta->visited_columns |= BIT(col_id); in rs_switch_to_column()
1722 rate_idx = rs_get_best_rate(mvm, lq_sta, search_tbl, in rs_switch_to_column()
1833 struct iwl_lq_sta *lq_sta) in rs_stbc_allow() argument
1838 if (!lq_sta->stbc_capable) in rs_stbc_allow()
1958 struct iwl_lq_sta *lq_sta, in rs_tpc_perform() argument
1969 u8 cur = lq_sta->lq.reduced_tpc; in rs_tpc_perform()
1975 if (lq_sta->pers.dbg_fixed_txp_reduction <= TPC_MAX_REDUCTION) { in rs_tpc_perform()
1977 lq_sta->pers.dbg_fixed_txp_reduction); in rs_tpc_perform()
1978 lq_sta->lq.reduced_tpc = lq_sta->pers.dbg_fixed_txp_reduction; in rs_tpc_perform()
1979 return cur != lq_sta->pers.dbg_fixed_txp_reduction; in rs_tpc_perform()
1994 lq_sta->lq.reduced_tpc = TPC_NO_REDUCTION; in rs_tpc_perform()
2030 lq_sta->lq.reduced_tpc = weak; in rs_tpc_perform()
2033 lq_sta->lq.reduced_tpc = strong; in rs_tpc_perform()
2036 lq_sta->lq.reduced_tpc = TPC_NO_REDUCTION; in rs_tpc_perform()
2050 struct iwl_lq_sta *lq_sta, in rs_rate_scale_perform() argument
2069 u8 prev_agg = lq_sta->is_agg; in rs_rate_scale_perform()
2073 lq_sta->is_agg = !!mvmsta->agg_tids; in rs_rate_scale_perform()
2080 if (!lq_sta->search_better_tbl) in rs_rate_scale_perform()
2081 active_tbl = lq_sta->active_tbl; in rs_rate_scale_perform()
2083 active_tbl = rs_search_tbl(lq_sta->active_tbl); in rs_rate_scale_perform()
2085 tbl = &(lq_sta->lq_info[active_tbl]); in rs_rate_scale_perform()
2088 if (prev_agg != lq_sta->is_agg) { in rs_rate_scale_perform()
2091 prev_agg, lq_sta->is_agg); in rs_rate_scale_perform()
2092 rs_set_expected_tpt_table(lq_sta, tbl); in rs_rate_scale_perform()
2100 rate_mask = rs_get_supported_rates(lq_sta, rate); in rs_rate_scale_perform()
2104 if (lq_sta->search_better_tbl) { in rs_rate_scale_perform()
2107 lq_sta->search_better_tbl = 0; in rs_rate_scale_perform()
2108 tbl = &(lq_sta->lq_info[lq_sta->active_tbl]); in rs_rate_scale_perform()
2109 rs_update_rate_tbl(mvm, sta, lq_sta, tbl); in rs_rate_scale_perform()
2143 rs_stay_in_table(lq_sta, false); in rs_rate_scale_perform()
2149 if (lq_sta->search_better_tbl) { in rs_rate_scale_perform()
2153 if (window->average_tpt > lq_sta->last_tpt) { in rs_rate_scale_perform()
2159 lq_sta->last_tpt); in rs_rate_scale_perform()
2162 lq_sta->active_tbl = active_tbl; in rs_rate_scale_perform()
2171 lq_sta->last_tpt); in rs_rate_scale_perform()
2177 active_tbl = lq_sta->active_tbl; in rs_rate_scale_perform()
2178 tbl = &(lq_sta->lq_info[active_tbl]); in rs_rate_scale_perform()
2182 current_tpt = lq_sta->last_tpt; in rs_rate_scale_perform()
2190 lq_sta->search_better_tbl = 0; in rs_rate_scale_perform()
2225 rs_stay_in_table(lq_sta, true); in rs_rate_scale_perform()
2254 if (lq_sta->rs_state == RS_STATE_STAY_IN_COLUMN) in rs_rate_scale_perform()
2255 update_lq = rs_tpc_perform(mvm, sta, lq_sta, tbl); in rs_rate_scale_perform()
2266 rs_tweak_rate_tbl(mvm, sta, lq_sta, tbl, scale_action); in rs_rate_scale_perform()
2268 rs_update_rate_tbl(mvm, sta, lq_sta, tbl); in rs_rate_scale_perform()
2271 rs_stay_in_table(lq_sta, false); in rs_rate_scale_perform()
2280 lq_sta->rs_state == RS_STATE_SEARCH_CYCLE_STARTED in rs_rate_scale_perform()
2285 lq_sta->last_tpt = current_tpt; in rs_rate_scale_perform()
2289 update_lq, done_search, lq_sta->rs_state, in rs_rate_scale_perform()
2292 next_column = rs_get_next_column(mvm, lq_sta, sta, tbl); in rs_rate_scale_perform()
2294 int ret = rs_switch_to_column(mvm, lq_sta, sta, in rs_rate_scale_perform()
2297 lq_sta->search_better_tbl = 1; in rs_rate_scale_perform()
2301 lq_sta->rs_state = RS_STATE_SEARCH_CYCLE_ENDED; in rs_rate_scale_perform()
2305 if (lq_sta->search_better_tbl) { in rs_rate_scale_perform()
2307 tbl = &lq_sta->lq_info[rs_search_tbl(lq_sta->active_tbl)]; in rs_rate_scale_perform()
2315 rs_update_rate_tbl(mvm, sta, lq_sta, tbl); in rs_rate_scale_perform()
2322 rs_tl_turn_on_agg(mvm, mvmsta, tid, lq_sta, sta); in rs_rate_scale_perform()
2324 if (done_search && lq_sta->rs_state == RS_STATE_SEARCH_CYCLE_ENDED) { in rs_rate_scale_perform()
2325 tbl1 = &(lq_sta->lq_info[lq_sta->active_tbl]); in rs_rate_scale_perform()
2326 rs_set_stay_in_table(mvm, is_legacy(&tbl1->rate), lq_sta); in rs_rate_scale_perform()
2406 struct iwl_lq_sta *lq_sta) in rs_init_optimal_rate() argument
2408 struct rs_rate *rate = &lq_sta->optimal_rate; in rs_init_optimal_rate()
2410 if (lq_sta->max_mimo2_rate_idx != IWL_RATE_INVALID) in rs_init_optimal_rate()
2411 rate->type = lq_sta->is_vht ? LQ_VHT_MIMO2 : LQ_HT_MIMO2; in rs_init_optimal_rate()
2412 else if (lq_sta->max_siso_rate_idx != IWL_RATE_INVALID) in rs_init_optimal_rate()
2413 rate->type = lq_sta->is_vht ? LQ_VHT_SISO : LQ_HT_SISO; in rs_init_optimal_rate()
2414 else if (lq_sta->band == NL80211_BAND_5GHZ) in rs_init_optimal_rate()
2425 lq_sta->optimal_rate_mask = lq_sta->active_mimo2_rate; in rs_init_optimal_rate()
2427 lq_sta->optimal_rate_mask = lq_sta->active_siso_rate; in rs_init_optimal_rate()
2429 lq_sta->optimal_rate_mask = lq_sta->active_legacy_rate; in rs_init_optimal_rate()
2431 if (lq_sta->band == NL80211_BAND_5GHZ) { in rs_init_optimal_rate()
2432 lq_sta->optimal_rates = rs_optimal_rates_5ghz_legacy; in rs_init_optimal_rate()
2433 lq_sta->optimal_nentries = in rs_init_optimal_rate()
2436 lq_sta->optimal_rates = rs_optimal_rates_24ghz_legacy; in rs_init_optimal_rate()
2437 lq_sta->optimal_nentries = in rs_init_optimal_rate()
2444 lq_sta->optimal_rates = rs_optimal_rates_vht_20mhz; in rs_init_optimal_rate()
2445 lq_sta->optimal_nentries = in rs_init_optimal_rate()
2448 lq_sta->optimal_rates = rs_optimal_rates_vht; in rs_init_optimal_rate()
2449 lq_sta->optimal_nentries = in rs_init_optimal_rate()
2453 lq_sta->optimal_rates = rs_optimal_rates_ht; in rs_init_optimal_rate()
2454 lq_sta->optimal_nentries = ARRAY_SIZE(rs_optimal_rates_ht); in rs_init_optimal_rate()
2460 struct iwl_lq_sta *lq_sta) in rs_get_optimal_rate() argument
2462 struct rs_rate *rate = &lq_sta->optimal_rate; in rs_get_optimal_rate()
2465 rate->index = find_first_bit(&lq_sta->optimal_rate_mask, in rs_get_optimal_rate()
2468 for (i = 0; i < lq_sta->optimal_nentries; i++) { in rs_get_optimal_rate()
2469 int rate_idx = lq_sta->optimal_rates[i].rate_idx; in rs_get_optimal_rate()
2471 if ((lq_sta->pers.last_rssi >= lq_sta->optimal_rates[i].rssi) && in rs_get_optimal_rate()
2472 (BIT(rate_idx) & lq_sta->optimal_rate_mask)) { in rs_get_optimal_rate()
2486 struct iwl_lq_sta *lq_sta, in rs_get_initial_rate() argument
2498 for (i = 0; i < ARRAY_SIZE(lq_sta->pers.chain_signal); i++) { in rs_get_initial_rate()
2499 if (!(lq_sta->pers.chains & BIT(i))) in rs_get_initial_rate()
2502 if (lq_sta->pers.chain_signal[i] > best_rssi) { in rs_get_initial_rate()
2503 best_rssi = lq_sta->pers.chain_signal[i]; in rs_get_initial_rate()
2520 rate->index = find_first_bit(&lq_sta->active_legacy_rate, in rs_get_initial_rate()
2572 active_rate = lq_sta->active_siso_rate; in rs_get_initial_rate()
2579 active_rate = lq_sta->active_siso_rate; in rs_get_initial_rate()
2582 active_rate = lq_sta->active_legacy_rate; in rs_get_initial_rate()
2604 struct iwl_lq_sta *lq_sta = &mvmsta->lq_sta.rs_drv; in rs_update_last_rssi() local
2607 lq_sta->pers.chains = rx_status->chains; in rs_update_last_rssi()
2608 lq_sta->pers.chain_signal[0] = rx_status->chain_signal[0]; in rs_update_last_rssi()
2609 lq_sta->pers.chain_signal[1] = rx_status->chain_signal[1]; in rs_update_last_rssi()
2610 lq_sta->pers.last_rssi = S8_MIN; in rs_update_last_rssi()
2612 for (i = 0; i < ARRAY_SIZE(lq_sta->pers.chain_signal); i++) { in rs_update_last_rssi()
2613 if (!(lq_sta->pers.chains & BIT(i))) in rs_update_last_rssi()
2616 if (lq_sta->pers.chain_signal[i] > lq_sta->pers.last_rssi) in rs_update_last_rssi()
2617 lq_sta->pers.last_rssi = lq_sta->pers.chain_signal[i]; in rs_update_last_rssi()
2637 struct iwl_lq_sta *lq_sta, in rs_initialize_lq() argument
2644 if (!sta || !lq_sta) in rs_initialize_lq()
2647 if (!lq_sta->search_better_tbl) in rs_initialize_lq()
2648 active_tbl = lq_sta->active_tbl; in rs_initialize_lq()
2650 active_tbl = rs_search_tbl(lq_sta->active_tbl); in rs_initialize_lq()
2652 tbl = &(lq_sta->lq_info[active_tbl]); in rs_initialize_lq()
2655 rs_get_initial_rate(mvm, sta, lq_sta, band, rate); in rs_initialize_lq()
2656 rs_init_optimal_rate(mvm, sta, lq_sta); in rs_initialize_lq()
2660 rate->ant, lq_sta->pers.chains, mvm->fw->valid_tx_ant, in rs_initialize_lq()
2665 rs_set_expected_tpt_table(lq_sta, tbl); in rs_initialize_lq()
2666 rs_fill_lq_cmd(mvm, sta, lq_sta, rate); in rs_initialize_lq()
2668 iwl_mvm_send_lq_cmd(mvm, &lq_sta->lq); in rs_initialize_lq()
2679 struct iwl_lq_sta *lq_sta; in rs_drv_get_rate() local
2694 lq_sta = mvm_sta; in rs_drv_get_rate()
2695 iwl_mvm_hwrate_to_tx_rate_v1(lq_sta->last_rate_n_flags, in rs_drv_get_rate()
2702 if (lq_sta->rs_state != RS_STATE_STAY_IN_COLUMN) { in rs_drv_get_rate()
2703 optimal_rate = rs_get_optimal_rate(mvm, lq_sta); in rs_drv_get_rate()
2717 struct iwl_lq_sta *lq_sta = &mvmsta->lq_sta.rs_drv; in rs_drv_alloc_sta() local
2721 lq_sta->pers.drv = mvm; in rs_drv_alloc_sta()
2723 lq_sta->pers.dbg_fixed_rate = 0; in rs_drv_alloc_sta()
2724 lq_sta->pers.dbg_fixed_txp_reduction = TPC_INVALID; in rs_drv_alloc_sta()
2725 lq_sta->pers.ss_force = RS_SS_FORCE_NONE; in rs_drv_alloc_sta()
2727 lq_sta->pers.chains = 0; in rs_drv_alloc_sta()
2728 memset(lq_sta->pers.chain_signal, 0, sizeof(lq_sta->pers.chain_signal)); in rs_drv_alloc_sta()
2729 lq_sta->pers.last_rssi = S8_MIN; in rs_drv_alloc_sta()
2731 return lq_sta; in rs_drv_alloc_sta()
2754 struct iwl_lq_sta *lq_sta) in rs_vht_set_enabled_rates() argument
2769 lq_sta->active_siso_rate |= BIT(i); in rs_vht_set_enabled_rates()
2787 lq_sta->active_mimo2_rate |= BIT(i); in rs_vht_set_enabled_rates()
2794 struct iwl_lq_sta *lq_sta, in rs_ht_init() argument
2801 lq_sta->active_siso_rate = ht_cap->mcs.rx_mask[0] << 1; in rs_ht_init()
2802 lq_sta->active_siso_rate |= ht_cap->mcs.rx_mask[0] & 0x1; in rs_ht_init()
2803 lq_sta->active_siso_rate &= ~((u16)0x2); in rs_ht_init()
2804 lq_sta->active_siso_rate <<= IWL_FIRST_OFDM_RATE; in rs_ht_init()
2806 lq_sta->active_mimo2_rate = ht_cap->mcs.rx_mask[1] << 1; in rs_ht_init()
2807 lq_sta->active_mimo2_rate |= ht_cap->mcs.rx_mask[1] & 0x1; in rs_ht_init()
2808 lq_sta->active_mimo2_rate &= ~((u16)0x2); in rs_ht_init()
2809 lq_sta->active_mimo2_rate <<= IWL_FIRST_OFDM_RATE; in rs_ht_init()
2813 lq_sta->ldpc = true; in rs_ht_init()
2818 lq_sta->stbc_capable = true; in rs_ht_init()
2820 lq_sta->is_vht = false; in rs_ht_init()
2825 struct iwl_lq_sta *lq_sta, in rs_vht_init() argument
2828 rs_vht_set_enabled_rates(sta, vht_cap, lq_sta); in rs_vht_init()
2832 lq_sta->ldpc = true; in rs_vht_init()
2837 lq_sta->stbc_capable = true; in rs_vht_init()
2842 lq_sta->bfer_capable = true; in rs_vht_init()
2844 lq_sta->is_vht = true; in rs_vht_init()
2924 struct iwl_lq_sta *lq_sta = &mvmsta->lq_sta.rs_drv; in rs_drv_rate_init() local
2928 lockdep_assert_held(&mvmsta->lq_sta.rs_drv.pers.lock); in rs_drv_rate_init()
2931 memset(lq_sta, 0, offsetof(typeof(*lq_sta), pers)); in rs_drv_rate_init()
2935 lq_sta->lq.sta_id = mvmsta->sta_id; in rs_drv_rate_init()
2940 rs_rate_scale_clear_tbl_windows(mvm, &lq_sta->lq_info[j]); in rs_drv_rate_init()
2942 lq_sta->flush_timer = 0; in rs_drv_rate_init()
2943 lq_sta->last_tx = jiffies; in rs_drv_rate_init()
2953 lq_sta->missed_rate_counter = IWL_MVM_RS_MISSED_RATE_MAX; in rs_drv_rate_init()
2954 lq_sta->band = sband->band; in rs_drv_rate_init()
2959 lq_sta->active_legacy_rate = 0; in rs_drv_rate_init()
2961 lq_sta->active_legacy_rate |= BIT(sband->bitrates[i].hw_value); in rs_drv_rate_init()
2965 rs_ht_init(mvm, sta, lq_sta, ht_cap); in rs_drv_rate_init()
2967 rs_vht_init(mvm, sta, lq_sta, vht_cap); in rs_drv_rate_init()
2969 lq_sta->max_legacy_rate_idx = in rs_drv_rate_init()
2970 rs_get_max_rate_from_mask(lq_sta->active_legacy_rate); in rs_drv_rate_init()
2971 lq_sta->max_siso_rate_idx = in rs_drv_rate_init()
2972 rs_get_max_rate_from_mask(lq_sta->active_siso_rate); in rs_drv_rate_init()
2973 lq_sta->max_mimo2_rate_idx = in rs_drv_rate_init()
2974 rs_get_max_rate_from_mask(lq_sta->active_mimo2_rate); in rs_drv_rate_init()
2978 lq_sta->active_legacy_rate, in rs_drv_rate_init()
2979 lq_sta->active_siso_rate, in rs_drv_rate_init()
2980 lq_sta->active_mimo2_rate, in rs_drv_rate_init()
2981 lq_sta->is_vht, lq_sta->ldpc, lq_sta->stbc_capable, in rs_drv_rate_init()
2982 lq_sta->bfer_capable); in rs_drv_rate_init()
2984 lq_sta->max_legacy_rate_idx, in rs_drv_rate_init()
2985 lq_sta->max_siso_rate_idx, in rs_drv_rate_init()
2986 lq_sta->max_mimo2_rate_idx); in rs_drv_rate_init()
2989 lq_sta->lq.single_stream_ant_msk = in rs_drv_rate_init()
2991 lq_sta->lq.dual_stream_ant_msk = ANT_AB; in rs_drv_rate_init()
2994 lq_sta->tx_agg_tid_en = IWL_AGG_ALL_TID; in rs_drv_rate_init()
2995 lq_sta->is_agg = 0; in rs_drv_rate_init()
2999 rs_initialize_lq(mvm, sta, lq_sta, band); in rs_drv_rate_init()
3039 struct iwl_lq_sta *lq_sta = &mvmsta->lq_sta.rs_drv; in __iwl_mvm_rs_tx_status() local
3041 if (!lq_sta->pers.drv) { in __iwl_mvm_rs_tx_status()
3061 if (lq_sta->pers.dbg_fixed_rate) { in __iwl_mvm_rs_tx_status()
3080 lq_sta->pers.tx_stats[column][index].total += attempts; in __iwl_mvm_rs_tx_status()
3081 lq_sta->pers.tx_stats[column][index].success += success; in __iwl_mvm_rs_tx_status()
3090 (unsigned long)(lq_sta->last_tx + in __iwl_mvm_rs_tx_status()
3099 lq_sta->last_tx = jiffies; in __iwl_mvm_rs_tx_status()
3108 table = &lq_sta->lq; in __iwl_mvm_rs_tx_status()
3125 lq_sta->missed_rate_counter++; in __iwl_mvm_rs_tx_status()
3126 if (lq_sta->missed_rate_counter > IWL_MVM_RS_MISSED_RATE_MAX) { in __iwl_mvm_rs_tx_status()
3127 lq_sta->missed_rate_counter = 0; in __iwl_mvm_rs_tx_status()
3130 lq_sta->rs_state); in __iwl_mvm_rs_tx_status()
3131 iwl_mvm_send_lq_cmd(mvm, &lq_sta->lq); in __iwl_mvm_rs_tx_status()
3138 lq_sta->missed_rate_counter = 0; in __iwl_mvm_rs_tx_status()
3140 if (!lq_sta->search_better_tbl) { in __iwl_mvm_rs_tx_status()
3141 curr_tbl = &lq_sta->lq_info[lq_sta->active_tbl]; in __iwl_mvm_rs_tx_status()
3142 other_tbl = &lq_sta->lq_info[rs_search_tbl(lq_sta->active_tbl)]; in __iwl_mvm_rs_tx_status()
3144 curr_tbl = &lq_sta->lq_info[rs_search_tbl(lq_sta->active_tbl)]; in __iwl_mvm_rs_tx_status()
3145 other_tbl = &lq_sta->lq_info[lq_sta->active_tbl]; in __iwl_mvm_rs_tx_status()
3151 tmp_tbl = &lq_sta->lq_info[lq_sta->active_tbl]; in __iwl_mvm_rs_tx_status()
3153 tmp_tbl = &lq_sta->lq_info[rs_search_tbl(lq_sta->active_tbl)]; in __iwl_mvm_rs_tx_status()
3160 rs_stay_in_table(lq_sta, true); in __iwl_mvm_rs_tx_status()
3171 rs_collect_tpc_data(mvm, lq_sta, curr_tbl, tx_resp_rate.index, in __iwl_mvm_rs_tx_status()
3198 if (lq_sta->rs_state == RS_STATE_STAY_IN_COLUMN) { in __iwl_mvm_rs_tx_status()
3199 lq_sta->total_success += info->status.ampdu_ack_len; in __iwl_mvm_rs_tx_status()
3200 lq_sta->total_failed += (info->status.ampdu_len - in __iwl_mvm_rs_tx_status()
3231 rs_collect_tpc_data(mvm, lq_sta, tmp_tbl, in __iwl_mvm_rs_tx_status()
3241 if (lq_sta->rs_state == RS_STATE_STAY_IN_COLUMN) { in __iwl_mvm_rs_tx_status()
3242 lq_sta->total_success += legacy_success; in __iwl_mvm_rs_tx_status()
3243 lq_sta->total_failed += retries + (1 - legacy_success); in __iwl_mvm_rs_tx_status()
3247 lq_sta->last_rate_n_flags = lq_hwrate; in __iwl_mvm_rs_tx_status()
3252 rs_rate_scale_perform(mvm, sta, lq_sta, tid, ndp); in __iwl_mvm_rs_tx_status()
3263 if (!spin_trylock(&mvmsta->lq_sta.rs_drv.pers.lock)) in iwl_mvm_rs_tx_status()
3267 spin_unlock(&mvmsta->lq_sta.rs_drv.pers.lock); in iwl_mvm_rs_tx_status()
3309 struct iwl_lq_sta *lq_sta, in rs_fill_rates_for_column() argument
3332 bottom_reached = rs_get_lower_rate_in_column(lq_sta, rate); in rs_fill_rates_for_column()
3365 struct iwl_lq_sta *lq_sta, in rs_build_rates_table() argument
3371 struct iwl_lq_cmd *lq_cmd = &lq_sta->lq; in rs_build_rates_table()
3381 rs_stbc_allow(mvm, sta, lq_sta)) in rs_build_rates_table()
3396 rs_fill_rates_for_column(mvm, lq_sta, &rate, lq_cmd->rs_table, &index, in rs_build_rates_table()
3400 rs_get_lower_rate_down_column(lq_sta, &rate); in rs_build_rates_table()
3415 rs_fill_rates_for_column(mvm, lq_sta, &rate, lq_cmd->rs_table, &index, in rs_build_rates_table()
3419 rs_get_lower_rate_down_column(lq_sta, &rate); in rs_build_rates_table()
3424 rs_fill_rates_for_column(mvm, lq_sta, &rate, lq_cmd->rs_table, &index, in rs_build_rates_table()
3443 struct iwl_lq_cmd *lq_cmd = &mvmsta->lq_sta.rs_drv.lq; in rs_bfer_active_iter()
3497 struct iwl_lq_sta *lq_sta, in rs_set_lq_ss_params() argument
3500 struct iwl_lq_cmd *lq_cmd = &lq_sta->lq; in rs_set_lq_ss_params()
3516 if (lq_sta->pers.ss_force == RS_SS_FORCE_STBC) in rs_set_lq_ss_params()
3518 else if (lq_sta->pers.ss_force == RS_SS_FORCE_BFER) in rs_set_lq_ss_params()
3521 if (lq_sta->pers.ss_force != RS_SS_FORCE_NONE) { in rs_set_lq_ss_params()
3523 lq_sta->pers.ss_force); in rs_set_lq_ss_params()
3528 if (lq_sta->stbc_capable) in rs_set_lq_ss_params()
3531 if (!lq_sta->bfer_capable) in rs_set_lq_ss_params()
3556 &bfer_mvmsta->lq_sta.rs_drv.lq; in rs_set_lq_ss_params()
3574 struct iwl_lq_sta *lq_sta, in rs_fill_lq_cmd() argument
3577 struct iwl_lq_cmd *lq_cmd = &lq_sta->lq; in rs_fill_lq_cmd()
3586 if (lq_sta->pers.dbg_fixed_rate) { in rs_fill_lq_cmd()
3588 lq_sta->band, in rs_fill_lq_cmd()
3589 lq_sta->pers.dbg_fixed_rate); in rs_fill_lq_cmd()
3596 rs_build_rates_table(mvm, sta, lq_sta, initial_rate); in rs_fill_lq_cmd()
3599 rs_set_lq_ss_params(mvm, sta, lq_sta, initial_rate); in rs_fill_lq_cmd()
3702 struct iwl_lq_sta *lq_sta) in rs_program_fix_rate() argument
3704 lq_sta->active_legacy_rate = 0x0FFF; /* 1 - 54 MBits, includes CCK */ in rs_program_fix_rate()
3705 lq_sta->active_siso_rate = 0x1FD0; /* 6 - 60 MBits, no 9, no CCK */ in rs_program_fix_rate()
3706 lq_sta->active_mimo2_rate = 0x1FD0; /* 6 - 60 MBits, no 9, no CCK */ in rs_program_fix_rate()
3709 lq_sta->lq.sta_id, lq_sta->pers.dbg_fixed_rate); in rs_program_fix_rate()
3711 if (lq_sta->pers.dbg_fixed_rate) { in rs_program_fix_rate()
3712 rs_fill_lq_cmd(mvm, NULL, lq_sta, NULL); in rs_program_fix_rate()
3713 iwl_mvm_send_lq_cmd(lq_sta->pers.drv, &lq_sta->lq); in rs_program_fix_rate()
3720 struct iwl_lq_sta *lq_sta = file->private_data; in rs_sta_dbgfs_scale_table_write() local
3726 mvm = lq_sta->pers.drv; in rs_sta_dbgfs_scale_table_write()
3733 lq_sta->pers.dbg_fixed_rate = parsed_rate; in rs_sta_dbgfs_scale_table_write()
3735 lq_sta->pers.dbg_fixed_rate = 0; in rs_sta_dbgfs_scale_table_write()
3737 rs_program_fix_rate(mvm, lq_sta); in rs_sta_dbgfs_scale_table_write()
3751 struct iwl_lq_sta *lq_sta = file->private_data; in rs_sta_dbgfs_scale_table_read() local
3753 container_of(lq_sta, struct iwl_mvm_sta, lq_sta.rs_drv); in rs_sta_dbgfs_scale_table_read()
3755 struct iwl_scale_tbl_info *tbl = &(lq_sta->lq_info[lq_sta->active_tbl]); in rs_sta_dbgfs_scale_table_read()
3759 mvm = lq_sta->pers.drv; in rs_sta_dbgfs_scale_table_read()
3765 "sta_id %d\n", lq_sta->lq.sta_id); in rs_sta_dbgfs_scale_table_read()
3768 lq_sta->total_failed, lq_sta->total_success, in rs_sta_dbgfs_scale_table_read()
3769 lq_sta->active_legacy_rate); in rs_sta_dbgfs_scale_table_read()
3771 lq_sta->pers.dbg_fixed_rate); in rs_sta_dbgfs_scale_table_read()
3789 (lq_sta->is_agg) ? "AGG on" : "", in rs_sta_dbgfs_scale_table_read()
3793 lq_sta->last_rate_n_flags); in rs_sta_dbgfs_scale_table_read()
3796 lq_sta->lq.flags, in rs_sta_dbgfs_scale_table_read()
3797 lq_sta->lq.mimo_delim, in rs_sta_dbgfs_scale_table_read()
3798 lq_sta->lq.single_stream_ant_msk, in rs_sta_dbgfs_scale_table_read()
3799 lq_sta->lq.dual_stream_ant_msk); in rs_sta_dbgfs_scale_table_read()
3803 le16_to_cpu(lq_sta->lq.agg_time_limit), in rs_sta_dbgfs_scale_table_read()
3804 lq_sta->lq.agg_disable_start_th, in rs_sta_dbgfs_scale_table_read()
3805 lq_sta->lq.agg_frame_cnt_limit); in rs_sta_dbgfs_scale_table_read()
3808 lq_sta->lq.reduced_tpc); in rs_sta_dbgfs_scale_table_read()
3809 ss_params = le32_to_cpu(lq_sta->lq.ss_params); in rs_sta_dbgfs_scale_table_read()
3822 lq_sta->lq.initial_rate_index[0], in rs_sta_dbgfs_scale_table_read()
3823 lq_sta->lq.initial_rate_index[1], in rs_sta_dbgfs_scale_table_read()
3824 lq_sta->lq.initial_rate_index[2], in rs_sta_dbgfs_scale_table_read()
3825 lq_sta->lq.initial_rate_index[3]); in rs_sta_dbgfs_scale_table_read()
3828 u32 r = le32_to_cpu(lq_sta->lq.rs_table[i]); in rs_sta_dbgfs_scale_table_read()
3857 struct iwl_lq_sta *lq_sta = file->private_data; in rs_sta_dbgfs_stats_table_read() local
3864 tbl = &(lq_sta->lq_info[i]); in rs_sta_dbgfs_stats_table_read()
3869 lq_sta->active_tbl == i ? "*" : "x", in rs_sta_dbgfs_stats_table_read()
3934 struct iwl_lq_sta *lq_sta = file->private_data; in rs_sta_dbgfs_drv_tx_stats_read() local
3955 stats = &(lq_sta->pers.tx_stats[col][rate]); in rs_sta_dbgfs_drv_tx_stats_read()
3973 struct iwl_lq_sta *lq_sta = file->private_data; in rs_sta_dbgfs_drv_tx_stats_write() local
3974 memset(lq_sta->pers.tx_stats, 0, sizeof(lq_sta->pers.tx_stats)); in rs_sta_dbgfs_drv_tx_stats_write()
3990 struct iwl_lq_sta *lq_sta = file->private_data; in iwl_dbgfs_ss_force_read() local
4002 ss_force_name[lq_sta->pers.ss_force]); in iwl_dbgfs_ss_force_read()
4006 static ssize_t iwl_dbgfs_ss_force_write(struct iwl_lq_sta *lq_sta, char *buf, in iwl_dbgfs_ss_force_write() argument
4009 struct iwl_mvm *mvm = lq_sta->pers.drv; in iwl_dbgfs_ss_force_write()
4013 lq_sta->pers.ss_force = RS_SS_FORCE_NONE; in iwl_dbgfs_ss_force_write()
4015 lq_sta->pers.ss_force = RS_SS_FORCE_SISO; in iwl_dbgfs_ss_force_write()
4017 if (lq_sta->stbc_capable) { in iwl_dbgfs_ss_force_write()
4018 lq_sta->pers.ss_force = RS_SS_FORCE_STBC; in iwl_dbgfs_ss_force_write()
4025 if (lq_sta->bfer_capable) { in iwl_dbgfs_ss_force_write()
4026 lq_sta->pers.ss_force = RS_SS_FORCE_BFER; in iwl_dbgfs_ss_force_write()
4042 debugfs_create_file(#name, mode, parent, lq_sta, \
4051 struct iwl_lq_sta *lq_sta = priv_sta; in rs_drv_add_sta_debugfs() local
4054 mvmsta = container_of(lq_sta, struct iwl_mvm_sta, lq_sta.rs_drv); in rs_drv_add_sta_debugfs()
4060 lq_sta, &rs_sta_dbgfs_scale_table_ops); in rs_drv_add_sta_debugfs()
4062 lq_sta, &rs_sta_dbgfs_stats_table_ops); in rs_drv_add_sta_debugfs()
4064 lq_sta, &rs_sta_dbgfs_drv_tx_stats_ops); in rs_drv_add_sta_debugfs()
4066 &lq_sta->tx_agg_tid_en); in rs_drv_add_sta_debugfs()
4068 &lq_sta->pers.dbg_fixed_txp_reduction); in rs_drv_add_sta_debugfs()
4111 spin_lock(&mvmsta->lq_sta.rs_drv.pers.lock); in iwl_mvm_rs_rate_init()
4113 spin_unlock(&mvmsta->lq_sta.rs_drv.pers.lock); in iwl_mvm_rs_rate_init()
4130 struct iwl_lq_cmd *lq = &mvmsta->lq_sta.rs_drv.lq; in rs_drv_tx_protection()