Lines Matching refs:tc_info

543 	if (hdev->tm_info.tc_info[pri_id].tc_sch_mode == HCLGE_SCH_MODE_DWRR)  in hclge_tm_pri_schd_mode_cfg()
611 for (i = 0; i < kinfo->tc_info.num_tc; i++) { in hclge_tm_qs_shaper_cfg()
638 struct hnae3_tc_info *tc_info = &kinfo->tc_info; in hclge_vport_get_max_rss_size() local
643 if (!tc_info->mqprio_active) in hclge_vport_get_max_rss_size()
644 return vport->alloc_tqps / tc_info->num_tc; in hclge_vport_get_max_rss_size()
647 if (!(hdev->hw_tc_map & BIT(i)) || i >= tc_info->num_tc) in hclge_vport_get_max_rss_size()
649 if (max_rss_size < tc_info->tqp_count[i]) in hclge_vport_get_max_rss_size()
650 max_rss_size = tc_info->tqp_count[i]; in hclge_vport_get_max_rss_size()
659 struct hnae3_tc_info *tc_info = &kinfo->tc_info; in hclge_vport_get_tqp_num() local
664 if (!tc_info->mqprio_active) in hclge_vport_get_tqp_num()
665 return kinfo->rss_size * tc_info->num_tc; in hclge_vport_get_tqp_num()
668 if (hdev->hw_tc_map & BIT(i) && i < tc_info->num_tc) in hclge_vport_get_tqp_num()
669 sum += tc_info->tqp_count[i]; in hclge_vport_get_tqp_num()
686 kinfo->tc_info.max_tc = 1; in hclge_tm_update_kinfo_rss_size()
687 kinfo->tc_info.num_tc = 1; in hclge_tm_update_kinfo_rss_size()
692 kinfo->tc_info.max_tc = hdev->tc_max; in hclge_tm_update_kinfo_rss_size()
693 kinfo->tc_info.num_tc = in hclge_tm_update_kinfo_rss_size()
730 if (kinfo->tc_info.mqprio_active) in hclge_tm_vport_tc_info_update()
734 if (hdev->hw_tc_map & BIT(i) && i < kinfo->tc_info.num_tc) { in hclge_tm_vport_tc_info_update()
735 kinfo->tc_info.tqp_offset[i] = i * kinfo->rss_size; in hclge_tm_vport_tc_info_update()
736 kinfo->tc_info.tqp_count[i] = kinfo->rss_size; in hclge_tm_vport_tc_info_update()
739 kinfo->tc_info.tqp_offset[i] = 0; in hclge_tm_vport_tc_info_update()
740 kinfo->tc_info.tqp_count[i] = 1; in hclge_tm_vport_tc_info_update()
744 memcpy(kinfo->tc_info.prio_tc, hdev->tm_info.prio_tc, in hclge_tm_vport_tc_info_update()
774 hdev->tm_info.tc_info[i].tc_id = i; in hclge_tm_tc_info_init()
775 hdev->tm_info.tc_info[i].tc_sch_mode = tc_sch_mode; in hclge_tm_tc_info_init()
776 hdev->tm_info.tc_info[i].pgid = 0; in hclge_tm_tc_info_init()
777 hdev->tm_info.tc_info[i].bw_limit = bw_limit; in hclge_tm_tc_info_init()
953 struct hnae3_tc_info *tc_info = &kinfo->tc_info; in hclge_vport_q_to_qs_map() local
958 for (i = 0; i < tc_info->num_tc; i++) { in hclge_vport_q_to_qs_map()
959 for (j = 0; j < tc_info->tqp_count[i]; j++) { in hclge_vport_q_to_qs_map()
960 struct hnae3_queue *q = tqp[tc_info->tqp_offset[i] + j]; in hclge_vport_q_to_qs_map()
983 for (i = 0; i < kinfo->tc_info.max_tc; i++) { in hclge_tm_pri_q_qs_cfg_tc_base()
984 u8 pri = i < kinfo->tc_info.num_tc ? i : 0; in hclge_tm_pri_q_qs_cfg_tc_base()
985 bool link_vld = i < kinfo->tc_info.num_tc; in hclge_tm_pri_q_qs_cfg_tc_base()
1054 u32 rate = hdev->tm_info.tc_info[i].bw_limit; in hclge_tm_pri_tc_base_shaper_cfg()
1133 for (i = 0; i < kinfo->tc_info.num_tc; i++) { in hclge_tm_pri_vnet_base_shaper_qs_cfg()
1134 ret = hclge_shaper_para_calc(hdev->tm_info.tc_info[i].bw_limit, in hclge_tm_pri_vnet_base_shaper_qs_cfg()
1193 &hdev->tm_info.pg_info[hdev->tm_info.tc_info[i].pgid]; in hclge_tm_pri_tc_base_dwrr_cfg()
1203 if (i >= kinfo->tc_info.max_tc) in hclge_tm_pri_tc_base_dwrr_cfg()
1206 dwrr = i < kinfo->tc_info.num_tc ? vport[k].dwrr : 0; in hclge_tm_pri_tc_base_dwrr_cfg()
1232 pg_info = &hdev->tm_info.pg_info[hdev->tm_info.tc_info[i].pgid]; in hclge_tm_ets_tc_dwrr_cfg()
1254 for (i = 0; i < kinfo->tc_info.num_tc; i++) { in hclge_tm_pri_vnet_base_dwrr_pri_cfg()
1391 if (pri_id >= kinfo->tc_info.max_tc) in hclge_tm_schd_mode_tc_base_cfg()
1394 mode = pri_id < kinfo->tc_info.num_tc ? HCLGE_SCH_MODE_DWRR : in hclge_tm_schd_mode_tc_base_cfg()
1420 for (i = 0; i < kinfo->tc_info.num_tc; i++) { in hclge_tm_schd_mode_vnet_base_cfg()
1421 u8 sch_mode = hdev->tm_info.tc_info[i].tc_sch_mode; in hclge_tm_schd_mode_vnet_base_cfg()
1646 kinfo->tc_info.prio_tc[i] = prio_tc[i]; in hclge_tm_prio_tc_info_update()