• Home
  • Raw
  • Download

Lines Matching refs:hdev

138 static void hclge_pfc_stats_get(struct hclge_dev *hdev, bool tx, u64 *stats)  in hclge_pfc_stats_get()  argument
149 stats[i] = HCLGE_STATS_READ(&hdev->mac_stats, offset[i]); in hclge_pfc_stats_get()
152 void hclge_pfc_rx_stats_get(struct hclge_dev *hdev, u64 *stats) in hclge_pfc_rx_stats_get() argument
154 hclge_pfc_stats_get(hdev, false, stats); in hclge_pfc_rx_stats_get()
157 void hclge_pfc_tx_stats_get(struct hclge_dev *hdev, u64 *stats) in hclge_pfc_tx_stats_get() argument
159 hclge_pfc_stats_get(hdev, true, stats); in hclge_pfc_tx_stats_get()
162 int hclge_mac_pause_en_cfg(struct hclge_dev *hdev, bool tx, bool rx) in hclge_mac_pause_en_cfg() argument
171 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_mac_pause_en_cfg()
174 int hclge_pfc_pause_en_cfg(struct hclge_dev *hdev, u8 tx_rx_bitmap, in hclge_pfc_pause_en_cfg() argument
185 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_pfc_pause_en_cfg()
188 static int hclge_pause_param_cfg(struct hclge_dev *hdev, const u8 *addr, in hclge_pause_param_cfg() argument
203 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_pause_param_cfg()
206 int hclge_pause_addr_cfg(struct hclge_dev *hdev, const u8 *mac_addr) in hclge_pause_addr_cfg() argument
218 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_pause_addr_cfg()
225 return hclge_pause_param_cfg(hdev, mac_addr, trans_gap, trans_time); in hclge_pause_addr_cfg()
228 static int hclge_fill_pri_array(struct hclge_dev *hdev, u8 *pri, u8 pri_id) in hclge_fill_pri_array() argument
232 tc = hdev->tm_info.prio_tc[pri_id]; in hclge_fill_pri_array()
234 if (tc >= hdev->tm_info.num_tc) in hclge_fill_pri_array()
251 static int hclge_up_to_tc_map(struct hclge_dev *hdev) in hclge_up_to_tc_map() argument
261 ret = hclge_fill_pri_array(hdev, pri, pri_id); in hclge_up_to_tc_map()
266 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_up_to_tc_map()
269 static int hclge_tm_pg_to_pri_map_cfg(struct hclge_dev *hdev, in hclge_tm_pg_to_pri_map_cfg() argument
282 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_pg_to_pri_map_cfg()
285 static int hclge_tm_qs_to_pri_map_cfg(struct hclge_dev *hdev, in hclge_tm_qs_to_pri_map_cfg() argument
299 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_qs_to_pri_map_cfg()
302 static int hclge_tm_q_to_qs_map_cfg(struct hclge_dev *hdev, in hclge_tm_q_to_qs_map_cfg() argument
333 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_q_to_qs_map_cfg()
336 static int hclge_tm_pg_weight_cfg(struct hclge_dev *hdev, u8 pg_id, in hclge_tm_pg_weight_cfg() argument
349 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_pg_weight_cfg()
352 static int hclge_tm_pri_weight_cfg(struct hclge_dev *hdev, u8 pri_id, in hclge_tm_pri_weight_cfg() argument
365 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_pri_weight_cfg()
368 static int hclge_tm_qs_weight_cfg(struct hclge_dev *hdev, u16 qs_id, in hclge_tm_qs_weight_cfg() argument
381 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_qs_weight_cfg()
398 static int hclge_tm_pg_shapping_cfg(struct hclge_dev *hdev, in hclge_tm_pg_shapping_cfg() argument
420 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_pg_shapping_cfg()
423 int hclge_tm_port_shaper_cfg(struct hclge_dev *hdev) in hclge_tm_port_shaper_cfg() argument
431 ret = hclge_shaper_para_calc(hdev->hw.mac.speed, HCLGE_SHAPER_LVL_PORT, in hclge_tm_port_shaper_cfg()
433 hdev->ae_dev->dev_specs.max_tm_rate); in hclge_tm_port_shaper_cfg()
449 shap_cfg_cmd->port_rate = cpu_to_le32(hdev->hw.mac.speed); in hclge_tm_port_shaper_cfg()
451 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_port_shaper_cfg()
454 static int hclge_tm_pri_shapping_cfg(struct hclge_dev *hdev, in hclge_tm_pri_shapping_cfg() argument
477 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_pri_shapping_cfg()
480 static int hclge_tm_pg_schd_mode_cfg(struct hclge_dev *hdev, u8 pg_id) in hclge_tm_pg_schd_mode_cfg() argument
486 if (hdev->tm_info.pg_info[pg_id].pg_sch_mode == HCLGE_SCH_MODE_DWRR) in hclge_tm_pg_schd_mode_cfg()
493 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_pg_schd_mode_cfg()
496 static int hclge_tm_pri_schd_mode_cfg(struct hclge_dev *hdev, u8 pri_id) in hclge_tm_pri_schd_mode_cfg() argument
502 if (hdev->tm_info.tc_info[pri_id].tc_sch_mode == HCLGE_SCH_MODE_DWRR) in hclge_tm_pri_schd_mode_cfg()
509 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_pri_schd_mode_cfg()
512 static int hclge_tm_qs_schd_mode_cfg(struct hclge_dev *hdev, u16 qs_id, u8 mode) in hclge_tm_qs_schd_mode_cfg() argument
525 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_qs_schd_mode_cfg()
528 static int hclge_tm_qs_bp_cfg(struct hclge_dev *hdev, u8 tc, u8 grp_id, in hclge_tm_qs_bp_cfg() argument
543 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_qs_bp_cfg()
551 struct hclge_dev *hdev = vport->back; in hclge_tm_qs_shaper_cfg() local
557 max_tx_rate = hdev->ae_dev->dev_specs.max_tm_rate; in hclge_tm_qs_shaper_cfg()
561 hdev->ae_dev->dev_specs.max_tm_rate); in hclge_tm_qs_shaper_cfg()
581 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_qs_shaper_cfg()
583 dev_err(&hdev->pdev->dev, in hclge_tm_qs_shaper_cfg()
598 struct hclge_dev *hdev = vport->back; in hclge_vport_get_max_rss_size() local
606 if (!(hdev->hw_tc_map & BIT(i)) || i >= tc_info->num_tc) in hclge_vport_get_max_rss_size()
619 struct hclge_dev *hdev = vport->back; in hclge_vport_get_tqp_num() local
627 if (hdev->hw_tc_map & BIT(i) && i < tc_info->num_tc) in hclge_vport_get_tqp_num()
637 struct hclge_dev *hdev = vport->back; in hclge_tm_update_kinfo_rss_size() local
648 vport_max_rss_size = hdev->vf_rss_size_max; in hclge_tm_update_kinfo_rss_size()
651 min_t(u16, vport->alloc_tqps, hdev->tm_info.num_tc); in hclge_tm_update_kinfo_rss_size()
653 vport_max_rss_size = hdev->pf_rss_size_max; in hclge_tm_update_kinfo_rss_size()
662 dev_info(&hdev->pdev->dev, "rss changes from %u to %u\n", in hclge_tm_update_kinfo_rss_size()
675 struct hclge_dev *hdev = vport->back; in hclge_tm_vport_tc_info_update() local
682 vport->bw_limit = hdev->tm_info.pg_info[0].bw_limit; in hclge_tm_vport_tc_info_update()
689 if (hdev->hw_tc_map & BIT(i) && i < kinfo->tc_info.num_tc) { in hclge_tm_vport_tc_info_update()
699 memcpy(kinfo->tc_info.prio_tc, hdev->tm_info.prio_tc, in hclge_tm_vport_tc_info_update()
703 static void hclge_tm_vport_info_update(struct hclge_dev *hdev) in hclge_tm_vport_info_update() argument
705 struct hclge_vport *vport = hdev->vport; in hclge_tm_vport_info_update()
708 for (i = 0; i < hdev->num_alloc_vport; i++) { in hclge_tm_vport_info_update()
715 static void hclge_tm_tc_info_init(struct hclge_dev *hdev) in hclge_tm_tc_info_init() argument
719 for (i = 0; i < hdev->tm_info.num_tc; i++) { in hclge_tm_tc_info_init()
720 hdev->tm_info.tc_info[i].tc_id = i; in hclge_tm_tc_info_init()
721 hdev->tm_info.tc_info[i].tc_sch_mode = HCLGE_SCH_MODE_DWRR; in hclge_tm_tc_info_init()
722 hdev->tm_info.tc_info[i].pgid = 0; in hclge_tm_tc_info_init()
723 hdev->tm_info.tc_info[i].bw_limit = in hclge_tm_tc_info_init()
724 hdev->tm_info.pg_info[0].bw_limit; in hclge_tm_tc_info_init()
728 hdev->tm_info.prio_tc[i] = in hclge_tm_tc_info_init()
729 (i >= hdev->tm_info.num_tc) ? 0 : i; in hclge_tm_tc_info_init()
732 static void hclge_tm_pg_info_init(struct hclge_dev *hdev) in hclge_tm_pg_info_init() argument
739 for (i = 0; i < hdev->tm_info.num_pg; i++) { in hclge_tm_pg_info_init()
742 hdev->tm_info.pg_dwrr[i] = i ? 0 : BW_PERCENT; in hclge_tm_pg_info_init()
744 hdev->tm_info.pg_info[i].pg_id = i; in hclge_tm_pg_info_init()
745 hdev->tm_info.pg_info[i].pg_sch_mode = HCLGE_SCH_MODE_DWRR; in hclge_tm_pg_info_init()
747 hdev->tm_info.pg_info[i].bw_limit = in hclge_tm_pg_info_init()
748 hdev->ae_dev->dev_specs.max_tm_rate; in hclge_tm_pg_info_init()
753 hdev->tm_info.pg_info[i].tc_bit_map = hdev->hw_tc_map; in hclge_tm_pg_info_init()
754 for (k = 0; k < hdev->tm_info.num_tc; k++) in hclge_tm_pg_info_init()
755 hdev->tm_info.pg_info[i].tc_dwrr[k] = BW_PERCENT; in hclge_tm_pg_info_init()
757 hdev->tm_info.pg_info[i].tc_dwrr[k] = DEFAULT_BW_WEIGHT; in hclge_tm_pg_info_init()
761 static void hclge_update_fc_mode_by_dcb_flag(struct hclge_dev *hdev) in hclge_update_fc_mode_by_dcb_flag() argument
763 if (hdev->tm_info.num_tc == 1 && !hdev->tm_info.pfc_en) { in hclge_update_fc_mode_by_dcb_flag()
764 if (hdev->fc_mode_last_time == HCLGE_FC_PFC) in hclge_update_fc_mode_by_dcb_flag()
765 dev_warn(&hdev->pdev->dev, in hclge_update_fc_mode_by_dcb_flag()
768 hdev->tm_info.fc_mode = hdev->fc_mode_last_time; in hclge_update_fc_mode_by_dcb_flag()
769 } else if (hdev->tm_info.fc_mode != HCLGE_FC_PFC) { in hclge_update_fc_mode_by_dcb_flag()
774 hdev->fc_mode_last_time = hdev->tm_info.fc_mode; in hclge_update_fc_mode_by_dcb_flag()
775 hdev->tm_info.fc_mode = HCLGE_FC_PFC; in hclge_update_fc_mode_by_dcb_flag()
779 static void hclge_update_fc_mode(struct hclge_dev *hdev) in hclge_update_fc_mode() argument
781 if (!hdev->tm_info.pfc_en) { in hclge_update_fc_mode()
782 hdev->tm_info.fc_mode = hdev->fc_mode_last_time; in hclge_update_fc_mode()
786 if (hdev->tm_info.fc_mode != HCLGE_FC_PFC) { in hclge_update_fc_mode()
787 hdev->fc_mode_last_time = hdev->tm_info.fc_mode; in hclge_update_fc_mode()
788 hdev->tm_info.fc_mode = HCLGE_FC_PFC; in hclge_update_fc_mode()
792 void hclge_tm_pfc_info_update(struct hclge_dev *hdev) in hclge_tm_pfc_info_update() argument
794 if (hdev->ae_dev->dev_version >= HNAE3_DEVICE_VERSION_V3) in hclge_tm_pfc_info_update()
795 hclge_update_fc_mode(hdev); in hclge_tm_pfc_info_update()
797 hclge_update_fc_mode_by_dcb_flag(hdev); in hclge_tm_pfc_info_update()
800 static void hclge_tm_schd_info_init(struct hclge_dev *hdev) in hclge_tm_schd_info_init() argument
802 hclge_tm_pg_info_init(hdev); in hclge_tm_schd_info_init()
804 hclge_tm_tc_info_init(hdev); in hclge_tm_schd_info_init()
806 hclge_tm_vport_info_update(hdev); in hclge_tm_schd_info_init()
808 hclge_tm_pfc_info_update(hdev); in hclge_tm_schd_info_init()
811 static int hclge_tm_pg_to_pri_map(struct hclge_dev *hdev) in hclge_tm_pg_to_pri_map() argument
816 if (hdev->tx_sch_mode != HCLGE_FLAG_TC_BASE_SCH_MODE) in hclge_tm_pg_to_pri_map()
819 for (i = 0; i < hdev->tm_info.num_pg; i++) { in hclge_tm_pg_to_pri_map()
822 hdev, i, hdev->tm_info.pg_info[i].tc_bit_map); in hclge_tm_pg_to_pri_map()
830 static int hclge_tm_pg_shaper_cfg(struct hclge_dev *hdev) in hclge_tm_pg_shaper_cfg() argument
832 u32 max_tm_rate = hdev->ae_dev->dev_specs.max_tm_rate; in hclge_tm_pg_shaper_cfg()
839 if (hdev->tx_sch_mode != HCLGE_FLAG_TC_BASE_SCH_MODE) in hclge_tm_pg_shaper_cfg()
843 for (i = 0; i < hdev->tm_info.num_pg; i++) { in hclge_tm_pg_shaper_cfg()
844 u32 rate = hdev->tm_info.pg_info[i].bw_limit; in hclge_tm_pg_shaper_cfg()
855 ret = hclge_tm_pg_shapping_cfg(hdev, in hclge_tm_pg_shaper_cfg()
866 ret = hclge_tm_pg_shapping_cfg(hdev, in hclge_tm_pg_shaper_cfg()
876 static int hclge_tm_pg_dwrr_cfg(struct hclge_dev *hdev) in hclge_tm_pg_dwrr_cfg() argument
882 if (hdev->tx_sch_mode != HCLGE_FLAG_TC_BASE_SCH_MODE) in hclge_tm_pg_dwrr_cfg()
886 for (i = 0; i < hdev->tm_info.num_pg; i++) { in hclge_tm_pg_dwrr_cfg()
888 ret = hclge_tm_pg_weight_cfg(hdev, i, hdev->tm_info.pg_dwrr[i]); in hclge_tm_pg_dwrr_cfg()
896 static int hclge_vport_q_to_qs_map(struct hclge_dev *hdev, in hclge_vport_q_to_qs_map() argument
909 ret = hclge_tm_q_to_qs_map_cfg(hdev, in hclge_vport_q_to_qs_map()
920 static int hclge_tm_pri_q_qs_cfg(struct hclge_dev *hdev) in hclge_tm_pri_q_qs_cfg() argument
922 struct hclge_vport *vport = hdev->vport; in hclge_tm_pri_q_qs_cfg()
926 if (hdev->tx_sch_mode == HCLGE_FLAG_TC_BASE_SCH_MODE) { in hclge_tm_pri_q_qs_cfg()
928 for (k = 0; k < hdev->num_alloc_vport; k++) { in hclge_tm_pri_q_qs_cfg()
934 hdev, vport[k].qs_offset + i, i); in hclge_tm_pri_q_qs_cfg()
939 } else if (hdev->tx_sch_mode == HCLGE_FLAG_VNET_BASE_SCH_MODE) { in hclge_tm_pri_q_qs_cfg()
941 for (k = 0; k < hdev->num_alloc_vport; k++) in hclge_tm_pri_q_qs_cfg()
944 hdev, vport[k].qs_offset + i, k); in hclge_tm_pri_q_qs_cfg()
953 for (i = 0; i < hdev->num_alloc_vport; i++) { in hclge_tm_pri_q_qs_cfg()
954 ret = hclge_vport_q_to_qs_map(hdev, vport); in hclge_tm_pri_q_qs_cfg()
964 static int hclge_tm_pri_tc_base_shaper_cfg(struct hclge_dev *hdev) in hclge_tm_pri_tc_base_shaper_cfg() argument
966 u32 max_tm_rate = hdev->ae_dev->dev_specs.max_tm_rate; in hclge_tm_pri_tc_base_shaper_cfg()
972 for (i = 0; i < hdev->tm_info.num_tc; i++) { in hclge_tm_pri_tc_base_shaper_cfg()
973 u32 rate = hdev->tm_info.tc_info[i].bw_limit; in hclge_tm_pri_tc_base_shaper_cfg()
983 ret = hclge_tm_pri_shapping_cfg(hdev, HCLGE_TM_SHAP_C_BUCKET, i, in hclge_tm_pri_tc_base_shaper_cfg()
993 ret = hclge_tm_pri_shapping_cfg(hdev, HCLGE_TM_SHAP_P_BUCKET, i, in hclge_tm_pri_tc_base_shaper_cfg()
1004 struct hclge_dev *hdev = vport->back; in hclge_tm_pri_vnet_base_shaper_pri_cfg() local
1011 hdev->ae_dev->dev_specs.max_tm_rate); in hclge_tm_pri_vnet_base_shaper_pri_cfg()
1018 ret = hclge_tm_pri_shapping_cfg(hdev, HCLGE_TM_SHAP_C_BUCKET, in hclge_tm_pri_vnet_base_shaper_pri_cfg()
1028 ret = hclge_tm_pri_shapping_cfg(hdev, HCLGE_TM_SHAP_P_BUCKET, in hclge_tm_pri_vnet_base_shaper_pri_cfg()
1040 struct hclge_dev *hdev = vport->back; in hclge_tm_pri_vnet_base_shaper_qs_cfg() local
1041 u32 max_tm_rate = hdev->ae_dev->dev_specs.max_tm_rate; in hclge_tm_pri_vnet_base_shaper_qs_cfg()
1047 ret = hclge_shaper_para_calc(hdev->tm_info.tc_info[i].bw_limit, in hclge_tm_pri_vnet_base_shaper_qs_cfg()
1057 static int hclge_tm_pri_vnet_base_shaper_cfg(struct hclge_dev *hdev) in hclge_tm_pri_vnet_base_shaper_cfg() argument
1059 struct hclge_vport *vport = hdev->vport; in hclge_tm_pri_vnet_base_shaper_cfg()
1064 for (i = 0; i < hdev->num_alloc_vport; i++) { in hclge_tm_pri_vnet_base_shaper_cfg()
1079 static int hclge_tm_pri_shaper_cfg(struct hclge_dev *hdev) in hclge_tm_pri_shaper_cfg() argument
1083 if (hdev->tx_sch_mode == HCLGE_FLAG_TC_BASE_SCH_MODE) { in hclge_tm_pri_shaper_cfg()
1084 ret = hclge_tm_pri_tc_base_shaper_cfg(hdev); in hclge_tm_pri_shaper_cfg()
1088 ret = hclge_tm_pri_vnet_base_shaper_cfg(hdev); in hclge_tm_pri_shaper_cfg()
1096 static int hclge_tm_pri_tc_base_dwrr_cfg(struct hclge_dev *hdev) in hclge_tm_pri_tc_base_dwrr_cfg() argument
1098 struct hclge_vport *vport = hdev->vport; in hclge_tm_pri_tc_base_dwrr_cfg()
1104 for (i = 0; i < hdev->tm_info.num_tc; i++) { in hclge_tm_pri_tc_base_dwrr_cfg()
1106 &hdev->tm_info.pg_info[hdev->tm_info.tc_info[i].pgid]; in hclge_tm_pri_tc_base_dwrr_cfg()
1109 ret = hclge_tm_pri_weight_cfg(hdev, i, dwrr); in hclge_tm_pri_tc_base_dwrr_cfg()
1113 for (k = 0; k < hdev->num_alloc_vport; k++) { in hclge_tm_pri_tc_base_dwrr_cfg()
1115 hdev, vport[k].qs_offset + i, in hclge_tm_pri_tc_base_dwrr_cfg()
1125 static int hclge_tm_ets_tc_dwrr_cfg(struct hclge_dev *hdev) in hclge_tm_ets_tc_dwrr_cfg() argument
1139 pg_info = &hdev->tm_info.pg_info[hdev->tm_info.tc_info[i].pgid]; in hclge_tm_ets_tc_dwrr_cfg()
1145 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_ets_tc_dwrr_cfg()
1151 struct hclge_dev *hdev = vport->back; in hclge_tm_pri_vnet_base_dwrr_pri_cfg() local
1156 ret = hclge_tm_pri_weight_cfg(hdev, vport->vport_id, vport->dwrr); in hclge_tm_pri_vnet_base_dwrr_pri_cfg()
1163 hdev, vport->qs_offset + i, in hclge_tm_pri_vnet_base_dwrr_pri_cfg()
1164 hdev->tm_info.pg_info[0].tc_dwrr[i]); in hclge_tm_pri_vnet_base_dwrr_pri_cfg()
1172 static int hclge_tm_pri_vnet_base_dwrr_cfg(struct hclge_dev *hdev) in hclge_tm_pri_vnet_base_dwrr_cfg() argument
1174 struct hclge_vport *vport = hdev->vport; in hclge_tm_pri_vnet_base_dwrr_cfg()
1178 for (i = 0; i < hdev->num_alloc_vport; i++) { in hclge_tm_pri_vnet_base_dwrr_cfg()
1189 static int hclge_tm_pri_dwrr_cfg(struct hclge_dev *hdev) in hclge_tm_pri_dwrr_cfg() argument
1193 if (hdev->tx_sch_mode == HCLGE_FLAG_TC_BASE_SCH_MODE) { in hclge_tm_pri_dwrr_cfg()
1194 ret = hclge_tm_pri_tc_base_dwrr_cfg(hdev); in hclge_tm_pri_dwrr_cfg()
1198 if (!hnae3_dev_dcb_supported(hdev)) in hclge_tm_pri_dwrr_cfg()
1201 ret = hclge_tm_ets_tc_dwrr_cfg(hdev); in hclge_tm_pri_dwrr_cfg()
1203 dev_warn(&hdev->pdev->dev, in hclge_tm_pri_dwrr_cfg()
1205 hdev->fw_version); in hclge_tm_pri_dwrr_cfg()
1211 ret = hclge_tm_pri_vnet_base_dwrr_cfg(hdev); in hclge_tm_pri_dwrr_cfg()
1219 static int hclge_tm_map_cfg(struct hclge_dev *hdev) in hclge_tm_map_cfg() argument
1223 ret = hclge_up_to_tc_map(hdev); in hclge_tm_map_cfg()
1227 ret = hclge_tm_pg_to_pri_map(hdev); in hclge_tm_map_cfg()
1231 return hclge_tm_pri_q_qs_cfg(hdev); in hclge_tm_map_cfg()
1234 static int hclge_tm_shaper_cfg(struct hclge_dev *hdev) in hclge_tm_shaper_cfg() argument
1238 ret = hclge_tm_port_shaper_cfg(hdev); in hclge_tm_shaper_cfg()
1242 ret = hclge_tm_pg_shaper_cfg(hdev); in hclge_tm_shaper_cfg()
1246 return hclge_tm_pri_shaper_cfg(hdev); in hclge_tm_shaper_cfg()
1249 int hclge_tm_dwrr_cfg(struct hclge_dev *hdev) in hclge_tm_dwrr_cfg() argument
1253 ret = hclge_tm_pg_dwrr_cfg(hdev); in hclge_tm_dwrr_cfg()
1257 return hclge_tm_pri_dwrr_cfg(hdev); in hclge_tm_dwrr_cfg()
1260 static int hclge_tm_lvl2_schd_mode_cfg(struct hclge_dev *hdev) in hclge_tm_lvl2_schd_mode_cfg() argument
1266 if (hdev->tx_sch_mode == HCLGE_FLAG_VNET_BASE_SCH_MODE) in hclge_tm_lvl2_schd_mode_cfg()
1269 for (i = 0; i < hdev->tm_info.num_pg; i++) { in hclge_tm_lvl2_schd_mode_cfg()
1270 ret = hclge_tm_pg_schd_mode_cfg(hdev, i); in hclge_tm_lvl2_schd_mode_cfg()
1281 struct hclge_dev *hdev = vport->back; in hclge_tm_schd_mode_vnet_base_cfg() local
1288 ret = hclge_tm_pri_schd_mode_cfg(hdev, vport->vport_id); in hclge_tm_schd_mode_vnet_base_cfg()
1293 u8 sch_mode = hdev->tm_info.tc_info[i].tc_sch_mode; in hclge_tm_schd_mode_vnet_base_cfg()
1295 ret = hclge_tm_qs_schd_mode_cfg(hdev, vport->qs_offset + i, in hclge_tm_schd_mode_vnet_base_cfg()
1304 static int hclge_tm_lvl34_schd_mode_cfg(struct hclge_dev *hdev) in hclge_tm_lvl34_schd_mode_cfg() argument
1306 struct hclge_vport *vport = hdev->vport; in hclge_tm_lvl34_schd_mode_cfg()
1310 if (hdev->tx_sch_mode == HCLGE_FLAG_TC_BASE_SCH_MODE) { in hclge_tm_lvl34_schd_mode_cfg()
1311 for (i = 0; i < hdev->tm_info.num_tc; i++) { in hclge_tm_lvl34_schd_mode_cfg()
1312 ret = hclge_tm_pri_schd_mode_cfg(hdev, i); in hclge_tm_lvl34_schd_mode_cfg()
1316 for (k = 0; k < hdev->num_alloc_vport; k++) { in hclge_tm_lvl34_schd_mode_cfg()
1318 hdev, vport[k].qs_offset + i, in hclge_tm_lvl34_schd_mode_cfg()
1325 for (i = 0; i < hdev->num_alloc_vport; i++) { in hclge_tm_lvl34_schd_mode_cfg()
1337 static int hclge_tm_schd_mode_hw(struct hclge_dev *hdev) in hclge_tm_schd_mode_hw() argument
1341 ret = hclge_tm_lvl2_schd_mode_cfg(hdev); in hclge_tm_schd_mode_hw()
1345 return hclge_tm_lvl34_schd_mode_cfg(hdev); in hclge_tm_schd_mode_hw()
1348 int hclge_tm_schd_setup_hw(struct hclge_dev *hdev) in hclge_tm_schd_setup_hw() argument
1353 ret = hclge_tm_map_cfg(hdev); in hclge_tm_schd_setup_hw()
1358 ret = hclge_tm_shaper_cfg(hdev); in hclge_tm_schd_setup_hw()
1363 ret = hclge_tm_dwrr_cfg(hdev); in hclge_tm_schd_setup_hw()
1368 return hclge_tm_schd_mode_hw(hdev); in hclge_tm_schd_setup_hw()
1371 static int hclge_pause_param_setup_hw(struct hclge_dev *hdev) in hclge_pause_param_setup_hw() argument
1373 struct hclge_mac *mac = &hdev->hw.mac; in hclge_pause_param_setup_hw()
1375 return hclge_pause_param_cfg(hdev, mac->mac_addr, in hclge_pause_param_setup_hw()
1380 static int hclge_pfc_setup_hw(struct hclge_dev *hdev) in hclge_pfc_setup_hw() argument
1384 if (hdev->tm_info.fc_mode == HCLGE_FC_PFC) in hclge_pfc_setup_hw()
1388 return hclge_pfc_pause_en_cfg(hdev, enable_bitmap, in hclge_pfc_setup_hw()
1389 hdev->tm_info.pfc_en); in hclge_pfc_setup_hw()
1395 static int hclge_bp_setup_hw(struct hclge_dev *hdev, u8 tc) in hclge_bp_setup_hw() argument
1402 if (hdev->num_tqps > HCLGE_TQP_MAX_SIZE_DEV_V2) { in hclge_bp_setup_hw()
1412 for (k = 0; k < hdev->num_alloc_vport; k++) { in hclge_bp_setup_hw()
1413 struct hclge_vport *vport = &hdev->vport[k]; in hclge_bp_setup_hw()
1424 ret = hclge_tm_qs_bp_cfg(hdev, tc, i, qs_bitmap); in hclge_bp_setup_hw()
1432 int hclge_mac_pause_setup_hw(struct hclge_dev *hdev) in hclge_mac_pause_setup_hw() argument
1436 switch (hdev->tm_info.fc_mode) { in hclge_mac_pause_setup_hw()
1462 return hclge_mac_pause_en_cfg(hdev, tx_en, rx_en); in hclge_mac_pause_setup_hw()
1465 static int hclge_tm_bp_setup(struct hclge_dev *hdev) in hclge_tm_bp_setup() argument
1470 for (i = 0; i < hdev->tm_info.num_tc; i++) { in hclge_tm_bp_setup()
1471 ret = hclge_bp_setup_hw(hdev, i); in hclge_tm_bp_setup()
1479 int hclge_pause_setup_hw(struct hclge_dev *hdev, bool init) in hclge_pause_setup_hw() argument
1483 ret = hclge_pause_param_setup_hw(hdev); in hclge_pause_setup_hw()
1487 ret = hclge_mac_pause_setup_hw(hdev); in hclge_pause_setup_hw()
1492 if (!hnae3_dev_dcb_supported(hdev)) in hclge_pause_setup_hw()
1499 ret = hclge_pfc_setup_hw(hdev); in hclge_pause_setup_hw()
1501 dev_warn(&hdev->pdev->dev, "GE MAC does not support pfc\n"); in hclge_pause_setup_hw()
1503 dev_err(&hdev->pdev->dev, "config pfc failed! ret = %d\n", in hclge_pause_setup_hw()
1508 return hclge_tm_bp_setup(hdev); in hclge_pause_setup_hw()
1511 void hclge_tm_prio_tc_info_update(struct hclge_dev *hdev, u8 *prio_tc) in hclge_tm_prio_tc_info_update() argument
1513 struct hclge_vport *vport = hdev->vport; in hclge_tm_prio_tc_info_update()
1518 hdev->tm_info.prio_tc[i] = prio_tc[i]; in hclge_tm_prio_tc_info_update()
1520 for (k = 0; k < hdev->num_alloc_vport; k++) { in hclge_tm_prio_tc_info_update()
1527 void hclge_tm_schd_info_update(struct hclge_dev *hdev, u8 num_tc) in hclge_tm_schd_info_update() argument
1532 hdev->tm_info.num_tc = num_tc; in hclge_tm_schd_info_update()
1534 for (i = 0; i < hdev->tm_info.num_tc; i++) in hclge_tm_schd_info_update()
1539 hdev->tm_info.num_tc = 1; in hclge_tm_schd_info_update()
1542 hdev->hw_tc_map = bit_map; in hclge_tm_schd_info_update()
1544 hclge_tm_schd_info_init(hdev); in hclge_tm_schd_info_update()
1547 int hclge_tm_init_hw(struct hclge_dev *hdev, bool init) in hclge_tm_init_hw() argument
1551 if ((hdev->tx_sch_mode != HCLGE_FLAG_TC_BASE_SCH_MODE) && in hclge_tm_init_hw()
1552 (hdev->tx_sch_mode != HCLGE_FLAG_VNET_BASE_SCH_MODE)) in hclge_tm_init_hw()
1555 ret = hclge_tm_schd_setup_hw(hdev); in hclge_tm_init_hw()
1559 ret = hclge_pause_setup_hw(hdev, init); in hclge_tm_init_hw()
1566 int hclge_tm_schd_init(struct hclge_dev *hdev) in hclge_tm_schd_init() argument
1569 hdev->tm_info.fc_mode = HCLGE_FC_FULL; in hclge_tm_schd_init()
1570 hdev->fc_mode_last_time = hdev->tm_info.fc_mode; in hclge_tm_schd_init()
1572 if (hdev->tx_sch_mode != HCLGE_FLAG_TC_BASE_SCH_MODE && in hclge_tm_schd_init()
1573 hdev->tm_info.num_pg != 1) in hclge_tm_schd_init()
1576 hclge_tm_schd_info_init(hdev); in hclge_tm_schd_init()
1578 return hclge_tm_init_hw(hdev, true); in hclge_tm_schd_init()
1581 int hclge_tm_vport_map_update(struct hclge_dev *hdev) in hclge_tm_vport_map_update() argument
1583 struct hclge_vport *vport = hdev->vport; in hclge_tm_vport_map_update()
1588 ret = hclge_vport_q_to_qs_map(hdev, vport); in hclge_tm_vport_map_update()
1592 if (hdev->tm_info.num_tc == 1 && !hdev->tm_info.pfc_en) in hclge_tm_vport_map_update()
1595 return hclge_tm_bp_setup(hdev); in hclge_tm_vport_map_update()
1598 int hclge_tm_get_qset_num(struct hclge_dev *hdev, u16 *qset_num) in hclge_tm_get_qset_num() argument
1604 if (hdev->ae_dev->dev_version <= HNAE3_DEVICE_VERSION_V2) { in hclge_tm_get_qset_num()
1606 *qset_num = HCLGE_TM_PF_MAX_QSET_NUM + pci_num_vf(hdev->pdev); in hclge_tm_get_qset_num()
1611 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_qset_num()
1613 dev_err(&hdev->pdev->dev, in hclge_tm_get_qset_num()
1623 int hclge_tm_get_pri_num(struct hclge_dev *hdev, u8 *pri_num) in hclge_tm_get_pri_num() argument
1629 if (hdev->ae_dev->dev_version <= HNAE3_DEVICE_VERSION_V2) { in hclge_tm_get_pri_num()
1635 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_pri_num()
1637 dev_err(&hdev->pdev->dev, in hclge_tm_get_pri_num()
1647 int hclge_tm_get_qset_map_pri(struct hclge_dev *hdev, u16 qset_id, u8 *priority, in hclge_tm_get_qset_map_pri() argument
1657 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_qset_map_pri()
1659 dev_err(&hdev->pdev->dev, in hclge_tm_get_qset_map_pri()
1669 int hclge_tm_get_qset_sch_mode(struct hclge_dev *hdev, u16 qset_id, u8 *mode) in hclge_tm_get_qset_sch_mode() argument
1678 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_qset_sch_mode()
1680 dev_err(&hdev->pdev->dev, in hclge_tm_get_qset_sch_mode()
1689 int hclge_tm_get_qset_weight(struct hclge_dev *hdev, u16 qset_id, u8 *weight) in hclge_tm_get_qset_weight() argument
1698 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_qset_weight()
1700 dev_err(&hdev->pdev->dev, in hclge_tm_get_qset_weight()
1709 int hclge_tm_get_qset_shaper(struct hclge_dev *hdev, u16 qset_id, in hclge_tm_get_qset_shaper() argument
1720 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_qset_shaper()
1722 dev_err(&hdev->pdev->dev, in hclge_tm_get_qset_shaper()
1739 int hclge_tm_get_pri_sch_mode(struct hclge_dev *hdev, u8 pri_id, u8 *mode) in hclge_tm_get_pri_sch_mode() argument
1748 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_pri_sch_mode()
1750 dev_err(&hdev->pdev->dev, in hclge_tm_get_pri_sch_mode()
1759 int hclge_tm_get_pri_weight(struct hclge_dev *hdev, u8 pri_id, u8 *weight) in hclge_tm_get_pri_weight() argument
1768 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_pri_weight()
1770 dev_err(&hdev->pdev->dev, in hclge_tm_get_pri_weight()
1779 int hclge_tm_get_pri_shaper(struct hclge_dev *hdev, u8 pri_id, in hclge_tm_get_pri_shaper() argument
1795 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_pri_shaper()
1797 dev_err(&hdev->pdev->dev, in hclge_tm_get_pri_shaper()
1814 int hclge_tm_get_q_to_qs_map(struct hclge_dev *hdev, u16 q_id, u16 *qset_id) in hclge_tm_get_q_to_qs_map() argument
1825 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_q_to_qs_map()
1827 dev_err(&hdev->pdev->dev, in hclge_tm_get_q_to_qs_map()
1852 int hclge_tm_get_q_to_tc(struct hclge_dev *hdev, u16 q_id, u8 *tc_id) in hclge_tm_get_q_to_tc() argument
1863 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_q_to_tc()
1865 dev_err(&hdev->pdev->dev, in hclge_tm_get_q_to_tc()
1874 int hclge_tm_get_pg_to_pri_map(struct hclge_dev *hdev, u8 pg_id, in hclge_tm_get_pg_to_pri_map() argument
1884 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_pg_to_pri_map()
1886 dev_err(&hdev->pdev->dev, in hclge_tm_get_pg_to_pri_map()
1895 int hclge_tm_get_pg_weight(struct hclge_dev *hdev, u8 pg_id, u8 *weight) in hclge_tm_get_pg_weight() argument
1904 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_pg_weight()
1906 dev_err(&hdev->pdev->dev, in hclge_tm_get_pg_weight()
1915 int hclge_tm_get_pg_sch_mode(struct hclge_dev *hdev, u8 pg_id, u8 *mode) in hclge_tm_get_pg_sch_mode() argument
1922 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_pg_sch_mode()
1924 dev_err(&hdev->pdev->dev, in hclge_tm_get_pg_sch_mode()
1933 int hclge_tm_get_pg_shaper(struct hclge_dev *hdev, u8 pg_id, in hclge_tm_get_pg_shaper() argument
1949 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_pg_shaper()
1951 dev_err(&hdev->pdev->dev, in hclge_tm_get_pg_shaper()
1968 int hclge_tm_get_port_shaper(struct hclge_dev *hdev, in hclge_tm_get_port_shaper() argument
1977 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_get_port_shaper()
1979 dev_err(&hdev->pdev->dev, in hclge_tm_get_port_shaper()