Lines Matching refs:hdev

115 static int hclge_pfc_stats_get(struct hclge_dev *hdev,  in hclge_pfc_stats_get()  argument
132 ret = hclge_cmd_send(&hdev->hw, desc, HCLGE_TM_PFC_PKT_GET_CMD_NUM); in hclge_pfc_stats_get()
151 int hclge_pfc_rx_stats_get(struct hclge_dev *hdev, u64 *stats) in hclge_pfc_rx_stats_get() argument
153 return hclge_pfc_stats_get(hdev, HCLGE_OPC_QUERY_PFC_RX_PKT_CNT, stats); in hclge_pfc_rx_stats_get()
156 int hclge_pfc_tx_stats_get(struct hclge_dev *hdev, u64 *stats) in hclge_pfc_tx_stats_get() argument
158 return hclge_pfc_stats_get(hdev, HCLGE_OPC_QUERY_PFC_TX_PKT_CNT, stats); in hclge_pfc_tx_stats_get()
161 int hclge_mac_pause_en_cfg(struct hclge_dev *hdev, bool tx, bool rx) in hclge_mac_pause_en_cfg() argument
170 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_mac_pause_en_cfg()
173 static int hclge_pfc_pause_en_cfg(struct hclge_dev *hdev, u8 tx_rx_bitmap, in hclge_pfc_pause_en_cfg() argument
184 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_pfc_pause_en_cfg()
187 static int hclge_pause_param_cfg(struct hclge_dev *hdev, const u8 *addr, in hclge_pause_param_cfg() argument
202 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_pause_param_cfg()
205 int hclge_pause_addr_cfg(struct hclge_dev *hdev, const u8 *mac_addr) in hclge_pause_addr_cfg() argument
217 ret = hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_pause_addr_cfg()
224 return hclge_pause_param_cfg(hdev, mac_addr, trans_gap, trans_time); in hclge_pause_addr_cfg()
227 static int hclge_fill_pri_array(struct hclge_dev *hdev, u8 *pri, u8 pri_id) in hclge_fill_pri_array() argument
231 tc = hdev->tm_info.prio_tc[pri_id]; in hclge_fill_pri_array()
233 if (tc >= hdev->tm_info.num_tc) in hclge_fill_pri_array()
250 static int hclge_up_to_tc_map(struct hclge_dev *hdev) in hclge_up_to_tc_map() argument
260 ret = hclge_fill_pri_array(hdev, pri, pri_id); in hclge_up_to_tc_map()
265 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_up_to_tc_map()
268 static int hclge_tm_pg_to_pri_map_cfg(struct hclge_dev *hdev, in hclge_tm_pg_to_pri_map_cfg() argument
281 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_pg_to_pri_map_cfg()
284 static int hclge_tm_qs_to_pri_map_cfg(struct hclge_dev *hdev, in hclge_tm_qs_to_pri_map_cfg() argument
298 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_qs_to_pri_map_cfg()
301 static int hclge_tm_q_to_qs_map_cfg(struct hclge_dev *hdev, in hclge_tm_q_to_qs_map_cfg() argument
314 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_q_to_qs_map_cfg()
317 static int hclge_tm_pg_weight_cfg(struct hclge_dev *hdev, u8 pg_id, in hclge_tm_pg_weight_cfg() argument
330 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_pg_weight_cfg()
333 static int hclge_tm_pri_weight_cfg(struct hclge_dev *hdev, u8 pri_id, in hclge_tm_pri_weight_cfg() argument
346 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_pri_weight_cfg()
349 static int hclge_tm_qs_weight_cfg(struct hclge_dev *hdev, u16 qs_id, in hclge_tm_qs_weight_cfg() argument
362 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_qs_weight_cfg()
379 static int hclge_tm_pg_shapping_cfg(struct hclge_dev *hdev, in hclge_tm_pg_shapping_cfg() argument
397 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_pg_shapping_cfg()
400 static int hclge_tm_port_shaper_cfg(struct hclge_dev *hdev) in hclge_tm_port_shaper_cfg() argument
408 ret = hclge_shaper_para_calc(hdev->hw.mac.speed, in hclge_tm_port_shaper_cfg()
423 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_port_shaper_cfg()
426 static int hclge_tm_pri_shapping_cfg(struct hclge_dev *hdev, in hclge_tm_pri_shapping_cfg() argument
445 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_pri_shapping_cfg()
448 static int hclge_tm_pg_schd_mode_cfg(struct hclge_dev *hdev, u8 pg_id) in hclge_tm_pg_schd_mode_cfg() argument
454 if (hdev->tm_info.pg_info[pg_id].pg_sch_mode == HCLGE_SCH_MODE_DWRR) in hclge_tm_pg_schd_mode_cfg()
461 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_pg_schd_mode_cfg()
464 static int hclge_tm_pri_schd_mode_cfg(struct hclge_dev *hdev, u8 pri_id) in hclge_tm_pri_schd_mode_cfg() argument
470 if (hdev->tm_info.tc_info[pri_id].tc_sch_mode == HCLGE_SCH_MODE_DWRR) in hclge_tm_pri_schd_mode_cfg()
477 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_pri_schd_mode_cfg()
480 static int hclge_tm_qs_schd_mode_cfg(struct hclge_dev *hdev, u16 qs_id, u8 mode) in hclge_tm_qs_schd_mode_cfg() argument
493 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_qs_schd_mode_cfg()
496 static int hclge_tm_qs_bp_cfg(struct hclge_dev *hdev, u8 tc, u8 grp_id, in hclge_tm_qs_bp_cfg() argument
511 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_qs_bp_cfg()
517 struct hclge_dev *hdev = vport->back; in hclge_tm_vport_tc_info_update() local
525 min_t(u16, vport->alloc_tqps, hdev->tm_info.num_tc); in hclge_tm_vport_tc_info_update()
526 vport->qs_offset = (vport->vport_id ? hdev->tm_info.num_tc : 0) + in hclge_tm_vport_tc_info_update()
529 max_rss_size = min_t(u16, hdev->rss_size_max, in hclge_tm_vport_tc_info_update()
535 dev_info(&hdev->pdev->dev, "rss changes from %d to %d\n", in hclge_tm_vport_tc_info_update()
546 (hdev->num_nic_msi - 1) / in hclge_tm_vport_tc_info_update()
556 vport->bw_limit = hdev->tm_info.pg_info[0].bw_limit; in hclge_tm_vport_tc_info_update()
559 if (hdev->hw_tc_map & BIT(i) && i < kinfo->num_tc) { in hclge_tm_vport_tc_info_update()
573 memcpy(kinfo->prio_tc, hdev->tm_info.prio_tc, in hclge_tm_vport_tc_info_update()
577 static void hclge_tm_vport_info_update(struct hclge_dev *hdev) in hclge_tm_vport_info_update() argument
579 struct hclge_vport *vport = hdev->vport; in hclge_tm_vport_info_update()
582 for (i = 0; i < hdev->num_alloc_vport; i++) { in hclge_tm_vport_info_update()
589 static void hclge_tm_tc_info_init(struct hclge_dev *hdev) in hclge_tm_tc_info_init() argument
593 for (i = 0; i < hdev->tm_info.num_tc; i++) { in hclge_tm_tc_info_init()
594 hdev->tm_info.tc_info[i].tc_id = i; in hclge_tm_tc_info_init()
595 hdev->tm_info.tc_info[i].tc_sch_mode = HCLGE_SCH_MODE_DWRR; in hclge_tm_tc_info_init()
596 hdev->tm_info.tc_info[i].pgid = 0; in hclge_tm_tc_info_init()
597 hdev->tm_info.tc_info[i].bw_limit = in hclge_tm_tc_info_init()
598 hdev->tm_info.pg_info[0].bw_limit; in hclge_tm_tc_info_init()
602 hdev->tm_info.prio_tc[i] = in hclge_tm_tc_info_init()
603 (i >= hdev->tm_info.num_tc) ? 0 : i; in hclge_tm_tc_info_init()
608 if (hdev->tm_info.num_tc > 1 || hdev->tm_info.pfc_en) in hclge_tm_tc_info_init()
609 hdev->flag |= HCLGE_FLAG_DCB_ENABLE; in hclge_tm_tc_info_init()
611 hdev->flag &= ~HCLGE_FLAG_DCB_ENABLE; in hclge_tm_tc_info_init()
614 static void hclge_tm_pg_info_init(struct hclge_dev *hdev) in hclge_tm_pg_info_init() argument
620 for (i = 0; i < hdev->tm_info.num_pg; i++) { in hclge_tm_pg_info_init()
623 hdev->tm_info.pg_dwrr[i] = i ? 0 : BW_PERCENT; in hclge_tm_pg_info_init()
625 hdev->tm_info.pg_info[i].pg_id = i; in hclge_tm_pg_info_init()
626 hdev->tm_info.pg_info[i].pg_sch_mode = HCLGE_SCH_MODE_DWRR; in hclge_tm_pg_info_init()
628 hdev->tm_info.pg_info[i].bw_limit = HCLGE_ETHER_MAX_RATE; in hclge_tm_pg_info_init()
633 hdev->tm_info.pg_info[i].tc_bit_map = hdev->hw_tc_map; in hclge_tm_pg_info_init()
634 for (k = 0; k < hdev->tm_info.num_tc; k++) in hclge_tm_pg_info_init()
635 hdev->tm_info.pg_info[i].tc_dwrr[k] = BW_PERCENT; in hclge_tm_pg_info_init()
639 static void hclge_pfc_info_init(struct hclge_dev *hdev) in hclge_pfc_info_init() argument
641 if (!(hdev->flag & HCLGE_FLAG_DCB_ENABLE)) { in hclge_pfc_info_init()
642 if (hdev->fc_mode_last_time == HCLGE_FC_PFC) in hclge_pfc_info_init()
643 dev_warn(&hdev->pdev->dev, in hclge_pfc_info_init()
646 hdev->tm_info.fc_mode = hdev->fc_mode_last_time; in hclge_pfc_info_init()
647 } else if (hdev->tm_info.fc_mode != HCLGE_FC_PFC) { in hclge_pfc_info_init()
652 hdev->fc_mode_last_time = hdev->tm_info.fc_mode; in hclge_pfc_info_init()
653 hdev->tm_info.fc_mode = HCLGE_FC_PFC; in hclge_pfc_info_init()
657 static void hclge_tm_schd_info_init(struct hclge_dev *hdev) in hclge_tm_schd_info_init() argument
659 hclge_tm_pg_info_init(hdev); in hclge_tm_schd_info_init()
661 hclge_tm_tc_info_init(hdev); in hclge_tm_schd_info_init()
663 hclge_tm_vport_info_update(hdev); in hclge_tm_schd_info_init()
665 hclge_pfc_info_init(hdev); in hclge_tm_schd_info_init()
668 static int hclge_tm_pg_to_pri_map(struct hclge_dev *hdev) in hclge_tm_pg_to_pri_map() argument
673 if (hdev->tx_sch_mode != HCLGE_FLAG_TC_BASE_SCH_MODE) in hclge_tm_pg_to_pri_map()
676 for (i = 0; i < hdev->tm_info.num_pg; i++) { in hclge_tm_pg_to_pri_map()
679 hdev, i, hdev->tm_info.pg_info[i].tc_bit_map); in hclge_tm_pg_to_pri_map()
687 static int hclge_tm_pg_shaper_cfg(struct hclge_dev *hdev) in hclge_tm_pg_shaper_cfg() argument
695 if (hdev->tx_sch_mode != HCLGE_FLAG_TC_BASE_SCH_MODE) in hclge_tm_pg_shaper_cfg()
699 for (i = 0; i < hdev->tm_info.num_pg; i++) { in hclge_tm_pg_shaper_cfg()
702 hdev->tm_info.pg_info[i].bw_limit, in hclge_tm_pg_shaper_cfg()
711 ret = hclge_tm_pg_shapping_cfg(hdev, in hclge_tm_pg_shaper_cfg()
720 ret = hclge_tm_pg_shapping_cfg(hdev, in hclge_tm_pg_shaper_cfg()
730 static int hclge_tm_pg_dwrr_cfg(struct hclge_dev *hdev) in hclge_tm_pg_dwrr_cfg() argument
736 if (hdev->tx_sch_mode != HCLGE_FLAG_TC_BASE_SCH_MODE) in hclge_tm_pg_dwrr_cfg()
740 for (i = 0; i < hdev->tm_info.num_pg; i++) { in hclge_tm_pg_dwrr_cfg()
742 ret = hclge_tm_pg_weight_cfg(hdev, i, hdev->tm_info.pg_dwrr[i]); in hclge_tm_pg_dwrr_cfg()
750 static int hclge_vport_q_to_qs_map(struct hclge_dev *hdev, in hclge_vport_q_to_qs_map() argument
764 ret = hclge_tm_q_to_qs_map_cfg(hdev, in hclge_vport_q_to_qs_map()
775 static int hclge_tm_pri_q_qs_cfg(struct hclge_dev *hdev) in hclge_tm_pri_q_qs_cfg() argument
777 struct hclge_vport *vport = hdev->vport; in hclge_tm_pri_q_qs_cfg()
781 if (hdev->tx_sch_mode == HCLGE_FLAG_TC_BASE_SCH_MODE) { in hclge_tm_pri_q_qs_cfg()
783 for (k = 0; k < hdev->num_alloc_vport; k++) { in hclge_tm_pri_q_qs_cfg()
789 hdev, vport[k].qs_offset + i, i); in hclge_tm_pri_q_qs_cfg()
794 } else if (hdev->tx_sch_mode == HCLGE_FLAG_VNET_BASE_SCH_MODE) { in hclge_tm_pri_q_qs_cfg()
796 for (k = 0; k < hdev->num_alloc_vport; k++) in hclge_tm_pri_q_qs_cfg()
799 hdev, vport[k].qs_offset + i, k); in hclge_tm_pri_q_qs_cfg()
808 for (i = 0; i < hdev->num_alloc_vport; i++) { in hclge_tm_pri_q_qs_cfg()
809 ret = hclge_vport_q_to_qs_map(hdev, vport); in hclge_tm_pri_q_qs_cfg()
819 static int hclge_tm_pri_tc_base_shaper_cfg(struct hclge_dev *hdev) in hclge_tm_pri_tc_base_shaper_cfg() argument
826 for (i = 0; i < hdev->tm_info.num_tc; i++) { in hclge_tm_pri_tc_base_shaper_cfg()
828 hdev->tm_info.tc_info[i].bw_limit, in hclge_tm_pri_tc_base_shaper_cfg()
837 ret = hclge_tm_pri_shapping_cfg(hdev, HCLGE_TM_SHAP_C_BUCKET, i, in hclge_tm_pri_tc_base_shaper_cfg()
845 ret = hclge_tm_pri_shapping_cfg(hdev, HCLGE_TM_SHAP_P_BUCKET, i, in hclge_tm_pri_tc_base_shaper_cfg()
856 struct hclge_dev *hdev = vport->back; in hclge_tm_pri_vnet_base_shaper_pri_cfg() local
869 ret = hclge_tm_pri_shapping_cfg(hdev, HCLGE_TM_SHAP_C_BUCKET, in hclge_tm_pri_vnet_base_shaper_pri_cfg()
877 ret = hclge_tm_pri_shapping_cfg(hdev, HCLGE_TM_SHAP_P_BUCKET, in hclge_tm_pri_vnet_base_shaper_pri_cfg()
888 struct hclge_dev *hdev = vport->back; in hclge_tm_pri_vnet_base_shaper_qs_cfg() local
895 hdev->tm_info.tc_info[i].bw_limit, in hclge_tm_pri_vnet_base_shaper_qs_cfg()
905 static int hclge_tm_pri_vnet_base_shaper_cfg(struct hclge_dev *hdev) in hclge_tm_pri_vnet_base_shaper_cfg() argument
907 struct hclge_vport *vport = hdev->vport; in hclge_tm_pri_vnet_base_shaper_cfg()
912 for (i = 0; i < hdev->num_alloc_vport; i++) { in hclge_tm_pri_vnet_base_shaper_cfg()
927 static int hclge_tm_pri_shaper_cfg(struct hclge_dev *hdev) in hclge_tm_pri_shaper_cfg() argument
931 if (hdev->tx_sch_mode == HCLGE_FLAG_TC_BASE_SCH_MODE) { in hclge_tm_pri_shaper_cfg()
932 ret = hclge_tm_pri_tc_base_shaper_cfg(hdev); in hclge_tm_pri_shaper_cfg()
936 ret = hclge_tm_pri_vnet_base_shaper_cfg(hdev); in hclge_tm_pri_shaper_cfg()
944 static int hclge_tm_pri_tc_base_dwrr_cfg(struct hclge_dev *hdev) in hclge_tm_pri_tc_base_dwrr_cfg() argument
946 struct hclge_vport *vport = hdev->vport; in hclge_tm_pri_tc_base_dwrr_cfg()
952 for (i = 0; i < hdev->tm_info.num_tc; i++) { in hclge_tm_pri_tc_base_dwrr_cfg()
954 &hdev->tm_info.pg_info[hdev->tm_info.tc_info[i].pgid]; in hclge_tm_pri_tc_base_dwrr_cfg()
957 ret = hclge_tm_pri_weight_cfg(hdev, i, dwrr); in hclge_tm_pri_tc_base_dwrr_cfg()
961 for (k = 0; k < hdev->num_alloc_vport; k++) { in hclge_tm_pri_tc_base_dwrr_cfg()
963 hdev, vport[k].qs_offset + i, in hclge_tm_pri_tc_base_dwrr_cfg()
973 static int hclge_tm_ets_tc_dwrr_cfg(struct hclge_dev *hdev) in hclge_tm_ets_tc_dwrr_cfg() argument
990 if (!(hdev->hw_tc_map & BIT(i))) in hclge_tm_ets_tc_dwrr_cfg()
994 &hdev->tm_info.pg_info[hdev->tm_info.tc_info[i].pgid]; in hclge_tm_ets_tc_dwrr_cfg()
1000 return hclge_cmd_send(&hdev->hw, &desc, 1); in hclge_tm_ets_tc_dwrr_cfg()
1006 struct hclge_dev *hdev = vport->back; in hclge_tm_pri_vnet_base_dwrr_pri_cfg() local
1011 ret = hclge_tm_pri_weight_cfg(hdev, vport->vport_id, vport->dwrr); in hclge_tm_pri_vnet_base_dwrr_pri_cfg()
1018 hdev, vport->qs_offset + i, in hclge_tm_pri_vnet_base_dwrr_pri_cfg()
1019 hdev->tm_info.pg_info[0].tc_dwrr[i]); in hclge_tm_pri_vnet_base_dwrr_pri_cfg()
1027 static int hclge_tm_pri_vnet_base_dwrr_cfg(struct hclge_dev *hdev) in hclge_tm_pri_vnet_base_dwrr_cfg() argument
1029 struct hclge_vport *vport = hdev->vport; in hclge_tm_pri_vnet_base_dwrr_cfg()
1033 for (i = 0; i < hdev->num_alloc_vport; i++) { in hclge_tm_pri_vnet_base_dwrr_cfg()
1044 static int hclge_tm_pri_dwrr_cfg(struct hclge_dev *hdev) in hclge_tm_pri_dwrr_cfg() argument
1048 if (hdev->tx_sch_mode == HCLGE_FLAG_TC_BASE_SCH_MODE) { in hclge_tm_pri_dwrr_cfg()
1049 ret = hclge_tm_pri_tc_base_dwrr_cfg(hdev); in hclge_tm_pri_dwrr_cfg()
1053 if (!hnae3_dev_dcb_supported(hdev)) in hclge_tm_pri_dwrr_cfg()
1056 ret = hclge_tm_ets_tc_dwrr_cfg(hdev); in hclge_tm_pri_dwrr_cfg()
1058 dev_warn(&hdev->pdev->dev, in hclge_tm_pri_dwrr_cfg()
1060 hdev->fw_version); in hclge_tm_pri_dwrr_cfg()
1066 ret = hclge_tm_pri_vnet_base_dwrr_cfg(hdev); in hclge_tm_pri_dwrr_cfg()
1074 static int hclge_tm_map_cfg(struct hclge_dev *hdev) in hclge_tm_map_cfg() argument
1078 ret = hclge_up_to_tc_map(hdev); in hclge_tm_map_cfg()
1082 ret = hclge_tm_pg_to_pri_map(hdev); in hclge_tm_map_cfg()
1086 return hclge_tm_pri_q_qs_cfg(hdev); in hclge_tm_map_cfg()
1089 static int hclge_tm_shaper_cfg(struct hclge_dev *hdev) in hclge_tm_shaper_cfg() argument
1093 ret = hclge_tm_port_shaper_cfg(hdev); in hclge_tm_shaper_cfg()
1097 ret = hclge_tm_pg_shaper_cfg(hdev); in hclge_tm_shaper_cfg()
1101 return hclge_tm_pri_shaper_cfg(hdev); in hclge_tm_shaper_cfg()
1104 int hclge_tm_dwrr_cfg(struct hclge_dev *hdev) in hclge_tm_dwrr_cfg() argument
1108 ret = hclge_tm_pg_dwrr_cfg(hdev); in hclge_tm_dwrr_cfg()
1112 return hclge_tm_pri_dwrr_cfg(hdev); in hclge_tm_dwrr_cfg()
1115 static int hclge_tm_lvl2_schd_mode_cfg(struct hclge_dev *hdev) in hclge_tm_lvl2_schd_mode_cfg() argument
1121 if (hdev->tx_sch_mode == HCLGE_FLAG_VNET_BASE_SCH_MODE) in hclge_tm_lvl2_schd_mode_cfg()
1124 for (i = 0; i < hdev->tm_info.num_pg; i++) { in hclge_tm_lvl2_schd_mode_cfg()
1125 ret = hclge_tm_pg_schd_mode_cfg(hdev, i); in hclge_tm_lvl2_schd_mode_cfg()
1136 struct hclge_dev *hdev = vport->back; in hclge_tm_schd_mode_vnet_base_cfg() local
1143 ret = hclge_tm_pri_schd_mode_cfg(hdev, vport->vport_id); in hclge_tm_schd_mode_vnet_base_cfg()
1148 u8 sch_mode = hdev->tm_info.tc_info[i].tc_sch_mode; in hclge_tm_schd_mode_vnet_base_cfg()
1150 ret = hclge_tm_qs_schd_mode_cfg(hdev, vport->qs_offset + i, in hclge_tm_schd_mode_vnet_base_cfg()
1159 static int hclge_tm_lvl34_schd_mode_cfg(struct hclge_dev *hdev) in hclge_tm_lvl34_schd_mode_cfg() argument
1161 struct hclge_vport *vport = hdev->vport; in hclge_tm_lvl34_schd_mode_cfg()
1165 if (hdev->tx_sch_mode == HCLGE_FLAG_TC_BASE_SCH_MODE) { in hclge_tm_lvl34_schd_mode_cfg()
1166 for (i = 0; i < hdev->tm_info.num_tc; i++) { in hclge_tm_lvl34_schd_mode_cfg()
1167 ret = hclge_tm_pri_schd_mode_cfg(hdev, i); in hclge_tm_lvl34_schd_mode_cfg()
1171 for (k = 0; k < hdev->num_alloc_vport; k++) { in hclge_tm_lvl34_schd_mode_cfg()
1173 hdev, vport[k].qs_offset + i, in hclge_tm_lvl34_schd_mode_cfg()
1180 for (i = 0; i < hdev->num_alloc_vport; i++) { in hclge_tm_lvl34_schd_mode_cfg()
1192 static int hclge_tm_schd_mode_hw(struct hclge_dev *hdev) in hclge_tm_schd_mode_hw() argument
1196 ret = hclge_tm_lvl2_schd_mode_cfg(hdev); in hclge_tm_schd_mode_hw()
1200 return hclge_tm_lvl34_schd_mode_cfg(hdev); in hclge_tm_schd_mode_hw()
1203 int hclge_tm_schd_setup_hw(struct hclge_dev *hdev) in hclge_tm_schd_setup_hw() argument
1208 ret = hclge_tm_map_cfg(hdev); in hclge_tm_schd_setup_hw()
1213 ret = hclge_tm_shaper_cfg(hdev); in hclge_tm_schd_setup_hw()
1218 ret = hclge_tm_dwrr_cfg(hdev); in hclge_tm_schd_setup_hw()
1223 return hclge_tm_schd_mode_hw(hdev); in hclge_tm_schd_setup_hw()
1226 static int hclge_pause_param_setup_hw(struct hclge_dev *hdev) in hclge_pause_param_setup_hw() argument
1228 struct hclge_mac *mac = &hdev->hw.mac; in hclge_pause_param_setup_hw()
1230 return hclge_pause_param_cfg(hdev, mac->mac_addr, in hclge_pause_param_setup_hw()
1235 static int hclge_pfc_setup_hw(struct hclge_dev *hdev) in hclge_pfc_setup_hw() argument
1239 if (hdev->tm_info.fc_mode == HCLGE_FC_PFC) in hclge_pfc_setup_hw()
1243 return hclge_pfc_pause_en_cfg(hdev, enable_bitmap, in hclge_pfc_setup_hw()
1244 hdev->tm_info.pfc_en); in hclge_pfc_setup_hw()
1251 static int hclge_bp_setup_hw(struct hclge_dev *hdev, u8 tc) in hclge_bp_setup_hw() argument
1259 for (k = 0; k < hdev->num_alloc_vport; k++) { in hclge_bp_setup_hw()
1260 struct hclge_vport *vport = &hdev->vport[k]; in hclge_bp_setup_hw()
1272 ret = hclge_tm_qs_bp_cfg(hdev, tc, i, qs_bitmap); in hclge_bp_setup_hw()
1280 static int hclge_mac_pause_setup_hw(struct hclge_dev *hdev) in hclge_mac_pause_setup_hw() argument
1284 switch (hdev->tm_info.fc_mode) { in hclge_mac_pause_setup_hw()
1310 return hclge_mac_pause_en_cfg(hdev, tx_en, rx_en); in hclge_mac_pause_setup_hw()
1313 static int hclge_tm_bp_setup(struct hclge_dev *hdev) in hclge_tm_bp_setup() argument
1318 for (i = 0; i < hdev->tm_info.num_tc; i++) { in hclge_tm_bp_setup()
1319 ret = hclge_bp_setup_hw(hdev, i); in hclge_tm_bp_setup()
1327 int hclge_pause_setup_hw(struct hclge_dev *hdev, bool init) in hclge_pause_setup_hw() argument
1331 ret = hclge_pause_param_setup_hw(hdev); in hclge_pause_setup_hw()
1335 ret = hclge_mac_pause_setup_hw(hdev); in hclge_pause_setup_hw()
1340 if (!hnae3_dev_dcb_supported(hdev)) in hclge_pause_setup_hw()
1347 ret = hclge_pfc_setup_hw(hdev); in hclge_pause_setup_hw()
1349 dev_warn(&hdev->pdev->dev, "GE MAC does not support pfc\n"); in hclge_pause_setup_hw()
1351 dev_err(&hdev->pdev->dev, "config pfc failed! ret = %d\n", in hclge_pause_setup_hw()
1356 return hclge_tm_bp_setup(hdev); in hclge_pause_setup_hw()
1359 void hclge_tm_prio_tc_info_update(struct hclge_dev *hdev, u8 *prio_tc) in hclge_tm_prio_tc_info_update() argument
1361 struct hclge_vport *vport = hdev->vport; in hclge_tm_prio_tc_info_update()
1366 hdev->tm_info.prio_tc[i] = prio_tc[i]; in hclge_tm_prio_tc_info_update()
1368 for (k = 0; k < hdev->num_alloc_vport; k++) { in hclge_tm_prio_tc_info_update()
1375 void hclge_tm_schd_info_update(struct hclge_dev *hdev, u8 num_tc) in hclge_tm_schd_info_update() argument
1380 hdev->tm_info.num_tc = num_tc; in hclge_tm_schd_info_update()
1382 for (i = 0; i < hdev->tm_info.num_tc; i++) in hclge_tm_schd_info_update()
1387 hdev->tm_info.num_tc = 1; in hclge_tm_schd_info_update()
1390 hdev->hw_tc_map = bit_map; in hclge_tm_schd_info_update()
1392 hclge_tm_schd_info_init(hdev); in hclge_tm_schd_info_update()
1395 void hclge_tm_pfc_info_update(struct hclge_dev *hdev) in hclge_tm_pfc_info_update() argument
1400 if (hdev->tm_info.num_tc > 1 || hdev->tm_info.pfc_en) in hclge_tm_pfc_info_update()
1401 hdev->flag |= HCLGE_FLAG_DCB_ENABLE; in hclge_tm_pfc_info_update()
1403 hdev->flag &= ~HCLGE_FLAG_DCB_ENABLE; in hclge_tm_pfc_info_update()
1405 hclge_pfc_info_init(hdev); in hclge_tm_pfc_info_update()
1408 int hclge_tm_init_hw(struct hclge_dev *hdev, bool init) in hclge_tm_init_hw() argument
1412 if ((hdev->tx_sch_mode != HCLGE_FLAG_TC_BASE_SCH_MODE) && in hclge_tm_init_hw()
1413 (hdev->tx_sch_mode != HCLGE_FLAG_VNET_BASE_SCH_MODE)) in hclge_tm_init_hw()
1416 ret = hclge_tm_schd_setup_hw(hdev); in hclge_tm_init_hw()
1420 ret = hclge_pause_setup_hw(hdev, init); in hclge_tm_init_hw()
1427 int hclge_tm_schd_init(struct hclge_dev *hdev) in hclge_tm_schd_init() argument
1430 hdev->tm_info.fc_mode = HCLGE_FC_FULL; in hclge_tm_schd_init()
1431 hdev->fc_mode_last_time = hdev->tm_info.fc_mode; in hclge_tm_schd_init()
1433 if (hdev->tx_sch_mode != HCLGE_FLAG_TC_BASE_SCH_MODE && in hclge_tm_schd_init()
1434 hdev->tm_info.num_pg != 1) in hclge_tm_schd_init()
1437 hclge_tm_schd_info_init(hdev); in hclge_tm_schd_init()
1439 return hclge_tm_init_hw(hdev, true); in hclge_tm_schd_init()
1442 int hclge_tm_vport_map_update(struct hclge_dev *hdev) in hclge_tm_vport_map_update() argument
1444 struct hclge_vport *vport = hdev->vport; in hclge_tm_vport_map_update()
1449 ret = hclge_vport_q_to_qs_map(hdev, vport); in hclge_tm_vport_map_update()
1453 if (!(hdev->flag & HCLGE_FLAG_DCB_ENABLE)) in hclge_tm_vport_map_update()
1456 return hclge_tm_bp_setup(hdev); in hclge_tm_vport_map_update()