Lines Matching full:gfx

46 #include "ivsrcid/gfx/irqsrcs_gfx_9_0.h"
888 adev->gfx.kiq.pmf = &gfx_v9_0_kiq_pm4_funcs; in gfx_v9_0_set_kiq_pm4_funcs()
1078 release_firmware(adev->gfx.pfp_fw); in gfx_v9_0_free_microcode()
1079 adev->gfx.pfp_fw = NULL; in gfx_v9_0_free_microcode()
1080 release_firmware(adev->gfx.me_fw); in gfx_v9_0_free_microcode()
1081 adev->gfx.me_fw = NULL; in gfx_v9_0_free_microcode()
1082 release_firmware(adev->gfx.ce_fw); in gfx_v9_0_free_microcode()
1083 adev->gfx.ce_fw = NULL; in gfx_v9_0_free_microcode()
1084 release_firmware(adev->gfx.rlc_fw); in gfx_v9_0_free_microcode()
1085 adev->gfx.rlc_fw = NULL; in gfx_v9_0_free_microcode()
1086 release_firmware(adev->gfx.mec_fw); in gfx_v9_0_free_microcode()
1087 adev->gfx.mec_fw = NULL; in gfx_v9_0_free_microcode()
1088 release_firmware(adev->gfx.mec2_fw); in gfx_v9_0_free_microcode()
1089 adev->gfx.mec2_fw = NULL; in gfx_v9_0_free_microcode()
1091 kfree(adev->gfx.rlc.register_list_format); in gfx_v9_0_free_microcode()
1096 adev->gfx.me_fw_write_wait = false; in gfx_v9_0_check_fw_write_wait()
1097 adev->gfx.mec_fw_write_wait = false; in gfx_v9_0_check_fw_write_wait()
1100 ((adev->gfx.mec_fw_version < 0x000001a5) || in gfx_v9_0_check_fw_write_wait()
1101 (adev->gfx.mec_feature_version < 46) || in gfx_v9_0_check_fw_write_wait()
1102 (adev->gfx.pfp_fw_version < 0x000000b7) || in gfx_v9_0_check_fw_write_wait()
1103 (adev->gfx.pfp_feature_version < 46))) in gfx_v9_0_check_fw_write_wait()
1108 if ((adev->gfx.me_fw_version >= 0x0000009c) && in gfx_v9_0_check_fw_write_wait()
1109 (adev->gfx.me_feature_version >= 42) && in gfx_v9_0_check_fw_write_wait()
1110 (adev->gfx.pfp_fw_version >= 0x000000b1) && in gfx_v9_0_check_fw_write_wait()
1111 (adev->gfx.pfp_feature_version >= 42)) in gfx_v9_0_check_fw_write_wait()
1112 adev->gfx.me_fw_write_wait = true; in gfx_v9_0_check_fw_write_wait()
1114 if ((adev->gfx.mec_fw_version >= 0x00000193) && in gfx_v9_0_check_fw_write_wait()
1115 (adev->gfx.mec_feature_version >= 42)) in gfx_v9_0_check_fw_write_wait()
1116 adev->gfx.mec_fw_write_wait = true; in gfx_v9_0_check_fw_write_wait()
1119 if ((adev->gfx.me_fw_version >= 0x0000009c) && in gfx_v9_0_check_fw_write_wait()
1120 (adev->gfx.me_feature_version >= 44) && in gfx_v9_0_check_fw_write_wait()
1121 (adev->gfx.pfp_fw_version >= 0x000000b2) && in gfx_v9_0_check_fw_write_wait()
1122 (adev->gfx.pfp_feature_version >= 44)) in gfx_v9_0_check_fw_write_wait()
1123 adev->gfx.me_fw_write_wait = true; in gfx_v9_0_check_fw_write_wait()
1125 if ((adev->gfx.mec_fw_version >= 0x00000196) && in gfx_v9_0_check_fw_write_wait()
1126 (adev->gfx.mec_feature_version >= 44)) in gfx_v9_0_check_fw_write_wait()
1127 adev->gfx.mec_fw_write_wait = true; in gfx_v9_0_check_fw_write_wait()
1130 if ((adev->gfx.me_fw_version >= 0x0000009c) && in gfx_v9_0_check_fw_write_wait()
1131 (adev->gfx.me_feature_version >= 44) && in gfx_v9_0_check_fw_write_wait()
1132 (adev->gfx.pfp_fw_version >= 0x000000b2) && in gfx_v9_0_check_fw_write_wait()
1133 (adev->gfx.pfp_feature_version >= 44)) in gfx_v9_0_check_fw_write_wait()
1134 adev->gfx.me_fw_write_wait = true; in gfx_v9_0_check_fw_write_wait()
1136 if ((adev->gfx.mec_fw_version >= 0x00000197) && in gfx_v9_0_check_fw_write_wait()
1137 (adev->gfx.mec_feature_version >= 44)) in gfx_v9_0_check_fw_write_wait()
1138 adev->gfx.mec_fw_write_wait = true; in gfx_v9_0_check_fw_write_wait()
1142 if ((adev->gfx.me_fw_version >= 0x0000009c) && in gfx_v9_0_check_fw_write_wait()
1143 (adev->gfx.me_feature_version >= 42) && in gfx_v9_0_check_fw_write_wait()
1144 (adev->gfx.pfp_fw_version >= 0x000000b1) && in gfx_v9_0_check_fw_write_wait()
1145 (adev->gfx.pfp_feature_version >= 42)) in gfx_v9_0_check_fw_write_wait()
1146 adev->gfx.me_fw_write_wait = true; in gfx_v9_0_check_fw_write_wait()
1148 if ((adev->gfx.mec_fw_version >= 0x00000192) && in gfx_v9_0_check_fw_write_wait()
1149 (adev->gfx.mec_feature_version >= 42)) in gfx_v9_0_check_fw_write_wait()
1150 adev->gfx.mec_fw_write_wait = true; in gfx_v9_0_check_fw_write_wait()
1153 adev->gfx.me_fw_write_wait = true; in gfx_v9_0_check_fw_write_wait()
1154 adev->gfx.mec_fw_write_wait = true; in gfx_v9_0_check_fw_write_wait()
1207 (adev->gfx.me_fw_version >= 0x000000a5) && in check_if_enlarge_doorbell_range()
1208 (adev->gfx.me_feature_version >= 52)) in check_if_enlarge_doorbell_range()
1229 adev->gfx.rlc_fw_version < 531) || in gfx_v9_0_check_if_need_gfxoff()
1230 (adev->gfx.rlc_feature_version < 1) || in gfx_v9_0_check_if_need_gfxoff()
1231 !adev->gfx.rlc.is_rlc_v2_1)) in gfx_v9_0_check_if_need_gfxoff()
1257 err = request_firmware(&adev->gfx.pfp_fw, fw_name, adev->dev); in gfx_v9_0_init_cp_gfx_microcode()
1260 err = amdgpu_ucode_validate(adev->gfx.pfp_fw); in gfx_v9_0_init_cp_gfx_microcode()
1266 err = request_firmware(&adev->gfx.me_fw, fw_name, adev->dev); in gfx_v9_0_init_cp_gfx_microcode()
1269 err = amdgpu_ucode_validate(adev->gfx.me_fw); in gfx_v9_0_init_cp_gfx_microcode()
1275 err = request_firmware(&adev->gfx.ce_fw, fw_name, adev->dev); in gfx_v9_0_init_cp_gfx_microcode()
1278 err = amdgpu_ucode_validate(adev->gfx.ce_fw); in gfx_v9_0_init_cp_gfx_microcode()
1288 release_firmware(adev->gfx.pfp_fw); in gfx_v9_0_init_cp_gfx_microcode()
1289 adev->gfx.pfp_fw = NULL; in gfx_v9_0_init_cp_gfx_microcode()
1290 release_firmware(adev->gfx.me_fw); in gfx_v9_0_init_cp_gfx_microcode()
1291 adev->gfx.me_fw = NULL; in gfx_v9_0_init_cp_gfx_microcode()
1292 release_firmware(adev->gfx.ce_fw); in gfx_v9_0_init_cp_gfx_microcode()
1293 adev->gfx.ce_fw = NULL; in gfx_v9_0_init_cp_gfx_microcode()
1328 err = request_firmware(&adev->gfx.rlc_fw, fw_name, adev->dev); in gfx_v9_0_init_rlc_microcode()
1331 err = amdgpu_ucode_validate(adev->gfx.rlc_fw); in gfx_v9_0_init_rlc_microcode()
1334 rlc_hdr = (const struct rlc_firmware_header_v2_0 *)adev->gfx.rlc_fw->data; in gfx_v9_0_init_rlc_microcode()
1344 release_firmware(adev->gfx.rlc_fw); in gfx_v9_0_init_rlc_microcode()
1345 adev->gfx.rlc_fw = NULL; in gfx_v9_0_init_rlc_microcode()
1371 err = request_firmware(&adev->gfx.mec_fw, fw_name, adev->dev); in gfx_v9_0_init_cp_compute_microcode()
1374 err = amdgpu_ucode_validate(adev->gfx.mec_fw); in gfx_v9_0_init_cp_compute_microcode()
1386 err = request_firmware(&adev->gfx.mec2_fw, fw_name, adev->dev); in gfx_v9_0_init_cp_compute_microcode()
1388 err = amdgpu_ucode_validate(adev->gfx.mec2_fw); in gfx_v9_0_init_cp_compute_microcode()
1395 adev->gfx.mec2_fw = NULL; in gfx_v9_0_init_cp_compute_microcode()
1398 adev->gfx.mec2_fw_version = adev->gfx.mec_fw_version; in gfx_v9_0_init_cp_compute_microcode()
1399 adev->gfx.mec2_feature_version = adev->gfx.mec_feature_version; in gfx_v9_0_init_cp_compute_microcode()
1409 release_firmware(adev->gfx.mec_fw); in gfx_v9_0_init_cp_compute_microcode()
1410 adev->gfx.mec_fw = NULL; in gfx_v9_0_init_cp_compute_microcode()
1411 release_firmware(adev->gfx.mec2_fw); in gfx_v9_0_init_cp_compute_microcode()
1412 adev->gfx.mec2_fw = NULL; in gfx_v9_0_init_cp_compute_microcode()
1460 if (adev->gfx.num_gfx_rings) { in gfx_v9_0_init_microcode()
1512 if (adev->gfx.rlc.cs_data == NULL) in gfx_v9_0_get_csb_buffer()
1524 for (sect = adev->gfx.rlc.cs_data; sect->section != NULL; ++sect) { in gfx_v9_0_get_csb_buffer()
1548 struct amdgpu_cu_info *cu_info = &adev->gfx.cu_info; in gfx_v9_0_init_always_on_cu_mask()
1562 for (i = 0; i < adev->gfx.config.max_shader_engines; i++) { in gfx_v9_0_init_always_on_cu_mask()
1563 for (j = 0; j < adev->gfx.config.max_sh_per_se; j++) { in gfx_v9_0_init_always_on_cu_mask()
1569 for (k = 0; k < adev->gfx.config.max_cu_per_sh; k ++) { in gfx_v9_0_init_always_on_cu_mask()
1705 reg_access_ctrl = &adev->gfx.rlc.reg_access_ctrl; in gfx_v9_0_init_rlcg_reg_access_ctrl()
1713 adev->gfx.rlc.rlcg_reg_access_supported = true; in gfx_v9_0_init_rlcg_reg_access_ctrl()
1721 adev->gfx.rlc.cs_data = gfx9_cs_data; in gfx_v9_0_rlc_init()
1723 cs_data = adev->gfx.rlc.cs_data; in gfx_v9_0_rlc_init()
1734 adev->gfx.rlc.cp_table_size = ALIGN(96 * 5 * 4, 2048) + (64 * 1024); /* JT + GDS */ in gfx_v9_0_rlc_init()
1753 if (adev->gfx.rlc.funcs->update_spm_vmid) in gfx_v9_0_rlc_init()
1754 adev->gfx.rlc.funcs->update_spm_vmid(adev, 0xf); in gfx_v9_0_rlc_init()
1761 amdgpu_bo_free_kernel(&adev->gfx.mec.hpd_eop_obj, NULL, NULL); in gfx_v9_0_mec_fini()
1762 amdgpu_bo_free_kernel(&adev->gfx.mec.mec_fw_obj, NULL, NULL); in gfx_v9_0_mec_fini()
1776 bitmap_zero(adev->gfx.mec.queue_bitmap, AMDGPU_MAX_COMPUTE_QUEUES); in gfx_v9_0_mec_init()
1780 mec_hpd_size = adev->gfx.num_compute_rings * GFX9_MEC_HPD_SIZE; in gfx_v9_0_mec_init()
1784 &adev->gfx.mec.hpd_eop_obj, in gfx_v9_0_mec_init()
1785 &adev->gfx.mec.hpd_eop_gpu_addr, in gfx_v9_0_mec_init()
1795 amdgpu_bo_kunmap(adev->gfx.mec.hpd_eop_obj); in gfx_v9_0_mec_init()
1796 amdgpu_bo_unreserve(adev->gfx.mec.hpd_eop_obj); in gfx_v9_0_mec_init()
1799 mec_hdr = (const struct gfx_firmware_header_v1_0 *)adev->gfx.mec_fw->data; in gfx_v9_0_mec_init()
1802 (adev->gfx.mec_fw->data + in gfx_v9_0_mec_init()
1808 &adev->gfx.mec.mec_fw_obj, in gfx_v9_0_mec_init()
1809 &adev->gfx.mec.mec_fw_gpu_addr, in gfx_v9_0_mec_init()
1819 amdgpu_bo_kunmap(adev->gfx.mec.mec_fw_obj); in gfx_v9_0_mec_init()
1820 amdgpu_bo_unreserve(adev->gfx.mec.mec_fw_obj); in gfx_v9_0_mec_init()
1922 adev->gfx.funcs = &gfx_v9_0_gfx_funcs; in gfx_v9_0_gpu_early_init()
1926 adev->gfx.config.max_hw_contexts = 8; in gfx_v9_0_gpu_early_init()
1927 adev->gfx.config.sc_prim_fifo_size_frontend = 0x20; in gfx_v9_0_gpu_early_init()
1928 adev->gfx.config.sc_prim_fifo_size_backend = 0x100; in gfx_v9_0_gpu_early_init()
1929 adev->gfx.config.sc_hiz_tile_fifo_size = 0x30; in gfx_v9_0_gpu_early_init()
1930 adev->gfx.config.sc_earlyz_tile_fifo_size = 0x4C0; in gfx_v9_0_gpu_early_init()
1934 adev->gfx.config.max_hw_contexts = 8; in gfx_v9_0_gpu_early_init()
1935 adev->gfx.config.sc_prim_fifo_size_frontend = 0x20; in gfx_v9_0_gpu_early_init()
1936 adev->gfx.config.sc_prim_fifo_size_backend = 0x100; in gfx_v9_0_gpu_early_init()
1937 adev->gfx.config.sc_hiz_tile_fifo_size = 0x30; in gfx_v9_0_gpu_early_init()
1938 adev->gfx.config.sc_earlyz_tile_fifo_size = 0x4C0; in gfx_v9_0_gpu_early_init()
1940 DRM_INFO("fix gfx.config for vega12\n"); in gfx_v9_0_gpu_early_init()
1943 adev->gfx.ras = &gfx_v9_0_ras; in gfx_v9_0_gpu_early_init()
1944 adev->gfx.config.max_hw_contexts = 8; in gfx_v9_0_gpu_early_init()
1945 adev->gfx.config.sc_prim_fifo_size_frontend = 0x20; in gfx_v9_0_gpu_early_init()
1946 adev->gfx.config.sc_prim_fifo_size_backend = 0x100; in gfx_v9_0_gpu_early_init()
1947 adev->gfx.config.sc_hiz_tile_fifo_size = 0x30; in gfx_v9_0_gpu_early_init()
1948 adev->gfx.config.sc_earlyz_tile_fifo_size = 0x4C0; in gfx_v9_0_gpu_early_init()
1959 adev->gfx.config.max_hw_contexts = 8; in gfx_v9_0_gpu_early_init()
1960 adev->gfx.config.sc_prim_fifo_size_frontend = 0x20; in gfx_v9_0_gpu_early_init()
1961 adev->gfx.config.sc_prim_fifo_size_backend = 0x100; in gfx_v9_0_gpu_early_init()
1962 adev->gfx.config.sc_hiz_tile_fifo_size = 0x30; in gfx_v9_0_gpu_early_init()
1963 adev->gfx.config.sc_earlyz_tile_fifo_size = 0x4C0; in gfx_v9_0_gpu_early_init()
1970 adev->gfx.ras = &gfx_v9_4_ras; in gfx_v9_0_gpu_early_init()
1971 adev->gfx.config.max_hw_contexts = 8; in gfx_v9_0_gpu_early_init()
1972 adev->gfx.config.sc_prim_fifo_size_frontend = 0x20; in gfx_v9_0_gpu_early_init()
1973 adev->gfx.config.sc_prim_fifo_size_backend = 0x100; in gfx_v9_0_gpu_early_init()
1974 adev->gfx.config.sc_hiz_tile_fifo_size = 0x30; in gfx_v9_0_gpu_early_init()
1975 adev->gfx.config.sc_earlyz_tile_fifo_size = 0x4C0; in gfx_v9_0_gpu_early_init()
1981 adev->gfx.config.max_hw_contexts = 8; in gfx_v9_0_gpu_early_init()
1982 adev->gfx.config.sc_prim_fifo_size_frontend = 0x20; in gfx_v9_0_gpu_early_init()
1983 adev->gfx.config.sc_prim_fifo_size_backend = 0x100; in gfx_v9_0_gpu_early_init()
1984 adev->gfx.config.sc_hiz_tile_fifo_size = 0x80; in gfx_v9_0_gpu_early_init()
1985 adev->gfx.config.sc_earlyz_tile_fifo_size = 0x4C0; in gfx_v9_0_gpu_early_init()
1991 adev->gfx.ras = &gfx_v9_4_2_ras; in gfx_v9_0_gpu_early_init()
1992 adev->gfx.config.max_hw_contexts = 8; in gfx_v9_0_gpu_early_init()
1993 adev->gfx.config.sc_prim_fifo_size_frontend = 0x20; in gfx_v9_0_gpu_early_init()
1994 adev->gfx.config.sc_prim_fifo_size_backend = 0x100; in gfx_v9_0_gpu_early_init()
1995 adev->gfx.config.sc_hiz_tile_fifo_size = 0x30; in gfx_v9_0_gpu_early_init()
1996 adev->gfx.config.sc_earlyz_tile_fifo_size = 0x4C0; in gfx_v9_0_gpu_early_init()
2010 if (adev->gfx.ras) { in gfx_v9_0_gpu_early_init()
2011 err = amdgpu_ras_register_ras_block(adev, &adev->gfx.ras->ras_block); in gfx_v9_0_gpu_early_init()
2013 DRM_ERROR("Failed to register gfx ras block!\n"); in gfx_v9_0_gpu_early_init()
2017 strcpy(adev->gfx.ras->ras_block.ras_comm.name, "gfx"); in gfx_v9_0_gpu_early_init()
2018 adev->gfx.ras->ras_block.ras_comm.block = AMDGPU_RAS_BLOCK__GFX; in gfx_v9_0_gpu_early_init()
2019 adev->gfx.ras->ras_block.ras_comm.type = AMDGPU_RAS_ERROR__MULTI_UNCORRECTABLE; in gfx_v9_0_gpu_early_init()
2020 adev->gfx.ras_if = &adev->gfx.ras->ras_block.ras_comm; in gfx_v9_0_gpu_early_init()
2022 /* If not define special ras_late_init function, use gfx default ras_late_init */ in gfx_v9_0_gpu_early_init()
2023 if (!adev->gfx.ras->ras_block.ras_late_init) in gfx_v9_0_gpu_early_init()
2024 adev->gfx.ras->ras_block.ras_late_init = amdgpu_gfx_ras_late_init; in gfx_v9_0_gpu_early_init()
2027 if (!adev->gfx.ras->ras_block.ras_cb) in gfx_v9_0_gpu_early_init()
2028 adev->gfx.ras->ras_block.ras_cb = amdgpu_gfx_process_ras_data_cb; in gfx_v9_0_gpu_early_init()
2031 adev->gfx.config.gb_addr_config = gb_addr_config; in gfx_v9_0_gpu_early_init()
2033 adev->gfx.config.gb_addr_config_fields.num_pipes = 1 << in gfx_v9_0_gpu_early_init()
2035 adev->gfx.config.gb_addr_config, in gfx_v9_0_gpu_early_init()
2039 adev->gfx.config.max_tile_pipes = in gfx_v9_0_gpu_early_init()
2040 adev->gfx.config.gb_addr_config_fields.num_pipes; in gfx_v9_0_gpu_early_init()
2042 adev->gfx.config.gb_addr_config_fields.num_banks = 1 << in gfx_v9_0_gpu_early_init()
2044 adev->gfx.config.gb_addr_config, in gfx_v9_0_gpu_early_init()
2047 adev->gfx.config.gb_addr_config_fields.max_compress_frags = 1 << in gfx_v9_0_gpu_early_init()
2049 adev->gfx.config.gb_addr_config, in gfx_v9_0_gpu_early_init()
2052 adev->gfx.config.gb_addr_config_fields.num_rb_per_se = 1 << in gfx_v9_0_gpu_early_init()
2054 adev->gfx.config.gb_addr_config, in gfx_v9_0_gpu_early_init()
2057 adev->gfx.config.gb_addr_config_fields.num_se = 1 << in gfx_v9_0_gpu_early_init()
2059 adev->gfx.config.gb_addr_config, in gfx_v9_0_gpu_early_init()
2062 adev->gfx.config.gb_addr_config_fields.pipe_interleave_size = 1 << (8 + in gfx_v9_0_gpu_early_init()
2064 adev->gfx.config.gb_addr_config, in gfx_v9_0_gpu_early_init()
2075 struct amdgpu_ring *ring = &adev->gfx.compute_ring[ring_id]; in gfx_v9_0_compute_ring_init()
2078 ring = &adev->gfx.compute_ring[ring_id]; in gfx_v9_0_compute_ring_init()
2088 ring->eop_gpu_addr = adev->gfx.mec.hpd_eop_gpu_addr in gfx_v9_0_compute_ring_init()
2093 + ((ring->me - 1) * adev->gfx.mec.num_pipe_per_mec) in gfx_v9_0_compute_ring_init()
2098 return amdgpu_ring_init(adev, ring, 1024, &adev->gfx.eop_irq, irq_type, in gfx_v9_0_compute_ring_init()
2118 adev->gfx.mec.num_mec = 2; in gfx_v9_0_sw_init()
2121 adev->gfx.mec.num_mec = 1; in gfx_v9_0_sw_init()
2125 adev->gfx.mec.num_pipe_per_mec = 4; in gfx_v9_0_sw_init()
2126 adev->gfx.mec.num_queue_per_pipe = 8; in gfx_v9_0_sw_init()
2129 …_irq_add_id(adev, SOC15_IH_CLIENTID_GRBM_CP, GFX_9_0__SRCID__CP_EOP_INTERRUPT, &adev->gfx.eop_irq); in gfx_v9_0_sw_init()
2135 &adev->gfx.priv_reg_irq); in gfx_v9_0_sw_init()
2141 &adev->gfx.priv_inst_irq); in gfx_v9_0_sw_init()
2147 &adev->gfx.cp_ecc_error_irq); in gfx_v9_0_sw_init()
2153 &adev->gfx.cp_ecc_error_irq); in gfx_v9_0_sw_init()
2157 adev->gfx.gfx_current_status = AMDGPU_GFX_NORMAL_MODE; in gfx_v9_0_sw_init()
2161 DRM_ERROR("Failed to load gfx firmware!\n"); in gfx_v9_0_sw_init()
2165 if (adev->gfx.rlc.funcs) { in gfx_v9_0_sw_init()
2166 if (adev->gfx.rlc.funcs->init) { in gfx_v9_0_sw_init()
2167 r = adev->gfx.rlc.funcs->init(adev); in gfx_v9_0_sw_init()
2181 /* set up the gfx ring */ in gfx_v9_0_sw_init()
2182 for (i = 0; i < adev->gfx.num_gfx_rings; i++) { in gfx_v9_0_sw_init()
2183 ring = &adev->gfx.gfx_ring[i]; in gfx_v9_0_sw_init()
2186 sprintf(ring->name, "gfx"); in gfx_v9_0_sw_init()
2191 r = amdgpu_ring_init(adev, ring, 1024, &adev->gfx.eop_irq, in gfx_v9_0_sw_init()
2200 for (i = 0; i < adev->gfx.mec.num_mec; ++i) { in gfx_v9_0_sw_init()
2201 for (j = 0; j < adev->gfx.mec.num_queue_per_pipe; j++) { in gfx_v9_0_sw_init()
2202 for (k = 0; k < adev->gfx.mec.num_pipe_per_mec; k++) { in gfx_v9_0_sw_init()
2223 kiq = &adev->gfx.kiq; in gfx_v9_0_sw_init()
2233 adev->gfx.ce_ram_size = 0x8000; in gfx_v9_0_sw_init()
2248 for (i = 0; i < adev->gfx.num_gfx_rings; i++) in gfx_v9_0_sw_fini()
2249 amdgpu_ring_fini(&adev->gfx.gfx_ring[i]); in gfx_v9_0_sw_fini()
2250 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_sw_fini()
2251 amdgpu_ring_fini(&adev->gfx.compute_ring[i]); in gfx_v9_0_sw_fini()
2254 amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq.ring); in gfx_v9_0_sw_fini()
2258 amdgpu_bo_free_kernel(&adev->gfx.rlc.clear_state_obj, in gfx_v9_0_sw_fini()
2259 &adev->gfx.rlc.clear_state_gpu_addr, in gfx_v9_0_sw_fini()
2260 (void **)&adev->gfx.rlc.cs_ptr); in gfx_v9_0_sw_fini()
2262 amdgpu_bo_free_kernel(&adev->gfx.rlc.cp_table_obj, in gfx_v9_0_sw_fini()
2263 &adev->gfx.rlc.cp_table_gpu_addr, in gfx_v9_0_sw_fini()
2264 (void **)&adev->gfx.rlc.cp_table_ptr); in gfx_v9_0_sw_fini()
2310 mask = amdgpu_gfx_create_bitmask(adev->gfx.config.max_backends_per_se / in gfx_v9_0_get_rb_active_bitmap()
2311 adev->gfx.config.max_sh_per_se); in gfx_v9_0_get_rb_active_bitmap()
2321 u32 rb_bitmap_width_per_sh = adev->gfx.config.max_backends_per_se / in gfx_v9_0_setup_rb()
2322 adev->gfx.config.max_sh_per_se; in gfx_v9_0_setup_rb()
2325 for (i = 0; i < adev->gfx.config.max_shader_engines; i++) { in gfx_v9_0_setup_rb()
2326 for (j = 0; j < adev->gfx.config.max_sh_per_se; j++) { in gfx_v9_0_setup_rb()
2329 active_rbs |= data << ((i * adev->gfx.config.max_sh_per_se + j) * in gfx_v9_0_setup_rb()
2336 adev->gfx.config.backend_enable_mask = active_rbs; in gfx_v9_0_setup_rb()
2337 adev->gfx.config.num_rbs = hweight32(active_rbs); in gfx_v9_0_setup_rb()
2384 * Initialize all compute and user-gfx VMIDs to have no GDS, GWS, or OA in gfx_v9_0_init_gds_vmid()
2422 if (adev->gfx.num_gfx_rings) in gfx_v9_0_constants_init()
2424 gfx_v9_0_get_cu_info(adev, &adev->gfx.cu_info); in gfx_v9_0_constants_init()
2425 adev->gfx.config.db_debug2 = RREG32_SOC15(GC, 0, mmDB_DEBUG2); in gfx_v9_0_constants_init()
2468 for (i = 0; i < adev->gfx.config.max_shader_engines; i++) { in gfx_v9_0_wait_for_rlc_serdes()
2469 for (j = 0; j < adev->gfx.config.max_sh_per_se; j++) { in gfx_v9_0_wait_for_rlc_serdes()
2512 if(adev->gfx.num_gfx_rings) in gfx_v9_0_enable_gui_idle_interrupt()
2520 adev->gfx.rlc.funcs->get_csb_buffer(adev, adev->gfx.rlc.cs_ptr); in gfx_v9_0_init_csb()
2523 adev->gfx.rlc.clear_state_gpu_addr >> 32); in gfx_v9_0_init_csb()
2525 adev->gfx.rlc.clear_state_gpu_addr & 0xfffffffc); in gfx_v9_0_init_csb()
2527 adev->gfx.rlc.clear_state_size); in gfx_v9_0_init_csb()
2580 kmemdup(adev->gfx.rlc.register_list_format, in gfx_v9_1_init_rlc_save_restore_list()
2581 adev->gfx.rlc.reg_list_format_size_bytes, GFP_KERNEL); in gfx_v9_1_init_rlc_save_restore_list()
2588 adev->gfx.rlc.reg_list_format_direct_reg_list_length, in gfx_v9_1_init_rlc_save_restore_list()
2589 adev->gfx.rlc.reg_list_format_size_bytes >> 2, in gfx_v9_1_init_rlc_save_restore_list()
2604 for (i = 0; i < adev->gfx.rlc.reg_list_size_bytes >> 2; i++) in gfx_v9_1_init_rlc_save_restore_list()
2606 adev->gfx.rlc.register_restore[i]); in gfx_v9_1_init_rlc_save_restore_list()
2610 adev->gfx.rlc.reg_list_format_start); in gfx_v9_1_init_rlc_save_restore_list()
2613 for (i = 0; i < adev->gfx.rlc.reg_list_format_direct_reg_list_length; i++) in gfx_v9_1_init_rlc_save_restore_list()
2618 while (i < (adev->gfx.rlc.reg_list_format_size_bytes >> 2)) { in gfx_v9_1_init_rlc_save_restore_list()
2640 list_size = adev->gfx.rlc.reg_list_size_bytes >> 2; in gfx_v9_1_init_rlc_save_restore_list()
2643 adev->gfx.rlc.reg_restore_list_size); in gfx_v9_1_init_rlc_save_restore_list()
2648 adev->gfx.rlc.starting_offsets_start); in gfx_v9_1_init_rlc_save_restore_list()
2811 /* read any GFX register to wake up GFX */ in gfx_v9_0_enable_gfx_pipeline_powergating()
2849 if (adev->gfx.rlc.is_rlc_v2_1) { in gfx_v9_0_init_pg()
2863 adev->gfx.rlc.cp_table_gpu_addr >> 8); in gfx_v9_0_init_pg()
2903 rlc_ucode_ver, adev->gfx.rlc_fw_version); in gfx_v9_0_rlc_start()
2921 if (!adev->gfx.rlc_fw) in gfx_v9_0_rlc_load_microcode()
2924 hdr = (const struct rlc_firmware_header_v2_0 *)adev->gfx.rlc_fw->data; in gfx_v9_0_rlc_load_microcode()
2927 fw_data = (const __le32 *)(adev->gfx.rlc_fw->data + in gfx_v9_0_rlc_load_microcode()
2935 WREG32_SOC15(GC, 0, mmRLC_GPM_UCODE_ADDR, adev->gfx.rlc_fw_version); in gfx_v9_0_rlc_load_microcode()
2949 adev->gfx.rlc.funcs->stop(adev); in gfx_v9_0_rlc_resume()
2981 adev->gfx.rlc.funcs->start(adev); in gfx_v9_0_rlc_resume()
3005 if (!adev->gfx.me_fw || !adev->gfx.pfp_fw || !adev->gfx.ce_fw) in gfx_v9_0_cp_gfx_load_microcode()
3009 adev->gfx.pfp_fw->data; in gfx_v9_0_cp_gfx_load_microcode()
3011 adev->gfx.ce_fw->data; in gfx_v9_0_cp_gfx_load_microcode()
3013 adev->gfx.me_fw->data; in gfx_v9_0_cp_gfx_load_microcode()
3023 (adev->gfx.pfp_fw->data + in gfx_v9_0_cp_gfx_load_microcode()
3029 WREG32_SOC15(GC, 0, mmCP_PFP_UCODE_ADDR, adev->gfx.pfp_fw_version); in gfx_v9_0_cp_gfx_load_microcode()
3033 (adev->gfx.ce_fw->data + in gfx_v9_0_cp_gfx_load_microcode()
3039 WREG32_SOC15(GC, 0, mmCP_CE_UCODE_ADDR, adev->gfx.ce_fw_version); in gfx_v9_0_cp_gfx_load_microcode()
3043 (adev->gfx.me_fw->data + in gfx_v9_0_cp_gfx_load_microcode()
3049 WREG32_SOC15(GC, 0, mmCP_ME_RAM_WADDR, adev->gfx.me_fw_version); in gfx_v9_0_cp_gfx_load_microcode()
3056 struct amdgpu_ring *ring = &adev->gfx.gfx_ring[0]; in gfx_v9_0_cp_gfx_start()
3062 WREG32_SOC15(GC, 0, mmCP_MAX_CONTEXT, adev->gfx.config.max_hw_contexts - 1); in gfx_v9_0_cp_gfx_start()
3130 ring = &adev->gfx.gfx_ring[0]; in gfx_v9_0_cp_gfx_resume()
3193 adev->gfx.kiq.ring.sched.ready = false; in gfx_v9_0_cp_compute_enable()
3205 if (!adev->gfx.mec_fw) in gfx_v9_0_cp_compute_load_microcode()
3210 mec_hdr = (const struct gfx_firmware_header_v1_0 *)adev->gfx.mec_fw->data; in gfx_v9_0_cp_compute_load_microcode()
3214 (adev->gfx.mec_fw->data + in gfx_v9_0_cp_compute_load_microcode()
3222 adev->gfx.mec.mec_fw_gpu_addr & 0xFFFFF000); in gfx_v9_0_cp_compute_load_microcode()
3224 upper_32_bits(adev->gfx.mec.mec_fw_gpu_addr)); in gfx_v9_0_cp_compute_load_microcode()
3234 adev->gfx.mec_fw_version); in gfx_v9_0_cp_compute_load_microcode()
3567 tmp_mqd = (struct v9_mqd *)adev->gfx.mec.mqd_backup[mqd_idx]; in gfx_v9_0_kiq_init_queue()
3570 if (adev->gfx.mec.mqd_backup[mqd_idx]) in gfx_v9_0_kiq_init_queue()
3571 memcpy(mqd, adev->gfx.mec.mqd_backup[mqd_idx], sizeof(struct v9_mqd_allocation)); in gfx_v9_0_kiq_init_queue()
3593 if (adev->gfx.mec.mqd_backup[mqd_idx]) in gfx_v9_0_kiq_init_queue()
3594 memcpy(adev->gfx.mec.mqd_backup[mqd_idx], mqd, sizeof(struct v9_mqd_allocation)); in gfx_v9_0_kiq_init_queue()
3604 int mqd_idx = ring - &adev->gfx.compute_ring[0]; in gfx_v9_0_kcq_init_queue()
3610 tmp_mqd = (struct v9_mqd *)adev->gfx.mec.mqd_backup[mqd_idx]; in gfx_v9_0_kcq_init_queue()
3623 if (adev->gfx.mec.mqd_backup[mqd_idx]) in gfx_v9_0_kcq_init_queue()
3624 memcpy(adev->gfx.mec.mqd_backup[mqd_idx], mqd, sizeof(struct v9_mqd_allocation)); in gfx_v9_0_kcq_init_queue()
3627 if (adev->gfx.mec.mqd_backup[mqd_idx]) in gfx_v9_0_kcq_init_queue()
3628 memcpy(mqd, adev->gfx.mec.mqd_backup[mqd_idx], sizeof(struct v9_mqd_allocation)); in gfx_v9_0_kcq_init_queue()
3646 ring = &adev->gfx.kiq.ring; in gfx_v9_0_kiq_resume()
3671 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_kcq_resume()
3672 ring = &adev->gfx.compute_ring[i]; in gfx_v9_0_kcq_resume()
3702 if (adev->gfx.num_gfx_rings) { in gfx_v9_0_cp_resume()
3718 if (adev->gfx.num_gfx_rings) { in gfx_v9_0_cp_resume()
3728 if (adev->gfx.num_gfx_rings) { in gfx_v9_0_cp_resume()
3729 ring = &adev->gfx.gfx_ring[0]; in gfx_v9_0_cp_resume()
3735 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_cp_resume()
3736 ring = &adev->gfx.compute_ring[i]; in gfx_v9_0_cp_resume()
3765 if (adev->gfx.num_gfx_rings) in gfx_v9_0_cp_enable()
3782 r = adev->gfx.rlc.funcs->resume(adev); in gfx_v9_0_hw_init()
3800 amdgpu_irq_put(adev, &adev->gfx.cp_ecc_error_irq, 0); in gfx_v9_0_hw_fini()
3801 amdgpu_irq_put(adev, &adev->gfx.priv_reg_irq, 0); in gfx_v9_0_hw_fini()
3802 amdgpu_irq_put(adev, &adev->gfx.priv_inst_irq, 0); in gfx_v9_0_hw_fini()
3825 soc15_grbm_select(adev, adev->gfx.kiq.ring.me, in gfx_v9_0_hw_fini()
3826 adev->gfx.kiq.ring.pipe, in gfx_v9_0_hw_fini()
3827 adev->gfx.kiq.ring.queue, 0); in gfx_v9_0_hw_fini()
3828 gfx_v9_0_kiq_fini_register(&adev->gfx.kiq.ring); in gfx_v9_0_hw_fini()
3835 /* Skip stopping RLC with A+A reset or when RLC controls GFX clock */ in gfx_v9_0_hw_fini()
3842 adev->gfx.rlc.funcs->stop(adev); in gfx_v9_0_hw_fini()
3914 adev->gfx.rlc.funcs->stop(adev); in gfx_v9_0_soft_reset()
3916 if (adev->gfx.num_gfx_rings) in gfx_v9_0_soft_reset()
3917 /* Disable GFX parsing/prefetching */ in gfx_v9_0_soft_reset()
3949 struct amdgpu_kiq *kiq = &adev->gfx.kiq; in gfx_v9_0_kiq_read_clock()
4039 mutex_lock(&adev->gfx.gpu_clock_mutex); in gfx_v9_0_get_gpu_clock_counter()
4047 mutex_unlock(&adev->gfx.gpu_clock_mutex); in gfx_v9_0_get_gpu_clock_counter()
4321 struct amdgpu_ring *ring = &adev->gfx.compute_ring[0]; in gfx_v9_0_do_edc_gds_workarounds()
4368 struct amdgpu_ring *ring = &adev->gfx.compute_ring[0]; in gfx_v9_0_do_edc_gpr_workarounds()
4375 int compute_dim_x = adev->gfx.config.max_shader_engines * in gfx_v9_0_do_edc_gpr_workarounds()
4376 adev->gfx.config.max_cu_per_sh * in gfx_v9_0_do_edc_gpr_workarounds()
4377 adev->gfx.config.max_sh_per_se; in gfx_v9_0_do_edc_gpr_workarounds()
4379 int gpr_reg_size = adev->gfx.config.max_shader_engines + 6; in gfx_v9_0_do_edc_gpr_workarounds()
4544 adev->gfx.num_gfx_rings = 0; in gfx_v9_0_early_init()
4546 adev->gfx.num_gfx_rings = GFX9_NUM_GFX_RINGS; in gfx_v9_0_early_init()
4547 adev->gfx.num_compute_rings = min(amdgpu_gfx_get_num_kcq(adev), in gfx_v9_0_early_init()
4588 if (adev->gfx.ras && in gfx_v9_0_ecc_late_init()
4589 adev->gfx.ras->enable_watchdog_timer) in gfx_v9_0_ecc_late_init()
4590 adev->gfx.ras->enable_watchdog_timer(adev); in gfx_v9_0_ecc_late_init()
4600 r = amdgpu_irq_get(adev, &adev->gfx.priv_reg_irq, 0); in gfx_v9_0_late_init()
4604 r = amdgpu_irq_get(adev, &adev->gfx.priv_inst_irq, 0); in gfx_v9_0_late_init()
4714 /* MGLS is a global flag to control all MGLS in GFX */ in gfx_v9_0_update_medium_grain_clock_gating()
4769 if (!adev->gfx.num_gfx_rings) in gfx_v9_0_update_3d_clock_gating()
4879 /* === CGCG /CGLS for GFX 3D Only === */ in gfx_v9_0_update_gfx_clock_gating()
4888 /* === CGCG /CGLS for GFX 3D Only === */ in gfx_v9_0_update_gfx_clock_gating()
4990 /* update gfx cgpg state */ in gfx_v9_0_set_powergating_state()
5395 /* set load_per_context_state & load_gfx_sh_regs for GFX */ in gfx_v9_ring_emit_cntxcntl()
5492 adev->gfx.me_fw_write_wait : adev->gfx.mec_fw_write_wait; in gfx_v9_0_ring_emit_reg_write_reg_wait()
5713 amdgpu_fence_process(&adev->gfx.gfx_ring[0]); in gfx_v9_0_eop_irq()
5717 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_eop_irq()
5718 ring = &adev->gfx.compute_ring[i]; in gfx_v9_0_eop_irq()
5743 drm_sched_fault(&adev->gfx.gfx_ring[0].sched); in gfx_v9_0_fault()
5747 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_fault()
5748 ring = &adev->gfx.compute_ring[i]; in gfx_v9_0_fault()
6235 DRM_ERROR("GFX Subblock %s, hardware do not support type 0x%x\n", in gfx_v9_0_ras_error_inject()
6243 DRM_ERROR("GFX Subblock %s, driver do not support type 0x%x\n", in gfx_v9_0_ras_error_inject()
6449 dev_info(adev->dev, "GFX SubBlock %s, " in gfx_v9_0_ras_error_count()
6461 dev_info(adev->dev, "GFX SubBlock %s, " in gfx_v9_0_ras_error_count()
6626 * number of gfx waves. Setting 5 bit will make sure gfx only gets in gfx_v9_0_emit_wave_limit()
6639 for (i = 0; i < adev->gfx.mec.num_pipe_per_mec; i++) { in gfx_v9_0_emit_wave_limit()
6790 adev->gfx.kiq.ring.funcs = &gfx_v9_0_ring_funcs_kiq; in gfx_v9_0_set_ring_funcs()
6792 for (i = 0; i < adev->gfx.num_gfx_rings; i++) in gfx_v9_0_set_ring_funcs()
6793 adev->gfx.gfx_ring[i].funcs = &gfx_v9_0_ring_funcs_gfx; in gfx_v9_0_set_ring_funcs()
6795 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_set_ring_funcs()
6796 adev->gfx.compute_ring[i].funcs = &gfx_v9_0_ring_funcs_compute; in gfx_v9_0_set_ring_funcs()
6822 adev->gfx.eop_irq.num_types = AMDGPU_CP_IRQ_LAST; in gfx_v9_0_set_irq_funcs()
6823 adev->gfx.eop_irq.funcs = &gfx_v9_0_eop_irq_funcs; in gfx_v9_0_set_irq_funcs()
6825 adev->gfx.priv_reg_irq.num_types = 1; in gfx_v9_0_set_irq_funcs()
6826 adev->gfx.priv_reg_irq.funcs = &gfx_v9_0_priv_reg_irq_funcs; in gfx_v9_0_set_irq_funcs()
6828 adev->gfx.priv_inst_irq.num_types = 1; in gfx_v9_0_set_irq_funcs()
6829 adev->gfx.priv_inst_irq.funcs = &gfx_v9_0_priv_inst_irq_funcs; in gfx_v9_0_set_irq_funcs()
6831 adev->gfx.cp_ecc_error_irq.num_types = 2; /*C5 ECC error and C9 FUE error*/ in gfx_v9_0_set_irq_funcs()
6832 adev->gfx.cp_ecc_error_irq.funcs = &gfx_v9_0_cp_ecc_error_irq_funcs; in gfx_v9_0_set_irq_funcs()
6846 adev->gfx.rlc.funcs = &gfx_v9_0_rlc_funcs; in gfx_v9_0_set_rlc_funcs()
6934 mask = amdgpu_gfx_create_bitmask(adev->gfx.config.max_cu_per_sh); in gfx_v9_0_get_cu_active_bitmap()
6952 if (adev->gfx.config.max_shader_engines * in gfx_v9_0_get_cu_info()
6953 adev->gfx.config.max_sh_per_se > 16) in gfx_v9_0_get_cu_info()
6957 adev->gfx.config.max_shader_engines, in gfx_v9_0_get_cu_info()
6958 adev->gfx.config.max_sh_per_se); in gfx_v9_0_get_cu_info()
6961 for (i = 0; i < adev->gfx.config.max_shader_engines; i++) { in gfx_v9_0_get_cu_info()
6962 for (j = 0; j < adev->gfx.config.max_sh_per_se; j++) { in gfx_v9_0_get_cu_info()
6968 adev, disable_masks[i * adev->gfx.config.max_sh_per_se + j]); in gfx_v9_0_get_cu_info()
6985 for (k = 0; k < adev->gfx.config.max_cu_per_sh; k ++) { in gfx_v9_0_get_cu_info()
6987 if (counter < adev->gfx.config.max_cu_per_sh) in gfx_v9_0_get_cu_info()