Lines Matching full:gfx
42 #include "ivsrcid/gfx/irqsrcs_gfx_10_1.h"
3332 adev->gfx.kiq.pmf = &gfx_v10_0_kiq_pm4_funcs; in gfx_v10_0_set_kiq_pm4_funcs()
3407 adev->gfx.scratch.num_reg = 8; in gfx_v10_0_scratch_init()
3408 adev->gfx.scratch.reg_base = SOC15_REG_OFFSET(GC, 0, mmSCRATCH_REG0); in gfx_v10_0_scratch_init()
3409 adev->gfx.scratch.free_mask = (1u << adev->gfx.scratch.num_reg) - 1; in gfx_v10_0_scratch_init()
3548 release_firmware(adev->gfx.pfp_fw); in gfx_v10_0_free_microcode()
3549 adev->gfx.pfp_fw = NULL; in gfx_v10_0_free_microcode()
3550 release_firmware(adev->gfx.me_fw); in gfx_v10_0_free_microcode()
3551 adev->gfx.me_fw = NULL; in gfx_v10_0_free_microcode()
3552 release_firmware(adev->gfx.ce_fw); in gfx_v10_0_free_microcode()
3553 adev->gfx.ce_fw = NULL; in gfx_v10_0_free_microcode()
3554 release_firmware(adev->gfx.rlc_fw); in gfx_v10_0_free_microcode()
3555 adev->gfx.rlc_fw = NULL; in gfx_v10_0_free_microcode()
3556 release_firmware(adev->gfx.mec_fw); in gfx_v10_0_free_microcode()
3557 adev->gfx.mec_fw = NULL; in gfx_v10_0_free_microcode()
3558 release_firmware(adev->gfx.mec2_fw); in gfx_v10_0_free_microcode()
3559 adev->gfx.mec2_fw = NULL; in gfx_v10_0_free_microcode()
3561 kfree(adev->gfx.rlc.register_list_format); in gfx_v10_0_free_microcode()
3566 adev->gfx.cp_fw_write_wait = false; in gfx_v10_0_check_fw_write_wait()
3572 if ((adev->gfx.me_fw_version >= 0x00000046) && in gfx_v10_0_check_fw_write_wait()
3573 (adev->gfx.me_feature_version >= 27) && in gfx_v10_0_check_fw_write_wait()
3574 (adev->gfx.pfp_fw_version >= 0x00000068) && in gfx_v10_0_check_fw_write_wait()
3575 (adev->gfx.pfp_feature_version >= 27) && in gfx_v10_0_check_fw_write_wait()
3576 (adev->gfx.mec_fw_version >= 0x0000005b) && in gfx_v10_0_check_fw_write_wait()
3577 (adev->gfx.mec_feature_version >= 27)) in gfx_v10_0_check_fw_write_wait()
3578 adev->gfx.cp_fw_write_wait = true; in gfx_v10_0_check_fw_write_wait()
3582 adev->gfx.cp_fw_write_wait = true; in gfx_v10_0_check_fw_write_wait()
3588 if (!adev->gfx.cp_fw_write_wait) in gfx_v10_0_check_fw_write_wait()
3597 rlc_hdr = (const struct rlc_firmware_header_v2_1 *)adev->gfx.rlc_fw->data; in gfx_v10_0_init_rlc_ext_microcode()
3598 adev->gfx.rlc_srlc_fw_version = le32_to_cpu(rlc_hdr->save_restore_list_cntl_ucode_ver); in gfx_v10_0_init_rlc_ext_microcode()
3599 adev->gfx.rlc_srlc_feature_version = le32_to_cpu(rlc_hdr->save_restore_list_cntl_feature_ver); in gfx_v10_0_init_rlc_ext_microcode()
3600 …adev->gfx.rlc.save_restore_list_cntl_size_bytes = le32_to_cpu(rlc_hdr->save_restore_list_cntl_size… in gfx_v10_0_init_rlc_ext_microcode()
3601 …adev->gfx.rlc.save_restore_list_cntl = (u8 *)rlc_hdr + le32_to_cpu(rlc_hdr->save_restore_list_cntl… in gfx_v10_0_init_rlc_ext_microcode()
3602 adev->gfx.rlc_srlg_fw_version = le32_to_cpu(rlc_hdr->save_restore_list_gpm_ucode_ver); in gfx_v10_0_init_rlc_ext_microcode()
3603 adev->gfx.rlc_srlg_feature_version = le32_to_cpu(rlc_hdr->save_restore_list_gpm_feature_ver); in gfx_v10_0_init_rlc_ext_microcode()
3604 …adev->gfx.rlc.save_restore_list_gpm_size_bytes = le32_to_cpu(rlc_hdr->save_restore_list_gpm_size_b… in gfx_v10_0_init_rlc_ext_microcode()
3605 …adev->gfx.rlc.save_restore_list_gpm = (u8 *)rlc_hdr + le32_to_cpu(rlc_hdr->save_restore_list_gpm_o… in gfx_v10_0_init_rlc_ext_microcode()
3606 adev->gfx.rlc_srls_fw_version = le32_to_cpu(rlc_hdr->save_restore_list_srm_ucode_ver); in gfx_v10_0_init_rlc_ext_microcode()
3607 adev->gfx.rlc_srls_feature_version = le32_to_cpu(rlc_hdr->save_restore_list_srm_feature_ver); in gfx_v10_0_init_rlc_ext_microcode()
3608 …adev->gfx.rlc.save_restore_list_srm_size_bytes = le32_to_cpu(rlc_hdr->save_restore_list_srm_size_b… in gfx_v10_0_init_rlc_ext_microcode()
3609 …adev->gfx.rlc.save_restore_list_srm = (u8 *)rlc_hdr + le32_to_cpu(rlc_hdr->save_restore_list_srm_o… in gfx_v10_0_init_rlc_ext_microcode()
3610 adev->gfx.rlc.reg_list_format_direct_reg_list_length = in gfx_v10_0_init_rlc_ext_microcode()
3618 rlc_hdr = (const struct rlc_firmware_header_v2_2 *)adev->gfx.rlc_fw->data; in gfx_v10_0_init_rlc_iram_dram_microcode()
3619 adev->gfx.rlc.rlc_iram_ucode_size_bytes = le32_to_cpu(rlc_hdr->rlc_iram_ucode_size_bytes); in gfx_v10_0_init_rlc_iram_dram_microcode()
3620 adev->gfx.rlc.rlc_iram_ucode = (u8 *)rlc_hdr + le32_to_cpu(rlc_hdr->rlc_iram_ucode_offset_bytes); in gfx_v10_0_init_rlc_iram_dram_microcode()
3621 adev->gfx.rlc.rlc_dram_ucode_size_bytes = le32_to_cpu(rlc_hdr->rlc_dram_ucode_size_bytes); in gfx_v10_0_init_rlc_iram_dram_microcode()
3622 adev->gfx.rlc.rlc_dram_ucode = (u8 *)rlc_hdr + le32_to_cpu(rlc_hdr->rlc_dram_ucode_offset_bytes); in gfx_v10_0_init_rlc_iram_dram_microcode()
3699 err = request_firmware(&adev->gfx.pfp_fw, fw_name, adev->dev); in gfx_v10_0_init_microcode()
3702 err = amdgpu_ucode_validate(adev->gfx.pfp_fw); in gfx_v10_0_init_microcode()
3705 cp_hdr = (const struct gfx_firmware_header_v1_0 *)adev->gfx.pfp_fw->data; in gfx_v10_0_init_microcode()
3706 adev->gfx.pfp_fw_version = le32_to_cpu(cp_hdr->header.ucode_version); in gfx_v10_0_init_microcode()
3707 adev->gfx.pfp_feature_version = le32_to_cpu(cp_hdr->ucode_feature_version); in gfx_v10_0_init_microcode()
3710 err = request_firmware(&adev->gfx.me_fw, fw_name, adev->dev); in gfx_v10_0_init_microcode()
3713 err = amdgpu_ucode_validate(adev->gfx.me_fw); in gfx_v10_0_init_microcode()
3716 cp_hdr = (const struct gfx_firmware_header_v1_0 *)adev->gfx.me_fw->data; in gfx_v10_0_init_microcode()
3717 adev->gfx.me_fw_version = le32_to_cpu(cp_hdr->header.ucode_version); in gfx_v10_0_init_microcode()
3718 adev->gfx.me_feature_version = le32_to_cpu(cp_hdr->ucode_feature_version); in gfx_v10_0_init_microcode()
3721 err = request_firmware(&adev->gfx.ce_fw, fw_name, adev->dev); in gfx_v10_0_init_microcode()
3724 err = amdgpu_ucode_validate(adev->gfx.ce_fw); in gfx_v10_0_init_microcode()
3727 cp_hdr = (const struct gfx_firmware_header_v1_0 *)adev->gfx.ce_fw->data; in gfx_v10_0_init_microcode()
3728 adev->gfx.ce_fw_version = le32_to_cpu(cp_hdr->header.ucode_version); in gfx_v10_0_init_microcode()
3729 adev->gfx.ce_feature_version = le32_to_cpu(cp_hdr->ucode_feature_version); in gfx_v10_0_init_microcode()
3733 err = request_firmware(&adev->gfx.rlc_fw, fw_name, adev->dev); in gfx_v10_0_init_microcode()
3736 err = amdgpu_ucode_validate(adev->gfx.rlc_fw); in gfx_v10_0_init_microcode()
3737 rlc_hdr = (const struct rlc_firmware_header_v2_0 *)adev->gfx.rlc_fw->data; in gfx_v10_0_init_microcode()
3741 adev->gfx.rlc_fw_version = le32_to_cpu(rlc_hdr->header.ucode_version); in gfx_v10_0_init_microcode()
3742 adev->gfx.rlc_feature_version = le32_to_cpu(rlc_hdr->ucode_feature_version); in gfx_v10_0_init_microcode()
3743 adev->gfx.rlc.save_and_restore_offset = in gfx_v10_0_init_microcode()
3745 adev->gfx.rlc.clear_state_descriptor_offset = in gfx_v10_0_init_microcode()
3747 adev->gfx.rlc.avail_scratch_ram_locations = in gfx_v10_0_init_microcode()
3749 adev->gfx.rlc.reg_restore_list_size = in gfx_v10_0_init_microcode()
3751 adev->gfx.rlc.reg_list_format_start = in gfx_v10_0_init_microcode()
3753 adev->gfx.rlc.reg_list_format_separate_start = in gfx_v10_0_init_microcode()
3755 adev->gfx.rlc.starting_offsets_start = in gfx_v10_0_init_microcode()
3757 adev->gfx.rlc.reg_list_format_size_bytes = in gfx_v10_0_init_microcode()
3759 adev->gfx.rlc.reg_list_size_bytes = in gfx_v10_0_init_microcode()
3761 adev->gfx.rlc.register_list_format = in gfx_v10_0_init_microcode()
3762 kmalloc(adev->gfx.rlc.reg_list_format_size_bytes + in gfx_v10_0_init_microcode()
3763 adev->gfx.rlc.reg_list_size_bytes, GFP_KERNEL); in gfx_v10_0_init_microcode()
3764 if (!adev->gfx.rlc.register_list_format) { in gfx_v10_0_init_microcode()
3772 adev->gfx.rlc.register_list_format[i] = le32_to_cpu(tmp[i]); in gfx_v10_0_init_microcode()
3774 adev->gfx.rlc.register_restore = adev->gfx.rlc.register_list_format + i; in gfx_v10_0_init_microcode()
3779 adev->gfx.rlc.register_restore[i] = le32_to_cpu(tmp[i]); in gfx_v10_0_init_microcode()
3790 err = request_firmware(&adev->gfx.mec_fw, fw_name, adev->dev); in gfx_v10_0_init_microcode()
3793 err = amdgpu_ucode_validate(adev->gfx.mec_fw); in gfx_v10_0_init_microcode()
3796 cp_hdr = (const struct gfx_firmware_header_v1_0 *)adev->gfx.mec_fw->data; in gfx_v10_0_init_microcode()
3797 adev->gfx.mec_fw_version = le32_to_cpu(cp_hdr->header.ucode_version); in gfx_v10_0_init_microcode()
3798 adev->gfx.mec_feature_version = le32_to_cpu(cp_hdr->ucode_feature_version); in gfx_v10_0_init_microcode()
3801 err = request_firmware(&adev->gfx.mec2_fw, fw_name, adev->dev); in gfx_v10_0_init_microcode()
3803 err = amdgpu_ucode_validate(adev->gfx.mec2_fw); in gfx_v10_0_init_microcode()
3807 adev->gfx.mec2_fw->data; in gfx_v10_0_init_microcode()
3808 adev->gfx.mec2_fw_version = in gfx_v10_0_init_microcode()
3810 adev->gfx.mec2_feature_version = in gfx_v10_0_init_microcode()
3814 adev->gfx.mec2_fw = NULL; in gfx_v10_0_init_microcode()
3820 info->fw = adev->gfx.pfp_fw; in gfx_v10_0_init_microcode()
3827 info->fw = adev->gfx.me_fw; in gfx_v10_0_init_microcode()
3834 info->fw = adev->gfx.ce_fw; in gfx_v10_0_init_microcode()
3841 info->fw = adev->gfx.rlc_fw; in gfx_v10_0_init_microcode()
3847 if (adev->gfx.rlc.save_restore_list_cntl_size_bytes && in gfx_v10_0_init_microcode()
3848 adev->gfx.rlc.save_restore_list_gpm_size_bytes && in gfx_v10_0_init_microcode()
3849 adev->gfx.rlc.save_restore_list_srm_size_bytes) { in gfx_v10_0_init_microcode()
3852 info->fw = adev->gfx.rlc_fw; in gfx_v10_0_init_microcode()
3854 ALIGN(adev->gfx.rlc.save_restore_list_cntl_size_bytes, PAGE_SIZE); in gfx_v10_0_init_microcode()
3858 info->fw = adev->gfx.rlc_fw; in gfx_v10_0_init_microcode()
3860 ALIGN(adev->gfx.rlc.save_restore_list_gpm_size_bytes, PAGE_SIZE); in gfx_v10_0_init_microcode()
3864 info->fw = adev->gfx.rlc_fw; in gfx_v10_0_init_microcode()
3866 ALIGN(adev->gfx.rlc.save_restore_list_srm_size_bytes, PAGE_SIZE); in gfx_v10_0_init_microcode()
3868 if (adev->gfx.rlc.rlc_iram_ucode_size_bytes && in gfx_v10_0_init_microcode()
3869 adev->gfx.rlc.rlc_dram_ucode_size_bytes) { in gfx_v10_0_init_microcode()
3872 info->fw = adev->gfx.rlc_fw; in gfx_v10_0_init_microcode()
3874 ALIGN(adev->gfx.rlc.rlc_iram_ucode_size_bytes, PAGE_SIZE); in gfx_v10_0_init_microcode()
3878 info->fw = adev->gfx.rlc_fw; in gfx_v10_0_init_microcode()
3880 ALIGN(adev->gfx.rlc.rlc_dram_ucode_size_bytes, PAGE_SIZE); in gfx_v10_0_init_microcode()
3886 info->fw = adev->gfx.mec_fw; in gfx_v10_0_init_microcode()
3895 info->fw = adev->gfx.mec_fw; in gfx_v10_0_init_microcode()
3899 if (adev->gfx.mec2_fw) { in gfx_v10_0_init_microcode()
3902 info->fw = adev->gfx.mec2_fw; in gfx_v10_0_init_microcode()
3911 info->fw = adev->gfx.mec2_fw; in gfx_v10_0_init_microcode()
3924 release_firmware(adev->gfx.pfp_fw); in gfx_v10_0_init_microcode()
3925 adev->gfx.pfp_fw = NULL; in gfx_v10_0_init_microcode()
3926 release_firmware(adev->gfx.me_fw); in gfx_v10_0_init_microcode()
3927 adev->gfx.me_fw = NULL; in gfx_v10_0_init_microcode()
3928 release_firmware(adev->gfx.ce_fw); in gfx_v10_0_init_microcode()
3929 adev->gfx.ce_fw = NULL; in gfx_v10_0_init_microcode()
3930 release_firmware(adev->gfx.rlc_fw); in gfx_v10_0_init_microcode()
3931 adev->gfx.rlc_fw = NULL; in gfx_v10_0_init_microcode()
3932 release_firmware(adev->gfx.mec_fw); in gfx_v10_0_init_microcode()
3933 adev->gfx.mec_fw = NULL; in gfx_v10_0_init_microcode()
3934 release_firmware(adev->gfx.mec2_fw); in gfx_v10_0_init_microcode()
3935 adev->gfx.mec2_fw = NULL; in gfx_v10_0_init_microcode()
3981 if (adev->gfx.rlc.cs_data == NULL) in gfx_v10_0_get_csb_buffer()
3993 for (sect = adev->gfx.rlc.cs_data; sect->section != NULL; ++sect) { in gfx_v10_0_get_csb_buffer()
4012 buffer[count++] = cpu_to_le32(adev->gfx.config.pa_sc_tile_steering_override); in gfx_v10_0_get_csb_buffer()
4024 amdgpu_bo_free_kernel(&adev->gfx.rlc.clear_state_obj, in gfx_v10_0_rlc_fini()
4025 &adev->gfx.rlc.clear_state_gpu_addr, in gfx_v10_0_rlc_fini()
4026 (void **)&adev->gfx.rlc.cs_ptr); in gfx_v10_0_rlc_fini()
4029 amdgpu_bo_free_kernel(&adev->gfx.rlc.cp_table_obj, in gfx_v10_0_rlc_fini()
4030 &adev->gfx.rlc.cp_table_gpu_addr, in gfx_v10_0_rlc_fini()
4031 (void **)&adev->gfx.rlc.cp_table_ptr); in gfx_v10_0_rlc_fini()
4039 adev->gfx.rlc.cs_data = gfx10_cs_data; in gfx_v10_0_rlc_init()
4041 cs_data = adev->gfx.rlc.cs_data; in gfx_v10_0_rlc_init()
4051 if (adev->gfx.rlc.funcs->update_spm_vmid) in gfx_v10_0_rlc_init()
4052 adev->gfx.rlc.funcs->update_spm_vmid(adev, 0xf); in gfx_v10_0_rlc_init()
4059 amdgpu_bo_free_kernel(&adev->gfx.mec.hpd_eop_obj, NULL, NULL); in gfx_v10_0_mec_fini()
4060 amdgpu_bo_free_kernel(&adev->gfx.mec.mec_fw_obj, NULL, NULL); in gfx_v10_0_mec_fini()
4067 bitmap_zero(adev->gfx.me.queue_bitmap, AMDGPU_MAX_GFX_QUEUES); in gfx_v10_0_me_init()
4073 DRM_ERROR("Failed to load gfx firmware!\n"); in gfx_v10_0_me_init()
4089 bitmap_zero(adev->gfx.mec.queue_bitmap, AMDGPU_MAX_COMPUTE_QUEUES); in gfx_v10_0_mec_init()
4093 mec_hpd_size = adev->gfx.num_compute_rings * GFX10_MEC_HPD_SIZE; in gfx_v10_0_mec_init()
4098 &adev->gfx.mec.hpd_eop_obj, in gfx_v10_0_mec_init()
4099 &adev->gfx.mec.hpd_eop_gpu_addr, in gfx_v10_0_mec_init()
4109 amdgpu_bo_kunmap(adev->gfx.mec.hpd_eop_obj); in gfx_v10_0_mec_init()
4110 amdgpu_bo_unreserve(adev->gfx.mec.hpd_eop_obj); in gfx_v10_0_mec_init()
4114 mec_hdr = (const struct gfx_firmware_header_v1_0 *)adev->gfx.mec_fw->data; in gfx_v10_0_mec_init()
4116 fw_data = (const __le32 *) (adev->gfx.mec_fw->data + in gfx_v10_0_mec_init()
4122 &adev->gfx.mec.mec_fw_obj, in gfx_v10_0_mec_init()
4123 &adev->gfx.mec.mec_fw_gpu_addr, in gfx_v10_0_mec_init()
4133 amdgpu_bo_kunmap(adev->gfx.mec.mec_fw_obj); in gfx_v10_0_mec_init()
4134 amdgpu_bo_unreserve(adev->gfx.mec.mec_fw_obj); in gfx_v10_0_mec_init()
4229 adev->gfx.funcs = &gfx_v10_0_gfx_funcs; in gfx_v10_0_gpu_early_init()
4235 adev->gfx.config.max_hw_contexts = 8; in gfx_v10_0_gpu_early_init()
4236 adev->gfx.config.sc_prim_fifo_size_frontend = 0x20; in gfx_v10_0_gpu_early_init()
4237 adev->gfx.config.sc_prim_fifo_size_backend = 0x100; in gfx_v10_0_gpu_early_init()
4238 adev->gfx.config.sc_hiz_tile_fifo_size = 0; in gfx_v10_0_gpu_early_init()
4239 adev->gfx.config.sc_earlyz_tile_fifo_size = 0x4C0; in gfx_v10_0_gpu_early_init()
4244 adev->gfx.config.max_hw_contexts = 8; in gfx_v10_0_gpu_early_init()
4245 adev->gfx.config.sc_prim_fifo_size_frontend = 0x20; in gfx_v10_0_gpu_early_init()
4246 adev->gfx.config.sc_prim_fifo_size_backend = 0x100; in gfx_v10_0_gpu_early_init()
4247 adev->gfx.config.sc_hiz_tile_fifo_size = 0; in gfx_v10_0_gpu_early_init()
4248 adev->gfx.config.sc_earlyz_tile_fifo_size = 0x4C0; in gfx_v10_0_gpu_early_init()
4250 adev->gfx.config.gb_addr_config_fields.num_pkrs = in gfx_v10_0_gpu_early_init()
4258 adev->gfx.config.gb_addr_config = gb_addr_config; in gfx_v10_0_gpu_early_init()
4260 adev->gfx.config.gb_addr_config_fields.num_pipes = 1 << in gfx_v10_0_gpu_early_init()
4261 REG_GET_FIELD(adev->gfx.config.gb_addr_config, in gfx_v10_0_gpu_early_init()
4264 adev->gfx.config.max_tile_pipes = in gfx_v10_0_gpu_early_init()
4265 adev->gfx.config.gb_addr_config_fields.num_pipes; in gfx_v10_0_gpu_early_init()
4267 adev->gfx.config.gb_addr_config_fields.max_compress_frags = 1 << in gfx_v10_0_gpu_early_init()
4268 REG_GET_FIELD(adev->gfx.config.gb_addr_config, in gfx_v10_0_gpu_early_init()
4270 adev->gfx.config.gb_addr_config_fields.num_rb_per_se = 1 << in gfx_v10_0_gpu_early_init()
4271 REG_GET_FIELD(adev->gfx.config.gb_addr_config, in gfx_v10_0_gpu_early_init()
4273 adev->gfx.config.gb_addr_config_fields.num_se = 1 << in gfx_v10_0_gpu_early_init()
4274 REG_GET_FIELD(adev->gfx.config.gb_addr_config, in gfx_v10_0_gpu_early_init()
4276 adev->gfx.config.gb_addr_config_fields.pipe_interleave_size = 1 << (8 + in gfx_v10_0_gpu_early_init()
4277 REG_GET_FIELD(adev->gfx.config.gb_addr_config, in gfx_v10_0_gpu_early_init()
4288 ring = &adev->gfx.gfx_ring[ring_id]; in gfx_v10_0_gfx_ring_init()
4305 &adev->gfx.eop_irq, irq_type, in gfx_v10_0_gfx_ring_init()
4320 ring = &adev->gfx.compute_ring[ring_id]; in gfx_v10_0_compute_ring_init()
4330 ring->eop_gpu_addr = adev->gfx.mec.hpd_eop_gpu_addr in gfx_v10_0_compute_ring_init()
4335 + ((ring->me - 1) * adev->gfx.mec.num_pipe_per_mec) in gfx_v10_0_compute_ring_init()
4341 &adev->gfx.eop_irq, irq_type, hw_prio); in gfx_v10_0_compute_ring_init()
4358 adev->gfx.me.num_me = 1; in gfx_v10_0_sw_init()
4359 adev->gfx.me.num_pipe_per_me = 1; in gfx_v10_0_sw_init()
4360 adev->gfx.me.num_queue_per_pipe = 1; in gfx_v10_0_sw_init()
4361 adev->gfx.mec.num_mec = 2; in gfx_v10_0_sw_init()
4362 adev->gfx.mec.num_pipe_per_mec = 4; in gfx_v10_0_sw_init()
4363 adev->gfx.mec.num_queue_per_pipe = 8; in gfx_v10_0_sw_init()
4367 adev->gfx.me.num_me = 1; in gfx_v10_0_sw_init()
4368 adev->gfx.me.num_pipe_per_me = 1; in gfx_v10_0_sw_init()
4369 adev->gfx.me.num_queue_per_pipe = 1; in gfx_v10_0_sw_init()
4370 adev->gfx.mec.num_mec = 2; in gfx_v10_0_sw_init()
4371 adev->gfx.mec.num_pipe_per_mec = 4; in gfx_v10_0_sw_init()
4372 adev->gfx.mec.num_queue_per_pipe = 4; in gfx_v10_0_sw_init()
4375 adev->gfx.me.num_me = 1; in gfx_v10_0_sw_init()
4376 adev->gfx.me.num_pipe_per_me = 1; in gfx_v10_0_sw_init()
4377 adev->gfx.me.num_queue_per_pipe = 1; in gfx_v10_0_sw_init()
4378 adev->gfx.mec.num_mec = 1; in gfx_v10_0_sw_init()
4379 adev->gfx.mec.num_pipe_per_mec = 4; in gfx_v10_0_sw_init()
4380 adev->gfx.mec.num_queue_per_pipe = 8; in gfx_v10_0_sw_init()
4387 &adev->gfx.kiq.irq); in gfx_v10_0_sw_init()
4394 &adev->gfx.eop_irq); in gfx_v10_0_sw_init()
4400 &adev->gfx.priv_reg_irq); in gfx_v10_0_sw_init()
4406 &adev->gfx.priv_inst_irq); in gfx_v10_0_sw_init()
4410 adev->gfx.gfx_current_status = AMDGPU_GFX_NORMAL_MODE; in gfx_v10_0_sw_init()
4430 /* set up the gfx ring */ in gfx_v10_0_sw_init()
4431 for (i = 0; i < adev->gfx.me.num_me; i++) { in gfx_v10_0_sw_init()
4432 for (j = 0; j < adev->gfx.me.num_queue_per_pipe; j++) { in gfx_v10_0_sw_init()
4433 for (k = 0; k < adev->gfx.me.num_pipe_per_me; k++) { in gfx_v10_0_sw_init()
4448 for (i = 0; i < adev->gfx.mec.num_mec; ++i) { in gfx_v10_0_sw_init()
4449 for (j = 0; j < adev->gfx.mec.num_queue_per_pipe; j++) { in gfx_v10_0_sw_init()
4450 for (k = 0; k < adev->gfx.mec.num_pipe_per_mec; k++) { in gfx_v10_0_sw_init()
4471 kiq = &adev->gfx.kiq; in gfx_v10_0_sw_init()
4487 adev->gfx.ce_ram_size = F32_CE_PROGRAM_RAM_SIZE; in gfx_v10_0_sw_init()
4496 amdgpu_bo_free_kernel(&adev->gfx.pfp.pfp_fw_obj, in gfx_v10_0_pfp_fini()
4497 &adev->gfx.pfp.pfp_fw_gpu_addr, in gfx_v10_0_pfp_fini()
4498 (void **)&adev->gfx.pfp.pfp_fw_ptr); in gfx_v10_0_pfp_fini()
4503 amdgpu_bo_free_kernel(&adev->gfx.ce.ce_fw_obj, in gfx_v10_0_ce_fini()
4504 &adev->gfx.ce.ce_fw_gpu_addr, in gfx_v10_0_ce_fini()
4505 (void **)&adev->gfx.ce.ce_fw_ptr); in gfx_v10_0_ce_fini()
4510 amdgpu_bo_free_kernel(&adev->gfx.me.me_fw_obj, in gfx_v10_0_me_fini()
4511 &adev->gfx.me.me_fw_gpu_addr, in gfx_v10_0_me_fini()
4512 (void **)&adev->gfx.me.me_fw_ptr); in gfx_v10_0_me_fini()
4520 for (i = 0; i < adev->gfx.num_gfx_rings; i++) in gfx_v10_0_sw_fini()
4521 amdgpu_ring_fini(&adev->gfx.gfx_ring[i]); in gfx_v10_0_sw_fini()
4522 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v10_0_sw_fini()
4523 amdgpu_ring_fini(&adev->gfx.compute_ring[i]); in gfx_v10_0_sw_fini()
4526 amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq.ring); in gfx_v10_0_sw_fini()
4580 mask = amdgpu_gfx_create_bitmask(adev->gfx.config.max_backends_per_se / in gfx_v10_0_get_rb_active_bitmap()
4581 adev->gfx.config.max_sh_per_se); in gfx_v10_0_get_rb_active_bitmap()
4592 u32 rb_bitmap_width_per_sh = adev->gfx.config.max_backends_per_se / in gfx_v10_0_setup_rb()
4593 adev->gfx.config.max_sh_per_se; in gfx_v10_0_setup_rb()
4596 for (i = 0; i < adev->gfx.config.max_shader_engines; i++) { in gfx_v10_0_setup_rb()
4597 for (j = 0; j < adev->gfx.config.max_sh_per_se; j++) { in gfx_v10_0_setup_rb()
4598 bitmap = i * adev->gfx.config.max_sh_per_se + j; in gfx_v10_0_setup_rb()
4604 active_rbs |= data << ((i * adev->gfx.config.max_sh_per_se + j) * in gfx_v10_0_setup_rb()
4611 adev->gfx.config.backend_enable_mask = active_rbs; in gfx_v10_0_setup_rb()
4612 adev->gfx.config.num_rbs = hweight32(active_rbs); in gfx_v10_0_setup_rb()
4624 /* for ASICs that integrates GFX v10.3 in gfx_v10_0_init_pa_sc_tile_steering_override()
4631 num_sc = adev->gfx.config.max_shader_engines * adev->gfx.config.max_sh_per_se * in gfx_v10_0_init_pa_sc_tile_steering_override()
4632 adev->gfx.config.num_sc_per_sh; in gfx_v10_0_init_pa_sc_tile_steering_override()
4636 num_rb_per_sc = enabled_rb_per_sh / adev->gfx.config.num_sc_per_sh; in gfx_v10_0_init_pa_sc_tile_steering_override()
4638 num_packer_per_sc = adev->gfx.config.num_packer_per_sc; in gfx_v10_0_init_pa_sc_tile_steering_override()
4694 * Initialize all compute and user-gfx VMIDs to have no GDS, GWS, or OA in gfx_v10_0_init_gds_vmid()
4711 int max_wgp_per_sh = adev->gfx.config.max_cu_per_sh >> 1; in gfx_v10_0_tcp_harvest()
4739 for (i = 0; i < adev->gfx.config.max_shader_engines; i++) { in gfx_v10_0_tcp_harvest()
4740 for (j = 0; j < adev->gfx.config.max_sh_per_se; j++) { in gfx_v10_0_tcp_harvest()
4784 adev->gfx.config.tcc_disabled_mask = in gfx_v10_0_get_tcc_info()
4797 gfx_v10_0_get_cu_info(adev, &adev->gfx.cu_info); in gfx_v10_0_constants_init()
4799 adev->gfx.config.pa_sc_tile_steering_override = in gfx_v10_0_constants_init()
4850 adev->gfx.rlc.funcs->get_csb_buffer(adev, adev->gfx.rlc.cs_ptr); in gfx_v10_0_init_csb()
4855 adev->gfx.rlc.clear_state_gpu_addr >> 32); in gfx_v10_0_init_csb()
4857 adev->gfx.rlc.clear_state_gpu_addr & 0xfffffffc); in gfx_v10_0_init_csb()
4858 WREG32_SOC15_RLC(GC, 0, mmRLC_CSIB_LENGTH, adev->gfx.rlc.clear_state_size); in gfx_v10_0_init_csb()
4861 adev->gfx.rlc.clear_state_gpu_addr >> 32); in gfx_v10_0_init_csb()
4863 adev->gfx.rlc.clear_state_gpu_addr & 0xfffffffc); in gfx_v10_0_init_csb()
4864 WREG32_SOC15(GC, 0, mmRLC_CSIB_LENGTH, adev->gfx.rlc.clear_state_size); in gfx_v10_0_init_csb()
4935 if (!adev->gfx.rlc_fw) in gfx_v10_0_rlc_load_microcode()
4938 hdr = (const struct rlc_firmware_header_v2_0 *)adev->gfx.rlc_fw->data; in gfx_v10_0_rlc_load_microcode()
4941 fw_data = (const __le32 *)(adev->gfx.rlc_fw->data + in gfx_v10_0_rlc_load_microcode()
4952 WREG32_SOC15(GC, 0, mmRLC_GPM_UCODE_ADDR, adev->gfx.rlc_fw_version); in gfx_v10_0_rlc_load_microcode()
4977 adev->gfx.rlc.funcs->stop(adev); in gfx_v10_0_rlc_resume()
4999 adev->gfx.rlc.funcs->start(adev); in gfx_v10_0_rlc_resume()
5023 &adev->gfx.rlc.rlc_toc_bo, in gfx_v10_0_parse_rlc_toc()
5024 &adev->gfx.rlc.rlc_toc_gpu_addr, in gfx_v10_0_parse_rlc_toc()
5025 (void **)&adev->gfx.rlc.rlc_toc_buf); in gfx_v10_0_parse_rlc_toc()
5032 memcpy(adev->gfx.rlc.rlc_toc_buf, adev->psp.toc_start_addr, adev->psp.toc_bin_size); in gfx_v10_0_parse_rlc_toc()
5034 rlc_toc = (RLC_TABLE_OF_CONTENT *)adev->gfx.rlc.rlc_toc_buf; in gfx_v10_0_parse_rlc_toc()
5085 &adev->gfx.rlc.rlc_autoload_bo, in gfx_v10_0_rlc_backdoor_autoload_buffer_init()
5086 &adev->gfx.rlc.rlc_autoload_gpu_addr, in gfx_v10_0_rlc_backdoor_autoload_buffer_init()
5087 (void **)&adev->gfx.rlc.rlc_autoload_ptr); in gfx_v10_0_rlc_backdoor_autoload_buffer_init()
5098 amdgpu_bo_free_kernel(&adev->gfx.rlc.rlc_toc_bo, in gfx_v10_0_rlc_backdoor_autoload_buffer_fini()
5099 &adev->gfx.rlc.rlc_toc_gpu_addr, in gfx_v10_0_rlc_backdoor_autoload_buffer_fini()
5100 (void **)&adev->gfx.rlc.rlc_toc_buf); in gfx_v10_0_rlc_backdoor_autoload_buffer_fini()
5101 amdgpu_bo_free_kernel(&adev->gfx.rlc.rlc_autoload_bo, in gfx_v10_0_rlc_backdoor_autoload_buffer_fini()
5102 &adev->gfx.rlc.rlc_autoload_gpu_addr, in gfx_v10_0_rlc_backdoor_autoload_buffer_fini()
5103 (void **)&adev->gfx.rlc.rlc_autoload_ptr); in gfx_v10_0_rlc_backdoor_autoload_buffer_fini()
5113 char *ptr = adev->gfx.rlc.rlc_autoload_ptr; in gfx_v10_0_rlc_backdoor_autoload_copy_ucode()
5138 data = adev->gfx.rlc.rlc_toc_buf; in gfx_v10_0_rlc_backdoor_autoload_copy_toc_ucode()
5155 adev->gfx.pfp_fw->data; in gfx_v10_0_rlc_backdoor_autoload_copy_gfx_ucode()
5156 fw_data = (const __le32 *)(adev->gfx.pfp_fw->data + in gfx_v10_0_rlc_backdoor_autoload_copy_gfx_ucode()
5165 adev->gfx.ce_fw->data; in gfx_v10_0_rlc_backdoor_autoload_copy_gfx_ucode()
5166 fw_data = (const __le32 *)(adev->gfx.ce_fw->data + in gfx_v10_0_rlc_backdoor_autoload_copy_gfx_ucode()
5175 adev->gfx.me_fw->data; in gfx_v10_0_rlc_backdoor_autoload_copy_gfx_ucode()
5176 fw_data = (const __le32 *)(adev->gfx.me_fw->data + in gfx_v10_0_rlc_backdoor_autoload_copy_gfx_ucode()
5185 adev->gfx.rlc_fw->data; in gfx_v10_0_rlc_backdoor_autoload_copy_gfx_ucode()
5186 fw_data = (const __le32 *)(adev->gfx.rlc_fw->data + in gfx_v10_0_rlc_backdoor_autoload_copy_gfx_ucode()
5195 adev->gfx.mec_fw->data; in gfx_v10_0_rlc_backdoor_autoload_copy_gfx_ucode()
5196 fw_data = (const __le32 *) (adev->gfx.mec_fw->data + in gfx_v10_0_rlc_backdoor_autoload_copy_gfx_ucode()
5252 gpu_addr = adev->gfx.rlc.rlc_autoload_gpu_addr + rlc_g_offset; in gfx_v10_0_rlc_backdoor_autoload_enable()
5301 addr = adev->gfx.rlc.rlc_autoload_gpu_addr + in gfx_v10_0_rlc_backdoor_autoload_config_me_cache()
5338 addr = adev->gfx.rlc.rlc_autoload_gpu_addr + in gfx_v10_0_rlc_backdoor_autoload_config_ce_cache()
5375 addr = adev->gfx.rlc.rlc_autoload_gpu_addr + in gfx_v10_0_rlc_backdoor_autoload_config_pfp_cache()
5412 addr = adev->gfx.rlc.rlc_autoload_gpu_addr + in gfx_v10_0_rlc_backdoor_autoload_config_mec_cache()
5487 DRM_ERROR("failed to %s cp gfx\n", enable ? "unhalt" : "halt"); in gfx_v10_0_cp_gfx_enable()
5502 adev->gfx.pfp_fw->data; in gfx_v10_0_cp_gfx_load_pfp_microcode()
5506 fw_data = (const __le32 *)(adev->gfx.pfp_fw->data + in gfx_v10_0_cp_gfx_load_pfp_microcode()
5512 &adev->gfx.pfp.pfp_fw_obj, in gfx_v10_0_cp_gfx_load_pfp_microcode()
5513 &adev->gfx.pfp.pfp_fw_gpu_addr, in gfx_v10_0_cp_gfx_load_pfp_microcode()
5514 (void **)&adev->gfx.pfp.pfp_fw_ptr); in gfx_v10_0_cp_gfx_load_pfp_microcode()
5521 memcpy(adev->gfx.pfp.pfp_fw_ptr, fw_data, fw_size); in gfx_v10_0_cp_gfx_load_pfp_microcode()
5523 amdgpu_bo_kunmap(adev->gfx.pfp.pfp_fw_obj); in gfx_v10_0_cp_gfx_load_pfp_microcode()
5524 amdgpu_bo_unreserve(adev->gfx.pfp.pfp_fw_obj); in gfx_v10_0_cp_gfx_load_pfp_microcode()
5555 adev->gfx.pfp.pfp_fw_gpu_addr & 0xFFFFF000); in gfx_v10_0_cp_gfx_load_pfp_microcode()
5557 upper_32_bits(adev->gfx.pfp.pfp_fw_gpu_addr)); in gfx_v10_0_cp_gfx_load_pfp_microcode()
5565 WREG32_SOC15(GC, 0, mmCP_HYP_PFP_UCODE_ADDR, adev->gfx.pfp_fw_version); in gfx_v10_0_cp_gfx_load_pfp_microcode()
5580 adev->gfx.ce_fw->data; in gfx_v10_0_cp_gfx_load_ce_microcode()
5584 fw_data = (const __le32 *)(adev->gfx.ce_fw->data + in gfx_v10_0_cp_gfx_load_ce_microcode()
5590 &adev->gfx.ce.ce_fw_obj, in gfx_v10_0_cp_gfx_load_ce_microcode()
5591 &adev->gfx.ce.ce_fw_gpu_addr, in gfx_v10_0_cp_gfx_load_ce_microcode()
5592 (void **)&adev->gfx.ce.ce_fw_ptr); in gfx_v10_0_cp_gfx_load_ce_microcode()
5599 memcpy(adev->gfx.ce.ce_fw_ptr, fw_data, fw_size); in gfx_v10_0_cp_gfx_load_ce_microcode()
5601 amdgpu_bo_kunmap(adev->gfx.ce.ce_fw_obj); in gfx_v10_0_cp_gfx_load_ce_microcode()
5602 amdgpu_bo_unreserve(adev->gfx.ce.ce_fw_obj); in gfx_v10_0_cp_gfx_load_ce_microcode()
5632 adev->gfx.ce.ce_fw_gpu_addr & 0xFFFFF000); in gfx_v10_0_cp_gfx_load_ce_microcode()
5634 upper_32_bits(adev->gfx.ce.ce_fw_gpu_addr)); in gfx_v10_0_cp_gfx_load_ce_microcode()
5642 WREG32_SOC15(GC, 0, mmCP_HYP_CE_UCODE_ADDR, adev->gfx.ce_fw_version); in gfx_v10_0_cp_gfx_load_ce_microcode()
5657 adev->gfx.me_fw->data; in gfx_v10_0_cp_gfx_load_me_microcode()
5661 fw_data = (const __le32 *)(adev->gfx.me_fw->data + in gfx_v10_0_cp_gfx_load_me_microcode()
5667 &adev->gfx.me.me_fw_obj, in gfx_v10_0_cp_gfx_load_me_microcode()
5668 &adev->gfx.me.me_fw_gpu_addr, in gfx_v10_0_cp_gfx_load_me_microcode()
5669 (void **)&adev->gfx.me.me_fw_ptr); in gfx_v10_0_cp_gfx_load_me_microcode()
5676 memcpy(adev->gfx.me.me_fw_ptr, fw_data, fw_size); in gfx_v10_0_cp_gfx_load_me_microcode()
5678 amdgpu_bo_kunmap(adev->gfx.me.me_fw_obj); in gfx_v10_0_cp_gfx_load_me_microcode()
5679 amdgpu_bo_unreserve(adev->gfx.me.me_fw_obj); in gfx_v10_0_cp_gfx_load_me_microcode()
5709 adev->gfx.me.me_fw_gpu_addr & 0xFFFFF000); in gfx_v10_0_cp_gfx_load_me_microcode()
5711 upper_32_bits(adev->gfx.me.me_fw_gpu_addr)); in gfx_v10_0_cp_gfx_load_me_microcode()
5719 WREG32_SOC15(GC, 0, mmCP_HYP_ME_UCODE_ADDR, adev->gfx.me_fw_version); in gfx_v10_0_cp_gfx_load_me_microcode()
5728 if (!adev->gfx.me_fw || !adev->gfx.pfp_fw || !adev->gfx.ce_fw) in gfx_v10_0_cp_gfx_load_microcode()
5764 adev->gfx.config.max_hw_contexts - 1); in gfx_v10_0_cp_gfx_start()
5769 ring = &adev->gfx.gfx_ring[0]; in gfx_v10_0_cp_gfx_start()
5801 amdgpu_ring_write(ring, adev->gfx.config.pa_sc_tile_steering_override); in gfx_v10_0_cp_gfx_start()
5817 if (adev->gfx.num_gfx_rings > 1) { in gfx_v10_0_cp_gfx_start()
5818 /* maximum supported gfx ring is 2 */ in gfx_v10_0_cp_gfx_start()
5819 ring = &adev->gfx.gfx_ring[1]; in gfx_v10_0_cp_gfx_start()
5896 /* Init gfx ring 0 for pipe 0 */ in gfx_v10_0_cp_gfx_resume()
5901 ring = &adev->gfx.gfx_ring[0]; in gfx_v10_0_cp_gfx_resume()
5939 /* Init gfx ring 1 for pipe 1 */ in gfx_v10_0_cp_gfx_resume()
5940 if (adev->gfx.num_gfx_rings > 1) { in gfx_v10_0_cp_gfx_resume()
5943 /* maximum supported gfx ring is 2 */ in gfx_v10_0_cp_gfx_resume()
5944 ring = &adev->gfx.gfx_ring[1]; in gfx_v10_0_cp_gfx_resume()
5983 for (i = 0; i < adev->gfx.num_gfx_rings; i++) { in gfx_v10_0_cp_gfx_resume()
5984 ring = &adev->gfx.gfx_ring[i]; in gfx_v10_0_cp_gfx_resume()
6017 adev->gfx.kiq.ring.sched.ready = false; in gfx_v10_0_cp_compute_enable()
6030 if (!adev->gfx.mec_fw) in gfx_v10_0_cp_compute_load_microcode()
6035 mec_hdr = (const struct gfx_firmware_header_v1_0 *)adev->gfx.mec_fw->data; in gfx_v10_0_cp_compute_load_microcode()
6039 (adev->gfx.mec_fw->data + in gfx_v10_0_cp_compute_load_microcode()
6070 WREG32_SOC15(GC, 0, mmCP_CPC_IC_BASE_LO, adev->gfx.mec.mec_fw_gpu_addr & in gfx_v10_0_cp_compute_load_microcode()
6073 upper_32_bits(adev->gfx.mec.mec_fw_gpu_addr)); in gfx_v10_0_cp_compute_load_microcode()
6082 WREG32_SOC15(GC, 0, mmCP_MEC_ME1_UCODE_ADDR, adev->gfx.mec_fw_version); in gfx_v10_0_cp_compute_load_microcode()
6127 /* set up gfx hqd wptr */ in gfx_v10_0_gfx_mqd_init()
6158 /* set up gfx hqd base. this is similar as CP_RB_BASE */ in gfx_v10_0_gfx_mqd_init()
6259 int mqd_idx = ring - &adev->gfx.gfx_ring[0]; in gfx_v10_0_gfx_init_queue()
6271 if (adev->gfx.me.mqd_backup[mqd_idx]) in gfx_v10_0_gfx_init_queue()
6272 memcpy(adev->gfx.me.mqd_backup[mqd_idx], mqd, sizeof(*mqd)); in gfx_v10_0_gfx_init_queue()
6275 if (adev->gfx.me.mqd_backup[mqd_idx]) in gfx_v10_0_gfx_init_queue()
6276 memcpy(mqd, adev->gfx.me.mqd_backup[mqd_idx], sizeof(*mqd)); in gfx_v10_0_gfx_init_queue()
6298 struct amdgpu_kiq *kiq = &adev->gfx.kiq; in gfx_v10_0_kiq_enable_kgq()
6299 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq.ring; in gfx_v10_0_kiq_enable_kgq()
6306 adev->gfx.num_gfx_rings); in gfx_v10_0_kiq_enable_kgq()
6312 for (i = 0; i < adev->gfx.num_gfx_rings; i++) in gfx_v10_0_kiq_enable_kgq()
6313 kiq->pmf->kiq_map_queues(kiq_ring, &adev->gfx.gfx_ring[i]); in gfx_v10_0_kiq_enable_kgq()
6324 for (i = 0; i < adev->gfx.num_gfx_rings; i++) { in gfx_v10_0_cp_async_gfx_ring_resume()
6325 ring = &adev->gfx.gfx_ring[i]; in gfx_v10_0_cp_async_gfx_ring_resume()
6350 for (i = 0; i < adev->gfx.num_gfx_rings; i++) { in gfx_v10_0_cp_async_gfx_ring_resume()
6351 ring = &adev->gfx.gfx_ring[i]; in gfx_v10_0_cp_async_gfx_ring_resume()
6628 if (adev->gfx.mec.mqd_backup[mqd_idx]) in gfx_v10_0_kiq_init_queue()
6629 memcpy(mqd, adev->gfx.mec.mqd_backup[mqd_idx], sizeof(*mqd)); in gfx_v10_0_kiq_init_queue()
6649 if (adev->gfx.mec.mqd_backup[mqd_idx]) in gfx_v10_0_kiq_init_queue()
6650 memcpy(adev->gfx.mec.mqd_backup[mqd_idx], mqd, sizeof(*mqd)); in gfx_v10_0_kiq_init_queue()
6660 int mqd_idx = ring - &adev->gfx.compute_ring[0]; in gfx_v10_0_kcq_init_queue()
6670 if (adev->gfx.mec.mqd_backup[mqd_idx]) in gfx_v10_0_kcq_init_queue()
6671 memcpy(adev->gfx.mec.mqd_backup[mqd_idx], mqd, sizeof(*mqd)); in gfx_v10_0_kcq_init_queue()
6674 if (adev->gfx.mec.mqd_backup[mqd_idx]) in gfx_v10_0_kcq_init_queue()
6675 memcpy(mqd, adev->gfx.mec.mqd_backup[mqd_idx], sizeof(*mqd)); in gfx_v10_0_kcq_init_queue()
6693 ring = &adev->gfx.kiq.ring; in gfx_v10_0_kiq_resume()
6718 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_kcq_resume()
6719 ring = &adev->gfx.compute_ring[i]; in gfx_v10_0_kcq_resume()
6777 for (i = 0; i < adev->gfx.num_gfx_rings; i++) { in gfx_v10_0_cp_resume()
6778 ring = &adev->gfx.gfx_ring[i]; in gfx_v10_0_cp_resume()
6784 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_cp_resume()
6785 ring = &adev->gfx.compute_ring[i]; in gfx_v10_0_cp_resume()
6973 * For gfx 10, rlc firmware loading relies on smu firmware is in gfx_v10_0_hw_init()
7019 struct amdgpu_kiq *kiq = &adev->gfx.kiq; in gfx_v10_0_kiq_disable_kgq()
7027 adev->gfx.num_gfx_rings)) in gfx_v10_0_kiq_disable_kgq()
7030 for (i = 0; i < adev->gfx.num_gfx_rings; i++) in gfx_v10_0_kiq_disable_kgq()
7031 kiq->pmf->kiq_unmap_queues(kiq_ring, &adev->gfx.gfx_ring[i], in gfx_v10_0_kiq_disable_kgq()
7044 amdgpu_irq_put(adev, &adev->gfx.priv_reg_irq, 0); in gfx_v10_0_hw_fini()
7045 amdgpu_irq_put(adev, &adev->gfx.priv_inst_irq, 0); in gfx_v10_0_hw_fini()
7164 /* Disable GFX parsing/prefetching */ in gfx_v10_0_soft_reset()
7195 mutex_lock(&adev->gfx.gpu_clock_mutex); in gfx_v10_0_get_gpu_clock_counter()
7198 mutex_unlock(&adev->gfx.gpu_clock_mutex); in gfx_v10_0_get_gpu_clock_counter()
7240 adev->gfx.num_gfx_rings = GFX10_NUM_GFX_RINGS_NV1X; in gfx_v10_0_early_init()
7244 adev->gfx.num_gfx_rings = GFX10_NUM_GFX_RINGS_Sienna_Cichlid; in gfx_v10_0_early_init()
7250 adev->gfx.num_compute_rings = amdgpu_num_kcq; in gfx_v10_0_early_init()
7266 r = amdgpu_irq_get(adev, &adev->gfx.priv_reg_irq, 0); in gfx_v10_0_late_init()
7270 r = amdgpu_irq_get(adev, &adev->gfx.priv_inst_irq, 0); in gfx_v10_0_late_init()
7360 /* MGLS is a global flag to control all MGLS in GFX */ in gfx_v10_0_update_medium_grain_clock_gating()
7499 /* === CGCG /CGLS for GFX 3D Only === */ in gfx_v10_0_update_gfx_clock_gating()
7508 /* === CGCG /CGLS for GFX 3D Only === */ in gfx_v10_0_update_gfx_clock_gating()
7958 /* set load_per_context_state & load_gfx_sh_regs for GFX */ in gfx_v10_0_ring_emit_cntxcntl()
8008 struct amdgpu_kiq *kiq = &adev->gfx.kiq; in gfx_v10_0_ring_preempt_ib()
8178 fw_version_ok = adev->gfx.cp_fw_write_wait; in gfx_v10_0_ring_emit_reg_write_reg_wait()
8354 amdgpu_fence_process(&adev->gfx.gfx_ring[0]); in gfx_v10_0_eop_irq()
8356 amdgpu_fence_process(&adev->gfx.gfx_ring[1]); in gfx_v10_0_eop_irq()
8360 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_eop_irq()
8361 ring = &adev->gfx.compute_ring[i]; in gfx_v10_0_eop_irq()
8423 for (i = 0; i < adev->gfx.num_gfx_rings; i++) { in gfx_v10_0_handle_priv_fault()
8424 ring = &adev->gfx.gfx_ring[i]; in gfx_v10_0_handle_priv_fault()
8425 /* we only enabled 1 gfx queue per pipe for now */ in gfx_v10_0_handle_priv_fault()
8432 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_handle_priv_fault()
8433 ring = &adev->gfx.compute_ring[i]; in gfx_v10_0_handle_priv_fault()
8468 struct amdgpu_ring *ring = &(adev->gfx.kiq.ring); in gfx_v10_0_kiq_set_interrupt_state()
8512 struct amdgpu_ring *ring = &(adev->gfx.kiq.ring); in gfx_v10_0_kiq_irq()
8691 adev->gfx.kiq.ring.funcs = &gfx_v10_0_ring_funcs_kiq; in gfx_v10_0_set_ring_funcs()
8693 for (i = 0; i < adev->gfx.num_gfx_rings; i++) in gfx_v10_0_set_ring_funcs()
8694 adev->gfx.gfx_ring[i].funcs = &gfx_v10_0_ring_funcs_gfx; in gfx_v10_0_set_ring_funcs()
8696 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v10_0_set_ring_funcs()
8697 adev->gfx.compute_ring[i].funcs = &gfx_v10_0_ring_funcs_compute; in gfx_v10_0_set_ring_funcs()
8722 adev->gfx.eop_irq.num_types = AMDGPU_CP_IRQ_LAST; in gfx_v10_0_set_irq_funcs()
8723 adev->gfx.eop_irq.funcs = &gfx_v10_0_eop_irq_funcs; in gfx_v10_0_set_irq_funcs()
8725 adev->gfx.kiq.irq.num_types = AMDGPU_CP_KIQ_IRQ_LAST; in gfx_v10_0_set_irq_funcs()
8726 adev->gfx.kiq.irq.funcs = &gfx_v10_0_kiq_irq_funcs; in gfx_v10_0_set_irq_funcs()
8728 adev->gfx.priv_reg_irq.num_types = 1; in gfx_v10_0_set_irq_funcs()
8729 adev->gfx.priv_reg_irq.funcs = &gfx_v10_0_priv_reg_irq_funcs; in gfx_v10_0_set_irq_funcs()
8731 adev->gfx.priv_inst_irq.num_types = 1; in gfx_v10_0_set_irq_funcs()
8732 adev->gfx.priv_inst_irq.funcs = &gfx_v10_0_priv_inst_irq_funcs; in gfx_v10_0_set_irq_funcs()
8742 adev->gfx.rlc.funcs = &gfx_v10_0_rlc_funcs; in gfx_v10_0_set_rlc_funcs()
8745 adev->gfx.rlc.funcs = &gfx_v10_0_rlc_funcs_sriov; in gfx_v10_0_set_rlc_funcs()
8754 unsigned total_cu = adev->gfx.config.max_cu_per_sh * in gfx_v10_0_set_gds_init()
8755 adev->gfx.config.max_sh_per_se * in gfx_v10_0_set_gds_init()
8756 adev->gfx.config.max_shader_engines; in gfx_v10_0_set_gds_init()
8788 amdgpu_gfx_create_bitmask(adev->gfx.config.max_cu_per_sh >> 1); in gfx_v10_0_get_wgp_active_bitmap_per_sh()
8824 for (i = 0; i < adev->gfx.config.max_shader_engines; i++) { in gfx_v10_0_get_cu_info()
8825 for (j = 0; j < adev->gfx.config.max_sh_per_se; j++) { in gfx_v10_0_get_cu_info()
8826 bitmap = i * adev->gfx.config.max_sh_per_se + j; in gfx_v10_0_get_cu_info()
8840 for (k = 0; k < adev->gfx.config.max_cu_per_sh; k++) { in gfx_v10_0_get_cu_info()
8842 if (counter < adev->gfx.config.max_cu_per_sh) in gfx_v10_0_get_cu_info()
8876 max_sa_mask = amdgpu_gfx_create_bitmask(adev->gfx.config.max_sh_per_se * in gfx_v10_3_get_disabled_sa()
8877 adev->gfx.config.max_shader_engines); in gfx_v10_3_get_disabled_sa()
8891 max_sa_per_se = adev->gfx.config.max_sh_per_se; in gfx_v10_3_program_pbb_mode()
8893 max_shader_engines = adev->gfx.config.max_shader_engines; in gfx_v10_3_program_pbb_mode()