Lines Matching refs:adev
244 static void gfx_v9_0_set_ring_funcs(struct amdgpu_device *adev);
245 static void gfx_v9_0_set_irq_funcs(struct amdgpu_device *adev);
246 static void gfx_v9_0_set_gds_init(struct amdgpu_device *adev);
247 static void gfx_v9_0_set_rlc_funcs(struct amdgpu_device *adev);
248 static int gfx_v9_0_get_cu_info(struct amdgpu_device *adev,
250 static uint64_t gfx_v9_0_get_gpu_clock_counter(struct amdgpu_device *adev);
251 static void gfx_v9_0_select_se_sh(struct amdgpu_device *adev, u32 se_num, u32 sh_num, u32 instance);
254 static void gfx_v9_0_init_golden_registers(struct amdgpu_device *adev) in gfx_v9_0_init_golden_registers() argument
256 switch (adev->asic_type) { in gfx_v9_0_init_golden_registers()
258 soc15_program_register_sequence(adev, in gfx_v9_0_init_golden_registers()
261 soc15_program_register_sequence(adev, in gfx_v9_0_init_golden_registers()
266 soc15_program_register_sequence(adev, in gfx_v9_0_init_golden_registers()
269 soc15_program_register_sequence(adev, in gfx_v9_0_init_golden_registers()
274 soc15_program_register_sequence(adev, in gfx_v9_0_init_golden_registers()
277 soc15_program_register_sequence(adev, in gfx_v9_0_init_golden_registers()
282 soc15_program_register_sequence(adev, in gfx_v9_0_init_golden_registers()
285 soc15_program_register_sequence(adev, in gfx_v9_0_init_golden_registers()
293 soc15_program_register_sequence(adev, golden_settings_gc_9_x_common, in gfx_v9_0_init_golden_registers()
297 static void gfx_v9_0_scratch_init(struct amdgpu_device *adev) in gfx_v9_0_scratch_init() argument
299 adev->gfx.scratch.num_reg = 8; in gfx_v9_0_scratch_init()
300 adev->gfx.scratch.reg_base = SOC15_REG_OFFSET(GC, 0, mmSCRATCH_REG0); in gfx_v9_0_scratch_init()
301 adev->gfx.scratch.free_mask = (1u << adev->gfx.scratch.num_reg) - 1; in gfx_v9_0_scratch_init()
340 struct amdgpu_device *adev = ring->adev; in gfx_v9_0_ring_test_ring() local
346 r = amdgpu_gfx_scratch_get(adev, &scratch); in gfx_v9_0_ring_test_ring()
356 amdgpu_gfx_scratch_free(adev, scratch); in gfx_v9_0_ring_test_ring()
364 for (i = 0; i < adev->usec_timeout; i++) { in gfx_v9_0_ring_test_ring()
370 if (i < adev->usec_timeout) { in gfx_v9_0_ring_test_ring()
378 amdgpu_gfx_scratch_free(adev, scratch); in gfx_v9_0_ring_test_ring()
384 struct amdgpu_device *adev = ring->adev; in gfx_v9_0_ring_test_ib() local
393 r = amdgpu_device_wb_get(adev, &index); in gfx_v9_0_ring_test_ib()
395 dev_err(adev->dev, "(%ld) failed to allocate wb slot\n", r); in gfx_v9_0_ring_test_ib()
399 gpu_addr = adev->wb.gpu_addr + (index * 4); in gfx_v9_0_ring_test_ib()
400 adev->wb.wb[index] = cpu_to_le32(0xCAFEDEAD); in gfx_v9_0_ring_test_ib()
402 r = amdgpu_ib_get(adev, NULL, 16, &ib); in gfx_v9_0_ring_test_ib()
428 tmp = adev->wb.wb[index]; in gfx_v9_0_ring_test_ib()
438 amdgpu_ib_free(adev, &ib, NULL); in gfx_v9_0_ring_test_ib()
441 amdgpu_device_wb_free(adev, index); in gfx_v9_0_ring_test_ib()
446 static void gfx_v9_0_free_microcode(struct amdgpu_device *adev) in gfx_v9_0_free_microcode() argument
448 release_firmware(adev->gfx.pfp_fw); in gfx_v9_0_free_microcode()
449 adev->gfx.pfp_fw = NULL; in gfx_v9_0_free_microcode()
450 release_firmware(adev->gfx.me_fw); in gfx_v9_0_free_microcode()
451 adev->gfx.me_fw = NULL; in gfx_v9_0_free_microcode()
452 release_firmware(adev->gfx.ce_fw); in gfx_v9_0_free_microcode()
453 adev->gfx.ce_fw = NULL; in gfx_v9_0_free_microcode()
454 release_firmware(adev->gfx.rlc_fw); in gfx_v9_0_free_microcode()
455 adev->gfx.rlc_fw = NULL; in gfx_v9_0_free_microcode()
456 release_firmware(adev->gfx.mec_fw); in gfx_v9_0_free_microcode()
457 adev->gfx.mec_fw = NULL; in gfx_v9_0_free_microcode()
458 release_firmware(adev->gfx.mec2_fw); in gfx_v9_0_free_microcode()
459 adev->gfx.mec2_fw = NULL; in gfx_v9_0_free_microcode()
461 kfree(adev->gfx.rlc.register_list_format); in gfx_v9_0_free_microcode()
464 static void gfx_v9_0_init_rlc_ext_microcode(struct amdgpu_device *adev) in gfx_v9_0_init_rlc_ext_microcode() argument
468 rlc_hdr = (const struct rlc_firmware_header_v2_1 *)adev->gfx.rlc_fw->data; in gfx_v9_0_init_rlc_ext_microcode()
469 adev->gfx.rlc_srlc_fw_version = le32_to_cpu(rlc_hdr->save_restore_list_cntl_ucode_ver); in gfx_v9_0_init_rlc_ext_microcode()
470 adev->gfx.rlc_srlc_feature_version = le32_to_cpu(rlc_hdr->save_restore_list_cntl_feature_ver); in gfx_v9_0_init_rlc_ext_microcode()
471 …adev->gfx.rlc.save_restore_list_cntl_size_bytes = le32_to_cpu(rlc_hdr->save_restore_list_cntl_size… in gfx_v9_0_init_rlc_ext_microcode()
472 …adev->gfx.rlc.save_restore_list_cntl = (u8 *)rlc_hdr + le32_to_cpu(rlc_hdr->save_restore_list_cntl… in gfx_v9_0_init_rlc_ext_microcode()
473 adev->gfx.rlc_srlg_fw_version = le32_to_cpu(rlc_hdr->save_restore_list_gpm_ucode_ver); in gfx_v9_0_init_rlc_ext_microcode()
474 adev->gfx.rlc_srlg_feature_version = le32_to_cpu(rlc_hdr->save_restore_list_gpm_feature_ver); in gfx_v9_0_init_rlc_ext_microcode()
475 …adev->gfx.rlc.save_restore_list_gpm_size_bytes = le32_to_cpu(rlc_hdr->save_restore_list_gpm_size_b… in gfx_v9_0_init_rlc_ext_microcode()
476 …adev->gfx.rlc.save_restore_list_gpm = (u8 *)rlc_hdr + le32_to_cpu(rlc_hdr->save_restore_list_gpm_o… in gfx_v9_0_init_rlc_ext_microcode()
477 adev->gfx.rlc_srls_fw_version = le32_to_cpu(rlc_hdr->save_restore_list_srm_ucode_ver); in gfx_v9_0_init_rlc_ext_microcode()
478 adev->gfx.rlc_srls_feature_version = le32_to_cpu(rlc_hdr->save_restore_list_srm_feature_ver); in gfx_v9_0_init_rlc_ext_microcode()
479 …adev->gfx.rlc.save_restore_list_srm_size_bytes = le32_to_cpu(rlc_hdr->save_restore_list_srm_size_b… in gfx_v9_0_init_rlc_ext_microcode()
480 …adev->gfx.rlc.save_restore_list_srm = (u8 *)rlc_hdr + le32_to_cpu(rlc_hdr->save_restore_list_srm_o… in gfx_v9_0_init_rlc_ext_microcode()
481 adev->gfx.rlc.reg_list_format_direct_reg_list_length = in gfx_v9_0_init_rlc_ext_microcode()
485 static int gfx_v9_0_init_microcode(struct amdgpu_device *adev) in gfx_v9_0_init_microcode() argument
501 switch (adev->asic_type) { in gfx_v9_0_init_microcode()
519 err = request_firmware(&adev->gfx.pfp_fw, fw_name, adev->dev); in gfx_v9_0_init_microcode()
522 err = amdgpu_ucode_validate(adev->gfx.pfp_fw); in gfx_v9_0_init_microcode()
525 cp_hdr = (const struct gfx_firmware_header_v1_0 *)adev->gfx.pfp_fw->data; in gfx_v9_0_init_microcode()
526 adev->gfx.pfp_fw_version = le32_to_cpu(cp_hdr->header.ucode_version); in gfx_v9_0_init_microcode()
527 adev->gfx.pfp_feature_version = le32_to_cpu(cp_hdr->ucode_feature_version); in gfx_v9_0_init_microcode()
530 err = request_firmware(&adev->gfx.me_fw, fw_name, adev->dev); in gfx_v9_0_init_microcode()
533 err = amdgpu_ucode_validate(adev->gfx.me_fw); in gfx_v9_0_init_microcode()
536 cp_hdr = (const struct gfx_firmware_header_v1_0 *)adev->gfx.me_fw->data; in gfx_v9_0_init_microcode()
537 adev->gfx.me_fw_version = le32_to_cpu(cp_hdr->header.ucode_version); in gfx_v9_0_init_microcode()
538 adev->gfx.me_feature_version = le32_to_cpu(cp_hdr->ucode_feature_version); in gfx_v9_0_init_microcode()
541 err = request_firmware(&adev->gfx.ce_fw, fw_name, adev->dev); in gfx_v9_0_init_microcode()
544 err = amdgpu_ucode_validate(adev->gfx.ce_fw); in gfx_v9_0_init_microcode()
547 cp_hdr = (const struct gfx_firmware_header_v1_0 *)adev->gfx.ce_fw->data; in gfx_v9_0_init_microcode()
548 adev->gfx.ce_fw_version = le32_to_cpu(cp_hdr->header.ucode_version); in gfx_v9_0_init_microcode()
549 adev->gfx.ce_feature_version = le32_to_cpu(cp_hdr->ucode_feature_version); in gfx_v9_0_init_microcode()
552 err = request_firmware(&adev->gfx.rlc_fw, fw_name, adev->dev); in gfx_v9_0_init_microcode()
555 err = amdgpu_ucode_validate(adev->gfx.rlc_fw); in gfx_v9_0_init_microcode()
556 rlc_hdr = (const struct rlc_firmware_header_v2_0 *)adev->gfx.rlc_fw->data; in gfx_v9_0_init_microcode()
561 adev->gfx.rlc.is_rlc_v2_1 = true; in gfx_v9_0_init_microcode()
563 adev->gfx.rlc_fw_version = le32_to_cpu(rlc_hdr->header.ucode_version); in gfx_v9_0_init_microcode()
564 adev->gfx.rlc_feature_version = le32_to_cpu(rlc_hdr->ucode_feature_version); in gfx_v9_0_init_microcode()
565 adev->gfx.rlc.save_and_restore_offset = in gfx_v9_0_init_microcode()
567 adev->gfx.rlc.clear_state_descriptor_offset = in gfx_v9_0_init_microcode()
569 adev->gfx.rlc.avail_scratch_ram_locations = in gfx_v9_0_init_microcode()
571 adev->gfx.rlc.reg_restore_list_size = in gfx_v9_0_init_microcode()
573 adev->gfx.rlc.reg_list_format_start = in gfx_v9_0_init_microcode()
575 adev->gfx.rlc.reg_list_format_separate_start = in gfx_v9_0_init_microcode()
577 adev->gfx.rlc.starting_offsets_start = in gfx_v9_0_init_microcode()
579 adev->gfx.rlc.reg_list_format_size_bytes = in gfx_v9_0_init_microcode()
581 adev->gfx.rlc.reg_list_size_bytes = in gfx_v9_0_init_microcode()
583 adev->gfx.rlc.register_list_format = in gfx_v9_0_init_microcode()
584 kmalloc(adev->gfx.rlc.reg_list_format_size_bytes + in gfx_v9_0_init_microcode()
585 adev->gfx.rlc.reg_list_size_bytes, GFP_KERNEL); in gfx_v9_0_init_microcode()
586 if (!adev->gfx.rlc.register_list_format) { in gfx_v9_0_init_microcode()
594 adev->gfx.rlc.register_list_format[i] = le32_to_cpu(tmp[i]); in gfx_v9_0_init_microcode()
596 adev->gfx.rlc.register_restore = adev->gfx.rlc.register_list_format + i; in gfx_v9_0_init_microcode()
601 adev->gfx.rlc.register_restore[i] = le32_to_cpu(tmp[i]); in gfx_v9_0_init_microcode()
603 if (adev->gfx.rlc.is_rlc_v2_1) in gfx_v9_0_init_microcode()
604 gfx_v9_0_init_rlc_ext_microcode(adev); in gfx_v9_0_init_microcode()
607 err = request_firmware(&adev->gfx.mec_fw, fw_name, adev->dev); in gfx_v9_0_init_microcode()
610 err = amdgpu_ucode_validate(adev->gfx.mec_fw); in gfx_v9_0_init_microcode()
613 cp_hdr = (const struct gfx_firmware_header_v1_0 *)adev->gfx.mec_fw->data; in gfx_v9_0_init_microcode()
614 adev->gfx.mec_fw_version = le32_to_cpu(cp_hdr->header.ucode_version); in gfx_v9_0_init_microcode()
615 adev->gfx.mec_feature_version = le32_to_cpu(cp_hdr->ucode_feature_version); in gfx_v9_0_init_microcode()
619 err = request_firmware(&adev->gfx.mec2_fw, fw_name, adev->dev); in gfx_v9_0_init_microcode()
621 err = amdgpu_ucode_validate(adev->gfx.mec2_fw); in gfx_v9_0_init_microcode()
625 adev->gfx.mec2_fw->data; in gfx_v9_0_init_microcode()
626 adev->gfx.mec2_fw_version = in gfx_v9_0_init_microcode()
628 adev->gfx.mec2_feature_version = in gfx_v9_0_init_microcode()
632 adev->gfx.mec2_fw = NULL; in gfx_v9_0_init_microcode()
635 if (adev->firmware.load_type == AMDGPU_FW_LOAD_PSP) { in gfx_v9_0_init_microcode()
636 info = &adev->firmware.ucode[AMDGPU_UCODE_ID_CP_PFP]; in gfx_v9_0_init_microcode()
638 info->fw = adev->gfx.pfp_fw; in gfx_v9_0_init_microcode()
640 adev->firmware.fw_size += in gfx_v9_0_init_microcode()
643 info = &adev->firmware.ucode[AMDGPU_UCODE_ID_CP_ME]; in gfx_v9_0_init_microcode()
645 info->fw = adev->gfx.me_fw; in gfx_v9_0_init_microcode()
647 adev->firmware.fw_size += in gfx_v9_0_init_microcode()
650 info = &adev->firmware.ucode[AMDGPU_UCODE_ID_CP_CE]; in gfx_v9_0_init_microcode()
652 info->fw = adev->gfx.ce_fw; in gfx_v9_0_init_microcode()
654 adev->firmware.fw_size += in gfx_v9_0_init_microcode()
657 info = &adev->firmware.ucode[AMDGPU_UCODE_ID_RLC_G]; in gfx_v9_0_init_microcode()
659 info->fw = adev->gfx.rlc_fw; in gfx_v9_0_init_microcode()
661 adev->firmware.fw_size += in gfx_v9_0_init_microcode()
664 if (adev->gfx.rlc.is_rlc_v2_1 && in gfx_v9_0_init_microcode()
665 adev->gfx.rlc.save_restore_list_cntl_size_bytes && in gfx_v9_0_init_microcode()
666 adev->gfx.rlc.save_restore_list_gpm_size_bytes && in gfx_v9_0_init_microcode()
667 adev->gfx.rlc.save_restore_list_srm_size_bytes) { in gfx_v9_0_init_microcode()
668 info = &adev->firmware.ucode[AMDGPU_UCODE_ID_RLC_RESTORE_LIST_CNTL]; in gfx_v9_0_init_microcode()
670 info->fw = adev->gfx.rlc_fw; in gfx_v9_0_init_microcode()
671 adev->firmware.fw_size += in gfx_v9_0_init_microcode()
672 ALIGN(adev->gfx.rlc.save_restore_list_cntl_size_bytes, PAGE_SIZE); in gfx_v9_0_init_microcode()
674 info = &adev->firmware.ucode[AMDGPU_UCODE_ID_RLC_RESTORE_LIST_GPM_MEM]; in gfx_v9_0_init_microcode()
676 info->fw = adev->gfx.rlc_fw; in gfx_v9_0_init_microcode()
677 adev->firmware.fw_size += in gfx_v9_0_init_microcode()
678 ALIGN(adev->gfx.rlc.save_restore_list_gpm_size_bytes, PAGE_SIZE); in gfx_v9_0_init_microcode()
680 info = &adev->firmware.ucode[AMDGPU_UCODE_ID_RLC_RESTORE_LIST_SRM_MEM]; in gfx_v9_0_init_microcode()
682 info->fw = adev->gfx.rlc_fw; in gfx_v9_0_init_microcode()
683 adev->firmware.fw_size += in gfx_v9_0_init_microcode()
684 ALIGN(adev->gfx.rlc.save_restore_list_srm_size_bytes, PAGE_SIZE); in gfx_v9_0_init_microcode()
687 info = &adev->firmware.ucode[AMDGPU_UCODE_ID_CP_MEC1]; in gfx_v9_0_init_microcode()
689 info->fw = adev->gfx.mec_fw; in gfx_v9_0_init_microcode()
692 adev->firmware.fw_size += in gfx_v9_0_init_microcode()
695 info = &adev->firmware.ucode[AMDGPU_UCODE_ID_CP_MEC1_JT]; in gfx_v9_0_init_microcode()
697 info->fw = adev->gfx.mec_fw; in gfx_v9_0_init_microcode()
698 adev->firmware.fw_size += in gfx_v9_0_init_microcode()
701 if (adev->gfx.mec2_fw) { in gfx_v9_0_init_microcode()
702 info = &adev->firmware.ucode[AMDGPU_UCODE_ID_CP_MEC2]; in gfx_v9_0_init_microcode()
704 info->fw = adev->gfx.mec2_fw; in gfx_v9_0_init_microcode()
707 adev->firmware.fw_size += in gfx_v9_0_init_microcode()
709 info = &adev->firmware.ucode[AMDGPU_UCODE_ID_CP_MEC2_JT]; in gfx_v9_0_init_microcode()
711 info->fw = adev->gfx.mec2_fw; in gfx_v9_0_init_microcode()
712 adev->firmware.fw_size += in gfx_v9_0_init_microcode()
720 dev_err(adev->dev, in gfx_v9_0_init_microcode()
723 release_firmware(adev->gfx.pfp_fw); in gfx_v9_0_init_microcode()
724 adev->gfx.pfp_fw = NULL; in gfx_v9_0_init_microcode()
725 release_firmware(adev->gfx.me_fw); in gfx_v9_0_init_microcode()
726 adev->gfx.me_fw = NULL; in gfx_v9_0_init_microcode()
727 release_firmware(adev->gfx.ce_fw); in gfx_v9_0_init_microcode()
728 adev->gfx.ce_fw = NULL; in gfx_v9_0_init_microcode()
729 release_firmware(adev->gfx.rlc_fw); in gfx_v9_0_init_microcode()
730 adev->gfx.rlc_fw = NULL; in gfx_v9_0_init_microcode()
731 release_firmware(adev->gfx.mec_fw); in gfx_v9_0_init_microcode()
732 adev->gfx.mec_fw = NULL; in gfx_v9_0_init_microcode()
733 release_firmware(adev->gfx.mec2_fw); in gfx_v9_0_init_microcode()
734 adev->gfx.mec2_fw = NULL; in gfx_v9_0_init_microcode()
739 static u32 gfx_v9_0_get_csb_size(struct amdgpu_device *adev) in gfx_v9_0_get_csb_size() argument
767 static void gfx_v9_0_get_csb_buffer(struct amdgpu_device *adev, in gfx_v9_0_get_csb_buffer() argument
774 if (adev->gfx.rlc.cs_data == NULL) in gfx_v9_0_get_csb_buffer()
786 for (sect = adev->gfx.rlc.cs_data; sect->section != NULL; ++sect) { in gfx_v9_0_get_csb_buffer()
808 static void gfx_v9_0_init_lbpw(struct amdgpu_device *adev) in gfx_v9_0_init_lbpw() argument
824 mutex_lock(&adev->grbm_idx_mutex); in gfx_v9_0_init_lbpw()
826 gfx_v9_0_select_se_sh(adev, 0xffffffff, 0xffffffff, 0xffffffff); in gfx_v9_0_init_lbpw()
850 mutex_unlock(&adev->grbm_idx_mutex); in gfx_v9_0_init_lbpw()
853 static void gfx_v9_0_enable_lbpw(struct amdgpu_device *adev, bool enable) in gfx_v9_0_enable_lbpw() argument
858 static void rv_init_cp_jump_table(struct amdgpu_device *adev) in rv_init_cp_jump_table() argument
867 dst_ptr = adev->gfx.rlc.cp_table_ptr; in rv_init_cp_jump_table()
871 (const struct gfx_firmware_header_v1_0 *)adev->gfx.ce_fw->data; in rv_init_cp_jump_table()
873 (adev->gfx.ce_fw->data + in rv_init_cp_jump_table()
879 (const struct gfx_firmware_header_v1_0 *)adev->gfx.pfp_fw->data; in rv_init_cp_jump_table()
881 (adev->gfx.pfp_fw->data + in rv_init_cp_jump_table()
887 (const struct gfx_firmware_header_v1_0 *)adev->gfx.me_fw->data; in rv_init_cp_jump_table()
889 (adev->gfx.me_fw->data + in rv_init_cp_jump_table()
895 (const struct gfx_firmware_header_v1_0 *)adev->gfx.mec_fw->data; in rv_init_cp_jump_table()
897 (adev->gfx.mec_fw->data + in rv_init_cp_jump_table()
903 (const struct gfx_firmware_header_v1_0 *)adev->gfx.mec2_fw->data; in rv_init_cp_jump_table()
905 (adev->gfx.mec2_fw->data + in rv_init_cp_jump_table()
920 static void gfx_v9_0_rlc_fini(struct amdgpu_device *adev) in gfx_v9_0_rlc_fini() argument
923 amdgpu_bo_free_kernel(&adev->gfx.rlc.clear_state_obj, in gfx_v9_0_rlc_fini()
924 &adev->gfx.rlc.clear_state_gpu_addr, in gfx_v9_0_rlc_fini()
925 (void **)&adev->gfx.rlc.cs_ptr); in gfx_v9_0_rlc_fini()
928 amdgpu_bo_free_kernel(&adev->gfx.rlc.cp_table_obj, in gfx_v9_0_rlc_fini()
929 &adev->gfx.rlc.cp_table_gpu_addr, in gfx_v9_0_rlc_fini()
930 (void **)&adev->gfx.rlc.cp_table_ptr); in gfx_v9_0_rlc_fini()
933 static int gfx_v9_0_rlc_init(struct amdgpu_device *adev) in gfx_v9_0_rlc_init() argument
940 adev->gfx.rlc.cs_data = gfx9_cs_data; in gfx_v9_0_rlc_init()
942 cs_data = adev->gfx.rlc.cs_data; in gfx_v9_0_rlc_init()
946 adev->gfx.rlc.clear_state_size = dws = gfx_v9_0_get_csb_size(adev); in gfx_v9_0_rlc_init()
947 r = amdgpu_bo_create_reserved(adev, dws * 4, PAGE_SIZE, in gfx_v9_0_rlc_init()
949 &adev->gfx.rlc.clear_state_obj, in gfx_v9_0_rlc_init()
950 &adev->gfx.rlc.clear_state_gpu_addr, in gfx_v9_0_rlc_init()
951 (void **)&adev->gfx.rlc.cs_ptr); in gfx_v9_0_rlc_init()
953 dev_err(adev->dev, "(%d) failed to create rlc csb bo\n", in gfx_v9_0_rlc_init()
955 gfx_v9_0_rlc_fini(adev); in gfx_v9_0_rlc_init()
959 dst_ptr = adev->gfx.rlc.cs_ptr; in gfx_v9_0_rlc_init()
960 gfx_v9_0_get_csb_buffer(adev, dst_ptr); in gfx_v9_0_rlc_init()
961 amdgpu_bo_kunmap(adev->gfx.rlc.clear_state_obj); in gfx_v9_0_rlc_init()
962 amdgpu_bo_unpin(adev->gfx.rlc.clear_state_obj); in gfx_v9_0_rlc_init()
963 amdgpu_bo_unreserve(adev->gfx.rlc.clear_state_obj); in gfx_v9_0_rlc_init()
966 if (adev->asic_type == CHIP_RAVEN) { in gfx_v9_0_rlc_init()
968 adev->gfx.rlc.cp_table_size = ALIGN(96 * 5 * 4, 2048) + (64 * 1024); /* JT + GDS */ in gfx_v9_0_rlc_init()
969 r = amdgpu_bo_create_reserved(adev, adev->gfx.rlc.cp_table_size, in gfx_v9_0_rlc_init()
971 &adev->gfx.rlc.cp_table_obj, in gfx_v9_0_rlc_init()
972 &adev->gfx.rlc.cp_table_gpu_addr, in gfx_v9_0_rlc_init()
973 (void **)&adev->gfx.rlc.cp_table_ptr); in gfx_v9_0_rlc_init()
975 dev_err(adev->dev, in gfx_v9_0_rlc_init()
977 gfx_v9_0_rlc_fini(adev); in gfx_v9_0_rlc_init()
981 rv_init_cp_jump_table(adev); in gfx_v9_0_rlc_init()
982 amdgpu_bo_kunmap(adev->gfx.rlc.cp_table_obj); in gfx_v9_0_rlc_init()
983 amdgpu_bo_unreserve(adev->gfx.rlc.cp_table_obj); in gfx_v9_0_rlc_init()
985 gfx_v9_0_init_lbpw(adev); in gfx_v9_0_rlc_init()
991 static int gfx_v9_0_csb_vram_pin(struct amdgpu_device *adev) in gfx_v9_0_csb_vram_pin() argument
995 r = amdgpu_bo_reserve(adev->gfx.rlc.clear_state_obj, false); in gfx_v9_0_csb_vram_pin()
999 r = amdgpu_bo_pin(adev->gfx.rlc.clear_state_obj, in gfx_v9_0_csb_vram_pin()
1002 adev->gfx.rlc.clear_state_gpu_addr = in gfx_v9_0_csb_vram_pin()
1003 amdgpu_bo_gpu_offset(adev->gfx.rlc.clear_state_obj); in gfx_v9_0_csb_vram_pin()
1005 amdgpu_bo_unreserve(adev->gfx.rlc.clear_state_obj); in gfx_v9_0_csb_vram_pin()
1010 static void gfx_v9_0_csb_vram_unpin(struct amdgpu_device *adev) in gfx_v9_0_csb_vram_unpin() argument
1014 if (!adev->gfx.rlc.clear_state_obj) in gfx_v9_0_csb_vram_unpin()
1017 r = amdgpu_bo_reserve(adev->gfx.rlc.clear_state_obj, true); in gfx_v9_0_csb_vram_unpin()
1019 amdgpu_bo_unpin(adev->gfx.rlc.clear_state_obj); in gfx_v9_0_csb_vram_unpin()
1020 amdgpu_bo_unreserve(adev->gfx.rlc.clear_state_obj); in gfx_v9_0_csb_vram_unpin()
1024 static void gfx_v9_0_mec_fini(struct amdgpu_device *adev) in gfx_v9_0_mec_fini() argument
1026 amdgpu_bo_free_kernel(&adev->gfx.mec.hpd_eop_obj, NULL, NULL); in gfx_v9_0_mec_fini()
1027 amdgpu_bo_free_kernel(&adev->gfx.mec.mec_fw_obj, NULL, NULL); in gfx_v9_0_mec_fini()
1030 static int gfx_v9_0_mec_init(struct amdgpu_device *adev) in gfx_v9_0_mec_init() argument
1041 bitmap_zero(adev->gfx.mec.queue_bitmap, AMDGPU_MAX_COMPUTE_QUEUES); in gfx_v9_0_mec_init()
1044 amdgpu_gfx_compute_queue_acquire(adev); in gfx_v9_0_mec_init()
1045 mec_hpd_size = adev->gfx.num_compute_rings * GFX9_MEC_HPD_SIZE; in gfx_v9_0_mec_init()
1047 r = amdgpu_bo_create_reserved(adev, mec_hpd_size, PAGE_SIZE, in gfx_v9_0_mec_init()
1049 &adev->gfx.mec.hpd_eop_obj, in gfx_v9_0_mec_init()
1050 &adev->gfx.mec.hpd_eop_gpu_addr, in gfx_v9_0_mec_init()
1053 dev_warn(adev->dev, "(%d) create HDP EOP bo failed\n", r); in gfx_v9_0_mec_init()
1054 gfx_v9_0_mec_fini(adev); in gfx_v9_0_mec_init()
1058 memset(hpd, 0, adev->gfx.mec.hpd_eop_obj->tbo.mem.size); in gfx_v9_0_mec_init()
1060 amdgpu_bo_kunmap(adev->gfx.mec.hpd_eop_obj); in gfx_v9_0_mec_init()
1061 amdgpu_bo_unreserve(adev->gfx.mec.hpd_eop_obj); in gfx_v9_0_mec_init()
1063 mec_hdr = (const struct gfx_firmware_header_v1_0 *)adev->gfx.mec_fw->data; in gfx_v9_0_mec_init()
1066 (adev->gfx.mec_fw->data + in gfx_v9_0_mec_init()
1070 r = amdgpu_bo_create_reserved(adev, mec_hdr->header.ucode_size_bytes, in gfx_v9_0_mec_init()
1072 &adev->gfx.mec.mec_fw_obj, in gfx_v9_0_mec_init()
1073 &adev->gfx.mec.mec_fw_gpu_addr, in gfx_v9_0_mec_init()
1076 dev_warn(adev->dev, "(%d) create mec firmware bo failed\n", r); in gfx_v9_0_mec_init()
1077 gfx_v9_0_mec_fini(adev); in gfx_v9_0_mec_init()
1083 amdgpu_bo_kunmap(adev->gfx.mec.mec_fw_obj); in gfx_v9_0_mec_init()
1084 amdgpu_bo_unreserve(adev->gfx.mec.mec_fw_obj); in gfx_v9_0_mec_init()
1089 static uint32_t wave_read_ind(struct amdgpu_device *adev, uint32_t simd, uint32_t wave, uint32_t ad… in wave_read_ind() argument
1099 static void wave_read_regs(struct amdgpu_device *adev, uint32_t simd, in wave_read_regs() argument
1114 static void gfx_v9_0_read_wave_data(struct amdgpu_device *adev, uint32_t simd, uint32_t wave, uint3… in gfx_v9_0_read_wave_data() argument
1118 dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_STATUS); in gfx_v9_0_read_wave_data()
1119 dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_PC_LO); in gfx_v9_0_read_wave_data()
1120 dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_PC_HI); in gfx_v9_0_read_wave_data()
1121 dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_EXEC_LO); in gfx_v9_0_read_wave_data()
1122 dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_EXEC_HI); in gfx_v9_0_read_wave_data()
1123 dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_HW_ID); in gfx_v9_0_read_wave_data()
1124 dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_INST_DW0); in gfx_v9_0_read_wave_data()
1125 dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_INST_DW1); in gfx_v9_0_read_wave_data()
1126 dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_GPR_ALLOC); in gfx_v9_0_read_wave_data()
1127 dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_LDS_ALLOC); in gfx_v9_0_read_wave_data()
1128 dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_TRAPSTS); in gfx_v9_0_read_wave_data()
1129 dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_IB_STS); in gfx_v9_0_read_wave_data()
1130 dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_IB_DBG0); in gfx_v9_0_read_wave_data()
1131 dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_M0); in gfx_v9_0_read_wave_data()
1134 static void gfx_v9_0_read_wave_sgprs(struct amdgpu_device *adev, uint32_t simd, in gfx_v9_0_read_wave_sgprs() argument
1139 adev, simd, wave, 0, in gfx_v9_0_read_wave_sgprs()
1143 static void gfx_v9_0_read_wave_vgprs(struct amdgpu_device *adev, uint32_t simd, in gfx_v9_0_read_wave_vgprs() argument
1149 adev, simd, wave, thread, in gfx_v9_0_read_wave_vgprs()
1153 static void gfx_v9_0_select_me_pipe_q(struct amdgpu_device *adev, in gfx_v9_0_select_me_pipe_q() argument
1156 soc15_grbm_select(adev, me, pipe, q, 0); in gfx_v9_0_select_me_pipe_q()
1168 static int gfx_v9_0_gpu_early_init(struct amdgpu_device *adev) in gfx_v9_0_gpu_early_init() argument
1173 adev->gfx.funcs = &gfx_v9_0_gfx_funcs; in gfx_v9_0_gpu_early_init()
1175 switch (adev->asic_type) { in gfx_v9_0_gpu_early_init()
1177 adev->gfx.config.max_hw_contexts = 8; in gfx_v9_0_gpu_early_init()
1178 adev->gfx.config.sc_prim_fifo_size_frontend = 0x20; in gfx_v9_0_gpu_early_init()
1179 adev->gfx.config.sc_prim_fifo_size_backend = 0x100; in gfx_v9_0_gpu_early_init()
1180 adev->gfx.config.sc_hiz_tile_fifo_size = 0x30; in gfx_v9_0_gpu_early_init()
1181 adev->gfx.config.sc_earlyz_tile_fifo_size = 0x4C0; in gfx_v9_0_gpu_early_init()
1185 adev->gfx.config.max_hw_contexts = 8; in gfx_v9_0_gpu_early_init()
1186 adev->gfx.config.sc_prim_fifo_size_frontend = 0x20; in gfx_v9_0_gpu_early_init()
1187 adev->gfx.config.sc_prim_fifo_size_backend = 0x100; in gfx_v9_0_gpu_early_init()
1188 adev->gfx.config.sc_hiz_tile_fifo_size = 0x30; in gfx_v9_0_gpu_early_init()
1189 adev->gfx.config.sc_earlyz_tile_fifo_size = 0x4C0; in gfx_v9_0_gpu_early_init()
1194 adev->gfx.config.max_hw_contexts = 8; in gfx_v9_0_gpu_early_init()
1195 adev->gfx.config.sc_prim_fifo_size_frontend = 0x20; in gfx_v9_0_gpu_early_init()
1196 adev->gfx.config.sc_prim_fifo_size_backend = 0x100; in gfx_v9_0_gpu_early_init()
1197 adev->gfx.config.sc_hiz_tile_fifo_size = 0x30; in gfx_v9_0_gpu_early_init()
1198 adev->gfx.config.sc_earlyz_tile_fifo_size = 0x4C0; in gfx_v9_0_gpu_early_init()
1203 err = amdgpu_atomfirmware_get_gfx_info(adev); in gfx_v9_0_gpu_early_init()
1208 adev->gfx.config.max_hw_contexts = 8; in gfx_v9_0_gpu_early_init()
1209 adev->gfx.config.sc_prim_fifo_size_frontend = 0x20; in gfx_v9_0_gpu_early_init()
1210 adev->gfx.config.sc_prim_fifo_size_backend = 0x100; in gfx_v9_0_gpu_early_init()
1211 adev->gfx.config.sc_hiz_tile_fifo_size = 0x30; in gfx_v9_0_gpu_early_init()
1212 adev->gfx.config.sc_earlyz_tile_fifo_size = 0x4C0; in gfx_v9_0_gpu_early_init()
1220 adev->gfx.config.gb_addr_config = gb_addr_config; in gfx_v9_0_gpu_early_init()
1222 adev->gfx.config.gb_addr_config_fields.num_pipes = 1 << in gfx_v9_0_gpu_early_init()
1224 adev->gfx.config.gb_addr_config, in gfx_v9_0_gpu_early_init()
1228 adev->gfx.config.max_tile_pipes = in gfx_v9_0_gpu_early_init()
1229 adev->gfx.config.gb_addr_config_fields.num_pipes; in gfx_v9_0_gpu_early_init()
1231 adev->gfx.config.gb_addr_config_fields.num_banks = 1 << in gfx_v9_0_gpu_early_init()
1233 adev->gfx.config.gb_addr_config, in gfx_v9_0_gpu_early_init()
1236 adev->gfx.config.gb_addr_config_fields.max_compress_frags = 1 << in gfx_v9_0_gpu_early_init()
1238 adev->gfx.config.gb_addr_config, in gfx_v9_0_gpu_early_init()
1241 adev->gfx.config.gb_addr_config_fields.num_rb_per_se = 1 << in gfx_v9_0_gpu_early_init()
1243 adev->gfx.config.gb_addr_config, in gfx_v9_0_gpu_early_init()
1246 adev->gfx.config.gb_addr_config_fields.num_se = 1 << in gfx_v9_0_gpu_early_init()
1248 adev->gfx.config.gb_addr_config, in gfx_v9_0_gpu_early_init()
1251 adev->gfx.config.gb_addr_config_fields.pipe_interleave_size = 1 << (8 + in gfx_v9_0_gpu_early_init()
1253 adev->gfx.config.gb_addr_config, in gfx_v9_0_gpu_early_init()
1260 static int gfx_v9_0_ngg_create_buf(struct amdgpu_device *adev, in gfx_v9_0_ngg_create_buf() argument
1268 dev_err(adev->dev, "Buffer size is invalid: %d\n", size_se); in gfx_v9_0_ngg_create_buf()
1273 ngg_buf->size = size_se * adev->gfx.config.max_shader_engines; in gfx_v9_0_ngg_create_buf()
1274 r = amdgpu_bo_create_kernel(adev, ngg_buf->size, in gfx_v9_0_ngg_create_buf()
1280 dev_err(adev->dev, "(%d) failed to create NGG buffer\n", r); in gfx_v9_0_ngg_create_buf()
1288 static int gfx_v9_0_ngg_fini(struct amdgpu_device *adev) in gfx_v9_0_ngg_fini() argument
1293 amdgpu_bo_free_kernel(&adev->gfx.ngg.buf[i].bo, in gfx_v9_0_ngg_fini()
1294 &adev->gfx.ngg.buf[i].gpu_addr, in gfx_v9_0_ngg_fini()
1297 memset(&adev->gfx.ngg.buf[0], 0, in gfx_v9_0_ngg_fini()
1300 adev->gfx.ngg.init = false; in gfx_v9_0_ngg_fini()
1305 static int gfx_v9_0_ngg_init(struct amdgpu_device *adev) in gfx_v9_0_ngg_init() argument
1309 if (!amdgpu_ngg || adev->gfx.ngg.init == true) in gfx_v9_0_ngg_init()
1313 adev->gfx.ngg.gds_reserve_size = ALIGN(5 * 4, 0x40); in gfx_v9_0_ngg_init()
1314 adev->gds.mem.total_size -= adev->gfx.ngg.gds_reserve_size; in gfx_v9_0_ngg_init()
1315 adev->gds.mem.gfx_partition_size -= adev->gfx.ngg.gds_reserve_size; in gfx_v9_0_ngg_init()
1316 adev->gfx.ngg.gds_reserve_addr = RREG32_SOC15(GC, 0, mmGDS_VMID0_BASE); in gfx_v9_0_ngg_init()
1317 adev->gfx.ngg.gds_reserve_addr += RREG32_SOC15(GC, 0, mmGDS_VMID0_SIZE); in gfx_v9_0_ngg_init()
1320 r = gfx_v9_0_ngg_create_buf(adev, &adev->gfx.ngg.buf[NGG_PRIM], in gfx_v9_0_ngg_init()
1324 dev_err(adev->dev, "Failed to create Primitive Buffer\n"); in gfx_v9_0_ngg_init()
1329 r = gfx_v9_0_ngg_create_buf(adev, &adev->gfx.ngg.buf[NGG_POS], in gfx_v9_0_ngg_init()
1333 dev_err(adev->dev, "Failed to create Position Buffer\n"); in gfx_v9_0_ngg_init()
1338 r = gfx_v9_0_ngg_create_buf(adev, &adev->gfx.ngg.buf[NGG_CNTL], in gfx_v9_0_ngg_init()
1342 dev_err(adev->dev, "Failed to create Control Sideband Buffer\n"); in gfx_v9_0_ngg_init()
1350 r = gfx_v9_0_ngg_create_buf(adev, &adev->gfx.ngg.buf[NGG_PARAM], in gfx_v9_0_ngg_init()
1354 dev_err(adev->dev, "Failed to create Parameter Cache\n"); in gfx_v9_0_ngg_init()
1359 adev->gfx.ngg.init = true; in gfx_v9_0_ngg_init()
1362 gfx_v9_0_ngg_fini(adev); in gfx_v9_0_ngg_init()
1366 static int gfx_v9_0_ngg_en(struct amdgpu_device *adev) in gfx_v9_0_ngg_en() argument
1368 struct amdgpu_ring *ring = &adev->gfx.gfx_ring[0]; in gfx_v9_0_ngg_en()
1377 adev->gfx.ngg.buf[NGG_PRIM].size >> 8); in gfx_v9_0_ngg_en()
1379 adev->gfx.ngg.buf[NGG_POS].size >> 8); in gfx_v9_0_ngg_en()
1383 adev->gfx.ngg.buf[NGG_CNTL].size >> 8); in gfx_v9_0_ngg_en()
1385 adev->gfx.ngg.buf[NGG_PARAM].size >> 10); in gfx_v9_0_ngg_en()
1389 base = lower_32_bits(adev->gfx.ngg.buf[NGG_PRIM].gpu_addr); in gfx_v9_0_ngg_en()
1393 base = upper_32_bits(adev->gfx.ngg.buf[NGG_PRIM].gpu_addr); in gfx_v9_0_ngg_en()
1397 base = lower_32_bits(adev->gfx.ngg.buf[NGG_POS].gpu_addr); in gfx_v9_0_ngg_en()
1401 base = upper_32_bits(adev->gfx.ngg.buf[NGG_POS].gpu_addr); in gfx_v9_0_ngg_en()
1405 base = lower_32_bits(adev->gfx.ngg.buf[NGG_CNTL].gpu_addr); in gfx_v9_0_ngg_en()
1409 base = upper_32_bits(adev->gfx.ngg.buf[NGG_CNTL].gpu_addr); in gfx_v9_0_ngg_en()
1423 (adev->gds.mem.total_size + in gfx_v9_0_ngg_en()
1424 adev->gfx.ngg.gds_reserve_size) >> in gfx_v9_0_ngg_en()
1433 amdgpu_ring_write(ring, adev->gfx.ngg.gds_reserve_addr); in gfx_v9_0_ngg_en()
1436 adev->gfx.ngg.gds_reserve_size); in gfx_v9_0_ngg_en()
1446 static int gfx_v9_0_compute_ring_init(struct amdgpu_device *adev, int ring_id, in gfx_v9_0_compute_ring_init() argument
1451 struct amdgpu_ring *ring = &adev->gfx.compute_ring[ring_id]; in gfx_v9_0_compute_ring_init()
1453 ring = &adev->gfx.compute_ring[ring_id]; in gfx_v9_0_compute_ring_init()
1463 ring->eop_gpu_addr = adev->gfx.mec.hpd_eop_gpu_addr in gfx_v9_0_compute_ring_init()
1468 + ((ring->me - 1) * adev->gfx.mec.num_pipe_per_mec) in gfx_v9_0_compute_ring_init()
1472 r = amdgpu_ring_init(adev, ring, 1024, in gfx_v9_0_compute_ring_init()
1473 &adev->gfx.eop_irq, irq_type); in gfx_v9_0_compute_ring_init()
1486 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in gfx_v9_0_sw_init() local
1488 switch (adev->asic_type) { in gfx_v9_0_sw_init()
1493 adev->gfx.mec.num_mec = 2; in gfx_v9_0_sw_init()
1496 adev->gfx.mec.num_mec = 1; in gfx_v9_0_sw_init()
1500 adev->gfx.mec.num_pipe_per_mec = 4; in gfx_v9_0_sw_init()
1501 adev->gfx.mec.num_queue_per_pipe = 8; in gfx_v9_0_sw_init()
1504 …r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_GRBM_CP, GFX_9_0__SRCID__CP_IB2_INTERRUPT_PKT, &adev… in gfx_v9_0_sw_init()
1509 …r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_GRBM_CP, GFX_9_0__SRCID__CP_EOP_INTERRUPT, &adev->gf… in gfx_v9_0_sw_init()
1514 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_GRBM_CP, GFX_9_0__SRCID__CP_PRIV_REG_FAULT, in gfx_v9_0_sw_init()
1515 &adev->gfx.priv_reg_irq); in gfx_v9_0_sw_init()
1520 r = amdgpu_irq_add_id(adev, SOC15_IH_CLIENTID_GRBM_CP, GFX_9_0__SRCID__CP_PRIV_INSTR_FAULT, in gfx_v9_0_sw_init()
1521 &adev->gfx.priv_inst_irq); in gfx_v9_0_sw_init()
1525 adev->gfx.gfx_current_status = AMDGPU_GFX_NORMAL_MODE; in gfx_v9_0_sw_init()
1527 gfx_v9_0_scratch_init(adev); in gfx_v9_0_sw_init()
1529 r = gfx_v9_0_init_microcode(adev); in gfx_v9_0_sw_init()
1535 r = gfx_v9_0_rlc_init(adev); in gfx_v9_0_sw_init()
1541 r = gfx_v9_0_mec_init(adev); in gfx_v9_0_sw_init()
1548 for (i = 0; i < adev->gfx.num_gfx_rings; i++) { in gfx_v9_0_sw_init()
1549 ring = &adev->gfx.gfx_ring[i]; in gfx_v9_0_sw_init()
1557 r = amdgpu_ring_init(adev, ring, 1024, in gfx_v9_0_sw_init()
1558 &adev->gfx.eop_irq, AMDGPU_CP_IRQ_GFX_EOP); in gfx_v9_0_sw_init()
1565 for (i = 0; i < adev->gfx.mec.num_mec; ++i) { in gfx_v9_0_sw_init()
1566 for (j = 0; j < adev->gfx.mec.num_queue_per_pipe; j++) { in gfx_v9_0_sw_init()
1567 for (k = 0; k < adev->gfx.mec.num_pipe_per_mec; k++) { in gfx_v9_0_sw_init()
1568 if (!amdgpu_gfx_is_mec_queue_enabled(adev, i, k, j)) in gfx_v9_0_sw_init()
1571 r = gfx_v9_0_compute_ring_init(adev, in gfx_v9_0_sw_init()
1582 r = amdgpu_gfx_kiq_init(adev, GFX9_MEC_HPD_SIZE); in gfx_v9_0_sw_init()
1588 kiq = &adev->gfx.kiq; in gfx_v9_0_sw_init()
1589 r = amdgpu_gfx_kiq_init_ring(adev, &kiq->ring, &kiq->irq); in gfx_v9_0_sw_init()
1594 r = amdgpu_gfx_compute_mqd_sw_init(adev, sizeof(struct v9_mqd_allocation)); in gfx_v9_0_sw_init()
1599 r = amdgpu_bo_create_kernel(adev, adev->gds.mem.gfx_partition_size, in gfx_v9_0_sw_init()
1601 &adev->gds.gds_gfx_bo, NULL, NULL); in gfx_v9_0_sw_init()
1605 r = amdgpu_bo_create_kernel(adev, adev->gds.gws.gfx_partition_size, in gfx_v9_0_sw_init()
1607 &adev->gds.gws_gfx_bo, NULL, NULL); in gfx_v9_0_sw_init()
1611 r = amdgpu_bo_create_kernel(adev, adev->gds.oa.gfx_partition_size, in gfx_v9_0_sw_init()
1613 &adev->gds.oa_gfx_bo, NULL, NULL); in gfx_v9_0_sw_init()
1617 adev->gfx.ce_ram_size = 0x8000; in gfx_v9_0_sw_init()
1619 r = gfx_v9_0_gpu_early_init(adev); in gfx_v9_0_sw_init()
1623 r = gfx_v9_0_ngg_init(adev); in gfx_v9_0_sw_init()
1634 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in gfx_v9_0_sw_fini() local
1636 amdgpu_bo_free_kernel(&adev->gds.oa_gfx_bo, NULL, NULL); in gfx_v9_0_sw_fini()
1637 amdgpu_bo_free_kernel(&adev->gds.gws_gfx_bo, NULL, NULL); in gfx_v9_0_sw_fini()
1638 amdgpu_bo_free_kernel(&adev->gds.gds_gfx_bo, NULL, NULL); in gfx_v9_0_sw_fini()
1640 for (i = 0; i < adev->gfx.num_gfx_rings; i++) in gfx_v9_0_sw_fini()
1641 amdgpu_ring_fini(&adev->gfx.gfx_ring[i]); in gfx_v9_0_sw_fini()
1642 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_sw_fini()
1643 amdgpu_ring_fini(&adev->gfx.compute_ring[i]); in gfx_v9_0_sw_fini()
1645 amdgpu_gfx_compute_mqd_sw_fini(adev); in gfx_v9_0_sw_fini()
1646 amdgpu_gfx_kiq_free_ring(&adev->gfx.kiq.ring, &adev->gfx.kiq.irq); in gfx_v9_0_sw_fini()
1647 amdgpu_gfx_kiq_fini(adev); in gfx_v9_0_sw_fini()
1649 gfx_v9_0_mec_fini(adev); in gfx_v9_0_sw_fini()
1650 gfx_v9_0_ngg_fini(adev); in gfx_v9_0_sw_fini()
1651 amdgpu_bo_free_kernel(&adev->gfx.rlc.clear_state_obj, in gfx_v9_0_sw_fini()
1652 &adev->gfx.rlc.clear_state_gpu_addr, in gfx_v9_0_sw_fini()
1653 (void **)&adev->gfx.rlc.cs_ptr); in gfx_v9_0_sw_fini()
1654 if (adev->asic_type == CHIP_RAVEN) { in gfx_v9_0_sw_fini()
1655 amdgpu_bo_free_kernel(&adev->gfx.rlc.cp_table_obj, in gfx_v9_0_sw_fini()
1656 &adev->gfx.rlc.cp_table_gpu_addr, in gfx_v9_0_sw_fini()
1657 (void **)&adev->gfx.rlc.cp_table_ptr); in gfx_v9_0_sw_fini()
1659 gfx_v9_0_free_microcode(adev); in gfx_v9_0_sw_fini()
1665 static void gfx_v9_0_tiling_mode_table_init(struct amdgpu_device *adev) in gfx_v9_0_tiling_mode_table_init() argument
1670 static void gfx_v9_0_select_se_sh(struct amdgpu_device *adev, u32 se_num, u32 sh_num, u32 instance) in gfx_v9_0_select_se_sh() argument
1692 static u32 gfx_v9_0_get_rb_active_bitmap(struct amdgpu_device *adev) in gfx_v9_0_get_rb_active_bitmap() argument
1702 mask = amdgpu_gfx_create_bitmask(adev->gfx.config.max_backends_per_se / in gfx_v9_0_get_rb_active_bitmap()
1703 adev->gfx.config.max_sh_per_se); in gfx_v9_0_get_rb_active_bitmap()
1708 static void gfx_v9_0_setup_rb(struct amdgpu_device *adev) in gfx_v9_0_setup_rb() argument
1713 u32 rb_bitmap_width_per_sh = adev->gfx.config.max_backends_per_se / in gfx_v9_0_setup_rb()
1714 adev->gfx.config.max_sh_per_se; in gfx_v9_0_setup_rb()
1716 mutex_lock(&adev->grbm_idx_mutex); in gfx_v9_0_setup_rb()
1717 for (i = 0; i < adev->gfx.config.max_shader_engines; i++) { in gfx_v9_0_setup_rb()
1718 for (j = 0; j < adev->gfx.config.max_sh_per_se; j++) { in gfx_v9_0_setup_rb()
1719 gfx_v9_0_select_se_sh(adev, i, j, 0xffffffff); in gfx_v9_0_setup_rb()
1720 data = gfx_v9_0_get_rb_active_bitmap(adev); in gfx_v9_0_setup_rb()
1721 active_rbs |= data << ((i * adev->gfx.config.max_sh_per_se + j) * in gfx_v9_0_setup_rb()
1725 gfx_v9_0_select_se_sh(adev, 0xffffffff, 0xffffffff, 0xffffffff); in gfx_v9_0_setup_rb()
1726 mutex_unlock(&adev->grbm_idx_mutex); in gfx_v9_0_setup_rb()
1728 adev->gfx.config.backend_enable_mask = active_rbs; in gfx_v9_0_setup_rb()
1729 adev->gfx.config.num_rbs = hweight32(active_rbs); in gfx_v9_0_setup_rb()
1735 static void gfx_v9_0_init_compute_vmid(struct amdgpu_device *adev) in gfx_v9_0_init_compute_vmid() argument
1753 mutex_lock(&adev->srbm_mutex); in gfx_v9_0_init_compute_vmid()
1755 soc15_grbm_select(adev, 0, 0, 0, i); in gfx_v9_0_init_compute_vmid()
1760 soc15_grbm_select(adev, 0, 0, 0, 0); in gfx_v9_0_init_compute_vmid()
1761 mutex_unlock(&adev->srbm_mutex); in gfx_v9_0_init_compute_vmid()
1764 static void gfx_v9_0_gpu_init(struct amdgpu_device *adev) in gfx_v9_0_gpu_init() argument
1771 gfx_v9_0_tiling_mode_table_init(adev); in gfx_v9_0_gpu_init()
1773 gfx_v9_0_setup_rb(adev); in gfx_v9_0_gpu_init()
1774 gfx_v9_0_get_cu_info(adev, &adev->gfx.cu_info); in gfx_v9_0_gpu_init()
1775 adev->gfx.config.db_debug2 = RREG32_SOC15(GC, 0, mmDB_DEBUG2); in gfx_v9_0_gpu_init()
1779 mutex_lock(&adev->srbm_mutex); in gfx_v9_0_gpu_init()
1780 for (i = 0; i < adev->vm_manager.id_mgr[AMDGPU_GFXHUB].num_ids; i++) { in gfx_v9_0_gpu_init()
1781 soc15_grbm_select(adev, 0, 0, 0, i); in gfx_v9_0_gpu_init()
1793 (adev->gmc.private_aperture_start >> 48)); in gfx_v9_0_gpu_init()
1795 (adev->gmc.shared_aperture_start >> 48)); in gfx_v9_0_gpu_init()
1799 soc15_grbm_select(adev, 0, 0, 0, 0); in gfx_v9_0_gpu_init()
1801 mutex_unlock(&adev->srbm_mutex); in gfx_v9_0_gpu_init()
1803 gfx_v9_0_init_compute_vmid(adev); in gfx_v9_0_gpu_init()
1805 mutex_lock(&adev->grbm_idx_mutex); in gfx_v9_0_gpu_init()
1810 gfx_v9_0_select_se_sh(adev, 0xffffffff, 0xffffffff, 0xffffffff); in gfx_v9_0_gpu_init()
1813 (adev->gfx.config.sc_prim_fifo_size_frontend << in gfx_v9_0_gpu_init()
1815 (adev->gfx.config.sc_prim_fifo_size_backend << in gfx_v9_0_gpu_init()
1817 (adev->gfx.config.sc_hiz_tile_fifo_size << in gfx_v9_0_gpu_init()
1819 (adev->gfx.config.sc_earlyz_tile_fifo_size << in gfx_v9_0_gpu_init()
1821 mutex_unlock(&adev->grbm_idx_mutex); in gfx_v9_0_gpu_init()
1825 static void gfx_v9_0_wait_for_rlc_serdes(struct amdgpu_device *adev) in gfx_v9_0_wait_for_rlc_serdes() argument
1830 mutex_lock(&adev->grbm_idx_mutex); in gfx_v9_0_wait_for_rlc_serdes()
1831 for (i = 0; i < adev->gfx.config.max_shader_engines; i++) { in gfx_v9_0_wait_for_rlc_serdes()
1832 for (j = 0; j < adev->gfx.config.max_sh_per_se; j++) { in gfx_v9_0_wait_for_rlc_serdes()
1833 gfx_v9_0_select_se_sh(adev, i, j, 0xffffffff); in gfx_v9_0_wait_for_rlc_serdes()
1834 for (k = 0; k < adev->usec_timeout; k++) { in gfx_v9_0_wait_for_rlc_serdes()
1839 if (k == adev->usec_timeout) { in gfx_v9_0_wait_for_rlc_serdes()
1840 gfx_v9_0_select_se_sh(adev, 0xffffffff, in gfx_v9_0_wait_for_rlc_serdes()
1842 mutex_unlock(&adev->grbm_idx_mutex); in gfx_v9_0_wait_for_rlc_serdes()
1849 gfx_v9_0_select_se_sh(adev, 0xffffffff, 0xffffffff, 0xffffffff); in gfx_v9_0_wait_for_rlc_serdes()
1850 mutex_unlock(&adev->grbm_idx_mutex); in gfx_v9_0_wait_for_rlc_serdes()
1856 for (k = 0; k < adev->usec_timeout; k++) { in gfx_v9_0_wait_for_rlc_serdes()
1863 static void gfx_v9_0_enable_gui_idle_interrupt(struct amdgpu_device *adev, in gfx_v9_0_enable_gui_idle_interrupt() argument
1876 static void gfx_v9_0_init_csb(struct amdgpu_device *adev) in gfx_v9_0_init_csb() argument
1880 adev->gfx.rlc.clear_state_gpu_addr >> 32); in gfx_v9_0_init_csb()
1882 adev->gfx.rlc.clear_state_gpu_addr & 0xfffffffc); in gfx_v9_0_init_csb()
1884 adev->gfx.rlc.clear_state_size); in gfx_v9_0_init_csb()
1924 static int gfx_v9_1_init_rlc_save_restore_list(struct amdgpu_device *adev) in gfx_v9_1_init_rlc_save_restore_list() argument
1937 kmalloc(adev->gfx.rlc.reg_list_format_size_bytes, GFP_KERNEL); in gfx_v9_1_init_rlc_save_restore_list()
1940 memcpy(register_list_format, adev->gfx.rlc.register_list_format, in gfx_v9_1_init_rlc_save_restore_list()
1941 adev->gfx.rlc.reg_list_format_size_bytes); in gfx_v9_1_init_rlc_save_restore_list()
1946 adev->gfx.rlc.reg_list_format_direct_reg_list_length, in gfx_v9_1_init_rlc_save_restore_list()
1947 adev->gfx.rlc.reg_list_format_size_bytes >> 2, in gfx_v9_1_init_rlc_save_restore_list()
1962 for (i = 0; i < adev->gfx.rlc.reg_list_size_bytes >> 2; i++) in gfx_v9_1_init_rlc_save_restore_list()
1964 adev->gfx.rlc.register_restore[i]); in gfx_v9_1_init_rlc_save_restore_list()
1968 adev->gfx.rlc.reg_list_format_start); in gfx_v9_1_init_rlc_save_restore_list()
1971 for (i = 0; i < adev->gfx.rlc.reg_list_format_direct_reg_list_length; i++) in gfx_v9_1_init_rlc_save_restore_list()
1976 while (i < (adev->gfx.rlc.reg_list_format_size_bytes >> 2)) { in gfx_v9_1_init_rlc_save_restore_list()
1998 list_size = adev->gfx.rlc.reg_list_size_bytes >> 2; in gfx_v9_1_init_rlc_save_restore_list()
2001 adev->gfx.rlc.reg_restore_list_size); in gfx_v9_1_init_rlc_save_restore_list()
2006 adev->gfx.rlc.starting_offsets_start); in gfx_v9_1_init_rlc_save_restore_list()
2028 static void gfx_v9_0_enable_save_restore_machine(struct amdgpu_device *adev) in gfx_v9_0_enable_save_restore_machine() argument
2033 static void pwr_10_0_gfxip_control_over_cgpg(struct amdgpu_device *adev, in pwr_10_0_gfxip_control_over_cgpg() argument
2059 static void gfx_v9_0_init_gfx_power_gating(struct amdgpu_device *adev) in gfx_v9_0_init_gfx_power_gating() argument
2063 if (adev->pg_flags & (AMD_PG_SUPPORT_GFX_PG | in gfx_v9_0_init_gfx_power_gating()
2097 pwr_10_0_gfxip_control_over_cgpg(adev, true); in gfx_v9_0_init_gfx_power_gating()
2101 static void gfx_v9_0_enable_sck_slow_down_on_power_up(struct amdgpu_device *adev, in gfx_v9_0_enable_sck_slow_down_on_power_up() argument
2115 static void gfx_v9_0_enable_sck_slow_down_on_power_down(struct amdgpu_device *adev, in gfx_v9_0_enable_sck_slow_down_on_power_down() argument
2129 static void gfx_v9_0_enable_cp_power_gating(struct amdgpu_device *adev, in gfx_v9_0_enable_cp_power_gating() argument
2143 static void gfx_v9_0_enable_gfx_cg_power_gating(struct amdgpu_device *adev, in gfx_v9_0_enable_gfx_cg_power_gating() argument
2156 static void gfx_v9_0_enable_gfx_pipeline_powergating(struct amdgpu_device *adev, in gfx_v9_0_enable_gfx_pipeline_powergating() argument
2173 static void gfx_v9_0_enable_gfx_static_mg_power_gating(struct amdgpu_device *adev, in gfx_v9_0_enable_gfx_static_mg_power_gating() argument
2186 static void gfx_v9_0_enable_gfx_dynamic_mg_power_gating(struct amdgpu_device *adev, in gfx_v9_0_enable_gfx_dynamic_mg_power_gating() argument
2199 static void gfx_v9_0_init_pg(struct amdgpu_device *adev) in gfx_v9_0_init_pg() argument
2201 gfx_v9_0_init_csb(adev); in gfx_v9_0_init_pg()
2207 if (adev->gfx.rlc.is_rlc_v2_1) { in gfx_v9_0_init_pg()
2208 gfx_v9_1_init_rlc_save_restore_list(adev); in gfx_v9_0_init_pg()
2209 gfx_v9_0_enable_save_restore_machine(adev); in gfx_v9_0_init_pg()
2212 if (adev->pg_flags & (AMD_PG_SUPPORT_GFX_PG | in gfx_v9_0_init_pg()
2219 adev->gfx.rlc.cp_table_gpu_addr >> 8); in gfx_v9_0_init_pg()
2220 gfx_v9_0_init_gfx_power_gating(adev); in gfx_v9_0_init_pg()
2224 void gfx_v9_0_rlc_stop(struct amdgpu_device *adev) in gfx_v9_0_rlc_stop() argument
2227 gfx_v9_0_enable_gui_idle_interrupt(adev, false); in gfx_v9_0_rlc_stop()
2228 gfx_v9_0_wait_for_rlc_serdes(adev); in gfx_v9_0_rlc_stop()
2231 static void gfx_v9_0_rlc_reset(struct amdgpu_device *adev) in gfx_v9_0_rlc_reset() argument
2239 static void gfx_v9_0_rlc_start(struct amdgpu_device *adev) in gfx_v9_0_rlc_start() argument
2248 if (!(adev->flags & AMD_IS_APU)) in gfx_v9_0_rlc_start()
2249 gfx_v9_0_enable_gui_idle_interrupt(adev, true); in gfx_v9_0_rlc_start()
2258 rlc_ucode_ver, adev->gfx.rlc_fw_version); in gfx_v9_0_rlc_start()
2270 static int gfx_v9_0_rlc_load_microcode(struct amdgpu_device *adev) in gfx_v9_0_rlc_load_microcode() argument
2276 if (!adev->gfx.rlc_fw) in gfx_v9_0_rlc_load_microcode()
2279 hdr = (const struct rlc_firmware_header_v2_0 *)adev->gfx.rlc_fw->data; in gfx_v9_0_rlc_load_microcode()
2282 fw_data = (const __le32 *)(adev->gfx.rlc_fw->data + in gfx_v9_0_rlc_load_microcode()
2290 WREG32_SOC15(GC, 0, mmRLC_GPM_UCODE_ADDR, adev->gfx.rlc_fw_version); in gfx_v9_0_rlc_load_microcode()
2295 static int gfx_v9_0_rlc_resume(struct amdgpu_device *adev) in gfx_v9_0_rlc_resume() argument
2299 if (amdgpu_sriov_vf(adev)) { in gfx_v9_0_rlc_resume()
2300 gfx_v9_0_init_csb(adev); in gfx_v9_0_rlc_resume()
2304 gfx_v9_0_rlc_stop(adev); in gfx_v9_0_rlc_resume()
2309 gfx_v9_0_rlc_reset(adev); in gfx_v9_0_rlc_resume()
2311 gfx_v9_0_init_pg(adev); in gfx_v9_0_rlc_resume()
2313 if (adev->firmware.load_type != AMDGPU_FW_LOAD_PSP) { in gfx_v9_0_rlc_resume()
2315 r = gfx_v9_0_rlc_load_microcode(adev); in gfx_v9_0_rlc_resume()
2320 if (adev->asic_type == CHIP_RAVEN) { in gfx_v9_0_rlc_resume()
2322 gfx_v9_0_enable_lbpw(adev, true); in gfx_v9_0_rlc_resume()
2324 gfx_v9_0_enable_lbpw(adev, false); in gfx_v9_0_rlc_resume()
2327 gfx_v9_0_rlc_start(adev); in gfx_v9_0_rlc_resume()
2332 static void gfx_v9_0_cp_gfx_enable(struct amdgpu_device *adev, bool enable) in gfx_v9_0_cp_gfx_enable() argument
2341 for (i = 0; i < adev->gfx.num_gfx_rings; i++) in gfx_v9_0_cp_gfx_enable()
2342 adev->gfx.gfx_ring[i].ready = false; in gfx_v9_0_cp_gfx_enable()
2348 static int gfx_v9_0_cp_gfx_load_microcode(struct amdgpu_device *adev) in gfx_v9_0_cp_gfx_load_microcode() argument
2356 if (!adev->gfx.me_fw || !adev->gfx.pfp_fw || !adev->gfx.ce_fw) in gfx_v9_0_cp_gfx_load_microcode()
2360 adev->gfx.pfp_fw->data; in gfx_v9_0_cp_gfx_load_microcode()
2362 adev->gfx.ce_fw->data; in gfx_v9_0_cp_gfx_load_microcode()
2364 adev->gfx.me_fw->data; in gfx_v9_0_cp_gfx_load_microcode()
2370 gfx_v9_0_cp_gfx_enable(adev, false); in gfx_v9_0_cp_gfx_load_microcode()
2374 (adev->gfx.pfp_fw->data + in gfx_v9_0_cp_gfx_load_microcode()
2380 WREG32_SOC15(GC, 0, mmCP_PFP_UCODE_ADDR, adev->gfx.pfp_fw_version); in gfx_v9_0_cp_gfx_load_microcode()
2384 (adev->gfx.ce_fw->data + in gfx_v9_0_cp_gfx_load_microcode()
2390 WREG32_SOC15(GC, 0, mmCP_CE_UCODE_ADDR, adev->gfx.ce_fw_version); in gfx_v9_0_cp_gfx_load_microcode()
2394 (adev->gfx.me_fw->data + in gfx_v9_0_cp_gfx_load_microcode()
2400 WREG32_SOC15(GC, 0, mmCP_ME_RAM_WADDR, adev->gfx.me_fw_version); in gfx_v9_0_cp_gfx_load_microcode()
2405 static int gfx_v9_0_cp_gfx_start(struct amdgpu_device *adev) in gfx_v9_0_cp_gfx_start() argument
2407 struct amdgpu_ring *ring = &adev->gfx.gfx_ring[0]; in gfx_v9_0_cp_gfx_start()
2413 WREG32_SOC15(GC, 0, mmCP_MAX_CONTEXT, adev->gfx.config.max_hw_contexts - 1); in gfx_v9_0_cp_gfx_start()
2416 gfx_v9_0_cp_gfx_enable(adev, true); in gfx_v9_0_cp_gfx_start()
2418 r = amdgpu_ring_alloc(ring, gfx_v9_0_get_csb_size(adev) + 4 + 3); in gfx_v9_0_cp_gfx_start()
2467 static int gfx_v9_0_cp_gfx_resume(struct amdgpu_device *adev) in gfx_v9_0_cp_gfx_resume() argument
2481 ring = &adev->gfx.gfx_ring[0]; in gfx_v9_0_cp_gfx_resume()
2496 rptr_addr = adev->wb.gpu_addr + (ring->rptr_offs * 4); in gfx_v9_0_cp_gfx_resume()
2500 wptr_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); in gfx_v9_0_cp_gfx_resume()
2531 gfx_v9_0_cp_gfx_start(adev); in gfx_v9_0_cp_gfx_resume()
2537 static void gfx_v9_0_cp_compute_enable(struct amdgpu_device *adev, bool enable) in gfx_v9_0_cp_compute_enable() argument
2546 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_cp_compute_enable()
2547 adev->gfx.compute_ring[i].ready = false; in gfx_v9_0_cp_compute_enable()
2548 adev->gfx.kiq.ring.ready = false; in gfx_v9_0_cp_compute_enable()
2553 static int gfx_v9_0_cp_compute_load_microcode(struct amdgpu_device *adev) in gfx_v9_0_cp_compute_load_microcode() argument
2560 if (!adev->gfx.mec_fw) in gfx_v9_0_cp_compute_load_microcode()
2563 gfx_v9_0_cp_compute_enable(adev, false); in gfx_v9_0_cp_compute_load_microcode()
2565 mec_hdr = (const struct gfx_firmware_header_v1_0 *)adev->gfx.mec_fw->data; in gfx_v9_0_cp_compute_load_microcode()
2569 (adev->gfx.mec_fw->data + in gfx_v9_0_cp_compute_load_microcode()
2577 adev->gfx.mec.mec_fw_gpu_addr & 0xFFFFF000); in gfx_v9_0_cp_compute_load_microcode()
2579 upper_32_bits(adev->gfx.mec.mec_fw_gpu_addr)); in gfx_v9_0_cp_compute_load_microcode()
2589 adev->gfx.mec_fw_version); in gfx_v9_0_cp_compute_load_microcode()
2599 struct amdgpu_device *adev = ring->adev; in gfx_v9_0_kiq_setting() local
2610 static int gfx_v9_0_kiq_kcq_enable(struct amdgpu_device *adev) in gfx_v9_0_kiq_kcq_enable() argument
2612 struct amdgpu_ring *kiq_ring = &adev->gfx.kiq.ring; in gfx_v9_0_kiq_kcq_enable()
2618 if (!test_bit(i, adev->gfx.mec.queue_bitmap)) in gfx_v9_0_kiq_kcq_enable()
2632 r = amdgpu_gfx_scratch_get(adev, &scratch); in gfx_v9_0_kiq_kcq_enable()
2639 r = amdgpu_ring_alloc(kiq_ring, (7 * adev->gfx.num_compute_rings) + 11); in gfx_v9_0_kiq_kcq_enable()
2642 amdgpu_gfx_scratch_free(adev, scratch); in gfx_v9_0_kiq_kcq_enable()
2656 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_kiq_kcq_enable()
2657 struct amdgpu_ring *ring = &adev->gfx.compute_ring[i]; in gfx_v9_0_kiq_kcq_enable()
2659 uint64_t wptr_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); in gfx_v9_0_kiq_kcq_enable()
2685 for (i = 0; i < adev->usec_timeout; i++) { in gfx_v9_0_kiq_kcq_enable()
2691 if (i >= adev->usec_timeout) { in gfx_v9_0_kiq_kcq_enable()
2696 amdgpu_gfx_scratch_free(adev, scratch); in gfx_v9_0_kiq_kcq_enable()
2703 struct amdgpu_device *adev = ring->adev; in gfx_v9_0_mqd_init() local
2790 wb_gpu_addr = adev->wb.gpu_addr + (ring->rptr_offs * 4); in gfx_v9_0_mqd_init()
2796 wb_gpu_addr = adev->wb.gpu_addr + (ring->wptr_offs * 4); in gfx_v9_0_mqd_init()
2841 struct amdgpu_device *adev = ring->adev; in gfx_v9_0_kiq_init_register() local
2864 for (j = 0; j < adev->usec_timeout; j++) { in gfx_v9_0_kiq_init_register()
2946 struct amdgpu_device *adev = ring->adev; in gfx_v9_0_kiq_fini_register() local
2954 for (j = 0; j < adev->usec_timeout; j++) { in gfx_v9_0_kiq_fini_register()
2985 struct amdgpu_device *adev = ring->adev; in gfx_v9_0_kiq_init_queue() local
2991 if (adev->in_gpu_reset) { /* for GPU_RESET case */ in gfx_v9_0_kiq_init_queue()
2993 if (adev->gfx.mec.mqd_backup[mqd_idx]) in gfx_v9_0_kiq_init_queue()
2994 memcpy(mqd, adev->gfx.mec.mqd_backup[mqd_idx], sizeof(struct v9_mqd_allocation)); in gfx_v9_0_kiq_init_queue()
3000 mutex_lock(&adev->srbm_mutex); in gfx_v9_0_kiq_init_queue()
3001 soc15_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0); in gfx_v9_0_kiq_init_queue()
3003 soc15_grbm_select(adev, 0, 0, 0, 0); in gfx_v9_0_kiq_init_queue()
3004 mutex_unlock(&adev->srbm_mutex); in gfx_v9_0_kiq_init_queue()
3009 mutex_lock(&adev->srbm_mutex); in gfx_v9_0_kiq_init_queue()
3010 soc15_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0); in gfx_v9_0_kiq_init_queue()
3013 soc15_grbm_select(adev, 0, 0, 0, 0); in gfx_v9_0_kiq_init_queue()
3014 mutex_unlock(&adev->srbm_mutex); in gfx_v9_0_kiq_init_queue()
3016 if (adev->gfx.mec.mqd_backup[mqd_idx]) in gfx_v9_0_kiq_init_queue()
3017 memcpy(adev->gfx.mec.mqd_backup[mqd_idx], mqd, sizeof(struct v9_mqd_allocation)); in gfx_v9_0_kiq_init_queue()
3025 struct amdgpu_device *adev = ring->adev; in gfx_v9_0_kcq_init_queue() local
3027 int mqd_idx = ring - &adev->gfx.compute_ring[0]; in gfx_v9_0_kcq_init_queue()
3029 if (!adev->in_gpu_reset && !adev->gfx.in_suspend) { in gfx_v9_0_kcq_init_queue()
3033 mutex_lock(&adev->srbm_mutex); in gfx_v9_0_kcq_init_queue()
3034 soc15_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0); in gfx_v9_0_kcq_init_queue()
3036 soc15_grbm_select(adev, 0, 0, 0, 0); in gfx_v9_0_kcq_init_queue()
3037 mutex_unlock(&adev->srbm_mutex); in gfx_v9_0_kcq_init_queue()
3039 if (adev->gfx.mec.mqd_backup[mqd_idx]) in gfx_v9_0_kcq_init_queue()
3040 memcpy(adev->gfx.mec.mqd_backup[mqd_idx], mqd, sizeof(struct v9_mqd_allocation)); in gfx_v9_0_kcq_init_queue()
3041 } else if (adev->in_gpu_reset) { /* for GPU_RESET case */ in gfx_v9_0_kcq_init_queue()
3043 if (adev->gfx.mec.mqd_backup[mqd_idx]) in gfx_v9_0_kcq_init_queue()
3044 memcpy(mqd, adev->gfx.mec.mqd_backup[mqd_idx], sizeof(struct v9_mqd_allocation)); in gfx_v9_0_kcq_init_queue()
3056 static int gfx_v9_0_kiq_resume(struct amdgpu_device *adev) in gfx_v9_0_kiq_resume() argument
3061 gfx_v9_0_cp_compute_enable(adev, true); in gfx_v9_0_kiq_resume()
3063 ring = &adev->gfx.kiq.ring; in gfx_v9_0_kiq_resume()
3079 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_kiq_resume()
3080 ring = &adev->gfx.compute_ring[i]; in gfx_v9_0_kiq_resume()
3096 r = gfx_v9_0_kiq_kcq_enable(adev); in gfx_v9_0_kiq_resume()
3101 static int gfx_v9_0_cp_resume(struct amdgpu_device *adev) in gfx_v9_0_cp_resume() argument
3106 if (!(adev->flags & AMD_IS_APU)) in gfx_v9_0_cp_resume()
3107 gfx_v9_0_enable_gui_idle_interrupt(adev, false); in gfx_v9_0_cp_resume()
3109 if (adev->firmware.load_type != AMDGPU_FW_LOAD_PSP) { in gfx_v9_0_cp_resume()
3111 r = gfx_v9_0_cp_gfx_load_microcode(adev); in gfx_v9_0_cp_resume()
3115 r = gfx_v9_0_cp_compute_load_microcode(adev); in gfx_v9_0_cp_resume()
3120 r = gfx_v9_0_cp_gfx_resume(adev); in gfx_v9_0_cp_resume()
3124 r = gfx_v9_0_kiq_resume(adev); in gfx_v9_0_cp_resume()
3128 ring = &adev->gfx.gfx_ring[0]; in gfx_v9_0_cp_resume()
3135 ring = &adev->gfx.kiq.ring; in gfx_v9_0_cp_resume()
3141 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_cp_resume()
3142 ring = &adev->gfx.compute_ring[i]; in gfx_v9_0_cp_resume()
3150 gfx_v9_0_enable_gui_idle_interrupt(adev, true); in gfx_v9_0_cp_resume()
3155 static void gfx_v9_0_cp_enable(struct amdgpu_device *adev, bool enable) in gfx_v9_0_cp_enable() argument
3157 gfx_v9_0_cp_gfx_enable(adev, enable); in gfx_v9_0_cp_enable()
3158 gfx_v9_0_cp_compute_enable(adev, enable); in gfx_v9_0_cp_enable()
3164 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in gfx_v9_0_hw_init() local
3166 gfx_v9_0_init_golden_registers(adev); in gfx_v9_0_hw_init()
3168 gfx_v9_0_gpu_init(adev); in gfx_v9_0_hw_init()
3170 r = gfx_v9_0_csb_vram_pin(adev); in gfx_v9_0_hw_init()
3174 r = gfx_v9_0_rlc_resume(adev); in gfx_v9_0_hw_init()
3178 r = gfx_v9_0_cp_resume(adev); in gfx_v9_0_hw_init()
3182 r = gfx_v9_0_ngg_en(adev); in gfx_v9_0_hw_init()
3191 struct amdgpu_device *adev = kiq_ring->adev; in gfx_v9_0_kcq_disable() local
3195 r = amdgpu_gfx_scratch_get(adev, &scratch); in gfx_v9_0_kcq_disable()
3205 amdgpu_gfx_scratch_free(adev, scratch); in gfx_v9_0_kcq_disable()
3226 for (i = 0; i < adev->usec_timeout; i++) { in gfx_v9_0_kcq_disable()
3232 if (i >= adev->usec_timeout) { in gfx_v9_0_kcq_disable()
3236 amdgpu_gfx_scratch_free(adev, scratch); in gfx_v9_0_kcq_disable()
3242 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in gfx_v9_0_hw_fini() local
3245 amdgpu_device_ip_set_powergating_state(adev, AMD_IP_BLOCK_TYPE_GFX, in gfx_v9_0_hw_fini()
3248 amdgpu_irq_put(adev, &adev->gfx.priv_reg_irq, 0); in gfx_v9_0_hw_fini()
3249 amdgpu_irq_put(adev, &adev->gfx.priv_inst_irq, 0); in gfx_v9_0_hw_fini()
3252 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_hw_fini()
3253 gfx_v9_0_kcq_disable(&adev->gfx.kiq.ring, &adev->gfx.compute_ring[i]); in gfx_v9_0_hw_fini()
3255 if (amdgpu_sriov_vf(adev)) { in gfx_v9_0_hw_fini()
3256 gfx_v9_0_cp_gfx_enable(adev, false); in gfx_v9_0_hw_fini()
3269 if (!adev->in_gpu_reset && !adev->gfx.in_suspend) { in gfx_v9_0_hw_fini()
3270 mutex_lock(&adev->srbm_mutex); in gfx_v9_0_hw_fini()
3271 soc15_grbm_select(adev, adev->gfx.kiq.ring.me, in gfx_v9_0_hw_fini()
3272 adev->gfx.kiq.ring.pipe, in gfx_v9_0_hw_fini()
3273 adev->gfx.kiq.ring.queue, 0); in gfx_v9_0_hw_fini()
3274 gfx_v9_0_kiq_fini_register(&adev->gfx.kiq.ring); in gfx_v9_0_hw_fini()
3275 soc15_grbm_select(adev, 0, 0, 0, 0); in gfx_v9_0_hw_fini()
3276 mutex_unlock(&adev->srbm_mutex); in gfx_v9_0_hw_fini()
3279 gfx_v9_0_cp_enable(adev, false); in gfx_v9_0_hw_fini()
3280 gfx_v9_0_rlc_stop(adev); in gfx_v9_0_hw_fini()
3282 gfx_v9_0_csb_vram_unpin(adev); in gfx_v9_0_hw_fini()
3289 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in gfx_v9_0_suspend() local
3291 adev->gfx.in_suspend = true; in gfx_v9_0_suspend()
3292 return gfx_v9_0_hw_fini(adev); in gfx_v9_0_suspend()
3297 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in gfx_v9_0_resume() local
3300 r = gfx_v9_0_hw_init(adev); in gfx_v9_0_resume()
3301 adev->gfx.in_suspend = false; in gfx_v9_0_resume()
3307 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in gfx_v9_0_is_idle() local
3319 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in gfx_v9_0_wait_for_idle() local
3321 for (i = 0; i < adev->usec_timeout; i++) { in gfx_v9_0_wait_for_idle()
3333 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in gfx_v9_0_soft_reset() local
3363 gfx_v9_0_rlc_stop(adev); in gfx_v9_0_soft_reset()
3366 gfx_v9_0_cp_gfx_enable(adev, false); in gfx_v9_0_soft_reset()
3369 gfx_v9_0_cp_compute_enable(adev, false); in gfx_v9_0_soft_reset()
3374 dev_info(adev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp); in gfx_v9_0_soft_reset()
3391 static uint64_t gfx_v9_0_get_gpu_clock_counter(struct amdgpu_device *adev) in gfx_v9_0_get_gpu_clock_counter() argument
3395 mutex_lock(&adev->gfx.gpu_clock_mutex); in gfx_v9_0_get_gpu_clock_counter()
3399 mutex_unlock(&adev->gfx.gpu_clock_mutex); in gfx_v9_0_get_gpu_clock_counter()
3409 struct amdgpu_device *adev = ring->adev; in gfx_v9_0_ring_emit_gds_switch() local
3443 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in gfx_v9_0_early_init() local
3445 adev->gfx.num_gfx_rings = GFX9_NUM_GFX_RINGS; in gfx_v9_0_early_init()
3446 adev->gfx.num_compute_rings = AMDGPU_MAX_COMPUTE_RINGS; in gfx_v9_0_early_init()
3447 gfx_v9_0_set_ring_funcs(adev); in gfx_v9_0_early_init()
3448 gfx_v9_0_set_irq_funcs(adev); in gfx_v9_0_early_init()
3449 gfx_v9_0_set_gds_init(adev); in gfx_v9_0_early_init()
3450 gfx_v9_0_set_rlc_funcs(adev); in gfx_v9_0_early_init()
3457 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in gfx_v9_0_late_init() local
3460 r = amdgpu_irq_get(adev, &adev->gfx.priv_reg_irq, 0); in gfx_v9_0_late_init()
3464 r = amdgpu_irq_get(adev, &adev->gfx.priv_inst_irq, 0); in gfx_v9_0_late_init()
3471 static void gfx_v9_0_enter_rlc_safe_mode(struct amdgpu_device *adev) in gfx_v9_0_enter_rlc_safe_mode() argument
3476 if (adev->gfx.rlc.in_safe_mode) in gfx_v9_0_enter_rlc_safe_mode()
3484 if (adev->cg_flags & in gfx_v9_0_enter_rlc_safe_mode()
3492 for (i = 0; i < adev->usec_timeout; i++) { in gfx_v9_0_enter_rlc_safe_mode()
3497 adev->gfx.rlc.in_safe_mode = true; in gfx_v9_0_enter_rlc_safe_mode()
3501 static void gfx_v9_0_exit_rlc_safe_mode(struct amdgpu_device *adev) in gfx_v9_0_exit_rlc_safe_mode() argument
3505 if (!adev->gfx.rlc.in_safe_mode) in gfx_v9_0_exit_rlc_safe_mode()
3513 if (adev->cg_flags & in gfx_v9_0_exit_rlc_safe_mode()
3521 adev->gfx.rlc.in_safe_mode = false; in gfx_v9_0_exit_rlc_safe_mode()
3525 static void gfx_v9_0_update_gfx_cg_power_gating(struct amdgpu_device *adev, in gfx_v9_0_update_gfx_cg_power_gating() argument
3528 gfx_v9_0_enter_rlc_safe_mode(adev); in gfx_v9_0_update_gfx_cg_power_gating()
3530 if ((adev->pg_flags & AMD_PG_SUPPORT_GFX_PG) && enable) { in gfx_v9_0_update_gfx_cg_power_gating()
3531 gfx_v9_0_enable_gfx_cg_power_gating(adev, true); in gfx_v9_0_update_gfx_cg_power_gating()
3532 if (adev->pg_flags & AMD_PG_SUPPORT_GFX_PIPELINE) in gfx_v9_0_update_gfx_cg_power_gating()
3533 gfx_v9_0_enable_gfx_pipeline_powergating(adev, true); in gfx_v9_0_update_gfx_cg_power_gating()
3535 gfx_v9_0_enable_gfx_cg_power_gating(adev, false); in gfx_v9_0_update_gfx_cg_power_gating()
3536 gfx_v9_0_enable_gfx_pipeline_powergating(adev, false); in gfx_v9_0_update_gfx_cg_power_gating()
3539 gfx_v9_0_exit_rlc_safe_mode(adev); in gfx_v9_0_update_gfx_cg_power_gating()
3542 static void gfx_v9_0_update_gfx_mg_power_gating(struct amdgpu_device *adev, in gfx_v9_0_update_gfx_mg_power_gating() argument
3548 if ((adev->pg_flags & AMD_PG_SUPPORT_GFX_SMG) && enable) in gfx_v9_0_update_gfx_mg_power_gating()
3549 gfx_v9_0_enable_gfx_static_mg_power_gating(adev, true); in gfx_v9_0_update_gfx_mg_power_gating()
3551 gfx_v9_0_enable_gfx_static_mg_power_gating(adev, false); in gfx_v9_0_update_gfx_mg_power_gating()
3553 if ((adev->pg_flags & AMD_PG_SUPPORT_GFX_DMG) && enable) in gfx_v9_0_update_gfx_mg_power_gating()
3554 gfx_v9_0_enable_gfx_dynamic_mg_power_gating(adev, true); in gfx_v9_0_update_gfx_mg_power_gating()
3556 gfx_v9_0_enable_gfx_dynamic_mg_power_gating(adev, false); in gfx_v9_0_update_gfx_mg_power_gating()
3561 static void gfx_v9_0_update_medium_grain_clock_gating(struct amdgpu_device *adev, in gfx_v9_0_update_medium_grain_clock_gating() argument
3567 if (enable && (adev->cg_flags & AMD_CG_SUPPORT_GFX_MGCG)) { in gfx_v9_0_update_medium_grain_clock_gating()
3571 if (adev->asic_type != CHIP_VEGA12) in gfx_v9_0_update_medium_grain_clock_gating()
3585 if (adev->cg_flags & AMD_CG_SUPPORT_GFX_MGLS) { in gfx_v9_0_update_medium_grain_clock_gating()
3587 if (adev->cg_flags & AMD_CG_SUPPORT_GFX_RLC_LS) { in gfx_v9_0_update_medium_grain_clock_gating()
3594 if (adev->cg_flags & AMD_CG_SUPPORT_GFX_CP_LS) { in gfx_v9_0_update_medium_grain_clock_gating()
3605 if (adev->asic_type != CHIP_VEGA12) in gfx_v9_0_update_medium_grain_clock_gating()
3632 static void gfx_v9_0_update_3d_clock_gating(struct amdgpu_device *adev, in gfx_v9_0_update_3d_clock_gating() argument
3637 adev->gfx.rlc.funcs->enter_safe_mode(adev); in gfx_v9_0_update_3d_clock_gating()
3640 if (enable && (adev->cg_flags & AMD_CG_SUPPORT_GFX_3D_CGCG)) { in gfx_v9_0_update_3d_clock_gating()
3654 if (adev->cg_flags & AMD_CG_SUPPORT_GFX_3D_CGLS) in gfx_v9_0_update_3d_clock_gating()
3677 adev->gfx.rlc.funcs->exit_safe_mode(adev); in gfx_v9_0_update_3d_clock_gating()
3680 static void gfx_v9_0_update_coarse_grain_clock_gating(struct amdgpu_device *adev, in gfx_v9_0_update_coarse_grain_clock_gating() argument
3685 adev->gfx.rlc.funcs->enter_safe_mode(adev); in gfx_v9_0_update_coarse_grain_clock_gating()
3687 if (enable && (adev->cg_flags & AMD_CG_SUPPORT_GFX_CGCG)) { in gfx_v9_0_update_coarse_grain_clock_gating()
3691 if (adev->cg_flags & AMD_CG_SUPPORT_GFX_CGLS) in gfx_v9_0_update_coarse_grain_clock_gating()
3704 if (adev->cg_flags & AMD_CG_SUPPORT_GFX_CGLS) in gfx_v9_0_update_coarse_grain_clock_gating()
3725 adev->gfx.rlc.funcs->exit_safe_mode(adev); in gfx_v9_0_update_coarse_grain_clock_gating()
3728 static int gfx_v9_0_update_gfx_clock_gating(struct amdgpu_device *adev, in gfx_v9_0_update_gfx_clock_gating() argument
3735 gfx_v9_0_update_medium_grain_clock_gating(adev, enable); in gfx_v9_0_update_gfx_clock_gating()
3737 gfx_v9_0_update_3d_clock_gating(adev, enable); in gfx_v9_0_update_gfx_clock_gating()
3739 gfx_v9_0_update_coarse_grain_clock_gating(adev, enable); in gfx_v9_0_update_gfx_clock_gating()
3744 gfx_v9_0_update_coarse_grain_clock_gating(adev, enable); in gfx_v9_0_update_gfx_clock_gating()
3746 gfx_v9_0_update_3d_clock_gating(adev, enable); in gfx_v9_0_update_gfx_clock_gating()
3748 gfx_v9_0_update_medium_grain_clock_gating(adev, enable); in gfx_v9_0_update_gfx_clock_gating()
3761 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in gfx_v9_0_set_powergating_state() local
3764 switch (adev->asic_type) { in gfx_v9_0_set_powergating_state()
3766 if (adev->pg_flags & AMD_PG_SUPPORT_RLC_SMU_HS) { in gfx_v9_0_set_powergating_state()
3767 gfx_v9_0_enable_sck_slow_down_on_power_up(adev, true); in gfx_v9_0_set_powergating_state()
3768 gfx_v9_0_enable_sck_slow_down_on_power_down(adev, true); in gfx_v9_0_set_powergating_state()
3770 gfx_v9_0_enable_sck_slow_down_on_power_up(adev, false); in gfx_v9_0_set_powergating_state()
3771 gfx_v9_0_enable_sck_slow_down_on_power_down(adev, false); in gfx_v9_0_set_powergating_state()
3774 if (adev->pg_flags & AMD_PG_SUPPORT_CP) in gfx_v9_0_set_powergating_state()
3775 gfx_v9_0_enable_cp_power_gating(adev, true); in gfx_v9_0_set_powergating_state()
3777 gfx_v9_0_enable_cp_power_gating(adev, false); in gfx_v9_0_set_powergating_state()
3780 gfx_v9_0_update_gfx_cg_power_gating(adev, enable); in gfx_v9_0_set_powergating_state()
3783 gfx_v9_0_update_gfx_mg_power_gating(adev, enable); in gfx_v9_0_set_powergating_state()
3786 if (enable && adev->powerplay.pp_funcs->set_powergating_by_smu) in gfx_v9_0_set_powergating_state()
3787 amdgpu_dpm_set_powergating_by_smu(adev, AMD_IP_BLOCK_TYPE_GFX, true); in gfx_v9_0_set_powergating_state()
3791 if (enable && adev->powerplay.pp_funcs->set_powergating_by_smu) in gfx_v9_0_set_powergating_state()
3792 amdgpu_dpm_set_powergating_by_smu(adev, AMD_IP_BLOCK_TYPE_GFX, true); in gfx_v9_0_set_powergating_state()
3804 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in gfx_v9_0_set_clockgating_state() local
3806 if (amdgpu_sriov_vf(adev)) in gfx_v9_0_set_clockgating_state()
3809 switch (adev->asic_type) { in gfx_v9_0_set_clockgating_state()
3814 gfx_v9_0_update_gfx_clock_gating(adev, in gfx_v9_0_set_clockgating_state()
3825 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in gfx_v9_0_get_clockgating_state() local
3828 if (amdgpu_sriov_vf(adev)) in gfx_v9_0_get_clockgating_state()
3867 return ring->adev->wb.wb[ring->rptr_offs]; /* gfx9 is 32bit rptr*/ in gfx_v9_0_ring_get_rptr_gfx()
3872 struct amdgpu_device *adev = ring->adev; in gfx_v9_0_ring_get_wptr_gfx() local
3877 wptr = atomic64_read((atomic64_t *)&adev->wb.wb[ring->wptr_offs]); in gfx_v9_0_ring_get_wptr_gfx()
3888 struct amdgpu_device *adev = ring->adev; in gfx_v9_0_ring_set_wptr_gfx() local
3892 atomic64_set((atomic64_t*)&adev->wb.wb[ring->wptr_offs], ring->wptr); in gfx_v9_0_ring_set_wptr_gfx()
3902 struct amdgpu_device *adev = ring->adev; in gfx_v9_0_ring_emit_hdp_flush() local
3904 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio_funcs->hdp_flush_reg; in gfx_v9_0_ring_emit_hdp_flush()
3924 adev->nbio_funcs->get_hdp_flush_req_offset(adev), in gfx_v9_0_ring_emit_hdp_flush()
3925 adev->nbio_funcs->get_hdp_flush_done_offset(adev), in gfx_v9_0_ring_emit_hdp_flush()
3942 if (amdgpu_sriov_vf(ring->adev) && (ib->flags & AMDGPU_IB_FLAG_PREEMPT)) { in gfx_v9_0_ring_emit_ib_gfx()
4037 return ring->adev->wb.wb[ring->rptr_offs]; /* gfx9 hardware is 32bit rptr */ in gfx_v9_0_ring_get_rptr_compute()
4046 wptr = atomic64_read((atomic64_t *)&ring->adev->wb.wb[ring->wptr_offs]); in gfx_v9_0_ring_get_wptr_compute()
4055 struct amdgpu_device *adev = ring->adev; in gfx_v9_0_ring_set_pipe_percent() local
4059 pipe_num = ring->me * adev->gfx.mec.num_pipe_per_mec + ring->pipe; in gfx_v9_0_ring_set_pipe_percent()
4071 static void gfx_v9_0_pipe_reserve_resources(struct amdgpu_device *adev, in gfx_v9_0_pipe_reserve_resources() argument
4079 mutex_lock(&adev->gfx.pipe_reserve_mutex); in gfx_v9_0_pipe_reserve_resources()
4080 pipe = amdgpu_gfx_queue_to_bit(adev, ring->me, ring->pipe, 0); in gfx_v9_0_pipe_reserve_resources()
4082 set_bit(pipe, adev->gfx.pipe_reserve_bitmap); in gfx_v9_0_pipe_reserve_resources()
4084 clear_bit(pipe, adev->gfx.pipe_reserve_bitmap); in gfx_v9_0_pipe_reserve_resources()
4086 if (!bitmap_weight(adev->gfx.pipe_reserve_bitmap, AMDGPU_MAX_COMPUTE_QUEUES)) { in gfx_v9_0_pipe_reserve_resources()
4088 for (i = 0; i < adev->gfx.num_gfx_rings; ++i) in gfx_v9_0_pipe_reserve_resources()
4089 gfx_v9_0_ring_set_pipe_percent(&adev->gfx.gfx_ring[i], in gfx_v9_0_pipe_reserve_resources()
4092 for (i = 0; i < adev->gfx.num_compute_rings; ++i) in gfx_v9_0_pipe_reserve_resources()
4093 gfx_v9_0_ring_set_pipe_percent(&adev->gfx.compute_ring[i], in gfx_v9_0_pipe_reserve_resources()
4097 for (i = 0; i < adev->gfx.num_gfx_rings; ++i) { in gfx_v9_0_pipe_reserve_resources()
4098 iring = &adev->gfx.gfx_ring[i]; in gfx_v9_0_pipe_reserve_resources()
4099 pipe = amdgpu_gfx_queue_to_bit(adev, in gfx_v9_0_pipe_reserve_resources()
4103 reserve = test_bit(pipe, adev->gfx.pipe_reserve_bitmap); in gfx_v9_0_pipe_reserve_resources()
4107 for (i = 0; i < adev->gfx.num_compute_rings; ++i) { in gfx_v9_0_pipe_reserve_resources()
4108 iring = &adev->gfx.compute_ring[i]; in gfx_v9_0_pipe_reserve_resources()
4109 pipe = amdgpu_gfx_queue_to_bit(adev, in gfx_v9_0_pipe_reserve_resources()
4113 reserve = test_bit(pipe, adev->gfx.pipe_reserve_bitmap); in gfx_v9_0_pipe_reserve_resources()
4118 mutex_unlock(&adev->gfx.pipe_reserve_mutex); in gfx_v9_0_pipe_reserve_resources()
4121 static void gfx_v9_0_hqd_set_priority(struct amdgpu_device *adev, in gfx_v9_0_hqd_set_priority() argument
4128 mutex_lock(&adev->srbm_mutex); in gfx_v9_0_hqd_set_priority()
4129 soc15_grbm_select(adev, ring->me, ring->pipe, ring->queue, 0); in gfx_v9_0_hqd_set_priority()
4134 soc15_grbm_select(adev, 0, 0, 0, 0); in gfx_v9_0_hqd_set_priority()
4135 mutex_unlock(&adev->srbm_mutex); in gfx_v9_0_hqd_set_priority()
4141 struct amdgpu_device *adev = ring->adev; in gfx_v9_0_ring_set_priority_compute() local
4147 gfx_v9_0_hqd_set_priority(adev, ring, acquire); in gfx_v9_0_ring_set_priority_compute()
4148 gfx_v9_0_pipe_reserve_resources(adev, ring, acquire); in gfx_v9_0_ring_set_priority_compute()
4153 struct amdgpu_device *adev = ring->adev; in gfx_v9_0_ring_set_wptr_compute() local
4157 atomic64_set((atomic64_t*)&adev->wb.wb[ring->wptr_offs], ring->wptr); in gfx_v9_0_ring_set_wptr_compute()
4167 struct amdgpu_device *adev = ring->adev; in gfx_v9_0_ring_emit_fence_kiq() local
4204 csa_addr = amdgpu_csa_vaddr(ring->adev); in gfx_v9_0_ring_emit_ce_meta()
4222 csa_addr = amdgpu_csa_vaddr(ring->adev); in gfx_v9_0_ring_emit_de_meta()
4248 if (amdgpu_sriov_vf(ring->adev)) in gfx_v9_ring_emit_cntxcntl()
4305 struct amdgpu_device *adev = ring->adev; in gfx_v9_0_ring_emit_rreg() local
4313 amdgpu_ring_write(ring, lower_32_bits(adev->wb.gpu_addr + in gfx_v9_0_ring_emit_rreg()
4314 adev->virt.reg_val_offs * 4)); in gfx_v9_0_ring_emit_rreg()
4315 amdgpu_ring_write(ring, upper_32_bits(adev->wb.gpu_addr + in gfx_v9_0_ring_emit_rreg()
4316 adev->virt.reg_val_offs * 4)); in gfx_v9_0_ring_emit_rreg()
4354 if (amdgpu_sriov_vf(ring->adev)) in gfx_v9_0_ring_emit_reg_write_reg_wait()
4362 static void gfx_v9_0_set_gfx_eop_interrupt_state(struct amdgpu_device *adev, in gfx_v9_0_set_gfx_eop_interrupt_state() argument
4377 static void gfx_v9_0_set_compute_eop_interrupt_state(struct amdgpu_device *adev, in gfx_v9_0_set_compute_eop_interrupt_state() argument
4430 static int gfx_v9_0_set_priv_reg_fault_state(struct amdgpu_device *adev, in gfx_v9_0_set_priv_reg_fault_state() argument
4449 static int gfx_v9_0_set_priv_inst_fault_state(struct amdgpu_device *adev, in gfx_v9_0_set_priv_inst_fault_state() argument
4467 static int gfx_v9_0_set_eop_interrupt_state(struct amdgpu_device *adev, in gfx_v9_0_set_eop_interrupt_state() argument
4474 gfx_v9_0_set_gfx_eop_interrupt_state(adev, state); in gfx_v9_0_set_eop_interrupt_state()
4477 gfx_v9_0_set_compute_eop_interrupt_state(adev, 1, 0, state); in gfx_v9_0_set_eop_interrupt_state()
4480 gfx_v9_0_set_compute_eop_interrupt_state(adev, 1, 1, state); in gfx_v9_0_set_eop_interrupt_state()
4483 gfx_v9_0_set_compute_eop_interrupt_state(adev, 1, 2, state); in gfx_v9_0_set_eop_interrupt_state()
4486 gfx_v9_0_set_compute_eop_interrupt_state(adev, 1, 3, state); in gfx_v9_0_set_eop_interrupt_state()
4489 gfx_v9_0_set_compute_eop_interrupt_state(adev, 2, 0, state); in gfx_v9_0_set_eop_interrupt_state()
4492 gfx_v9_0_set_compute_eop_interrupt_state(adev, 2, 1, state); in gfx_v9_0_set_eop_interrupt_state()
4495 gfx_v9_0_set_compute_eop_interrupt_state(adev, 2, 2, state); in gfx_v9_0_set_eop_interrupt_state()
4498 gfx_v9_0_set_compute_eop_interrupt_state(adev, 2, 3, state); in gfx_v9_0_set_eop_interrupt_state()
4506 static int gfx_v9_0_eop_irq(struct amdgpu_device *adev, in gfx_v9_0_eop_irq() argument
4521 amdgpu_fence_process(&adev->gfx.gfx_ring[0]); in gfx_v9_0_eop_irq()
4525 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_eop_irq()
4526 ring = &adev->gfx.compute_ring[i]; in gfx_v9_0_eop_irq()
4538 static int gfx_v9_0_priv_reg_irq(struct amdgpu_device *adev, in gfx_v9_0_priv_reg_irq() argument
4543 schedule_work(&adev->reset_work); in gfx_v9_0_priv_reg_irq()
4547 static int gfx_v9_0_priv_inst_irq(struct amdgpu_device *adev, in gfx_v9_0_priv_inst_irq() argument
4552 schedule_work(&adev->reset_work); in gfx_v9_0_priv_inst_irq()
4556 static int gfx_v9_0_kiq_set_interrupt_state(struct amdgpu_device *adev, in gfx_v9_0_kiq_set_interrupt_state() argument
4562 struct amdgpu_ring *ring = &(adev->gfx.kiq.ring); in gfx_v9_0_kiq_set_interrupt_state()
4601 static int gfx_v9_0_kiq_irq(struct amdgpu_device *adev, in gfx_v9_0_kiq_irq() argument
4606 struct amdgpu_ring *ring = &(adev->gfx.kiq.ring); in gfx_v9_0_kiq_irq()
4752 static void gfx_v9_0_set_ring_funcs(struct amdgpu_device *adev) in gfx_v9_0_set_ring_funcs() argument
4756 adev->gfx.kiq.ring.funcs = &gfx_v9_0_ring_funcs_kiq; in gfx_v9_0_set_ring_funcs()
4758 for (i = 0; i < adev->gfx.num_gfx_rings; i++) in gfx_v9_0_set_ring_funcs()
4759 adev->gfx.gfx_ring[i].funcs = &gfx_v9_0_ring_funcs_gfx; in gfx_v9_0_set_ring_funcs()
4761 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_set_ring_funcs()
4762 adev->gfx.compute_ring[i].funcs = &gfx_v9_0_ring_funcs_compute; in gfx_v9_0_set_ring_funcs()
4785 static void gfx_v9_0_set_irq_funcs(struct amdgpu_device *adev) in gfx_v9_0_set_irq_funcs() argument
4787 adev->gfx.eop_irq.num_types = AMDGPU_CP_IRQ_LAST; in gfx_v9_0_set_irq_funcs()
4788 adev->gfx.eop_irq.funcs = &gfx_v9_0_eop_irq_funcs; in gfx_v9_0_set_irq_funcs()
4790 adev->gfx.priv_reg_irq.num_types = 1; in gfx_v9_0_set_irq_funcs()
4791 adev->gfx.priv_reg_irq.funcs = &gfx_v9_0_priv_reg_irq_funcs; in gfx_v9_0_set_irq_funcs()
4793 adev->gfx.priv_inst_irq.num_types = 1; in gfx_v9_0_set_irq_funcs()
4794 adev->gfx.priv_inst_irq.funcs = &gfx_v9_0_priv_inst_irq_funcs; in gfx_v9_0_set_irq_funcs()
4796 adev->gfx.kiq.irq.num_types = AMDGPU_CP_KIQ_IRQ_LAST; in gfx_v9_0_set_irq_funcs()
4797 adev->gfx.kiq.irq.funcs = &gfx_v9_0_kiq_irq_funcs; in gfx_v9_0_set_irq_funcs()
4800 static void gfx_v9_0_set_rlc_funcs(struct amdgpu_device *adev) in gfx_v9_0_set_rlc_funcs() argument
4802 switch (adev->asic_type) { in gfx_v9_0_set_rlc_funcs()
4807 adev->gfx.rlc.funcs = &gfx_v9_0_rlc_funcs; in gfx_v9_0_set_rlc_funcs()
4814 static void gfx_v9_0_set_gds_init(struct amdgpu_device *adev) in gfx_v9_0_set_gds_init() argument
4817 adev->gds.mem.total_size = RREG32_SOC15(GC, 0, mmGDS_VMID0_SIZE); in gfx_v9_0_set_gds_init()
4818 adev->gds.gws.total_size = 64; in gfx_v9_0_set_gds_init()
4819 adev->gds.oa.total_size = 16; in gfx_v9_0_set_gds_init()
4821 if (adev->gds.mem.total_size == 64 * 1024) { in gfx_v9_0_set_gds_init()
4822 adev->gds.mem.gfx_partition_size = 4096; in gfx_v9_0_set_gds_init()
4823 adev->gds.mem.cs_partition_size = 4096; in gfx_v9_0_set_gds_init()
4825 adev->gds.gws.gfx_partition_size = 4; in gfx_v9_0_set_gds_init()
4826 adev->gds.gws.cs_partition_size = 4; in gfx_v9_0_set_gds_init()
4828 adev->gds.oa.gfx_partition_size = 4; in gfx_v9_0_set_gds_init()
4829 adev->gds.oa.cs_partition_size = 1; in gfx_v9_0_set_gds_init()
4831 adev->gds.mem.gfx_partition_size = 1024; in gfx_v9_0_set_gds_init()
4832 adev->gds.mem.cs_partition_size = 1024; in gfx_v9_0_set_gds_init()
4834 adev->gds.gws.gfx_partition_size = 16; in gfx_v9_0_set_gds_init()
4835 adev->gds.gws.cs_partition_size = 16; in gfx_v9_0_set_gds_init()
4837 adev->gds.oa.gfx_partition_size = 4; in gfx_v9_0_set_gds_init()
4838 adev->gds.oa.cs_partition_size = 4; in gfx_v9_0_set_gds_init()
4842 static void gfx_v9_0_set_user_cu_inactive_bitmap(struct amdgpu_device *adev, in gfx_v9_0_set_user_cu_inactive_bitmap() argument
4856 static u32 gfx_v9_0_get_cu_active_bitmap(struct amdgpu_device *adev) in gfx_v9_0_get_cu_active_bitmap() argument
4866 mask = amdgpu_gfx_create_bitmask(adev->gfx.config.max_cu_per_sh); in gfx_v9_0_get_cu_active_bitmap()
4871 static int gfx_v9_0_get_cu_info(struct amdgpu_device *adev, in gfx_v9_0_get_cu_info() argument
4878 if (!adev || !cu_info) in gfx_v9_0_get_cu_info()
4883 mutex_lock(&adev->grbm_idx_mutex); in gfx_v9_0_get_cu_info()
4884 for (i = 0; i < adev->gfx.config.max_shader_engines; i++) { in gfx_v9_0_get_cu_info()
4885 for (j = 0; j < adev->gfx.config.max_sh_per_se; j++) { in gfx_v9_0_get_cu_info()
4889 gfx_v9_0_select_se_sh(adev, i, j, 0xffffffff); in gfx_v9_0_get_cu_info()
4892 adev, disable_masks[i * 2 + j]); in gfx_v9_0_get_cu_info()
4893 bitmap = gfx_v9_0_get_cu_active_bitmap(adev); in gfx_v9_0_get_cu_info()
4896 for (k = 0; k < adev->gfx.config.max_cu_per_sh; k ++) { in gfx_v9_0_get_cu_info()
4898 if (counter < adev->gfx.config.max_cu_per_sh) in gfx_v9_0_get_cu_info()
4910 gfx_v9_0_select_se_sh(adev, 0xffffffff, 0xffffffff, 0xffffffff); in gfx_v9_0_get_cu_info()
4911 mutex_unlock(&adev->grbm_idx_mutex); in gfx_v9_0_get_cu_info()