Lines Matching refs:adev
37 void aqua_vanjaram_doorbell_index_init(struct amdgpu_device *adev) in aqua_vanjaram_doorbell_index_init() argument
41 adev->doorbell_index.kiq = AMDGPU_DOORBELL_LAYOUT1_KIQ_START; in aqua_vanjaram_doorbell_index_init()
43 adev->doorbell_index.mec_ring0 = AMDGPU_DOORBELL_LAYOUT1_MEC_RING_START; in aqua_vanjaram_doorbell_index_init()
45 adev->doorbell_index.userqueue_start = AMDGPU_DOORBELL_LAYOUT1_USERQUEUE_START; in aqua_vanjaram_doorbell_index_init()
46 adev->doorbell_index.userqueue_end = AMDGPU_DOORBELL_LAYOUT1_USERQUEUE_END; in aqua_vanjaram_doorbell_index_init()
47 adev->doorbell_index.xcc_doorbell_range = AMDGPU_DOORBELL_LAYOUT1_XCC_RANGE; in aqua_vanjaram_doorbell_index_init()
49 adev->doorbell_index.sdma_doorbell_range = 20; in aqua_vanjaram_doorbell_index_init()
50 for (i = 0; i < adev->sdma.num_instances; i++) in aqua_vanjaram_doorbell_index_init()
51 adev->doorbell_index.sdma_engine[i] = in aqua_vanjaram_doorbell_index_init()
53 i * (adev->doorbell_index.sdma_doorbell_range >> 1); in aqua_vanjaram_doorbell_index_init()
55 adev->doorbell_index.ih = AMDGPU_DOORBELL_LAYOUT1_IH; in aqua_vanjaram_doorbell_index_init()
56 adev->doorbell_index.vcn.vcn_ring0_1 = AMDGPU_DOORBELL_LAYOUT1_VCN_START; in aqua_vanjaram_doorbell_index_init()
58 adev->doorbell_index.first_non_cp = AMDGPU_DOORBELL_LAYOUT1_FIRST_NON_CP; in aqua_vanjaram_doorbell_index_init()
59 adev->doorbell_index.last_non_cp = AMDGPU_DOORBELL_LAYOUT1_LAST_NON_CP; in aqua_vanjaram_doorbell_index_init()
61 adev->doorbell_index.max_assignment = AMDGPU_DOORBELL_LAYOUT1_MAX_ASSIGNMENT << 1; in aqua_vanjaram_doorbell_index_init()
64 static void aqua_vanjaram_set_xcp_id(struct amdgpu_device *adev, in aqua_vanjaram_set_xcp_id() argument
72 if (adev->xcp_mgr->mode == AMDGPU_XCP_MODE_NONE) in aqua_vanjaram_set_xcp_id()
89 if (adev->xcp_mgr->mode == AMDGPU_CPX_PARTITION_MODE) in aqua_vanjaram_set_xcp_id()
97 for (xcp_id = 0; xcp_id < adev->xcp_mgr->num_xcps; xcp_id++) { in aqua_vanjaram_set_xcp_id()
98 if (adev->xcp_mgr->xcp[xcp_id].ip[ip_blk].inst_mask & inst_mask) { in aqua_vanjaram_set_xcp_id()
106 struct amdgpu_device *adev, in aqua_vanjaram_xcp_gpu_sched_update() argument
112 num_gpu_sched = &adev->xcp_mgr->xcp[sel_xcp_id] in aqua_vanjaram_xcp_gpu_sched_update()
114 adev->xcp_mgr->xcp[sel_xcp_id].gpu_sched[ring->funcs->type][ring->hw_prio] in aqua_vanjaram_xcp_gpu_sched_update()
122 struct amdgpu_device *adev) in aqua_vanjaram_xcp_sched_list_update() argument
128 atomic_set(&adev->xcp_mgr->xcp[i].ref_cnt, 0); in aqua_vanjaram_xcp_sched_list_update()
129 memset(adev->xcp_mgr->xcp[i].gpu_sched, 0, sizeof(adev->xcp_mgr->xcp->gpu_sched)); in aqua_vanjaram_xcp_sched_list_update()
132 if (adev->xcp_mgr->mode == AMDGPU_XCP_MODE_NONE) in aqua_vanjaram_xcp_sched_list_update()
136 ring = adev->rings[i]; in aqua_vanjaram_xcp_sched_list_update()
140 aqua_vanjaram_xcp_gpu_sched_update(adev, ring, ring->xcp_id); in aqua_vanjaram_xcp_sched_list_update()
145 adev->xcp_mgr->mode == AMDGPU_CPX_PARTITION_MODE) in aqua_vanjaram_xcp_sched_list_update()
146 aqua_vanjaram_xcp_gpu_sched_update(adev, ring, ring->xcp_id + 1); in aqua_vanjaram_xcp_sched_list_update()
152 static int aqua_vanjaram_update_partition_sched_list(struct amdgpu_device *adev) in aqua_vanjaram_update_partition_sched_list() argument
156 for (i = 0; i < adev->num_rings; i++) { in aqua_vanjaram_update_partition_sched_list()
157 struct amdgpu_ring *ring = adev->rings[i]; in aqua_vanjaram_update_partition_sched_list()
161 aqua_vanjaram_set_xcp_id(adev, ring->xcc_id, ring); in aqua_vanjaram_update_partition_sched_list()
163 aqua_vanjaram_set_xcp_id(adev, ring->me, ring); in aqua_vanjaram_update_partition_sched_list()
166 return aqua_vanjaram_xcp_sched_list_update(adev); in aqua_vanjaram_update_partition_sched_list()
170 struct amdgpu_device *adev, in aqua_vanjaram_select_scheds() argument
184 for (i = 0; i < adev->xcp_mgr->num_xcps; i++) { in aqua_vanjaram_select_scheds()
187 total_ref_cnt = atomic_read(&adev->xcp_mgr->xcp[i].ref_cnt); in aqua_vanjaram_select_scheds()
196 if (adev->xcp_mgr->xcp[sel_xcp_id].gpu_sched[hw_ip][hw_prio].num_scheds) { in aqua_vanjaram_select_scheds()
197 *num_scheds = adev->xcp_mgr->xcp[fpriv->xcp_id].gpu_sched[hw_ip][hw_prio].num_scheds; in aqua_vanjaram_select_scheds()
198 *scheds = adev->xcp_mgr->xcp[fpriv->xcp_id].gpu_sched[hw_ip][hw_prio].sched; in aqua_vanjaram_select_scheds()
199 atomic_inc(&adev->xcp_mgr->xcp[sel_xcp_id].ref_cnt); in aqua_vanjaram_select_scheds()
209 static int8_t aqua_vanjaram_logical_to_dev_inst(struct amdgpu_device *adev, in aqua_vanjaram_logical_to_dev_inst() argument
220 dev_inst = adev->ip_map.dev_inst[block][inst]; in aqua_vanjaram_logical_to_dev_inst()
232 static uint32_t aqua_vanjaram_logical_to_dev_mask(struct amdgpu_device *adev, in aqua_vanjaram_logical_to_dev_mask() argument
241 dev_inst = aqua_vanjaram_logical_to_dev_inst(adev, block, log_inst); in aqua_vanjaram_logical_to_dev_mask()
249 static void aqua_vanjaram_populate_ip_map(struct amdgpu_device *adev, in aqua_vanjaram_populate_ip_map() argument
257 adev->ip_map.dev_inst[ip_block][l++] = i; in aqua_vanjaram_populate_ip_map()
261 adev->ip_map.dev_inst[ip_block][l] = -1; in aqua_vanjaram_populate_ip_map()
264 void aqua_vanjaram_ip_map_init(struct amdgpu_device *adev) in aqua_vanjaram_ip_map_init() argument
267 { GC_HWIP, adev->gfx.xcc_mask }, in aqua_vanjaram_ip_map_init()
268 { SDMA0_HWIP, adev->sdma.sdma_mask }, in aqua_vanjaram_ip_map_init()
269 { VCN_HWIP, adev->vcn.inst_mask }, in aqua_vanjaram_ip_map_init()
274 aqua_vanjaram_populate_ip_map(adev, ip_map[i][0], ip_map[i][1]); in aqua_vanjaram_ip_map_init()
276 adev->ip_map.logical_to_dev_inst = aqua_vanjaram_logical_to_dev_inst; in aqua_vanjaram_ip_map_init()
277 adev->ip_map.logical_to_dev_mask = aqua_vanjaram_logical_to_dev_mask; in aqua_vanjaram_ip_map_init()
302 struct amdgpu_device *adev = xcp_mgr->adev; in aqua_vanjaram_query_partition_mode() local
304 if (adev->nbio.funcs->get_compute_partition_mode) in aqua_vanjaram_query_partition_mode()
305 mode = adev->nbio.funcs->get_compute_partition_mode(adev); in aqua_vanjaram_query_partition_mode()
314 num_xcc = NUM_XCC(xcp_mgr->adev->gfx.xcc_mask); in __aqua_vanjaram_get_xcc_per_xcp()
341 struct amdgpu_device *adev = xcp_mgr->adev; in __aqua_vanjaram_get_xcp_ip_info() local
345 num_sdma = adev->sdma.num_instances; in __aqua_vanjaram_get_xcp_ip_info()
346 num_vcn = adev->vcn.num_vcn_inst; in __aqua_vanjaram_get_xcp_ip_info()
373 num_xcc_xcp = adev->gfx.num_xcc_per_xcp; in __aqua_vanjaram_get_xcp_ip_info()
404 struct amdgpu_device *adev = xcp_mgr->adev; in __aqua_vanjaram_get_auto_mode() local
407 num_xcc = NUM_XCC(xcp_mgr->adev->gfx.xcc_mask); in __aqua_vanjaram_get_auto_mode()
409 if (adev->gmc.num_mem_partitions == 1) in __aqua_vanjaram_get_auto_mode()
412 if (adev->gmc.num_mem_partitions == num_xcc) in __aqua_vanjaram_get_auto_mode()
415 if (adev->gmc.num_mem_partitions == num_xcc / 2) in __aqua_vanjaram_get_auto_mode()
416 return (adev->flags & AMD_IS_APU) ? AMDGPU_TPX_PARTITION_MODE : in __aqua_vanjaram_get_auto_mode()
419 if (adev->gmc.num_mem_partitions == 2 && !(adev->flags & AMD_IS_APU)) in __aqua_vanjaram_get_auto_mode()
428 struct amdgpu_device *adev = xcp_mgr->adev; in __aqua_vanjaram_is_valid_mode() local
431 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in __aqua_vanjaram_is_valid_mode()
434 return adev->gmc.num_mem_partitions == 1 && num_xcc > 0; in __aqua_vanjaram_is_valid_mode()
436 return adev->gmc.num_mem_partitions != 8 && (num_xcc % 4) == 0; in __aqua_vanjaram_is_valid_mode()
438 return (adev->gmc.num_mem_partitions == 1 || in __aqua_vanjaram_is_valid_mode()
439 adev->gmc.num_mem_partitions == 3) && in __aqua_vanjaram_is_valid_mode()
443 return (adev->gmc.num_mem_partitions == 1 || in __aqua_vanjaram_is_valid_mode()
444 adev->gmc.num_mem_partitions == 4) && in __aqua_vanjaram_is_valid_mode()
448 (adev->gmc.num_mem_partitions == 1 || adev->gmc.num_mem_partitions == 4) && in __aqua_vanjaram_is_valid_mode()
449 (num_xcc % adev->gmc.num_mem_partitions) == 0); in __aqua_vanjaram_is_valid_mode()
464 amdgpu_amdkfd_device_fini_sw(xcp_mgr->adev); in __aqua_vanjaram_pre_partition_switch()
474 amdgpu_amdkfd_device_probe(xcp_mgr->adev); in __aqua_vanjaram_post_partition_switch()
475 amdgpu_amdkfd_device_init(xcp_mgr->adev); in __aqua_vanjaram_post_partition_switch()
477 if (!xcp_mgr->adev->kfd.init_complete) in __aqua_vanjaram_post_partition_switch()
488 struct amdgpu_device *adev; in aqua_vanjaram_switch_partition_mode() local
491 adev = xcp_mgr->adev; in aqua_vanjaram_switch_partition_mode()
492 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in aqua_vanjaram_switch_partition_mode()
497 dev_err(adev->dev, in aqua_vanjaram_switch_partition_mode()
499 amdgpu_gfx_compute_mode_desc(mode), adev->gmc.num_mem_partitions); in aqua_vanjaram_switch_partition_mode()
503 if (adev->kfd.init_complete) in aqua_vanjaram_switch_partition_mode()
507 ret = amdgpu_amdkfd_check_and_lock_kfd(adev); in aqua_vanjaram_switch_partition_mode()
517 if (adev->gfx.funcs->switch_partition_mode) in aqua_vanjaram_switch_partition_mode()
518 adev->gfx.funcs->switch_partition_mode(xcp_mgr->adev, in aqua_vanjaram_switch_partition_mode()
528 amdgpu_amdkfd_unlock_kfd(adev); in aqua_vanjaram_switch_partition_mode()
533 static int __aqua_vanjaram_get_xcp_mem_id(struct amdgpu_device *adev, in __aqua_vanjaram_get_xcp_mem_id() argument
537 *mem_id = xcc_id / adev->gfx.num_xcc_per_xcp; in __aqua_vanjaram_get_xcp_mem_id()
538 *mem_id /= adev->xcp_mgr->num_xcp_per_mem_partition; in __aqua_vanjaram_get_xcp_mem_id()
547 struct amdgpu_device *adev; in aqua_vanjaram_get_xcp_mem_id() local
551 adev = xcp_mgr->adev; in aqua_vanjaram_get_xcp_mem_id()
559 if (adev->gmc.num_mem_partitions == 1) { in aqua_vanjaram_get_xcp_mem_id()
570 if (!adev->gmc.is_app_apu) in aqua_vanjaram_get_xcp_mem_id()
571 return __aqua_vanjaram_get_xcp_mem_id(adev, xcc_id, mem_id); in aqua_vanjaram_get_xcp_mem_id()
573 r = amdgpu_acpi_get_mem_info(adev, xcc_id, &numa_info); in aqua_vanjaram_get_xcp_mem_id()
579 for (i = 0; i < adev->gmc.num_mem_partitions; ++i) { in aqua_vanjaram_get_xcp_mem_id()
580 if (adev->gmc.mem_partitions[i].numa.node == numa_info.nid) { in aqua_vanjaram_get_xcp_mem_id()
609 static int aqua_vanjaram_xcp_mgr_init(struct amdgpu_device *adev) in aqua_vanjaram_xcp_mgr_init() argument
613 ret = amdgpu_xcp_mgr_init(adev, AMDGPU_UNKNOWN_COMPUTE_PARTITION_MODE, 1, in aqua_vanjaram_xcp_mgr_init()
623 int aqua_vanjaram_init_soc_config(struct amdgpu_device *adev) in aqua_vanjaram_init_soc_config() argument
625 u32 mask, inst_mask = adev->sdma.sdma_mask; in aqua_vanjaram_init_soc_config()
629 adev->sdma.num_inst_per_aid = 4; in aqua_vanjaram_init_soc_config()
630 adev->sdma.num_instances = NUM_SDMA(adev->sdma.sdma_mask); in aqua_vanjaram_init_soc_config()
632 adev->aid_mask = i = 1; in aqua_vanjaram_init_soc_config()
633 inst_mask >>= adev->sdma.num_inst_per_aid; in aqua_vanjaram_init_soc_config()
635 for (mask = (1 << adev->sdma.num_inst_per_aid) - 1; inst_mask; in aqua_vanjaram_init_soc_config()
636 inst_mask >>= adev->sdma.num_inst_per_aid, ++i) { in aqua_vanjaram_init_soc_config()
638 adev->aid_mask |= (1 << i); in aqua_vanjaram_init_soc_config()
644 adev->vcn.harvest_config = 0; in aqua_vanjaram_init_soc_config()
645 adev->vcn.num_inst_per_aid = 1; in aqua_vanjaram_init_soc_config()
646 adev->vcn.num_vcn_inst = hweight32(adev->vcn.inst_mask); in aqua_vanjaram_init_soc_config()
647 adev->jpeg.harvest_config = 0; in aqua_vanjaram_init_soc_config()
648 adev->jpeg.num_inst_per_aid = 1; in aqua_vanjaram_init_soc_config()
649 adev->jpeg.num_jpeg_inst = hweight32(adev->jpeg.inst_mask); in aqua_vanjaram_init_soc_config()
651 ret = aqua_vanjaram_xcp_mgr_init(adev); in aqua_vanjaram_init_soc_config()
655 aqua_vanjaram_ip_map_init(adev); in aqua_vanjaram_init_soc_config()