Home
last modified time | relevance | path

Searched refs:num_vcn_inst (Results 1 – 25 of 25) sorted by relevance

/linux/drivers/gpu/drm/amd/amdgpu/
H A Dvcn_v5_0_1.c63 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) in vcn_v5_0_1_early_init()
71 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v5_0_1_early_init()
121 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v5_0_1_sw_init()
184 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v5_0_1_sw_fini()
198 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v5_0_1_sw_fini()
204 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v5_0_1_sw_fini()
235 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v5_0_1_hw_init()
245 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v5_0_1_hw_init()
279 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v5_0_1_hw_fini()
309 for (i = 0; i < adev->vcn.num_vcn_inst; in vcn_v5_0_1_suspend()
[all...]
H A Dvcn_v4_0_3.c118 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) in vcn_v4_0_3_early_init()
126 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v4_0_3_early_init()
176 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v4_0_3_sw_init()
235 ptr = kcalloc(adev->vcn.num_vcn_inst * reg_count, sizeof(uint32_t), GFP_KERNEL); in vcn_v4_0_3_sw_init()
263 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v4_0_3_sw_fini()
276 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v4_0_3_sw_fini()
284 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v4_0_3_sw_fini()
339 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v4_0_3_hw_init()
352 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v4_0_3_hw_init()
385 for (i = 0; i < adev->vcn.num_vcn_inst; in vcn_v4_0_3_hw_fini()
[all...]
H A Dvcn_v5_0_0.c100 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) in vcn_v5_0_0_early_init()
107 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v5_0_0_early_init()
124 ptr = kcalloc(adev->vcn.num_vcn_inst * reg_count, sizeof(uint32_t), GFP_KERNEL); in vcn_v5_0_0_alloc_ip_dump()
146 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v5_0_0_sw_init()
226 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v5_0_0_sw_fini()
240 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v5_0_0_sw_fini()
248 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v5_0_0_sw_fini()
272 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v5_0_0_hw_init()
301 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v5_0_0_hw_fini()
337 for (i = 0; i < adev->vcn.num_vcn_inst; in vcn_v5_0_0_suspend()
[all...]
H A Dvcn_v4_0_5.c121 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) in vcn_v4_0_5_early_init()
127 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v4_0_5_early_init()
154 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v4_0_5_sw_init()
237 ptr = kcalloc(adev->vcn.num_vcn_inst * reg_count, sizeof(uint32_t), GFP_KERNEL); in vcn_v4_0_5_sw_init()
260 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v4_0_5_sw_fini()
277 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v4_0_5_sw_fini()
305 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v4_0_5_hw_init()
334 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v4_0_5_hw_fini()
370 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v4_0_5_suspend()
391 for (i = 0; i < adev->vcn.num_vcn_inst; in vcn_v4_0_5_resume()
[all...]
H A Dvcn_v4_0.c122 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v4_0_early_init()
130 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) in vcn_v4_0_early_init()
138 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v4_0_early_init()
189 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v4_0_sw_init()
259 ptr = kcalloc(adev->vcn.num_vcn_inst * reg_count, sizeof(uint32_t), GFP_KERNEL); in vcn_v4_0_sw_init()
287 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v4_0_sw_fini()
304 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v4_0_sw_fini()
312 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v4_0_sw_fini()
341 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v4_0_hw_init()
352 for (i = 0; i < adev->vcn.num_vcn_inst; in vcn_v4_0_hw_init()
[all...]
H A Dvcn_v2_5.c121 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v2_5_idle_work_handler()
253 adev->vcn.num_vcn_inst = 2; in vcn_v2_5_early_init()
255 for (i = 0; i < adev->vcn.num_vcn_inst; i++) in vcn_v2_5_early_init()
261 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v2_5_early_init()
278 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v2_5_early_init()
304 for (j = 0; j < adev->vcn.num_vcn_inst; j++) { in vcn_v2_5_sw_init()
427 ptr = kcalloc(adev->vcn.num_vcn_inst * reg_count, sizeof(uint32_t), GFP_KERNEL); in vcn_v2_5_sw_init()
456 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v2_5_sw_fini()
471 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v2_5_sw_fini()
501 for (j = 0; j < adev->vcn.num_vcn_inst; in vcn_v2_5_hw_init()
[all...]
H A Dvcn_v3_0.c132 adev->vcn.num_vcn_inst = VCN_INSTANCES_SIENNA_CICHLID; in vcn_v3_0_early_init()
134 for (i = 0; i < adev->vcn.num_vcn_inst; i++) in vcn_v3_0_early_init()
143 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v3_0_early_init()
156 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v3_0_early_init()
195 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v3_0_sw_init()
308 ptr = kcalloc(adev->vcn.num_vcn_inst * reg_count, sizeof(uint32_t), GFP_KERNEL); in vcn_v3_0_sw_init()
336 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v3_0_sw_fini()
354 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v3_0_sw_fini()
387 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_v3_0_hw_init()
418 for (i = 0; i < adev->vcn.num_vcn_inst; in vcn_v3_0_hw_init()
[all...]
H A Damdgpu_vcn.c339 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in amdgpu_vcn_save_vcpu_bo()
1270 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in amdgpu_vcn_ras_late_init()
1348 if (adev->vcn.num_vcn_inst) { in amdgpu_vcn_sysfs_reset_mask_init()
1360 if (adev->vcn.num_vcn_inst) in amdgpu_vcn_sysfs_reset_mask_fini()
1380 mask = (1ULL << adev->vcn.num_vcn_inst) - 1; in amdgpu_debugfs_vcn_sched_mask_set()
1383 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in amdgpu_debugfs_vcn_sched_mask_set()
1404 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in amdgpu_debugfs_vcn_sched_mask_get()
1425 if (adev->vcn.num_vcn_inst <= 1 || !adev->vcn.inst[0].using_unified_queue) in amdgpu_debugfs_vcn_sched_mask_init()
1447 for (i = 0; i < adev->vcn.num_vcn_inst; ++i) { in vcn_set_powergating_state()
H A Damdgpu_discovery.c1393 if (adev->vcn.num_vcn_inst < in amdgpu_discovery_reg_base_init()
1395 adev->vcn.inst[adev->vcn.num_vcn_inst].vcn_config = in amdgpu_discovery_reg_base_init()
1397 adev->vcn.num_vcn_inst++; in amdgpu_discovery_reg_base_init()
1404 adev->vcn.num_vcn_inst + 1, in amdgpu_discovery_reg_base_init()
1555 if (vcn_harvest_count == adev->vcn.num_vcn_inst) { in amdgpu_discovery_harvest_ip()
1750 /* num_vcn_inst is currently limited to AMDGPU_MAX_VCN_INSTANCES in amdgpu_discovery_get_vcn_info()
1755 if (adev->vcn.num_vcn_inst > VCN_INFO_TABLE_MAX_NUM_INSTANCES) { in amdgpu_discovery_get_vcn_info()
1770 /* num_vcn_inst is currently limited to AMDGPU_MAX_VCN_INSTANCES in amdgpu_discovery_get_vcn_info()
1773 for (v = 0; v < adev->vcn.num_vcn_inst; v++) { in amdgpu_discovery_get_vcn_info()
2619 adev->vcn.num_vcn_inst in amdgpu_discovery_set_ip_blocks()
[all...]
H A Daqua_vanjaram.c176 num_vcn = adev->vcn.num_vcn_inst; in __aqua_vanjaram_get_xcp_ip_info()
286 max_res[AMDGPU_XCP_RES_DEC] = adev->vcn.num_vcn_inst; in aqua_vanjaram_get_xcp_res_info()
560 adev->vcn.num_vcn_inst = hweight32(adev->vcn.inst_mask); in aqua_vanjaram_init_soc_config()
H A Dvcn_v1_0.c205 ptr = kcalloc(adev->vcn.num_vcn_inst * reg_count, sizeof(uint32_t), GFP_KERNEL); in vcn_v1_0_sw_init()
1980 drm_printf(p, "num_instances:%d\n", adev->vcn.num_vcn_inst); in vcn_v1_0_print_ip_state()
1981 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v1_0_print_ip_state()
2013 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v1_0_dump_ip_state()
H A Damdgpu_vcn.h343 uint8_t num_vcn_inst; member
H A Dvcn_v2_0.c236 ptr = kcalloc(adev->vcn.num_vcn_inst * reg_count, sizeof(uint32_t), GFP_KERNEL); in vcn_v2_0_sw_init()
2108 drm_printf(p, "num_instances:%d\n", adev->vcn.num_vcn_inst); in vcn_v2_0_print_ip_state()
2109 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v2_0_print_ip_state()
2141 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in vcn_v2_0_dump_ip_state()
H A Dsoc24.c76 if (adev->vcn.num_vcn_inst == hweight8(adev->vcn.harvest_config)) in soc24_query_video_codecs()
H A Damdgpu_kms.c492 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in amdgpu_hw_ip_info()
505 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in amdgpu_hw_ip_info()
719 count = adev->vcn.num_vcn_inst; in amdgpu_info_ioctl()
H A Dsoc21.c147 if (adev->vcn.num_vcn_inst == hweight8(adev->vcn.harvest_config)) in soc21_query_video_codecs()
H A Dnv.c213 if (adev->vcn.num_vcn_inst == hweight8(adev->vcn.harvest_config)) in nv_query_video_codecs()
H A Damdgpu_xcp.c588 (adev->xcp_mgr->num_xcps > adev->vcn.num_vcn_inst)) in amdgpu_xcp_sched_list_update()
H A Damdgpu_debugfs.c2085 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in amdgpu_debugfs_init()
H A Damdgpu_ras.c383 mask = GENMASK(adev->vcn.num_vcn_inst - 1, 0); in amdgpu_ras_instance_mask_check()
/linux/drivers/gpu/drm/amd/pm/swsmu/
H A Damdgpu_smu.c834 for (i = 0; i < adev->vcn.num_vcn_inst; i++) in smu_set_default_dpm_table()
841 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in smu_set_default_dpm_table()
863 for (i = 0; i < adev->vcn.num_vcn_inst; i++) in smu_set_default_dpm_table()
1334 for (i = 0; i < adev->vcn.num_vcn_inst; i++) in smu_sw_init()
1896 for (i = 0; i < adev->vcn.num_vcn_inst; i++) in smu_hw_init()
2101 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in smu_hw_fini()
3150 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in smu_read_sensor()
/linux/drivers/gpu/drm/amd/pm/swsmu/smu14/
H A Dsmu_v14_0.c1376 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in smu_v14_0_set_performance_level()
1392 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in smu_v14_0_set_performance_level()
/linux/drivers/gpu/drm/amd/pm/swsmu/smu13/
H A Dsmu_v13_0.c1726 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in smu_v13_0_set_performance_level()
1742 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in smu_v13_0_set_performance_level()
/linux/drivers/gpu/drm/amd/pm/swsmu/smu11/
H A Dsienna_cichlid_ppt.c1036 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in sienna_cichlid_set_default_dpm_table()
1059 for (i = 0; i < adev->vcn.num_vcn_inst; i++) { in sienna_cichlid_set_default_dpm_table()
/linux/drivers/gpu/drm/amd/pm/
H A Damdgpu_pm.c2019 gc_ver == IP_VERSION(11, 0, 3)) && adev->vcn.num_vcn_inst >= 2)) in pp_dpm_clk_default_attr_update()
2042 gc_ver == IP_VERSION(11, 0, 3)) && adev->vcn.num_vcn_inst >= 2)) in pp_dpm_clk_default_attr_update()