Lines Matching +full:0 +full:x00000100

82 	mmSDMA0_CHICKEN_BITS, 0xfc910007, 0x00810007,
83 mmSDMA0_CLK_CTRL, 0xff000fff, 0x00000000,
84 mmSDMA0_GFX_IB_CNTL, 0x800f0111, 0x00000100,
85 mmSDMA0_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
86 mmSDMA0_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
87 mmSDMA1_CHICKEN_BITS, 0xfc910007, 0x00810007,
88 mmSDMA1_CLK_CTRL, 0xff000fff, 0x00000000,
89 mmSDMA1_GFX_IB_CNTL, 0x800f0111, 0x00000100,
90 mmSDMA1_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
91 mmSDMA1_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
96 mmSDMA0_CLK_CTRL, 0xff000ff0, 0x00000100,
97 mmSDMA1_CLK_CTRL, 0xff000ff0, 0x00000100
102 mmSDMA0_CHICKEN_BITS, 0xfc910007, 0x00810007,
103 mmSDMA0_GFX_IB_CNTL, 0x800f0111, 0x00000100,
104 mmSDMA0_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
105 mmSDMA0_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
106 mmSDMA1_CHICKEN_BITS, 0xfc910007, 0x00810007,
107 mmSDMA1_GFX_IB_CNTL, 0x800f0111, 0x00000100,
108 mmSDMA1_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
109 mmSDMA1_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
114 mmSDMA0_CLK_CTRL, 0xff000ff0, 0x00000100,
115 mmSDMA1_CLK_CTRL, 0xff000ff0, 0x00000100
120 mmSDMA0_CHICKEN_BITS, 0xfc910007, 0x00810007,
121 mmSDMA0_CLK_CTRL, 0xff000fff, 0x00000000,
122 mmSDMA0_GFX_IB_CNTL, 0x800f0111, 0x00000100,
123 mmSDMA0_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
124 mmSDMA0_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
125 mmSDMA1_CHICKEN_BITS, 0xfc910007, 0x00810007,
126 mmSDMA1_CLK_CTRL, 0xff000fff, 0x00000000,
127 mmSDMA1_GFX_IB_CNTL, 0x800f0111, 0x00000100,
128 mmSDMA1_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
129 mmSDMA1_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
134 mmSDMA0_CHICKEN_BITS, 0xfc910007, 0x00810007,
135 mmSDMA0_CLK_CTRL, 0xff000fff, 0x00000000,
136 mmSDMA0_GFX_IB_CNTL, 0x800f0111, 0x00000100,
137 mmSDMA0_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
138 mmSDMA0_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
139 mmSDMA1_CHICKEN_BITS, 0xfc910007, 0x00810007,
140 mmSDMA1_CLK_CTRL, 0xff000fff, 0x00000000,
141 mmSDMA1_GFX_IB_CNTL, 0x800f0111, 0x00000100,
142 mmSDMA1_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
143 mmSDMA1_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
148 mmSDMA0_CHICKEN_BITS, 0xfc910007, 0x00810007,
149 mmSDMA0_CLK_CTRL, 0xff000fff, 0x00000000,
150 mmSDMA0_GFX_IB_CNTL, 0x00000100, 0x00000100,
151 mmSDMA0_POWER_CNTL, 0x00000800, 0x0003c800,
152 mmSDMA0_RLC0_IB_CNTL, 0x00000100, 0x00000100,
153 mmSDMA0_RLC1_IB_CNTL, 0x00000100, 0x00000100,
154 mmSDMA1_CHICKEN_BITS, 0xfc910007, 0x00810007,
155 mmSDMA1_CLK_CTRL, 0xff000fff, 0x00000000,
156 mmSDMA1_GFX_IB_CNTL, 0x00000100, 0x00000100,
157 mmSDMA1_POWER_CNTL, 0x00000800, 0x0003c800,
158 mmSDMA1_RLC0_IB_CNTL, 0x00000100, 0x00000100,
159 mmSDMA1_RLC1_IB_CNTL, 0x00000100, 0x00000100,
164 mmSDMA0_CLK_CTRL, 0xff000ff0, 0x00000100,
165 mmSDMA1_CLK_CTRL, 0xff000ff0, 0x00000100
170 mmSDMA0_GFX_IB_CNTL, 0x00000100, 0x00000100,
171 mmSDMA0_POWER_CNTL, 0x00000800, 0x0003c800,
172 mmSDMA0_RLC0_IB_CNTL, 0x00000100, 0x00000100,
173 mmSDMA0_RLC1_IB_CNTL, 0x00000100, 0x00000100,
178 mmSDMA0_CLK_CTRL, 0xffffffff, 0x00000100,
253 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_free_microcode()
266 * Returns 0 on success, error on failure.
272 int err = 0, i; in sdma_v3_0_init_microcode()
307 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_init_microcode()
308 if (i == 0) in sdma_v3_0_init_microcode()
335 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_init_microcode()
408 for (i = 0; i < count; i++) in sdma_v3_0_ring_insert_nop()
409 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v3_0_ring_insert_nop()
435 SDMA_PKT_INDIRECT_HEADER_VMID(vmid & 0xf)); in sdma_v3_0_ring_emit_ib()
437 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v3_0_ring_emit_ib()
440 amdgpu_ring_write(ring, 0); in sdma_v3_0_ring_emit_ib()
441 amdgpu_ring_write(ring, 0); in sdma_v3_0_ring_emit_ib()
454 u32 ref_and_mask = 0; in sdma_v3_0_ring_emit_hdp_flush()
456 if (ring->me == 0) in sdma_v3_0_ring_emit_hdp_flush()
468 amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) | in sdma_v3_0_ring_emit_hdp_flush()
503 amdgpu_ring_write(ring, SDMA_PKT_TRAP_INT_CONTEXT_INT_CONTEXT(0)); in sdma_v3_0_ring_emit_fence()
515 struct amdgpu_ring *sdma0 = &adev->sdma.instance[0].ring; in sdma_v3_0_gfx_stop()
524 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_stop()
526 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, RB_ENABLE, 0); in sdma_v3_0_gfx_stop()
529 ib_cntl = REG_SET_FIELD(ib_cntl, SDMA0_GFX_IB_CNTL, IB_ENABLE, 0); in sdma_v3_0_gfx_stop()
556 u32 f32_cntl, phase_quantum = 0; in sdma_v3_0_ctx_switch_enable()
561 unsigned unit = 0; in sdma_v3_0_ctx_switch_enable()
583 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_ctx_switch_enable()
598 AUTO_CTXSW_ENABLE, 0); in sdma_v3_0_ctx_switch_enable()
625 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_enable()
628 f32_cntl = REG_SET_FIELD(f32_cntl, SDMA0_F32_CNTL, HALT, 0); in sdma_v3_0_enable()
641 * Returns 0 for success, error for failure.
653 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_resume()
659 for (j = 0; j < 16; j++) { in sdma_v3_0_gfx_resume()
660 vi_srbm_select(adev, 0, 0, 0, j); in sdma_v3_0_gfx_resume()
662 WREG32(mmSDMA0_GFX_VIRTUAL_ADDR + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
663 WREG32(mmSDMA0_GFX_APE1_CNTL + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
665 vi_srbm_select(adev, 0, 0, 0, 0); in sdma_v3_0_gfx_resume()
669 adev->gfx.config.gb_addr_config & 0x70); in sdma_v3_0_gfx_resume()
671 WREG32(mmSDMA0_SEM_WAIT_FAIL_TIMER_CNTL + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
685 ring->wptr = 0; in sdma_v3_0_gfx_resume()
686 WREG32(mmSDMA0_GFX_RB_RPTR + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
688 WREG32(mmSDMA0_GFX_IB_RPTR + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
689 WREG32(mmSDMA0_GFX_IB_OFFSET + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
693 upper_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in sdma_v3_0_gfx_resume()
695 lower_32_bits(adev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC); in sdma_v3_0_gfx_resume()
709 doorbell = REG_SET_FIELD(doorbell, SDMA0_GFX_DOORBELL, ENABLE, 0); in sdma_v3_0_gfx_resume()
723 WREG32(mmSDMA0_GFX_RB_WPTR + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
730 ENABLE, 0); in sdma_v3_0_gfx_resume()
754 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_resume()
764 return 0; in sdma_v3_0_gfx_resume()
773 * Returns 0 for success, error for failure.
778 return 0; in sdma_v3_0_rlc_resume()
787 * Returns 0 for success, error for failure.
805 return 0; in sdma_v3_0_start()
815 * Returns 0 for success, error for failure.
831 tmp = 0xCAFEDEAD; in sdma_v3_0_ring_test_ring()
843 amdgpu_ring_write(ring, 0xDEADBEEF); in sdma_v3_0_ring_test_ring()
846 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v3_0_ring_test_ring()
848 if (tmp == 0xDEADBEEF) in sdma_v3_0_ring_test_ring()
867 * Returns 0 on success, error on failure.
875 u32 tmp = 0; in sdma_v3_0_ring_test_ib()
884 tmp = 0xCAFEDEAD; in sdma_v3_0_ring_test_ib()
886 memset(&ib, 0, sizeof(ib)); in sdma_v3_0_ring_test_ib()
892 ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v3_0_ring_test_ib()
897 ib.ptr[4] = 0xDEADBEEF; in sdma_v3_0_ring_test_ib()
908 if (r == 0) { in sdma_v3_0_ring_test_ib()
911 } else if (r < 0) { in sdma_v3_0_ring_test_ib()
915 if (tmp == 0xDEADBEEF) in sdma_v3_0_ring_test_ib()
916 r = 0; in sdma_v3_0_ring_test_ib()
946 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v3_0_vm_copy_pte()
975 for (; ndw > 0; ndw -= 2) { in sdma_v3_0_vm_write_pte()
1007 ib->ptr[ib->length_dw++] = 0; in sdma_v3_0_vm_set_pte_pde()
1024 for (i = 0; i < pad_count; i++) in sdma_v3_0_ring_pad_ib()
1025 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v3_0_ring_pad_ib()
1048 SDMA_PKT_POLL_REGMEM_HEADER_HDP_FLUSH(0) | in sdma_v3_0_ring_emit_pipeline_sync()
1051 amdgpu_ring_write(ring, addr & 0xfffffffc); in sdma_v3_0_ring_emit_pipeline_sync()
1052 amdgpu_ring_write(ring, upper_32_bits(addr) & 0xffffffff); in sdma_v3_0_ring_emit_pipeline_sync()
1054 amdgpu_ring_write(ring, 0xffffffff); /* mask */ in sdma_v3_0_ring_emit_pipeline_sync()
1055 amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) | in sdma_v3_0_ring_emit_pipeline_sync()
1075 SDMA_PKT_POLL_REGMEM_HEADER_HDP_FLUSH(0) | in sdma_v3_0_ring_emit_vm_flush()
1076 SDMA_PKT_POLL_REGMEM_HEADER_FUNC(0)); /* always */ in sdma_v3_0_ring_emit_vm_flush()
1078 amdgpu_ring_write(ring, 0); in sdma_v3_0_ring_emit_vm_flush()
1079 amdgpu_ring_write(ring, 0); /* reference */ in sdma_v3_0_ring_emit_vm_flush()
1080 amdgpu_ring_write(ring, 0); /* mask */ in sdma_v3_0_ring_emit_vm_flush()
1081 amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) | in sdma_v3_0_ring_emit_vm_flush()
1089 SDMA_PKT_SRBM_WRITE_HEADER_BYTE_EN(0xf)); in sdma_v3_0_ring_emit_wreg()
1112 return 0; in sdma_v3_0_early_init()
1145 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_sw_init()
1158 (i == 0) ? in sdma_v3_0_sw_init()
1174 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_sw_fini()
1178 return 0; in sdma_v3_0_sw_fini()
1202 return 0; in sdma_v3_0_hw_fini()
1237 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v3_0_wait_for_idle()
1242 return 0; in sdma_v3_0_wait_for_idle()
1251 u32 srbm_soft_reset = 0; in sdma_v3_0_check_soft_reset()
1264 adev->sdma.srbm_soft_reset = 0; in sdma_v3_0_check_soft_reset()
1272 u32 srbm_soft_reset = 0; in sdma_v3_0_pre_soft_reset()
1275 return 0; in sdma_v3_0_pre_soft_reset()
1285 return 0; in sdma_v3_0_pre_soft_reset()
1291 u32 srbm_soft_reset = 0; in sdma_v3_0_post_soft_reset()
1294 return 0; in sdma_v3_0_post_soft_reset()
1304 return 0; in sdma_v3_0_post_soft_reset()
1310 u32 srbm_soft_reset = 0; in sdma_v3_0_soft_reset()
1314 return 0; in sdma_v3_0_soft_reset()
1321 dev_info(adev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp); in sdma_v3_0_soft_reset()
1335 return 0; in sdma_v3_0_soft_reset()
1350 sdma_cntl = REG_SET_FIELD(sdma_cntl, SDMA0_CNTL, TRAP_ENABLE, 0); in sdma_v3_0_set_trap_irq_state()
1366 sdma_cntl = REG_SET_FIELD(sdma_cntl, SDMA0_CNTL, TRAP_ENABLE, 0); in sdma_v3_0_set_trap_irq_state()
1381 return 0; in sdma_v3_0_set_trap_irq_state()
1390 instance_id = (entry->ring_id & 0x3) >> 0; in sdma_v3_0_process_trap_irq()
1391 queue_id = (entry->ring_id & 0xc) >> 2; in sdma_v3_0_process_trap_irq()
1394 case 0: in sdma_v3_0_process_trap_irq()
1396 case 0: in sdma_v3_0_process_trap_irq()
1397 amdgpu_fence_process(&adev->sdma.instance[0].ring); in sdma_v3_0_process_trap_irq()
1409 case 0: in sdma_v3_0_process_trap_irq()
1421 return 0; in sdma_v3_0_process_trap_irq()
1431 instance_id = (entry->ring_id & 0x3) >> 0; in sdma_v3_0_process_illegal_inst_irq()
1432 queue_id = (entry->ring_id & 0xc) >> 2; in sdma_v3_0_process_illegal_inst_irq()
1434 if (instance_id <= 1 && queue_id == 0) in sdma_v3_0_process_illegal_inst_irq()
1436 return 0; in sdma_v3_0_process_illegal_inst_irq()
1447 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_update_sdma_medium_grain_clock_gating()
1461 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_update_sdma_medium_grain_clock_gating()
1486 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_update_sdma_medium_grain_light_sleep()
1494 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_update_sdma_medium_grain_light_sleep()
1510 return 0; in sdma_v3_0_set_clockgating_state()
1524 return 0; in sdma_v3_0_set_clockgating_state()
1530 return 0; in sdma_v3_0_set_powergating_state()
1539 *flags = 0; in sdma_v3_0_get_clockgating_state()
1542 data = RREG32(mmSDMA0_CLK_CTRL + sdma_offsets[0]); in sdma_v3_0_get_clockgating_state()
1547 data = RREG32(mmSDMA0_POWER_CNTL + sdma_offsets[0]); in sdma_v3_0_get_clockgating_state()
1575 .align_mask = 0xf,
1604 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_set_ring_funcs()
1647 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v3_0_emit_copy_buffer()
1677 .copy_max_bytes = 0x3fffe0, /* not 0x3fffff due to HW limitation */
1681 .fill_max_bytes = 0x3fffe0, /* not 0x3fffff due to HW limitation */
1689 adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring; in sdma_v3_0_set_buffer_funcs()
1705 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_set_vm_pte_funcs()
1716 .minor = 0,
1717 .rev = 0,
1726 .rev = 0,