Lines Matching +full:0 +full:x00000100

82 	mmSDMA0_CHICKEN_BITS, 0xfc910007, 0x00810007,
83 mmSDMA0_CLK_CTRL, 0xff000fff, 0x00000000,
84 mmSDMA0_GFX_IB_CNTL, 0x800f0111, 0x00000100,
85 mmSDMA0_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
86 mmSDMA0_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
87 mmSDMA1_CHICKEN_BITS, 0xfc910007, 0x00810007,
88 mmSDMA1_CLK_CTRL, 0xff000fff, 0x00000000,
89 mmSDMA1_GFX_IB_CNTL, 0x800f0111, 0x00000100,
90 mmSDMA1_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
91 mmSDMA1_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
96 mmSDMA0_CLK_CTRL, 0xff000ff0, 0x00000100,
97 mmSDMA1_CLK_CTRL, 0xff000ff0, 0x00000100
102 mmSDMA0_CHICKEN_BITS, 0xfc910007, 0x00810007,
103 mmSDMA0_GFX_IB_CNTL, 0x800f0111, 0x00000100,
104 mmSDMA0_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
105 mmSDMA0_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
106 mmSDMA1_CHICKEN_BITS, 0xfc910007, 0x00810007,
107 mmSDMA1_GFX_IB_CNTL, 0x800f0111, 0x00000100,
108 mmSDMA1_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
109 mmSDMA1_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
114 mmSDMA0_CLK_CTRL, 0xff000ff0, 0x00000100,
115 mmSDMA1_CLK_CTRL, 0xff000ff0, 0x00000100
120 mmSDMA0_CHICKEN_BITS, 0xfc910007, 0x00810007,
121 mmSDMA0_CLK_CTRL, 0xff000fff, 0x00000000,
122 mmSDMA0_GFX_IB_CNTL, 0x800f0111, 0x00000100,
123 mmSDMA0_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
124 mmSDMA0_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
125 mmSDMA1_CHICKEN_BITS, 0xfc910007, 0x00810007,
126 mmSDMA1_CLK_CTRL, 0xff000fff, 0x00000000,
127 mmSDMA1_GFX_IB_CNTL, 0x800f0111, 0x00000100,
128 mmSDMA1_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
129 mmSDMA1_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
134 mmSDMA0_CHICKEN_BITS, 0xfc910007, 0x00810007,
135 mmSDMA0_CLK_CTRL, 0xff000fff, 0x00000000,
136 mmSDMA0_GFX_IB_CNTL, 0x800f0111, 0x00000100,
137 mmSDMA0_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
138 mmSDMA0_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
139 mmSDMA1_CHICKEN_BITS, 0xfc910007, 0x00810007,
140 mmSDMA1_CLK_CTRL, 0xff000fff, 0x00000000,
141 mmSDMA1_GFX_IB_CNTL, 0x800f0111, 0x00000100,
142 mmSDMA1_RLC0_IB_CNTL, 0x800f0111, 0x00000100,
143 mmSDMA1_RLC1_IB_CNTL, 0x800f0111, 0x00000100,
148 mmSDMA0_CHICKEN_BITS, 0xfc910007, 0x00810007,
149 mmSDMA0_CLK_CTRL, 0xff000fff, 0x00000000,
150 mmSDMA0_GFX_IB_CNTL, 0x00000100, 0x00000100,
151 mmSDMA0_POWER_CNTL, 0x00000800, 0x0003c800,
152 mmSDMA0_RLC0_IB_CNTL, 0x00000100, 0x00000100,
153 mmSDMA0_RLC1_IB_CNTL, 0x00000100, 0x00000100,
154 mmSDMA1_CHICKEN_BITS, 0xfc910007, 0x00810007,
155 mmSDMA1_CLK_CTRL, 0xff000fff, 0x00000000,
156 mmSDMA1_GFX_IB_CNTL, 0x00000100, 0x00000100,
157 mmSDMA1_POWER_CNTL, 0x00000800, 0x0003c800,
158 mmSDMA1_RLC0_IB_CNTL, 0x00000100, 0x00000100,
159 mmSDMA1_RLC1_IB_CNTL, 0x00000100, 0x00000100,
164 mmSDMA0_CLK_CTRL, 0xff000ff0, 0x00000100,
165 mmSDMA1_CLK_CTRL, 0xff000ff0, 0x00000100
170 mmSDMA0_GFX_IB_CNTL, 0x00000100, 0x00000100,
171 mmSDMA0_POWER_CNTL, 0x00000800, 0x0003c800,
172 mmSDMA0_RLC0_IB_CNTL, 0x00000100, 0x00000100,
173 mmSDMA0_RLC1_IB_CNTL, 0x00000100, 0x00000100,
178 mmSDMA0_CLK_CTRL, 0xffffffff, 0x00000100,
254 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_free_microcode()
265 * Returns 0 on success, error on failure.
271 int err = 0, i; in sdma_v3_0_init_microcode()
306 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_init_microcode()
307 if (i == 0) in sdma_v3_0_init_microcode()
331 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_init_microcode()
402 for (i = 0; i < count; i++) in sdma_v3_0_ring_insert_nop()
403 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v3_0_ring_insert_nop()
431 SDMA_PKT_INDIRECT_HEADER_VMID(vmid & 0xf)); in sdma_v3_0_ring_emit_ib()
433 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v3_0_ring_emit_ib()
436 amdgpu_ring_write(ring, 0); in sdma_v3_0_ring_emit_ib()
437 amdgpu_ring_write(ring, 0); in sdma_v3_0_ring_emit_ib()
450 u32 ref_and_mask = 0; in sdma_v3_0_ring_emit_hdp_flush()
452 if (ring->me == 0) in sdma_v3_0_ring_emit_hdp_flush()
464 amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) | in sdma_v3_0_ring_emit_hdp_flush()
501 amdgpu_ring_write(ring, SDMA_PKT_TRAP_INT_CONTEXT_INT_CONTEXT(0)); in sdma_v3_0_ring_emit_fence()
516 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_stop()
518 rb_cntl = REG_SET_FIELD(rb_cntl, SDMA0_GFX_RB_CNTL, RB_ENABLE, 0); in sdma_v3_0_gfx_stop()
521 ib_cntl = REG_SET_FIELD(ib_cntl, SDMA0_GFX_IB_CNTL, IB_ENABLE, 0); in sdma_v3_0_gfx_stop()
548 u32 f32_cntl, phase_quantum = 0; in sdma_v3_0_ctx_switch_enable()
553 unsigned unit = 0; in sdma_v3_0_ctx_switch_enable()
575 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_ctx_switch_enable()
590 AUTO_CTXSW_ENABLE, 0); in sdma_v3_0_ctx_switch_enable()
617 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_enable()
620 f32_cntl = REG_SET_FIELD(f32_cntl, SDMA0_F32_CNTL, HALT, 0); in sdma_v3_0_enable()
633 * Returns 0 for success, error for failure.
644 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_resume()
649 for (j = 0; j < 16; j++) { in sdma_v3_0_gfx_resume()
650 vi_srbm_select(adev, 0, 0, 0, j); in sdma_v3_0_gfx_resume()
652 WREG32(mmSDMA0_GFX_VIRTUAL_ADDR + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
653 WREG32(mmSDMA0_GFX_APE1_CNTL + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
655 vi_srbm_select(adev, 0, 0, 0, 0); in sdma_v3_0_gfx_resume()
659 adev->gfx.config.gb_addr_config & 0x70); in sdma_v3_0_gfx_resume()
661 WREG32(mmSDMA0_SEM_WAIT_FAIL_TIMER_CNTL + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
675 ring->wptr = 0; in sdma_v3_0_gfx_resume()
676 WREG32(mmSDMA0_GFX_RB_RPTR + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
678 WREG32(mmSDMA0_GFX_IB_RPTR + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
679 WREG32(mmSDMA0_GFX_IB_OFFSET + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
683 upper_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFF); in sdma_v3_0_gfx_resume()
685 lower_32_bits(ring->rptr_gpu_addr) & 0xFFFFFFFC); in sdma_v3_0_gfx_resume()
699 doorbell = REG_SET_FIELD(doorbell, SDMA0_GFX_DOORBELL, ENABLE, 0); in sdma_v3_0_gfx_resume()
713 WREG32(mmSDMA0_GFX_RB_WPTR + sdma_offsets[i], 0); in sdma_v3_0_gfx_resume()
720 ENABLE, 0); in sdma_v3_0_gfx_resume()
742 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_gfx_resume()
749 return 0; in sdma_v3_0_gfx_resume()
758 * Returns 0 for success, error for failure.
763 return 0; in sdma_v3_0_rlc_resume()
772 * Returns 0 for success, error for failure.
790 return 0; in sdma_v3_0_start()
800 * Returns 0 for success, error for failure.
816 tmp = 0xCAFEDEAD; in sdma_v3_0_ring_test_ring()
828 amdgpu_ring_write(ring, 0xDEADBEEF); in sdma_v3_0_ring_test_ring()
831 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v3_0_ring_test_ring()
833 if (tmp == 0xDEADBEEF) in sdma_v3_0_ring_test_ring()
853 * Returns 0 on success, error on failure.
861 u32 tmp = 0; in sdma_v3_0_ring_test_ib()
870 tmp = 0xCAFEDEAD; in sdma_v3_0_ring_test_ib()
872 memset(&ib, 0, sizeof(ib)); in sdma_v3_0_ring_test_ib()
878 ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v3_0_ring_test_ib()
883 ib.ptr[4] = 0xDEADBEEF; in sdma_v3_0_ring_test_ib()
894 if (r == 0) { in sdma_v3_0_ring_test_ib()
897 } else if (r < 0) { in sdma_v3_0_ring_test_ib()
901 if (tmp == 0xDEADBEEF) in sdma_v3_0_ring_test_ib()
902 r = 0; in sdma_v3_0_ring_test_ib()
932 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v3_0_vm_copy_pte()
961 for (; ndw > 0; ndw -= 2) { in sdma_v3_0_vm_write_pte()
993 ib->ptr[ib->length_dw++] = 0; in sdma_v3_0_vm_set_pte_pde()
1011 for (i = 0; i < pad_count; i++) in sdma_v3_0_ring_pad_ib()
1012 if (sdma && sdma->burst_nop && (i == 0)) in sdma_v3_0_ring_pad_ib()
1035 SDMA_PKT_POLL_REGMEM_HEADER_HDP_FLUSH(0) | in sdma_v3_0_ring_emit_pipeline_sync()
1038 amdgpu_ring_write(ring, addr & 0xfffffffc); in sdma_v3_0_ring_emit_pipeline_sync()
1039 amdgpu_ring_write(ring, upper_32_bits(addr) & 0xffffffff); in sdma_v3_0_ring_emit_pipeline_sync()
1041 amdgpu_ring_write(ring, 0xffffffff); /* mask */ in sdma_v3_0_ring_emit_pipeline_sync()
1042 amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) | in sdma_v3_0_ring_emit_pipeline_sync()
1063 SDMA_PKT_POLL_REGMEM_HEADER_HDP_FLUSH(0) | in sdma_v3_0_ring_emit_vm_flush()
1064 SDMA_PKT_POLL_REGMEM_HEADER_FUNC(0)); /* always */ in sdma_v3_0_ring_emit_vm_flush()
1066 amdgpu_ring_write(ring, 0); in sdma_v3_0_ring_emit_vm_flush()
1067 amdgpu_ring_write(ring, 0); /* reference */ in sdma_v3_0_ring_emit_vm_flush()
1068 amdgpu_ring_write(ring, 0); /* mask */ in sdma_v3_0_ring_emit_vm_flush()
1069 amdgpu_ring_write(ring, SDMA_PKT_POLL_REGMEM_DW5_RETRY_COUNT(0xfff) | in sdma_v3_0_ring_emit_vm_flush()
1077 SDMA_PKT_SRBM_WRITE_HEADER_BYTE_EN(0xf)); in sdma_v3_0_ring_emit_wreg()
1105 return 0; in sdma_v3_0_early_init()
1132 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_sw_init()
1144 (i == 0) ? AMDGPU_SDMA_IRQ_INSTANCE0 : in sdma_v3_0_sw_init()
1159 for (i = 0; i < adev->sdma.num_instances; i++) in sdma_v3_0_sw_fini()
1163 return 0; in sdma_v3_0_sw_fini()
1187 return 0; in sdma_v3_0_hw_fini()
1222 for (i = 0; i < adev->usec_timeout; i++) { in sdma_v3_0_wait_for_idle()
1227 return 0; in sdma_v3_0_wait_for_idle()
1236 u32 srbm_soft_reset = 0; in sdma_v3_0_check_soft_reset()
1249 adev->sdma.srbm_soft_reset = 0; in sdma_v3_0_check_soft_reset()
1257 u32 srbm_soft_reset = 0; in sdma_v3_0_pre_soft_reset()
1260 return 0; in sdma_v3_0_pre_soft_reset()
1270 return 0; in sdma_v3_0_pre_soft_reset()
1276 u32 srbm_soft_reset = 0; in sdma_v3_0_post_soft_reset()
1279 return 0; in sdma_v3_0_post_soft_reset()
1289 return 0; in sdma_v3_0_post_soft_reset()
1295 u32 srbm_soft_reset = 0; in sdma_v3_0_soft_reset()
1299 return 0; in sdma_v3_0_soft_reset()
1306 dev_info(adev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp); in sdma_v3_0_soft_reset()
1320 return 0; in sdma_v3_0_soft_reset()
1335 sdma_cntl = REG_SET_FIELD(sdma_cntl, SDMA0_CNTL, TRAP_ENABLE, 0); in sdma_v3_0_set_trap_irq_state()
1351 sdma_cntl = REG_SET_FIELD(sdma_cntl, SDMA0_CNTL, TRAP_ENABLE, 0); in sdma_v3_0_set_trap_irq_state()
1366 return 0; in sdma_v3_0_set_trap_irq_state()
1375 instance_id = (entry->ring_id & 0x3) >> 0; in sdma_v3_0_process_trap_irq()
1376 queue_id = (entry->ring_id & 0xc) >> 2; in sdma_v3_0_process_trap_irq()
1379 case 0: in sdma_v3_0_process_trap_irq()
1381 case 0: in sdma_v3_0_process_trap_irq()
1382 amdgpu_fence_process(&adev->sdma.instance[0].ring); in sdma_v3_0_process_trap_irq()
1394 case 0: in sdma_v3_0_process_trap_irq()
1406 return 0; in sdma_v3_0_process_trap_irq()
1416 instance_id = (entry->ring_id & 0x3) >> 0; in sdma_v3_0_process_illegal_inst_irq()
1417 queue_id = (entry->ring_id & 0xc) >> 2; in sdma_v3_0_process_illegal_inst_irq()
1419 if (instance_id <= 1 && queue_id == 0) in sdma_v3_0_process_illegal_inst_irq()
1421 return 0; in sdma_v3_0_process_illegal_inst_irq()
1432 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_update_sdma_medium_grain_clock_gating()
1446 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_update_sdma_medium_grain_clock_gating()
1471 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_update_sdma_medium_grain_light_sleep()
1479 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_update_sdma_medium_grain_light_sleep()
1495 return 0; in sdma_v3_0_set_clockgating_state()
1509 return 0; in sdma_v3_0_set_clockgating_state()
1515 return 0; in sdma_v3_0_set_powergating_state()
1524 *flags = 0; in sdma_v3_0_get_clockgating_state()
1527 data = RREG32(mmSDMA0_CLK_CTRL + sdma_offsets[0]); in sdma_v3_0_get_clockgating_state()
1532 data = RREG32(mmSDMA0_POWER_CNTL + sdma_offsets[0]); in sdma_v3_0_get_clockgating_state()
1560 .align_mask = 0xf,
1590 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_set_ring_funcs()
1634 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v3_0_emit_copy_buffer()
1664 .copy_max_bytes = 0x3fffe0, /* not 0x3fffff due to HW limitation */
1668 .fill_max_bytes = 0x3fffe0, /* not 0x3fffff due to HW limitation */
1676 adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring; in sdma_v3_0_set_buffer_funcs()
1692 for (i = 0; i < adev->sdma.num_instances; i++) { in sdma_v3_0_set_vm_pte_funcs()
1703 .minor = 0,
1704 .rev = 0,
1713 .rev = 0,