Lines Matching +full:sr +full:- +full:idle +full:- +full:ns

62 	spin_lock_irqsave(&rdev->cg_idx_lock, flags);  in eg_cg_rreg()
65 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags); in eg_cg_rreg()
73 spin_lock_irqsave(&rdev->cg_idx_lock, flags); in eg_cg_wreg()
76 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags); in eg_cg_wreg()
84 spin_lock_irqsave(&rdev->pif_idx_lock, flags); in eg_pif_phy0_rreg()
87 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags); in eg_pif_phy0_rreg()
95 spin_lock_irqsave(&rdev->pif_idx_lock, flags); in eg_pif_phy0_wreg()
98 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags); in eg_pif_phy0_wreg()
106 spin_lock_irqsave(&rdev->pif_idx_lock, flags); in eg_pif_phy1_rreg()
109 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags); in eg_pif_phy1_rreg()
117 spin_lock_irqsave(&rdev->pif_idx_lock, flags); in eg_pif_phy1_wreg()
120 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags); in eg_pif_phy1_wreg()
999 switch (rdev->family) { in evergreen_init_golden_registers()
1084 * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1090 * Returns 0 for success or -EINVAL for an invalid register
1107 return -EINVAL; in evergreen_get_allowed_info_register()
1161 return -ETIMEDOUT; in sumo_set_uvd_clock()
1283 readrq = pcie_get_readrq(rdev->pdev); in evergreen_fix_pci_max_read_req_size()
1284 v = ffs(readrq) - 8; in evergreen_fix_pci_max_read_req_size()
1289 pcie_set_readrq(rdev->pdev, 512); in evergreen_fix_pci_max_read_req_size()
1294 struct drm_device *dev = encoder->dev; in dce4_program_fmt()
1295 struct radeon_device *rdev = dev->dev_private; in dce4_program_fmt()
1297 struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc); in dce4_program_fmt()
1306 dither = radeon_connector->dither; in dce4_program_fmt()
1310 if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT) in dce4_program_fmt()
1314 if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) || in dce4_program_fmt()
1315 (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2)) in dce4_program_fmt()
1345 WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp); in dce4_program_fmt()
1370 * dce4_wait_for_vblank - vblank wait asic callback.
1381 if (crtc >= rdev->num_crtc) in dce4_wait_for_vblank()
1406 * evergreen_page_flip - pageflip callback.
1419 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id]; in evergreen_page_flip()
1420 struct drm_framebuffer *fb = radeon_crtc->base.primary->fb; in evergreen_page_flip()
1423 WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, in evergreen_page_flip()
1426 WREG32(EVERGREEN_GRPH_PITCH + radeon_crtc->crtc_offset, in evergreen_page_flip()
1427 fb->pitches[0] / fb->format->cpp[0]); in evergreen_page_flip()
1429 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset, in evergreen_page_flip()
1431 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset, in evergreen_page_flip()
1434 RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset); in evergreen_page_flip()
1438 * evergreen_page_flip_pending - check if page flip is still pending
1447 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id]; in evergreen_page_flip_pending()
1450 return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & in evergreen_page_flip_pending()
1460 if (rdev->family == CHIP_JUNIPER) { in evergreen_get_temp()
1467 actual_temp = temp / 2 - (0x200 - toffset); in evergreen_get_temp()
1478 actual_temp = -256; in evergreen_get_temp()
1496 int actual_temp = temp - 49; in sumo_get_temp()
1502 * sumo_pm_init_profile - Initialize power profiles callback.
1515 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; in sumo_pm_init_profile()
1516 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; in sumo_pm_init_profile()
1517 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0; in sumo_pm_init_profile()
1518 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0; in sumo_pm_init_profile()
1521 if (rdev->flags & RADEON_IS_MOBILITY) in sumo_pm_init_profile()
1526 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx; in sumo_pm_init_profile()
1527 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx; in sumo_pm_init_profile()
1528 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; in sumo_pm_init_profile()
1529 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; in sumo_pm_init_profile()
1531 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx; in sumo_pm_init_profile()
1532 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx; in sumo_pm_init_profile()
1533 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0; in sumo_pm_init_profile()
1534 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0; in sumo_pm_init_profile()
1536 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx; in sumo_pm_init_profile()
1537 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx; in sumo_pm_init_profile()
1538 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0; in sumo_pm_init_profile()
1539 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0; in sumo_pm_init_profile()
1541 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx; in sumo_pm_init_profile()
1542 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx; in sumo_pm_init_profile()
1543 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0; in sumo_pm_init_profile()
1544 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0; in sumo_pm_init_profile()
1548 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx; in sumo_pm_init_profile()
1549 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx; in sumo_pm_init_profile()
1550 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0; in sumo_pm_init_profile()
1551 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = in sumo_pm_init_profile()
1552 rdev->pm.power_state[idx].num_clock_modes - 1; in sumo_pm_init_profile()
1554 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx; in sumo_pm_init_profile()
1555 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx; in sumo_pm_init_profile()
1556 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0; in sumo_pm_init_profile()
1557 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = in sumo_pm_init_profile()
1558 rdev->pm.power_state[idx].num_clock_modes - 1; in sumo_pm_init_profile()
1562 * btc_pm_init_profile - Initialize power profiles callback.
1575 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; in btc_pm_init_profile()
1576 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; in btc_pm_init_profile()
1577 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0; in btc_pm_init_profile()
1578 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2; in btc_pm_init_profile()
1583 if (rdev->flags & RADEON_IS_MOBILITY) in btc_pm_init_profile()
1588 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx; in btc_pm_init_profile()
1589 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx; in btc_pm_init_profile()
1590 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; in btc_pm_init_profile()
1591 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; in btc_pm_init_profile()
1593 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx; in btc_pm_init_profile()
1594 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx; in btc_pm_init_profile()
1595 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0; in btc_pm_init_profile()
1596 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1; in btc_pm_init_profile()
1598 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx; in btc_pm_init_profile()
1599 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx; in btc_pm_init_profile()
1600 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0; in btc_pm_init_profile()
1601 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2; in btc_pm_init_profile()
1603 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx; in btc_pm_init_profile()
1604 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx; in btc_pm_init_profile()
1605 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0; in btc_pm_init_profile()
1606 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0; in btc_pm_init_profile()
1608 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx; in btc_pm_init_profile()
1609 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx; in btc_pm_init_profile()
1610 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0; in btc_pm_init_profile()
1611 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1; in btc_pm_init_profile()
1613 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx; in btc_pm_init_profile()
1614 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx; in btc_pm_init_profile()
1615 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0; in btc_pm_init_profile()
1616 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2; in btc_pm_init_profile()
1620 * evergreen_pm_misc - set additional pm hw parameters callback.
1624 * Set non-clock parameters associated with a power state
1629 int req_ps_idx = rdev->pm.requested_power_state_index; in evergreen_pm_misc()
1630 int req_cm_idx = rdev->pm.requested_clock_mode_index; in evergreen_pm_misc()
1631 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx]; in evergreen_pm_misc()
1632 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage; in evergreen_pm_misc()
1634 if (voltage->type == VOLTAGE_SW) { in evergreen_pm_misc()
1636 if ((voltage->voltage & 0xff00) == 0xff00) in evergreen_pm_misc()
1638 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) { in evergreen_pm_misc()
1639 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC); in evergreen_pm_misc()
1640 rdev->pm.current_vddc = voltage->voltage; in evergreen_pm_misc()
1641 DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage); in evergreen_pm_misc()
1648 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) && in evergreen_pm_misc()
1649 (rdev->family >= CHIP_BARTS) && in evergreen_pm_misc()
1650 rdev->pm.active_crtc_count && in evergreen_pm_misc()
1651 ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) || in evergreen_pm_misc()
1652 (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX))) in evergreen_pm_misc()
1653 voltage = &rdev->pm.power_state[req_ps_idx]. in evergreen_pm_misc()
1654 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage; in evergreen_pm_misc()
1657 if ((voltage->vddci & 0xff00) == 0xff00) in evergreen_pm_misc()
1659 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) { in evergreen_pm_misc()
1660 radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI); in evergreen_pm_misc()
1661 rdev->pm.current_vddci = voltage->vddci; in evergreen_pm_misc()
1662 DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci); in evergreen_pm_misc()
1668 * evergreen_pm_prepare - pre-power state change callback.
1676 struct drm_device *ddev = rdev->ddev; in evergreen_pm_prepare()
1682 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) { in evergreen_pm_prepare()
1684 if (radeon_crtc->enabled) { in evergreen_pm_prepare()
1685 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset); in evergreen_pm_prepare()
1687 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp); in evergreen_pm_prepare()
1693 * evergreen_pm_finish - post-power state change callback.
1701 struct drm_device *ddev = rdev->ddev; in evergreen_pm_finish()
1707 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) { in evergreen_pm_finish()
1709 if (radeon_crtc->enabled) { in evergreen_pm_finish()
1710 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset); in evergreen_pm_finish()
1712 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp); in evergreen_pm_finish()
1718 * evergreen_hpd_sense - hpd sense callback.
1735 * evergreen_hpd_set_polarity - hpd set polarity callback.
1757 * evergreen_hpd_init - hpd setup callback.
1766 struct drm_device *dev = rdev->ddev; in evergreen_hpd_init()
1772 list_for_each_entry(connector, &dev->mode_config.connector_list, head) { in evergreen_hpd_init()
1774 to_radeon_connector(connector)->hpd.hpd; in evergreen_hpd_init()
1776 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP || in evergreen_hpd_init()
1777 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) { in evergreen_hpd_init()
1798 * evergreen_hpd_fini - hpd tear down callback.
1807 struct drm_device *dev = rdev->ddev; in evergreen_hpd_fini()
1811 list_for_each_entry(connector, &dev->mode_config.connector_list, head) { in evergreen_hpd_fini()
1813 to_radeon_connector(connector)->hpd.hpd; in evergreen_hpd_fini()
1832 u32 pipe_offset = radeon_crtc->crtc_id * 0x20; in evergreen_line_buffer_adjust()
1840 * 0 - first half of lb (3840 * 2) in evergreen_line_buffer_adjust()
1841 * 1 - first 3/4 of lb (5760 * 2) in evergreen_line_buffer_adjust()
1842 * 2 - whole lb (7680 * 2), other crtc must be disabled in evergreen_line_buffer_adjust()
1843 * 3 - first 1/4 of lb (1920 * 2) in evergreen_line_buffer_adjust()
1845 * 4 - second half of lb (3840 * 2) in evergreen_line_buffer_adjust()
1846 * 5 - second 3/4 of lb (5760 * 2) in evergreen_line_buffer_adjust()
1847 * 6 - whole lb (7680 * 2), other crtc must be disabled in evergreen_line_buffer_adjust()
1848 * 7 - last 1/4 of lb (1920 * 2) in evergreen_line_buffer_adjust()
1852 * non-linked crtcs for maximum line buffer allocation. in evergreen_line_buffer_adjust()
1854 if (radeon_crtc->base.enabled && mode) { in evergreen_line_buffer_adjust()
1868 if (radeon_crtc->crtc_id % 2) in evergreen_line_buffer_adjust()
1870 WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp); in evergreen_line_buffer_adjust()
1875 for (i = 0; i < rdev->usec_timeout; i++) { in evergreen_line_buffer_adjust()
1883 if (radeon_crtc->base.enabled && mode) { in evergreen_line_buffer_adjust()
1940 u32 active_time; /* active display time in ns */
1941 u32 blank_time; /* blank time in ns */
1958 yclk.full = dfixed_const(wm->yclk); in evergreen_dram_bandwidth()
1960 dram_channels.full = dfixed_const(wm->dram_channels * 4); in evergreen_dram_bandwidth()
1978 yclk.full = dfixed_const(wm->yclk); in evergreen_dram_bandwidth_for_display()
1980 dram_channels.full = dfixed_const(wm->dram_channels * 4); in evergreen_dram_bandwidth_for_display()
1998 sclk.full = dfixed_const(wm->sclk); in evergreen_data_return_bandwidth()
2018 disp_clk.full = dfixed_const(wm->disp_clk); in evergreen_dmif_request_bandwidth()
2053 line_time.full = dfixed_const(wm->active_time + wm->blank_time); in evergreen_average_bandwidth()
2055 bpp.full = dfixed_const(wm->bytes_per_pixel); in evergreen_average_bandwidth()
2056 src_width.full = dfixed_const(wm->src_width); in evergreen_average_bandwidth()
2058 bandwidth.full = dfixed_mul(bandwidth, wm->vsc); in evergreen_average_bandwidth()
2066 /* First calcualte the latency in ns */ in evergreen_latency_watermark()
2067 u32 mc_latency = 2000; /* 2000 ns. */ in evergreen_latency_watermark()
2071 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */ in evergreen_latency_watermark()
2072 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) + in evergreen_latency_watermark()
2073 (wm->num_heads * cursor_line_pair_return_time); in evergreen_latency_watermark()
2078 if (wm->num_heads == 0) in evergreen_latency_watermark()
2083 if ((wm->vsc.full > a.full) || in evergreen_latency_watermark()
2084 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) || in evergreen_latency_watermark()
2085 (wm->vtaps >= 5) || in evergreen_latency_watermark()
2086 ((wm->vsc.full >= a.full) && wm->interlaced)) in evergreen_latency_watermark()
2092 b.full = dfixed_const(wm->num_heads); in evergreen_latency_watermark()
2095 lb_fill_bw = min(dfixed_trunc(a), wm->disp_clk * wm->bytes_per_pixel / 1000); in evergreen_latency_watermark()
2097 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel); in evergreen_latency_watermark()
2104 if (line_fill_time < wm->active_time) in evergreen_latency_watermark()
2107 return latency + (line_fill_time - wm->active_time); in evergreen_latency_watermark()
2114 (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads)) in evergreen_average_bandwidth_vs_dram_bandwidth_for_display()
2123 (evergreen_available_bandwidth(wm) / wm->num_heads)) in evergreen_average_bandwidth_vs_available_bandwidth()
2131 u32 lb_partitions = wm->lb_size / wm->src_width; in evergreen_check_latency_hiding()
2132 u32 line_time = wm->active_time + wm->blank_time; in evergreen_check_latency_hiding()
2138 if (wm->vsc.full > a.full) in evergreen_check_latency_hiding()
2141 if (lb_partitions <= (wm->vtaps + 1)) in evergreen_check_latency_hiding()
2147 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time); in evergreen_check_latency_hiding()
2159 struct drm_display_mode *mode = &radeon_crtc->base.mode; in evergreen_program_watermarks()
2168 u32 pipe_offset = radeon_crtc->crtc_id * 16; in evergreen_program_watermarks()
2172 if (radeon_crtc->base.enabled && num_heads && mode) { in evergreen_program_watermarks()
2173 active_time = (u32) div_u64((u64)mode->crtc_hdisplay * 1000000, in evergreen_program_watermarks()
2174 (u32)mode->clock); in evergreen_program_watermarks()
2175 line_time = (u32) div_u64((u64)mode->crtc_htotal * 1000000, in evergreen_program_watermarks()
2176 (u32)mode->clock); in evergreen_program_watermarks()
2183 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { in evergreen_program_watermarks()
2189 wm_high.yclk = rdev->pm.current_mclk * 10; in evergreen_program_watermarks()
2190 wm_high.sclk = rdev->pm.current_sclk * 10; in evergreen_program_watermarks()
2193 wm_high.disp_clk = mode->clock; in evergreen_program_watermarks()
2194 wm_high.src_width = mode->crtc_hdisplay; in evergreen_program_watermarks()
2196 wm_high.blank_time = line_time - wm_high.active_time; in evergreen_program_watermarks()
2198 if (mode->flags & DRM_MODE_FLAG_INTERLACE) in evergreen_program_watermarks()
2200 wm_high.vsc = radeon_crtc->vsc; in evergreen_program_watermarks()
2202 if (radeon_crtc->rmx_type != RMX_OFF) in evergreen_program_watermarks()
2210 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) { in evergreen_program_watermarks()
2216 wm_low.yclk = rdev->pm.current_mclk * 10; in evergreen_program_watermarks()
2217 wm_low.sclk = rdev->pm.current_sclk * 10; in evergreen_program_watermarks()
2220 wm_low.disp_clk = mode->clock; in evergreen_program_watermarks()
2221 wm_low.src_width = mode->crtc_hdisplay; in evergreen_program_watermarks()
2223 wm_low.blank_time = line_time - wm_low.active_time; in evergreen_program_watermarks()
2225 if (mode->flags & DRM_MODE_FLAG_INTERLACE) in evergreen_program_watermarks()
2227 wm_low.vsc = radeon_crtc->vsc; in evergreen_program_watermarks()
2229 if (radeon_crtc->rmx_type != RMX_OFF) in evergreen_program_watermarks()
2246 (rdev->disp_priority == 2)) { in evergreen_program_watermarks()
2253 (rdev->disp_priority == 2)) { in evergreen_program_watermarks()
2259 b.full = dfixed_const(mode->clock); in evergreen_program_watermarks()
2263 c.full = dfixed_mul(c, radeon_crtc->hsc); in evergreen_program_watermarks()
2271 b.full = dfixed_const(mode->clock); in evergreen_program_watermarks()
2275 c.full = dfixed_mul(c, radeon_crtc->hsc); in evergreen_program_watermarks()
2283 radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay); in evergreen_program_watermarks()
2307 WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt); in evergreen_program_watermarks()
2308 WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt); in evergreen_program_watermarks()
2311 radeon_crtc->line_time = line_time; in evergreen_program_watermarks()
2312 radeon_crtc->wm_high = latency_watermark_a; in evergreen_program_watermarks()
2313 radeon_crtc->wm_low = latency_watermark_b; in evergreen_program_watermarks()
2317 * evergreen_bandwidth_update - update display watermarks callback.
2331 if (!rdev->mode_info.mode_config_initialized) in evergreen_bandwidth_update()
2336 for (i = 0; i < rdev->num_crtc; i++) { in evergreen_bandwidth_update()
2337 if (rdev->mode_info.crtcs[i]->base.enabled) in evergreen_bandwidth_update()
2340 for (i = 0; i < rdev->num_crtc; i += 2) { in evergreen_bandwidth_update()
2341 mode0 = &rdev->mode_info.crtcs[i]->base.mode; in evergreen_bandwidth_update()
2342 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode; in evergreen_bandwidth_update()
2343 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1); in evergreen_bandwidth_update()
2344 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads); in evergreen_bandwidth_update()
2345 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0); in evergreen_bandwidth_update()
2346 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads); in evergreen_bandwidth_update()
2351 * evergreen_mc_wait_for_idle - wait for MC idle callback.
2355 * Wait for the MC (memory controller) to be idle.
2357 * Returns 0 if the MC is idle, -1 if not.
2364 for (i = 0; i < rdev->usec_timeout; i++) { in evergreen_mc_wait_for_idle()
2371 return -1; in evergreen_mc_wait_for_idle()
2385 for (i = 0; i < rdev->usec_timeout; i++) { in evergreen_pcie_gart_tlb_flush()
2405 if (rdev->gart.robj == NULL) { in evergreen_pcie_gart_enable()
2406 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n"); in evergreen_pcie_gart_enable()
2407 return -EINVAL; in evergreen_pcie_gart_enable()
2423 if (rdev->flags & RADEON_IS_IGP) { in evergreen_pcie_gart_enable()
2431 if ((rdev->family == CHIP_JUNIPER) || in evergreen_pcie_gart_enable()
2432 (rdev->family == CHIP_CYPRESS) || in evergreen_pcie_gart_enable()
2433 (rdev->family == CHIP_HEMLOCK) || in evergreen_pcie_gart_enable()
2434 (rdev->family == CHIP_BARTS)) in evergreen_pcie_gart_enable()
2441 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12); in evergreen_pcie_gart_enable()
2442 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12); in evergreen_pcie_gart_enable()
2443 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); in evergreen_pcie_gart_enable()
2447 (u32)(rdev->dummy_page.addr >> 12)); in evergreen_pcie_gart_enable()
2452 (unsigned)(rdev->mc.gtt_size >> 20), in evergreen_pcie_gart_enable()
2453 (unsigned long long)rdev->gart.table_addr); in evergreen_pcie_gart_enable()
2454 rdev->gart.ready = true; in evergreen_pcie_gart_enable()
2674 save->vga_render_control = RREG32(VGA_RENDER_CONTROL); in evergreen_mc_stop()
2675 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL); in evergreen_mc_stop()
2681 for (i = 0; i < rdev->num_crtc; i++) { in evergreen_mc_stop()
2684 save->crtc_enabled[i] = true; in evergreen_mc_stop()
2706 for (j = 0; j < rdev->usec_timeout; j++) { in evergreen_mc_stop()
2728 save->crtc_enabled[i] = false; in evergreen_mc_stop()
2731 save->crtc_enabled[i] = false; in evergreen_mc_stop()
2749 for (i = 0; i < rdev->num_crtc; i++) { in evergreen_mc_stop()
2750 if (save->crtc_enabled[i]) { in evergreen_mc_stop()
2771 for (i = 0; i < rdev->num_crtc; i++) { in evergreen_mc_resume()
2773 upper_32_bits(rdev->mc.vram_start)); in evergreen_mc_resume()
2775 upper_32_bits(rdev->mc.vram_start)); in evergreen_mc_resume()
2777 (u32)rdev->mc.vram_start); in evergreen_mc_resume()
2779 (u32)rdev->mc.vram_start); in evergreen_mc_resume()
2783 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start)); in evergreen_mc_resume()
2784 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start); in evergreen_mc_resume()
2788 for (i = 0; i < rdev->num_crtc; i++) { in evergreen_mc_resume()
2789 if (save->crtc_enabled[i]) { in evergreen_mc_resume()
2805 for (j = 0; j < rdev->usec_timeout; j++) { in evergreen_mc_resume()
2821 for (i = 0; i < rdev->num_crtc; i++) { in evergreen_mc_resume()
2822 if (save->crtc_enabled[i]) { in evergreen_mc_resume()
2838 for (j = 0; j < rdev->usec_timeout; j++) { in evergreen_mc_resume()
2847 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control); in evergreen_mc_resume()
2849 WREG32(VGA_RENDER_CONTROL, save->vga_render_control); in evergreen_mc_resume()
2871 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); in evergreen_mc_program()
2876 if (rdev->flags & RADEON_IS_AGP) { in evergreen_mc_program()
2877 if (rdev->mc.vram_start < rdev->mc.gtt_start) { in evergreen_mc_program()
2880 rdev->mc.vram_start >> 12); in evergreen_mc_program()
2882 rdev->mc.gtt_end >> 12); in evergreen_mc_program()
2886 rdev->mc.gtt_start >> 12); in evergreen_mc_program()
2888 rdev->mc.vram_end >> 12); in evergreen_mc_program()
2892 rdev->mc.vram_start >> 12); in evergreen_mc_program()
2894 rdev->mc.vram_end >> 12); in evergreen_mc_program()
2896 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12); in evergreen_mc_program()
2898 if ((rdev->family == CHIP_PALM) || in evergreen_mc_program()
2899 (rdev->family == CHIP_SUMO) || in evergreen_mc_program()
2900 (rdev->family == CHIP_SUMO2)) { in evergreen_mc_program()
2902 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24; in evergreen_mc_program()
2903 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20; in evergreen_mc_program()
2906 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16; in evergreen_mc_program()
2907 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF); in evergreen_mc_program()
2909 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8)); in evergreen_mc_program()
2912 if (rdev->flags & RADEON_IS_AGP) { in evergreen_mc_program()
2913 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16); in evergreen_mc_program()
2914 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16); in evergreen_mc_program()
2915 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22); in evergreen_mc_program()
2922 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); in evergreen_mc_program()
2935 struct radeon_ring *ring = &rdev->ring[ib->ring]; in evergreen_ring_ib_execute()
2942 if (ring->rptr_save_reg) { in evergreen_ring_ib_execute()
2943 next_rptr = ring->wptr + 3 + 4; in evergreen_ring_ib_execute()
2945 radeon_ring_write(ring, ((ring->rptr_save_reg - in evergreen_ring_ib_execute()
2948 } else if (rdev->wb.enabled) { in evergreen_ring_ib_execute()
2949 next_rptr = ring->wptr + 5 + 4; in evergreen_ring_ib_execute()
2951 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc); in evergreen_ring_ib_execute()
2952 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18)); in evergreen_ring_ib_execute()
2962 (ib->gpu_addr & 0xFFFFFFFC)); in evergreen_ring_ib_execute()
2963 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF); in evergreen_ring_ib_execute()
2964 radeon_ring_write(ring, ib->length_dw); in evergreen_ring_ib_execute()
2973 if (!rdev->me_fw || !rdev->pfp_fw) in evergreen_cp_load_microcode()
2974 return -EINVAL; in evergreen_cp_load_microcode()
2983 fw_data = (const __be32 *)rdev->pfp_fw->data; in evergreen_cp_load_microcode()
2989 fw_data = (const __be32 *)rdev->me_fw->data; in evergreen_cp_load_microcode()
3002 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in evergreen_cp_start()
3014 radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1); in evergreen_cp_start()
3068 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in evergreen_cp_resume()
3086 rb_bufsz = order_base_2(ring->ring_size / 8); in evergreen_cp_resume()
3101 ring->wptr = 0; in evergreen_cp_resume()
3102 WREG32(CP_RB_WPTR, ring->wptr); in evergreen_cp_resume()
3106 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC)); in evergreen_cp_resume()
3107 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF); in evergreen_cp_resume()
3108 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF); in evergreen_cp_resume()
3110 if (rdev->wb.enabled) in evergreen_cp_resume()
3120 WREG32(CP_RB_BASE, ring->gpu_addr >> 8); in evergreen_cp_resume()
3124 ring->ready = true; in evergreen_cp_resume()
3127 ring->ready = false; in evergreen_cp_resume()
3157 switch (rdev->family) { in evergreen_gpu_init()
3160 rdev->config.evergreen.num_ses = 2; in evergreen_gpu_init()
3161 rdev->config.evergreen.max_pipes = 4; in evergreen_gpu_init()
3162 rdev->config.evergreen.max_tile_pipes = 8; in evergreen_gpu_init()
3163 rdev->config.evergreen.max_simds = 10; in evergreen_gpu_init()
3164 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses; in evergreen_gpu_init()
3165 rdev->config.evergreen.max_gprs = 256; in evergreen_gpu_init()
3166 rdev->config.evergreen.max_threads = 248; in evergreen_gpu_init()
3167 rdev->config.evergreen.max_gs_threads = 32; in evergreen_gpu_init()
3168 rdev->config.evergreen.max_stack_entries = 512; in evergreen_gpu_init()
3169 rdev->config.evergreen.sx_num_of_sets = 4; in evergreen_gpu_init()
3170 rdev->config.evergreen.sx_max_export_size = 256; in evergreen_gpu_init()
3171 rdev->config.evergreen.sx_max_export_pos_size = 64; in evergreen_gpu_init()
3172 rdev->config.evergreen.sx_max_export_smx_size = 192; in evergreen_gpu_init()
3173 rdev->config.evergreen.max_hw_contexts = 8; in evergreen_gpu_init()
3174 rdev->config.evergreen.sq_num_cf_insts = 2; in evergreen_gpu_init()
3176 rdev->config.evergreen.sc_prim_fifo_size = 0x100; in evergreen_gpu_init()
3177 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; in evergreen_gpu_init()
3178 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; in evergreen_gpu_init()
3182 rdev->config.evergreen.num_ses = 1; in evergreen_gpu_init()
3183 rdev->config.evergreen.max_pipes = 4; in evergreen_gpu_init()
3184 rdev->config.evergreen.max_tile_pipes = 4; in evergreen_gpu_init()
3185 rdev->config.evergreen.max_simds = 10; in evergreen_gpu_init()
3186 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses; in evergreen_gpu_init()
3187 rdev->config.evergreen.max_gprs = 256; in evergreen_gpu_init()
3188 rdev->config.evergreen.max_threads = 248; in evergreen_gpu_init()
3189 rdev->config.evergreen.max_gs_threads = 32; in evergreen_gpu_init()
3190 rdev->config.evergreen.max_stack_entries = 512; in evergreen_gpu_init()
3191 rdev->config.evergreen.sx_num_of_sets = 4; in evergreen_gpu_init()
3192 rdev->config.evergreen.sx_max_export_size = 256; in evergreen_gpu_init()
3193 rdev->config.evergreen.sx_max_export_pos_size = 64; in evergreen_gpu_init()
3194 rdev->config.evergreen.sx_max_export_smx_size = 192; in evergreen_gpu_init()
3195 rdev->config.evergreen.max_hw_contexts = 8; in evergreen_gpu_init()
3196 rdev->config.evergreen.sq_num_cf_insts = 2; in evergreen_gpu_init()
3198 rdev->config.evergreen.sc_prim_fifo_size = 0x100; in evergreen_gpu_init()
3199 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; in evergreen_gpu_init()
3200 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; in evergreen_gpu_init()
3204 rdev->config.evergreen.num_ses = 1; in evergreen_gpu_init()
3205 rdev->config.evergreen.max_pipes = 4; in evergreen_gpu_init()
3206 rdev->config.evergreen.max_tile_pipes = 4; in evergreen_gpu_init()
3207 rdev->config.evergreen.max_simds = 5; in evergreen_gpu_init()
3208 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses; in evergreen_gpu_init()
3209 rdev->config.evergreen.max_gprs = 256; in evergreen_gpu_init()
3210 rdev->config.evergreen.max_threads = 248; in evergreen_gpu_init()
3211 rdev->config.evergreen.max_gs_threads = 32; in evergreen_gpu_init()
3212 rdev->config.evergreen.max_stack_entries = 256; in evergreen_gpu_init()
3213 rdev->config.evergreen.sx_num_of_sets = 4; in evergreen_gpu_init()
3214 rdev->config.evergreen.sx_max_export_size = 256; in evergreen_gpu_init()
3215 rdev->config.evergreen.sx_max_export_pos_size = 64; in evergreen_gpu_init()
3216 rdev->config.evergreen.sx_max_export_smx_size = 192; in evergreen_gpu_init()
3217 rdev->config.evergreen.max_hw_contexts = 8; in evergreen_gpu_init()
3218 rdev->config.evergreen.sq_num_cf_insts = 2; in evergreen_gpu_init()
3220 rdev->config.evergreen.sc_prim_fifo_size = 0x100; in evergreen_gpu_init()
3221 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; in evergreen_gpu_init()
3222 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; in evergreen_gpu_init()
3227 rdev->config.evergreen.num_ses = 1; in evergreen_gpu_init()
3228 rdev->config.evergreen.max_pipes = 2; in evergreen_gpu_init()
3229 rdev->config.evergreen.max_tile_pipes = 2; in evergreen_gpu_init()
3230 rdev->config.evergreen.max_simds = 2; in evergreen_gpu_init()
3231 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses; in evergreen_gpu_init()
3232 rdev->config.evergreen.max_gprs = 256; in evergreen_gpu_init()
3233 rdev->config.evergreen.max_threads = 192; in evergreen_gpu_init()
3234 rdev->config.evergreen.max_gs_threads = 16; in evergreen_gpu_init()
3235 rdev->config.evergreen.max_stack_entries = 256; in evergreen_gpu_init()
3236 rdev->config.evergreen.sx_num_of_sets = 4; in evergreen_gpu_init()
3237 rdev->config.evergreen.sx_max_export_size = 128; in evergreen_gpu_init()
3238 rdev->config.evergreen.sx_max_export_pos_size = 32; in evergreen_gpu_init()
3239 rdev->config.evergreen.sx_max_export_smx_size = 96; in evergreen_gpu_init()
3240 rdev->config.evergreen.max_hw_contexts = 4; in evergreen_gpu_init()
3241 rdev->config.evergreen.sq_num_cf_insts = 1; in evergreen_gpu_init()
3243 rdev->config.evergreen.sc_prim_fifo_size = 0x40; in evergreen_gpu_init()
3244 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; in evergreen_gpu_init()
3245 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; in evergreen_gpu_init()
3249 rdev->config.evergreen.num_ses = 1; in evergreen_gpu_init()
3250 rdev->config.evergreen.max_pipes = 2; in evergreen_gpu_init()
3251 rdev->config.evergreen.max_tile_pipes = 2; in evergreen_gpu_init()
3252 rdev->config.evergreen.max_simds = 2; in evergreen_gpu_init()
3253 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses; in evergreen_gpu_init()
3254 rdev->config.evergreen.max_gprs = 256; in evergreen_gpu_init()
3255 rdev->config.evergreen.max_threads = 192; in evergreen_gpu_init()
3256 rdev->config.evergreen.max_gs_threads = 16; in evergreen_gpu_init()
3257 rdev->config.evergreen.max_stack_entries = 256; in evergreen_gpu_init()
3258 rdev->config.evergreen.sx_num_of_sets = 4; in evergreen_gpu_init()
3259 rdev->config.evergreen.sx_max_export_size = 128; in evergreen_gpu_init()
3260 rdev->config.evergreen.sx_max_export_pos_size = 32; in evergreen_gpu_init()
3261 rdev->config.evergreen.sx_max_export_smx_size = 96; in evergreen_gpu_init()
3262 rdev->config.evergreen.max_hw_contexts = 4; in evergreen_gpu_init()
3263 rdev->config.evergreen.sq_num_cf_insts = 1; in evergreen_gpu_init()
3265 rdev->config.evergreen.sc_prim_fifo_size = 0x40; in evergreen_gpu_init()
3266 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; in evergreen_gpu_init()
3267 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; in evergreen_gpu_init()
3271 rdev->config.evergreen.num_ses = 1; in evergreen_gpu_init()
3272 rdev->config.evergreen.max_pipes = 4; in evergreen_gpu_init()
3273 rdev->config.evergreen.max_tile_pipes = 4; in evergreen_gpu_init()
3274 if (rdev->pdev->device == 0x9648) in evergreen_gpu_init()
3275 rdev->config.evergreen.max_simds = 3; in evergreen_gpu_init()
3276 else if ((rdev->pdev->device == 0x9647) || in evergreen_gpu_init()
3277 (rdev->pdev->device == 0x964a)) in evergreen_gpu_init()
3278 rdev->config.evergreen.max_simds = 4; in evergreen_gpu_init()
3280 rdev->config.evergreen.max_simds = 5; in evergreen_gpu_init()
3281 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses; in evergreen_gpu_init()
3282 rdev->config.evergreen.max_gprs = 256; in evergreen_gpu_init()
3283 rdev->config.evergreen.max_threads = 248; in evergreen_gpu_init()
3284 rdev->config.evergreen.max_gs_threads = 32; in evergreen_gpu_init()
3285 rdev->config.evergreen.max_stack_entries = 256; in evergreen_gpu_init()
3286 rdev->config.evergreen.sx_num_of_sets = 4; in evergreen_gpu_init()
3287 rdev->config.evergreen.sx_max_export_size = 256; in evergreen_gpu_init()
3288 rdev->config.evergreen.sx_max_export_pos_size = 64; in evergreen_gpu_init()
3289 rdev->config.evergreen.sx_max_export_smx_size = 192; in evergreen_gpu_init()
3290 rdev->config.evergreen.max_hw_contexts = 8; in evergreen_gpu_init()
3291 rdev->config.evergreen.sq_num_cf_insts = 2; in evergreen_gpu_init()
3293 rdev->config.evergreen.sc_prim_fifo_size = 0x40; in evergreen_gpu_init()
3294 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; in evergreen_gpu_init()
3295 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; in evergreen_gpu_init()
3299 rdev->config.evergreen.num_ses = 1; in evergreen_gpu_init()
3300 rdev->config.evergreen.max_pipes = 4; in evergreen_gpu_init()
3301 rdev->config.evergreen.max_tile_pipes = 4; in evergreen_gpu_init()
3302 rdev->config.evergreen.max_simds = 2; in evergreen_gpu_init()
3303 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses; in evergreen_gpu_init()
3304 rdev->config.evergreen.max_gprs = 256; in evergreen_gpu_init()
3305 rdev->config.evergreen.max_threads = 248; in evergreen_gpu_init()
3306 rdev->config.evergreen.max_gs_threads = 32; in evergreen_gpu_init()
3307 rdev->config.evergreen.max_stack_entries = 512; in evergreen_gpu_init()
3308 rdev->config.evergreen.sx_num_of_sets = 4; in evergreen_gpu_init()
3309 rdev->config.evergreen.sx_max_export_size = 256; in evergreen_gpu_init()
3310 rdev->config.evergreen.sx_max_export_pos_size = 64; in evergreen_gpu_init()
3311 rdev->config.evergreen.sx_max_export_smx_size = 192; in evergreen_gpu_init()
3312 rdev->config.evergreen.max_hw_contexts = 4; in evergreen_gpu_init()
3313 rdev->config.evergreen.sq_num_cf_insts = 2; in evergreen_gpu_init()
3315 rdev->config.evergreen.sc_prim_fifo_size = 0x40; in evergreen_gpu_init()
3316 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; in evergreen_gpu_init()
3317 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; in evergreen_gpu_init()
3321 rdev->config.evergreen.num_ses = 2; in evergreen_gpu_init()
3322 rdev->config.evergreen.max_pipes = 4; in evergreen_gpu_init()
3323 rdev->config.evergreen.max_tile_pipes = 8; in evergreen_gpu_init()
3324 rdev->config.evergreen.max_simds = 7; in evergreen_gpu_init()
3325 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses; in evergreen_gpu_init()
3326 rdev->config.evergreen.max_gprs = 256; in evergreen_gpu_init()
3327 rdev->config.evergreen.max_threads = 248; in evergreen_gpu_init()
3328 rdev->config.evergreen.max_gs_threads = 32; in evergreen_gpu_init()
3329 rdev->config.evergreen.max_stack_entries = 512; in evergreen_gpu_init()
3330 rdev->config.evergreen.sx_num_of_sets = 4; in evergreen_gpu_init()
3331 rdev->config.evergreen.sx_max_export_size = 256; in evergreen_gpu_init()
3332 rdev->config.evergreen.sx_max_export_pos_size = 64; in evergreen_gpu_init()
3333 rdev->config.evergreen.sx_max_export_smx_size = 192; in evergreen_gpu_init()
3334 rdev->config.evergreen.max_hw_contexts = 8; in evergreen_gpu_init()
3335 rdev->config.evergreen.sq_num_cf_insts = 2; in evergreen_gpu_init()
3337 rdev->config.evergreen.sc_prim_fifo_size = 0x100; in evergreen_gpu_init()
3338 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; in evergreen_gpu_init()
3339 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; in evergreen_gpu_init()
3343 rdev->config.evergreen.num_ses = 1; in evergreen_gpu_init()
3344 rdev->config.evergreen.max_pipes = 4; in evergreen_gpu_init()
3345 rdev->config.evergreen.max_tile_pipes = 4; in evergreen_gpu_init()
3346 rdev->config.evergreen.max_simds = 6; in evergreen_gpu_init()
3347 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses; in evergreen_gpu_init()
3348 rdev->config.evergreen.max_gprs = 256; in evergreen_gpu_init()
3349 rdev->config.evergreen.max_threads = 248; in evergreen_gpu_init()
3350 rdev->config.evergreen.max_gs_threads = 32; in evergreen_gpu_init()
3351 rdev->config.evergreen.max_stack_entries = 256; in evergreen_gpu_init()
3352 rdev->config.evergreen.sx_num_of_sets = 4; in evergreen_gpu_init()
3353 rdev->config.evergreen.sx_max_export_size = 256; in evergreen_gpu_init()
3354 rdev->config.evergreen.sx_max_export_pos_size = 64; in evergreen_gpu_init()
3355 rdev->config.evergreen.sx_max_export_smx_size = 192; in evergreen_gpu_init()
3356 rdev->config.evergreen.max_hw_contexts = 8; in evergreen_gpu_init()
3357 rdev->config.evergreen.sq_num_cf_insts = 2; in evergreen_gpu_init()
3359 rdev->config.evergreen.sc_prim_fifo_size = 0x100; in evergreen_gpu_init()
3360 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; in evergreen_gpu_init()
3361 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; in evergreen_gpu_init()
3365 rdev->config.evergreen.num_ses = 1; in evergreen_gpu_init()
3366 rdev->config.evergreen.max_pipes = 2; in evergreen_gpu_init()
3367 rdev->config.evergreen.max_tile_pipes = 2; in evergreen_gpu_init()
3368 rdev->config.evergreen.max_simds = 2; in evergreen_gpu_init()
3369 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses; in evergreen_gpu_init()
3370 rdev->config.evergreen.max_gprs = 256; in evergreen_gpu_init()
3371 rdev->config.evergreen.max_threads = 192; in evergreen_gpu_init()
3372 rdev->config.evergreen.max_gs_threads = 16; in evergreen_gpu_init()
3373 rdev->config.evergreen.max_stack_entries = 256; in evergreen_gpu_init()
3374 rdev->config.evergreen.sx_num_of_sets = 4; in evergreen_gpu_init()
3375 rdev->config.evergreen.sx_max_export_size = 128; in evergreen_gpu_init()
3376 rdev->config.evergreen.sx_max_export_pos_size = 32; in evergreen_gpu_init()
3377 rdev->config.evergreen.sx_max_export_smx_size = 96; in evergreen_gpu_init()
3378 rdev->config.evergreen.max_hw_contexts = 4; in evergreen_gpu_init()
3379 rdev->config.evergreen.sq_num_cf_insts = 1; in evergreen_gpu_init()
3381 rdev->config.evergreen.sc_prim_fifo_size = 0x40; in evergreen_gpu_init()
3382 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30; in evergreen_gpu_init()
3383 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130; in evergreen_gpu_init()
3404 if ((rdev->family == CHIP_PALM) || in evergreen_gpu_init()
3405 (rdev->family == CHIP_SUMO) || in evergreen_gpu_init()
3406 (rdev->family == CHIP_SUMO2)) in evergreen_gpu_init()
3418 rdev->config.evergreen.tile_config = 0; in evergreen_gpu_init()
3419 switch (rdev->config.evergreen.max_tile_pipes) { in evergreen_gpu_init()
3422 rdev->config.evergreen.tile_config |= (0 << 0); in evergreen_gpu_init()
3425 rdev->config.evergreen.tile_config |= (1 << 0); in evergreen_gpu_init()
3428 rdev->config.evergreen.tile_config |= (2 << 0); in evergreen_gpu_init()
3431 rdev->config.evergreen.tile_config |= (3 << 0); in evergreen_gpu_init()
3435 if (rdev->flags & RADEON_IS_IGP) in evergreen_gpu_init()
3436 rdev->config.evergreen.tile_config |= 1 << 4; in evergreen_gpu_init()
3440 rdev->config.evergreen.tile_config |= 0 << 4; in evergreen_gpu_init()
3443 rdev->config.evergreen.tile_config |= 1 << 4; in evergreen_gpu_init()
3447 rdev->config.evergreen.tile_config |= 2 << 4; in evergreen_gpu_init()
3451 rdev->config.evergreen.tile_config |= 0 << 8; in evergreen_gpu_init()
3452 rdev->config.evergreen.tile_config |= in evergreen_gpu_init()
3455 if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) { in evergreen_gpu_init()
3465 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) { in evergreen_gpu_init()
3478 for (i = 0; i < rdev->config.evergreen.max_backends; i++) in evergreen_gpu_init()
3482 for (i = 0; i < rdev->config.evergreen.max_backends; i++) in evergreen_gpu_init()
3486 for (i = 0; i < rdev->config.evergreen.num_ses; i++) { in evergreen_gpu_init()
3492 simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds; in evergreen_gpu_init()
3496 rdev->config.evergreen.active_simds = hweight32(~tmp); in evergreen_gpu_init()
3509 if ((rdev->config.evergreen.max_backends == 1) && in evergreen_gpu_init()
3510 (rdev->flags & RADEON_IS_IGP)) { in evergreen_gpu_init()
3520 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends, in evergreen_gpu_init()
3523 rdev->config.evergreen.backend_map = tmp; in evergreen_gpu_init()
3549 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets); in evergreen_gpu_init()
3552 if (rdev->family <= CHIP_SUMO2) in evergreen_gpu_init()
3555 …REG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - in evergreen_gpu_init()
3556 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) | in evergreen_gpu_init()
3557 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1))); in evergreen_gpu_init()
3559 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) | in evergreen_gpu_init()
3560 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) | in evergreen_gpu_init()
3561 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size))); in evergreen_gpu_init()
3568 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) | in evergreen_gpu_init()
3585 switch (rdev->family) { in evergreen_gpu_init()
3600 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32); in evergreen_gpu_init()
3601 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32); in evergreen_gpu_init()
3603 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32); in evergreen_gpu_init()
3604 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32); in evergreen_gpu_init()
3605 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32); in evergreen_gpu_init()
3606 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32); in evergreen_gpu_init()
3608 switch (rdev->family) { in evergreen_gpu_init()
3621 …sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count)… in evergreen_gpu_init()
3622 …sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count)… in evergreen_gpu_init()
3623 …sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count)… in evergreen_gpu_init()
3624 …sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count… in evergreen_gpu_init()
3625 …sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_coun… in evergreen_gpu_init()
3627 …sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6… in evergreen_gpu_init()
3628 …sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / … in evergreen_gpu_init()
3629 …sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6… in evergreen_gpu_init()
3630 …sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / … in evergreen_gpu_init()
3631 …sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6… in evergreen_gpu_init()
3632 …sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / … in evergreen_gpu_init()
3649 switch (rdev->family) { in evergreen_gpu_init()
3719 rdev->mc.vram_is_ddr = true; in evergreen_mc_init()
3720 if ((rdev->family == CHIP_PALM) || in evergreen_mc_init()
3721 (rdev->family == CHIP_SUMO) || in evergreen_mc_init()
3722 (rdev->family == CHIP_SUMO2)) in evergreen_mc_init()
3749 rdev->mc.vram_width = numchan * chansize; in evergreen_mc_init()
3751 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0); in evergreen_mc_init()
3752 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0); in evergreen_mc_init()
3754 if ((rdev->family == CHIP_PALM) || in evergreen_mc_init()
3755 (rdev->family == CHIP_SUMO) || in evergreen_mc_init()
3756 (rdev->family == CHIP_SUMO2)) { in evergreen_mc_init()
3758 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE); in evergreen_mc_init()
3759 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE); in evergreen_mc_init()
3762 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL; in evergreen_mc_init()
3763 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL; in evergreen_mc_init()
3765 rdev->mc.visible_vram_size = rdev->mc.aper_size; in evergreen_mc_init()
3766 r700_vram_gtt_location(rdev, &rdev->mc); in evergreen_mc_init()
3774 dev_info(rdev->dev, " GRBM_STATUS = 0x%08X\n", in evergreen_print_gpu_status_regs()
3776 dev_info(rdev->dev, " GRBM_STATUS_SE0 = 0x%08X\n", in evergreen_print_gpu_status_regs()
3778 dev_info(rdev->dev, " GRBM_STATUS_SE1 = 0x%08X\n", in evergreen_print_gpu_status_regs()
3780 dev_info(rdev->dev, " SRBM_STATUS = 0x%08X\n", in evergreen_print_gpu_status_regs()
3782 dev_info(rdev->dev, " SRBM_STATUS2 = 0x%08X\n", in evergreen_print_gpu_status_regs()
3784 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n", in evergreen_print_gpu_status_regs()
3786 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n", in evergreen_print_gpu_status_regs()
3788 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n", in evergreen_print_gpu_status_regs()
3790 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n", in evergreen_print_gpu_status_regs()
3792 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n", in evergreen_print_gpu_status_regs()
3794 if (rdev->family >= CHIP_CAYMAN) { in evergreen_print_gpu_status_regs()
3795 dev_info(rdev->dev, " R_00D834_DMA_STATUS_REG = 0x%08X\n", in evergreen_print_gpu_status_regs()
3806 for (i = 0; i < rdev->num_crtc; i++) { in evergreen_is_display_hung()
3814 for (i = 0; i < rdev->num_crtc; i++) { in evergreen_is_display_hung()
3907 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask); in evergreen_gpu_soft_reset()
3925 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); in evergreen_gpu_soft_reset()
3970 if (!(rdev->flags & RADEON_IS_IGP)) { in evergreen_gpu_soft_reset()
3978 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp); in evergreen_gpu_soft_reset()
3992 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp); in evergreen_gpu_soft_reset()
4017 dev_info(rdev->dev, "GPU pci config reset\n"); in evergreen_gpu_pci_config_reset()
4038 pci_clear_master(rdev->pdev); in evergreen_gpu_pci_config_reset()
4042 dev_warn(rdev->dev, "Wait for MC idle timed out !\n"); in evergreen_gpu_pci_config_reset()
4047 for (i = 0; i < rdev->usec_timeout; i++) { in evergreen_gpu_pci_config_reset()
4086 * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4118 if (rdev->rlc.save_restore_obj) { in sumo_rlc_fini()
4119 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false); in sumo_rlc_fini()
4121 dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r); in sumo_rlc_fini()
4122 radeon_bo_unpin(rdev->rlc.save_restore_obj); in sumo_rlc_fini()
4123 radeon_bo_unreserve(rdev->rlc.save_restore_obj); in sumo_rlc_fini()
4125 radeon_bo_unref(&rdev->rlc.save_restore_obj); in sumo_rlc_fini()
4126 rdev->rlc.save_restore_obj = NULL; in sumo_rlc_fini()
4130 if (rdev->rlc.clear_state_obj) { in sumo_rlc_fini()
4131 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false); in sumo_rlc_fini()
4133 dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r); in sumo_rlc_fini()
4134 radeon_bo_unpin(rdev->rlc.clear_state_obj); in sumo_rlc_fini()
4135 radeon_bo_unreserve(rdev->rlc.clear_state_obj); in sumo_rlc_fini()
4137 radeon_bo_unref(&rdev->rlc.clear_state_obj); in sumo_rlc_fini()
4138 rdev->rlc.clear_state_obj = NULL; in sumo_rlc_fini()
4142 if (rdev->rlc.cp_table_obj) { in sumo_rlc_fini()
4143 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false); in sumo_rlc_fini()
4145 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r); in sumo_rlc_fini()
4146 radeon_bo_unpin(rdev->rlc.cp_table_obj); in sumo_rlc_fini()
4147 radeon_bo_unreserve(rdev->rlc.cp_table_obj); in sumo_rlc_fini()
4149 radeon_bo_unref(&rdev->rlc.cp_table_obj); in sumo_rlc_fini()
4150 rdev->rlc.cp_table_obj = NULL; in sumo_rlc_fini()
4166 src_ptr = rdev->rlc.reg_list; in sumo_rlc_init()
4167 dws = rdev->rlc.reg_list_size; in sumo_rlc_init()
4168 if (rdev->family >= CHIP_BONAIRE) { in sumo_rlc_init()
4171 cs_data = rdev->rlc.cs_data; in sumo_rlc_init()
4175 if (rdev->rlc.save_restore_obj == NULL) { in sumo_rlc_init()
4178 NULL, &rdev->rlc.save_restore_obj); in sumo_rlc_init()
4180 dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r); in sumo_rlc_init()
4185 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false); in sumo_rlc_init()
4190 r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM, in sumo_rlc_init()
4191 &rdev->rlc.save_restore_gpu_addr); in sumo_rlc_init()
4193 radeon_bo_unreserve(rdev->rlc.save_restore_obj); in sumo_rlc_init()
4194 dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r); in sumo_rlc_init()
4199 r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr); in sumo_rlc_init()
4201 dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r); in sumo_rlc_init()
4205 /* write the sr buffer */ in sumo_rlc_init()
4206 dst_ptr = rdev->rlc.sr_ptr; in sumo_rlc_init()
4207 if (rdev->family >= CHIP_TAHITI) { in sumo_rlc_init()
4209 for (i = 0; i < rdev->rlc.reg_list_size; i++) in sumo_rlc_init()
4223 j = (((i - 1) * 3) / 2); in sumo_rlc_init()
4229 radeon_bo_kunmap(rdev->rlc.save_restore_obj); in sumo_rlc_init()
4230 radeon_bo_unreserve(rdev->rlc.save_restore_obj); in sumo_rlc_init()
4235 if (rdev->family >= CHIP_BONAIRE) { in sumo_rlc_init()
4236 rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev); in sumo_rlc_init()
4237 } else if (rdev->family >= CHIP_TAHITI) { in sumo_rlc_init()
4238 rdev->rlc.clear_state_size = si_get_csb_size(rdev); in sumo_rlc_init()
4239 dws = rdev->rlc.clear_state_size + (256 / 4); in sumo_rlc_init()
4251 rdev->rlc.clear_state_size = dws; in sumo_rlc_init()
4254 if (rdev->rlc.clear_state_obj == NULL) { in sumo_rlc_init()
4257 NULL, &rdev->rlc.clear_state_obj); in sumo_rlc_init()
4259 dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r); in sumo_rlc_init()
4264 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false); in sumo_rlc_init()
4269 r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM, in sumo_rlc_init()
4270 &rdev->rlc.clear_state_gpu_addr); in sumo_rlc_init()
4272 radeon_bo_unreserve(rdev->rlc.clear_state_obj); in sumo_rlc_init()
4273 dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r); in sumo_rlc_init()
4278 r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr); in sumo_rlc_init()
4280 dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r); in sumo_rlc_init()
4285 dst_ptr = rdev->rlc.cs_ptr; in sumo_rlc_init()
4286 if (rdev->family >= CHIP_BONAIRE) { in sumo_rlc_init()
4288 } else if (rdev->family >= CHIP_TAHITI) { in sumo_rlc_init()
4289 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256; in sumo_rlc_init()
4292 dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size); in sumo_rlc_init()
4296 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4); in sumo_rlc_init()
4325 radeon_bo_kunmap(rdev->rlc.clear_state_obj); in sumo_rlc_init()
4326 radeon_bo_unreserve(rdev->rlc.clear_state_obj); in sumo_rlc_init()
4329 if (rdev->rlc.cp_table_size) { in sumo_rlc_init()
4330 if (rdev->rlc.cp_table_obj == NULL) { in sumo_rlc_init()
4331 r = radeon_bo_create(rdev, rdev->rlc.cp_table_size, in sumo_rlc_init()
4334 NULL, &rdev->rlc.cp_table_obj); in sumo_rlc_init()
4336 dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r); in sumo_rlc_init()
4342 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false); in sumo_rlc_init()
4344 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r); in sumo_rlc_init()
4348 r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM, in sumo_rlc_init()
4349 &rdev->rlc.cp_table_gpu_addr); in sumo_rlc_init()
4351 radeon_bo_unreserve(rdev->rlc.cp_table_obj); in sumo_rlc_init()
4352 dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r); in sumo_rlc_init()
4356 r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr); in sumo_rlc_init()
4358 dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r); in sumo_rlc_init()
4365 radeon_bo_kunmap(rdev->rlc.cp_table_obj); in sumo_rlc_init()
4366 radeon_bo_unreserve(rdev->rlc.cp_table_obj); in sumo_rlc_init()
4377 if (rdev->flags & RADEON_IS_IGP) { in evergreen_rlc_start()
4389 if (!rdev->rlc_fw) in evergreen_rlc_resume()
4390 return -EINVAL; in evergreen_rlc_resume()
4396 if (rdev->flags & RADEON_IS_IGP) { in evergreen_rlc_resume()
4397 if (rdev->family == CHIP_ARUBA) { in evergreen_rlc_resume()
4399 3 | (3 << (16 * rdev->config.cayman.max_shader_engines)); in evergreen_rlc_resume()
4402 tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se; in evergreen_rlc_resume()
4404 if (tmp == rdev->config.cayman.max_simds_per_se) { in evergreen_rlc_resume()
4415 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8); in evergreen_rlc_resume()
4416 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8); in evergreen_rlc_resume()
4427 fw_data = (const __be32 *)rdev->rlc_fw->data; in evergreen_rlc_resume()
4428 if (rdev->family >= CHIP_ARUBA) { in evergreen_rlc_resume()
4433 } else if (rdev->family >= CHIP_CAYMAN) { in evergreen_rlc_resume()
4455 if (crtc >= rdev->num_crtc) in evergreen_get_vblank_counter()
4466 if (rdev->family >= CHIP_CAYMAN) { in evergreen_disable_interrupt_state()
4479 for (i = 0; i < rdev->num_crtc; i++) in evergreen_disable_interrupt_state()
4481 for (i = 0; i < rdev->num_crtc; i++) in evergreen_disable_interrupt_state()
4503 if (!rdev->irq.installed) { in evergreen_irq_set()
4505 return -EINVAL; in evergreen_irq_set()
4508 if (!rdev->ih.enabled) { in evergreen_irq_set()
4515 if (rdev->family == CHIP_ARUBA) in evergreen_irq_set()
4524 if (rdev->family >= CHIP_CAYMAN) { in evergreen_irq_set()
4526 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) { in evergreen_irq_set()
4530 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) { in evergreen_irq_set()
4534 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) { in evergreen_irq_set()
4539 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) { in evergreen_irq_set()
4546 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) { in evergreen_irq_set()
4551 if (rdev->family >= CHIP_CAYMAN) { in evergreen_irq_set()
4553 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) { in evergreen_irq_set()
4559 if (rdev->irq.dpm_thermal) { in evergreen_irq_set()
4564 if (rdev->family >= CHIP_CAYMAN) { in evergreen_irq_set()
4573 if (rdev->family >= CHIP_CAYMAN) in evergreen_irq_set()
4578 for (i = 0; i < rdev->num_crtc; i++) { in evergreen_irq_set()
4582 rdev->irq.crtc_vblank_int[i] || in evergreen_irq_set()
4583 atomic_read(&rdev->irq.pflip[i]), "vblank", i); in evergreen_irq_set()
4586 for (i = 0; i < rdev->num_crtc; i++) in evergreen_irq_set()
4593 rdev->irq.hpd[i], "HPD", i); in evergreen_irq_set()
4596 if (rdev->family == CHIP_ARUBA) in evergreen_irq_set()
4605 rdev->irq.afmt[i], "HDMI", i); in evergreen_irq_set()
4618 u32 *grph_int = rdev->irq.stat_regs.evergreen.grph_int; in evergreen_irq_ack()
4619 u32 *disp_int = rdev->irq.stat_regs.evergreen.disp_int; in evergreen_irq_ack()
4620 u32 *afmt_status = rdev->irq.stat_regs.evergreen.afmt_status; in evergreen_irq_ack()
4625 if (i < rdev->num_crtc) in evergreen_irq_ack()
4630 for (i = 0; i < rdev->num_crtc; i += 2) { in evergreen_irq_ack()
4683 if (rdev->wb.enabled) in evergreen_get_ih_wptr()
4684 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]); in evergreen_get_ih_wptr()
4694 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n", in evergreen_get_ih_wptr()
4695 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask); in evergreen_get_ih_wptr()
4696 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask; in evergreen_get_ih_wptr()
4701 return (wptr & rdev->ih.ptr_mask); in evergreen_get_ih_wptr()
4706 u32 *disp_int = rdev->irq.stat_regs.evergreen.disp_int; in evergreen_irq_process()
4707 u32 *afmt_status = rdev->irq.stat_regs.evergreen.afmt_status; in evergreen_irq_process()
4721 if (!rdev->ih.enabled || rdev->shutdown) in evergreen_irq_process()
4728 if (atomic_xchg(&rdev->ih.lock, 1)) in evergreen_irq_process()
4731 rptr = rdev->ih.rptr; in evergreen_irq_process()
4743 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff; in evergreen_irq_process()
4744 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff; in evergreen_irq_process()
4753 crtc_idx = src_id - 1; in evergreen_irq_process()
4759 if (rdev->irq.crtc_vblank_int[crtc_idx]) { in evergreen_irq_process()
4760 drm_handle_vblank(rdev->ddev, crtc_idx); in evergreen_irq_process()
4761 rdev->pm.vblank_sync = true; in evergreen_irq_process()
4762 wake_up(&rdev->irq.vblank_queue); in evergreen_irq_process()
4764 if (atomic_read(&rdev->irq.pflip[crtc_idx])) { in evergreen_irq_process()
4779 DRM_DEBUG("IH: D%d %s - IH event w/o asserted irq bit?\n", in evergreen_irq_process()
4793 DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1); in evergreen_irq_process()
4795 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1); in evergreen_irq_process()
4805 hpd_idx = src_data - 6; in evergreen_irq_process()
4854 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data); in evergreen_irq_process()
4855 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n", in evergreen_irq_process()
4857 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n", in evergreen_irq_process()
4869 if (rdev->family >= CHIP_CAYMAN) { in evergreen_irq_process()
4890 rdev->pm.dpm.thermal.high_to_low = false; in evergreen_irq_process()
4895 rdev->pm.dpm.thermal.high_to_low = true; in evergreen_irq_process()
4898 case 233: /* GUI IDLE */ in evergreen_irq_process()
4899 DRM_DEBUG("IH: GUI idle\n"); in evergreen_irq_process()
4902 if (rdev->family >= CHIP_CAYMAN) { in evergreen_irq_process()
4914 rptr &= rdev->ih.ptr_mask; in evergreen_irq_process()
4918 schedule_work(&rdev->dp_work); in evergreen_irq_process()
4920 schedule_delayed_work(&rdev->hotplug_work, 0); in evergreen_irq_process()
4922 schedule_work(&rdev->audio_work); in evergreen_irq_process()
4923 if (queue_thermal && rdev->pm.dpm_enabled) in evergreen_irq_process()
4924 schedule_work(&rdev->pm.dpm.thermal.work); in evergreen_irq_process()
4925 rdev->ih.rptr = rptr; in evergreen_irq_process()
4926 atomic_set(&rdev->ih.lock, 0); in evergreen_irq_process()
4940 if (!rdev->has_uvd) in evergreen_uvd_init()
4945 dev_err(rdev->dev, "failed UVD (%d) init.\n", r); in evergreen_uvd_init()
4947 * At this point rdev->uvd.vcpu_bo is NULL which trickles down in evergreen_uvd_init()
4952 rdev->has_uvd = false; in evergreen_uvd_init()
4955 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL; in evergreen_uvd_init()
4956 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096); in evergreen_uvd_init()
4963 if (!rdev->has_uvd) in evergreen_uvd_start()
4968 dev_err(rdev->dev, "failed UVD resume (%d).\n", r); in evergreen_uvd_start()
4973 dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r); in evergreen_uvd_start()
4979 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0; in evergreen_uvd_start()
4987 if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size) in evergreen_uvd_resume()
4990 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; in evergreen_uvd_resume()
4991 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0)); in evergreen_uvd_resume()
4993 dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r); in evergreen_uvd_resume()
4998 dev_err(rdev->dev, "failed initializing UVD (%d).\n", r); in evergreen_uvd_resume()
5020 if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) { in evergreen_startup()
5028 if (rdev->flags & RADEON_IS_AGP) { in evergreen_startup()
5038 if (rdev->flags & RADEON_IS_IGP) { in evergreen_startup()
5039 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list; in evergreen_startup()
5040 rdev->rlc.reg_list_size = in evergreen_startup()
5042 rdev->rlc.cs_data = evergreen_cs_data; in evergreen_startup()
5057 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); in evergreen_startup()
5063 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r); in evergreen_startup()
5070 if (!rdev->irq.installed) { in evergreen_startup()
5084 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in evergreen_startup()
5085 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET, in evergreen_startup()
5090 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; in evergreen_startup()
5091 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET, in evergreen_startup()
5110 dev_err(rdev->dev, "IB initialization failed (%d).\n", r); in evergreen_startup()
5131 dev_warn(rdev->dev, "GPU reset failed !\n"); in evergreen_resume()
5137 atom_asic_init(rdev->mode_info.atom_context); in evergreen_resume()
5142 if (rdev->pm.pm_method == PM_METHOD_DPM) in evergreen_resume()
5145 rdev->accel_working = true; in evergreen_resume()
5149 rdev->accel_working = false; in evergreen_resume()
5161 if (rdev->has_uvd) { in evergreen_suspend()
5187 return -EINVAL; in evergreen_init()
5190 if (!rdev->is_atom_bios) { in evergreen_init()
5191 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n"); in evergreen_init()
5192 return -EINVAL; in evergreen_init()
5201 dev_warn(rdev->dev, "GPU reset failed !\n"); in evergreen_init()
5204 if (!rdev->bios) { in evergreen_init()
5205 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n"); in evergreen_init()
5206 return -EINVAL; in evergreen_init()
5209 atom_asic_init(rdev->mode_info.atom_context); in evergreen_init()
5218 radeon_get_clock_info(rdev->ddev); in evergreen_init()
5222 if (rdev->flags & RADEON_IS_AGP) { in evergreen_init()
5237 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) { in evergreen_init()
5245 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) { in evergreen_init()
5257 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL; in evergreen_init()
5258 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024); in evergreen_init()
5260 rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL; in evergreen_init()
5261 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024); in evergreen_init()
5265 rdev->ih.ring_obj = NULL; in evergreen_init()
5272 rdev->accel_working = true; in evergreen_init()
5275 dev_err(rdev->dev, "disabling GPU acceleration\n"); in evergreen_init()
5279 if (rdev->flags & RADEON_IS_IGP) in evergreen_init()
5285 rdev->accel_working = false; in evergreen_init()
5293 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) { in evergreen_init()
5295 return -EINVAL; in evergreen_init()
5309 if (rdev->flags & RADEON_IS_IGP) in evergreen_fini()
5323 kfree(rdev->bios); in evergreen_fini()
5324 rdev->bios = NULL; in evergreen_fini()
5334 if (rdev->flags & RADEON_IS_IGP) in evergreen_pcie_gen2_enable()
5337 if (!(rdev->flags & RADEON_IS_PCIE)) in evergreen_pcie_gen2_enable()
5344 if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) && in evergreen_pcie_gen2_enable()
5345 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT)) in evergreen_pcie_gen2_enable()
5405 if (!(rdev->flags & RADEON_IS_PCIE)) in evergreen_program_aspm()
5408 switch (rdev->family) { in evergreen_program_aspm()
5425 if (rdev->flags & RADEON_IS_IGP) in evergreen_program_aspm()
5447 if (rdev->family >= CHIP_BARTS) in evergreen_program_aspm()
5454 if (rdev->family >= CHIP_BARTS) in evergreen_program_aspm()
5484 if (rdev->family >= CHIP_BARTS) { in evergreen_program_aspm()
5516 if (rdev->family >= CHIP_BARTS) { in evergreen_program_aspm()
5533 if (rdev->family < CHIP_BARTS) in evergreen_program_aspm()