Lines Matching +full:simple +full:- +full:framebuffer

2  * Copyright © 2006-2007 Intel Corporation
82 #define IRONLAKE_FDI_FREQ 2700000 /* in kHz for mode->clock */
102 struct drm_i915_private *dev_priv = dev->dev_private; in intel_fdi_link_freq()
272 * the range value for them is (actual_value - 2).
362 struct drm_device *dev = crtc->dev; in intel_ironlake_limit()
363 struct drm_i915_private *dev_priv = dev->dev_private; in intel_ironlake_limit()
391 struct drm_device *dev = crtc->dev; in intel_g4x_limit()
392 struct drm_i915_private *dev_priv = dev->dev_private; in intel_g4x_limit()
418 struct drm_device *dev = crtc->dev; in intel_limit()
447 clock->m = clock->m2 + 2; in pineview_clock()
448 clock->p = clock->p1 * clock->p2; in pineview_clock()
449 clock->vco = refclk * clock->m / clock->n; in pineview_clock()
450 clock->dot = clock->vco / clock->p; in pineview_clock()
459 clock->m = 5 * (clock->m1 + 2) + (clock->m2 + 2); in intel_clock()
460 clock->p = clock->p1 * clock->p2; in intel_clock()
461 clock->vco = refclk * clock->m / (clock->n + 2); in intel_clock()
462 clock->dot = clock->vco / clock->p; in intel_clock()
470 struct drm_device *dev = crtc->dev; in intel_pipe_has_type()
471 struct drm_mode_config *mode_config = &dev->mode_config; in intel_pipe_has_type()
474 list_for_each_entry(encoder, &mode_config->encoder_list, base.head) in intel_pipe_has_type()
475 if (encoder->base.crtc == crtc && encoder->type == type) in intel_pipe_has_type()
491 if (clock->p1 < limit->p1.min || limit->p1.max < clock->p1) in intel_PLL_is_valid()
493 if (clock->p < limit->p.min || limit->p.max < clock->p) in intel_PLL_is_valid()
495 if (clock->m2 < limit->m2.min || limit->m2.max < clock->m2) in intel_PLL_is_valid()
497 if (clock->m1 < limit->m1.min || limit->m1.max < clock->m1) in intel_PLL_is_valid()
499 if (clock->m1 <= clock->m2 && !IS_PINEVIEW(dev)) in intel_PLL_is_valid()
501 if (clock->m < limit->m.min || limit->m.max < clock->m) in intel_PLL_is_valid()
503 if (clock->n < limit->n.min || limit->n.max < clock->n) in intel_PLL_is_valid()
505 if (clock->vco < limit->vco.min || limit->vco.max < clock->vco) in intel_PLL_is_valid()
510 if (clock->dot < limit->dot.min || limit->dot.max < clock->dot) in intel_PLL_is_valid()
521 struct drm_device *dev = crtc->dev; in intel_find_best_PLL()
522 struct drm_i915_private *dev_priv = dev->dev_private; in intel_find_best_PLL()
530 * settings for dual-channel. We haven't figured out how to in intel_find_best_PLL()
536 clock.p2 = limit->p2.p2_fast; in intel_find_best_PLL()
538 clock.p2 = limit->p2.p2_slow; in intel_find_best_PLL()
540 if (target < limit->p2.dot_limit) in intel_find_best_PLL()
541 clock.p2 = limit->p2.p2_slow; in intel_find_best_PLL()
543 clock.p2 = limit->p2.p2_fast; in intel_find_best_PLL()
548 for (clock.m1 = limit->m1.min; clock.m1 <= limit->m1.max; in intel_find_best_PLL()
550 for (clock.m2 = limit->m2.min; in intel_find_best_PLL()
551 clock.m2 <= limit->m2.max; clock.m2++) { in intel_find_best_PLL()
555 for (clock.n = limit->n.min; in intel_find_best_PLL()
556 clock.n <= limit->n.max; clock.n++) { in intel_find_best_PLL()
557 for (clock.p1 = limit->p1.min; in intel_find_best_PLL()
558 clock.p1 <= limit->p1.max; clock.p1++) { in intel_find_best_PLL()
566 this_err = abs(clock.dot - target); in intel_find_best_PLL()
583 struct drm_device *dev = crtc->dev; in intel_g4x_find_best_PLL()
584 struct drm_i915_private *dev_priv = dev->dev_private; in intel_g4x_find_best_PLL()
601 clock.p2 = limit->p2.p2_fast; in intel_g4x_find_best_PLL()
603 clock.p2 = limit->p2.p2_slow; in intel_g4x_find_best_PLL()
605 if (target < limit->p2.dot_limit) in intel_g4x_find_best_PLL()
606 clock.p2 = limit->p2.p2_slow; in intel_g4x_find_best_PLL()
608 clock.p2 = limit->p2.p2_fast; in intel_g4x_find_best_PLL()
612 max_n = limit->n.max; in intel_g4x_find_best_PLL()
614 for (clock.n = limit->n.min; clock.n <= max_n; clock.n++) { in intel_g4x_find_best_PLL()
616 for (clock.m1 = limit->m1.max; in intel_g4x_find_best_PLL()
617 clock.m1 >= limit->m1.min; clock.m1--) { in intel_g4x_find_best_PLL()
618 for (clock.m2 = limit->m2.max; in intel_g4x_find_best_PLL()
619 clock.m2 >= limit->m2.min; clock.m2--) { in intel_g4x_find_best_PLL()
620 for (clock.p1 = limit->p1.max; in intel_g4x_find_best_PLL()
621 clock.p1 >= limit->p1.min; clock.p1--) { in intel_g4x_find_best_PLL()
629 this_err = abs(clock.dot - target); in intel_g4x_find_best_PLL()
647 struct drm_device *dev = crtc->dev; in intel_find_pll_ironlake_dp()
696 * intel_wait_for_vblank - wait for vblank on a given pipe
705 struct drm_i915_private *dev_priv = dev->dev_private; in intel_wait_for_vblank()
732 * intel_wait_for_pipe_off - wait for pipe to turn off
750 struct drm_i915_private *dev_priv = dev->dev_private; in intel_wait_for_pipe_off()
752 if (INTEL_INFO(dev)->gen >= 4) { in intel_wait_for_pipe_off()
780 /* Only for pre-ILK configs */
806 if (HAS_PCH_CPT(dev_priv->dev)) { in assert_pch_pll()
870 if (dev_priv->info->gen == 5) in assert_fdi_tx_pll_enabled()
897 if (HAS_PCH_SPLIT(dev_priv->dev)) { in assert_panel_unlocked()
954 if (HAS_PCH_SPLIT(dev_priv->dev)) in assert_planes_disabled()
1001 if (HAS_PCH_CPT(dev_priv->dev)) { in dp_pipe_enabled()
1019 if (HAS_PCH_CPT(dev_priv->dev)) { in hdmi_pipe_enabled()
1035 if (HAS_PCH_CPT(dev_priv->dev)) { in lvds_pipe_enabled()
1050 if (HAS_PCH_CPT(dev_priv->dev)) { in adpa_pipe_enabled()
1106 * intel_enable_pll - enable a PLL
1114 * Note! This is for pre-ILK only.
1122 BUG_ON(dev_priv->info->gen >= 5); in intel_enable_pll()
1125 if (IS_MOBILE(dev_priv->dev) && !IS_I830(dev_priv->dev)) in intel_enable_pll()
1145 * intel_disable_pll - disable a PLL
1151 * Note! This is for pre-ILK only.
1159 if (pipe == PIPE_A && (dev_priv->quirks & QUIRK_PIPEA_FORCE)) in intel_disable_pll()
1173 * intel_enable_pch_pll - enable PCH PLL
1190 BUG_ON(dev_priv->info->gen < 5); in intel_enable_pch_pll()
1214 BUG_ON(dev_priv->info->gen < 5); in intel_disable_pch_pll()
1243 BUG_ON(dev_priv->info->gen < 5); in intel_enable_transcoder()
1255 if (HAS_PCH_IBX(dev_priv->dev)) { in intel_enable_transcoder()
1291 * intel_enable_pipe - enable a pipe, asserting requirements
1315 if (!HAS_PCH_SPLIT(dev_priv->dev)) in intel_enable_pipe()
1332 intel_wait_for_vblank(dev_priv->dev, pipe); in intel_enable_pipe()
1336 * intel_disable_pipe - disable a pipe, asserting requirements
1360 if (pipe == PIPE_A && (dev_priv->quirks & QUIRK_PIPEA_FORCE)) in intel_disable_pipe()
1369 intel_wait_for_pipe_off(dev_priv->dev, pipe); in intel_disable_pipe()
1373 * Plane regs are double buffered, going from enabled->disabled needs a
1384 * intel_enable_plane - enable a display plane on a given pipe
1407 intel_wait_for_vblank(dev_priv->dev, pipe); in intel_enable_plane()
1411 * intel_disable_plane - disable a display plane
1431 intel_wait_for_vblank(dev_priv->dev, pipe); in intel_disable_plane()
1489 struct drm_i915_private *dev_priv = dev->dev_private; in i8xx_disable_fbc()
1511 struct drm_device *dev = crtc->dev; in i8xx_enable_fbc()
1512 struct drm_i915_private *dev_priv = dev->dev_private; in i8xx_enable_fbc()
1513 struct drm_framebuffer *fb = crtc->fb; in i8xx_enable_fbc()
1515 struct drm_i915_gem_object *obj = intel_fb->obj; in i8xx_enable_fbc()
1521 cfb_pitch = dev_priv->cfb_size / FBC_LL_SIZE; in i8xx_enable_fbc()
1522 if (fb->pitches[0] < cfb_pitch) in i8xx_enable_fbc()
1523 cfb_pitch = fb->pitches[0]; in i8xx_enable_fbc()
1526 cfb_pitch = (cfb_pitch / 64) - 1; in i8xx_enable_fbc()
1527 plane = intel_crtc->plane == 0 ? FBC_CTL_PLANEA : FBC_CTL_PLANEB; in i8xx_enable_fbc()
1537 I915_WRITE(FBC_FENCE_OFF, crtc->y); in i8xx_enable_fbc()
1545 fbc_ctl |= obj->fence_reg; in i8xx_enable_fbc()
1549 cfb_pitch, crtc->y, intel_crtc->plane); in i8xx_enable_fbc()
1554 struct drm_i915_private *dev_priv = dev->dev_private; in i8xx_fbc_enabled()
1561 struct drm_device *dev = crtc->dev; in g4x_enable_fbc()
1562 struct drm_i915_private *dev_priv = dev->dev_private; in g4x_enable_fbc()
1563 struct drm_framebuffer *fb = crtc->fb; in g4x_enable_fbc()
1565 struct drm_i915_gem_object *obj = intel_fb->obj; in g4x_enable_fbc()
1567 int plane = intel_crtc->plane == 0 ? DPFC_CTL_PLANEA : DPFC_CTL_PLANEB; in g4x_enable_fbc()
1572 dpfc_ctl |= DPFC_CTL_FENCE_EN | obj->fence_reg; in g4x_enable_fbc()
1578 I915_WRITE(DPFC_FENCE_YOFF, crtc->y); in g4x_enable_fbc()
1583 DRM_DEBUG_KMS("enabled fbc on plane %d\n", intel_crtc->plane); in g4x_enable_fbc()
1588 struct drm_i915_private *dev_priv = dev->dev_private; in g4x_disable_fbc()
1603 struct drm_i915_private *dev_priv = dev->dev_private; in g4x_fbc_enabled()
1610 struct drm_i915_private *dev_priv = dev->dev_private; in sandybridge_blit_fbc_update()
1630 struct drm_device *dev = crtc->dev; in ironlake_enable_fbc()
1631 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_enable_fbc()
1632 struct drm_framebuffer *fb = crtc->fb; in ironlake_enable_fbc()
1634 struct drm_i915_gem_object *obj = intel_fb->obj; in ironlake_enable_fbc()
1636 int plane = intel_crtc->plane == 0 ? DPFC_CTL_PLANEA : DPFC_CTL_PLANEB; in ironlake_enable_fbc()
1643 /* Set persistent mode for front-buffer rendering, ala X. */ in ironlake_enable_fbc()
1645 dpfc_ctl |= (DPFC_CTL_FENCE_EN | obj->fence_reg); in ironlake_enable_fbc()
1651 I915_WRITE(ILK_DPFC_FENCE_YOFF, crtc->y); in ironlake_enable_fbc()
1652 I915_WRITE(ILK_FBC_RT_BASE, obj->gtt_offset | ILK_FBC_RT_VALID); in ironlake_enable_fbc()
1658 SNB_CPU_FENCE_ENABLE | obj->fence_reg); in ironlake_enable_fbc()
1659 I915_WRITE(DPFC_CPU_FENCE_OFFSET, crtc->y); in ironlake_enable_fbc()
1663 DRM_DEBUG_KMS("enabled fbc on plane %d\n", intel_crtc->plane); in ironlake_enable_fbc()
1668 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_disable_fbc()
1683 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_fbc_enabled()
1690 struct drm_i915_private *dev_priv = dev->dev_private; in intel_fbc_enabled()
1692 if (!dev_priv->display.fbc_enabled) in intel_fbc_enabled()
1695 return dev_priv->display.fbc_enabled(dev); in intel_fbc_enabled()
1703 struct drm_device *dev = work->crtc->dev; in intel_fbc_work_fn()
1704 struct drm_i915_private *dev_priv = dev->dev_private; in intel_fbc_work_fn()
1706 mutex_lock(&dev->struct_mutex); in intel_fbc_work_fn()
1707 if (work == dev_priv->fbc_work) { in intel_fbc_work_fn()
1711 if (work->crtc->fb == work->fb) { in intel_fbc_work_fn()
1712 dev_priv->display.enable_fbc(work->crtc, in intel_fbc_work_fn()
1713 work->interval); in intel_fbc_work_fn()
1715 dev_priv->cfb_plane = to_intel_crtc(work->crtc)->plane; in intel_fbc_work_fn()
1716 dev_priv->cfb_fb = work->crtc->fb->base.id; in intel_fbc_work_fn()
1717 dev_priv->cfb_y = work->crtc->y; in intel_fbc_work_fn()
1720 dev_priv->fbc_work = NULL; in intel_fbc_work_fn()
1722 mutex_unlock(&dev->struct_mutex); in intel_fbc_work_fn()
1729 if (dev_priv->fbc_work == NULL) in intel_cancel_fbc_work()
1735 * dev_priv->fbc_work, so we can perform the cancellation in intel_cancel_fbc_work()
1738 if (cancel_delayed_work(&dev_priv->fbc_work->work)) in intel_cancel_fbc_work()
1740 kfree(dev_priv->fbc_work); in intel_cancel_fbc_work()
1743 * wake-up (because the work was already running and waiting in intel_cancel_fbc_work()
1747 dev_priv->fbc_work = NULL; in intel_cancel_fbc_work()
1753 struct drm_device *dev = crtc->dev; in intel_enable_fbc()
1754 struct drm_i915_private *dev_priv = dev->dev_private; in intel_enable_fbc()
1756 if (!dev_priv->display.enable_fbc) in intel_enable_fbc()
1763 dev_priv->display.enable_fbc(crtc, interval); in intel_enable_fbc()
1767 work->crtc = crtc; in intel_enable_fbc()
1768 work->fb = crtc->fb; in intel_enable_fbc()
1769 work->interval = interval; in intel_enable_fbc()
1770 INIT_DELAYED_WORK(&work->work, intel_fbc_work_fn); in intel_enable_fbc()
1772 dev_priv->fbc_work = work; in intel_enable_fbc()
1783 * following the termination of the page-flipping sequence in intel_enable_fbc()
1784 * and indeed performing the enable as a co-routine and not in intel_enable_fbc()
1787 schedule_delayed_work(&work->work, msecs_to_jiffies(50)); in intel_enable_fbc()
1792 struct drm_i915_private *dev_priv = dev->dev_private; in intel_disable_fbc()
1796 if (!dev_priv->display.disable_fbc) in intel_disable_fbc()
1799 dev_priv->display.disable_fbc(dev); in intel_disable_fbc()
1800 dev_priv->cfb_plane = -1; in intel_disable_fbc()
1804 * intel_update_fbc - enable/disable FBC as needed
1807 * Set up the framebuffer compression hardware at mode set time. We
1809 * - plane A only (on pre-965)
1810 * - no pixel mulitply/line duplication
1811 * - no alpha buffer discard
1812 * - no dual wide
1813 * - framebuffer <= 2048 in width, 1536 in height
1824 struct drm_i915_private *dev_priv = dev->dev_private; in intel_update_fbc()
1844 * - more than one pipe is active in intel_update_fbc()
1845 * - changing FBC params (stride, fence, mode) in intel_update_fbc()
1846 * - new fb is too large to fit in compressed buffer in intel_update_fbc()
1847 * - going to an unsupported config (interlace, pixel multiply, etc.) in intel_update_fbc()
1849 list_for_each_entry(tmp_crtc, &dev->mode_config.crtc_list, head) { in intel_update_fbc()
1850 if (tmp_crtc->enabled && tmp_crtc->fb) { in intel_update_fbc()
1853 dev_priv->no_fbc_reason = FBC_MULTIPLE_PIPES; in intel_update_fbc()
1860 if (!crtc || crtc->fb == NULL) { in intel_update_fbc()
1862 dev_priv->no_fbc_reason = FBC_NO_OUTPUT; in intel_update_fbc()
1867 fb = crtc->fb; in intel_update_fbc()
1869 obj = intel_fb->obj; in intel_update_fbc()
1873 DRM_DEBUG_KMS("fbc set to per-chip default\n"); in intel_update_fbc()
1875 if (INTEL_INFO(dev)->gen <= 6) in intel_update_fbc()
1880 dev_priv->no_fbc_reason = FBC_MODULE_PARAM; in intel_update_fbc()
1883 if (intel_fb->obj->base.size > dev_priv->cfb_size) { in intel_update_fbc()
1884 DRM_DEBUG_KMS("framebuffer too large, disabling " in intel_update_fbc()
1886 dev_priv->no_fbc_reason = FBC_STOLEN_TOO_SMALL; in intel_update_fbc()
1889 if ((crtc->mode.flags & DRM_MODE_FLAG_INTERLACE) || in intel_update_fbc()
1890 (crtc->mode.flags & DRM_MODE_FLAG_DBLSCAN)) { in intel_update_fbc()
1893 dev_priv->no_fbc_reason = FBC_UNSUPPORTED_MODE; in intel_update_fbc()
1896 if ((crtc->mode.hdisplay > 2048) || in intel_update_fbc()
1897 (crtc->mode.vdisplay > 1536)) { in intel_update_fbc()
1899 dev_priv->no_fbc_reason = FBC_MODE_TOO_LARGE; in intel_update_fbc()
1902 if ((IS_I915GM(dev) || IS_I945GM(dev)) && intel_crtc->plane != 0) { in intel_update_fbc()
1904 dev_priv->no_fbc_reason = FBC_BAD_PLANE; in intel_update_fbc()
1911 if (obj->tiling_mode != I915_TILING_X || in intel_update_fbc()
1912 obj->fence_reg == I915_FENCE_REG_NONE) { in intel_update_fbc()
1913 DRM_DEBUG_KMS("framebuffer not tiled or fenced, disabling compression\n"); in intel_update_fbc()
1914 dev_priv->no_fbc_reason = FBC_NOT_TILED; in intel_update_fbc()
1923 * Note that we make the fundamental assumption that the fb->obj in intel_update_fbc()
1927 if (dev_priv->cfb_plane == intel_crtc->plane && in intel_update_fbc()
1928 dev_priv->cfb_fb == fb->base.id && in intel_update_fbc()
1929 dev_priv->cfb_y == crtc->y) in intel_update_fbc()
1934 * configuration (modeswitching) and after page-flipping in intel_update_fbc()
1936 * we disable the FBC at the start of the page-flip in intel_update_fbc()
1976 struct drm_i915_private *dev_priv = dev->dev_private; in intel_pin_and_fence_fb_obj()
1980 switch (obj->tiling_mode) { in intel_pin_and_fence_fb_obj()
1984 else if (INTEL_INFO(dev)->gen >= 4) in intel_pin_and_fence_fb_obj()
1996 return -EINVAL; in intel_pin_and_fence_fb_obj()
2001 dev_priv->mm.interruptible = false; in intel_pin_and_fence_fb_obj()
2006 /* Install a fence for tiled scan-out. Pre-i965 always needs a in intel_pin_and_fence_fb_obj()
2008 * framebuffer compression. For simplicity, we always install in intel_pin_and_fence_fb_obj()
2011 if (obj->tiling_mode != I915_TILING_NONE) { in intel_pin_and_fence_fb_obj()
2017 dev_priv->mm.interruptible = true; in intel_pin_and_fence_fb_obj()
2023 dev_priv->mm.interruptible = true; in intel_pin_and_fence_fb_obj()
2030 struct drm_device *dev = crtc->dev; in i9xx_update_plane()
2031 struct drm_i915_private *dev_priv = dev->dev_private; in i9xx_update_plane()
2035 int plane = intel_crtc->plane; in i9xx_update_plane()
2046 return -EINVAL; in i9xx_update_plane()
2050 obj = intel_fb->obj; in i9xx_update_plane()
2056 switch (fb->bits_per_pixel) { in i9xx_update_plane()
2061 if (fb->depth == 15) in i9xx_update_plane()
2071 DRM_ERROR("Unknown color depth %d\n", fb->bits_per_pixel); in i9xx_update_plane()
2072 return -EINVAL; in i9xx_update_plane()
2074 if (INTEL_INFO(dev)->gen >= 4) { in i9xx_update_plane()
2075 if (obj->tiling_mode != I915_TILING_NONE) in i9xx_update_plane()
2083 Start = obj->gtt_offset; in i9xx_update_plane()
2084 Offset = y * fb->pitches[0] + x * (fb->bits_per_pixel / 8); in i9xx_update_plane()
2087 Start, Offset, x, y, fb->pitches[0]); in i9xx_update_plane()
2088 I915_WRITE(DSPSTRIDE(plane), fb->pitches[0]); in i9xx_update_plane()
2089 if (INTEL_INFO(dev)->gen >= 4) { in i9xx_update_plane()
2103 struct drm_device *dev = crtc->dev; in ironlake_update_plane()
2104 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_update_plane()
2108 int plane = intel_crtc->plane; in ironlake_update_plane()
2120 return -EINVAL; in ironlake_update_plane()
2124 obj = intel_fb->obj; in ironlake_update_plane()
2130 switch (fb->bits_per_pixel) { in ironlake_update_plane()
2135 if (fb->depth != 16) in ironlake_update_plane()
2136 return -EINVAL; in ironlake_update_plane()
2142 if (fb->depth == 24) in ironlake_update_plane()
2144 else if (fb->depth == 30) in ironlake_update_plane()
2147 return -EINVAL; in ironlake_update_plane()
2150 DRM_ERROR("Unknown color depth %d\n", fb->bits_per_pixel); in ironlake_update_plane()
2151 return -EINVAL; in ironlake_update_plane()
2154 if (obj->tiling_mode != I915_TILING_NONE) in ironlake_update_plane()
2164 Start = obj->gtt_offset; in ironlake_update_plane()
2165 Offset = y * fb->pitches[0] + x * (fb->bits_per_pixel / 8); in ironlake_update_plane()
2168 Start, Offset, x, y, fb->pitches[0]); in ironlake_update_plane()
2169 I915_WRITE(DSPSTRIDE(plane), fb->pitches[0]); in ironlake_update_plane()
2183 struct drm_device *dev = crtc->dev; in intel_pipe_set_base_atomic()
2184 struct drm_i915_private *dev_priv = dev->dev_private; in intel_pipe_set_base_atomic()
2187 ret = dev_priv->display.update_plane(crtc, fb, x, y); in intel_pipe_set_base_atomic()
2201 struct drm_device *dev = crtc->dev; in intel_pipe_set_base()
2207 if (!crtc->fb) { in intel_pipe_set_base()
2212 switch (intel_crtc->plane) { in intel_pipe_set_base()
2222 return -EINVAL; in intel_pipe_set_base()
2225 mutex_lock(&dev->struct_mutex); in intel_pipe_set_base()
2227 to_intel_framebuffer(crtc->fb)->obj, in intel_pipe_set_base()
2230 mutex_unlock(&dev->struct_mutex); in intel_pipe_set_base()
2236 struct drm_i915_private *dev_priv = dev->dev_private; in intel_pipe_set_base()
2237 struct drm_i915_gem_object *obj = to_intel_framebuffer(old_fb)->obj; in intel_pipe_set_base()
2239 wait_event(dev_priv->pending_flip_queue, in intel_pipe_set_base()
2240 atomic_read(&dev_priv->mm.wedged) || in intel_pipe_set_base()
2241 atomic_read(&obj->pending_flip) == 0); in intel_pipe_set_base()
2246 * framebuffer. in intel_pipe_set_base()
2255 ret = intel_pipe_set_base_atomic(crtc, crtc->fb, x, y, in intel_pipe_set_base()
2258 i915_gem_object_unpin(to_intel_framebuffer(crtc->fb)->obj); in intel_pipe_set_base()
2259 mutex_unlock(&dev->struct_mutex); in intel_pipe_set_base()
2265 intel_wait_for_vblank(dev, intel_crtc->pipe); in intel_pipe_set_base()
2266 i915_gem_object_unpin(to_intel_framebuffer(old_fb)->obj); in intel_pipe_set_base()
2269 mutex_unlock(&dev->struct_mutex); in intel_pipe_set_base()
2271 if (!dev->primary->master) in intel_pipe_set_base()
2274 master_priv = dev->primary->master->driver_priv; in intel_pipe_set_base()
2275 if (!master_priv->sarea_priv) in intel_pipe_set_base()
2278 if (intel_crtc->pipe) { in intel_pipe_set_base()
2279 master_priv->sarea_priv->pipeB_x = x; in intel_pipe_set_base()
2280 master_priv->sarea_priv->pipeB_y = y; in intel_pipe_set_base()
2282 master_priv->sarea_priv->pipeA_x = x; in intel_pipe_set_base()
2283 master_priv->sarea_priv->pipeA_y = y; in intel_pipe_set_base()
2291 struct drm_device *dev = crtc->dev; in ironlake_set_pll_edp()
2292 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_set_pll_edp()
2328 struct drm_device *dev = crtc->dev; in intel_fdi_normal_train()
2329 struct drm_i915_private *dev_priv = dev->dev_private; in intel_fdi_normal_train()
2331 int pipe = intel_crtc->pipe; in intel_fdi_normal_train()
2369 struct drm_i915_private *dev_priv = dev->dev_private; in cpt_phase_pointer_enable()
2382 struct drm_device *dev = crtc->dev; in ironlake_fdi_link_train()
2383 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_fdi_link_train()
2385 int pipe = intel_crtc->pipe; in ironlake_fdi_link_train()
2386 int plane = intel_crtc->plane; in ironlake_fdi_link_train()
2407 temp |= (intel_crtc->fdi_lanes - 1) << 19; in ironlake_fdi_link_train()
2486 struct drm_device *dev = crtc->dev; in gen6_fdi_link_train()
2487 struct drm_i915_private *dev_priv = dev->dev_private; in gen6_fdi_link_train()
2489 int pipe = intel_crtc->pipe; in gen6_fdi_link_train()
2507 temp |= (intel_crtc->fdi_lanes - 1) << 19; in gen6_fdi_link_train()
2511 /* SNB-B */ in gen6_fdi_link_train()
2562 /* SNB-B */ in gen6_fdi_link_train()
2610 struct drm_device *dev = crtc->dev; in ivb_manual_fdi_link_train()
2611 struct drm_i915_private *dev_priv = dev->dev_private; in ivb_manual_fdi_link_train()
2613 int pipe = intel_crtc->pipe; in ivb_manual_fdi_link_train()
2631 temp |= (intel_crtc->fdi_lanes - 1) << 19; in ivb_manual_fdi_link_train()
2723 struct drm_device *dev = crtc->dev; in ironlake_fdi_pll_enable()
2724 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_fdi_pll_enable()
2726 int pipe = intel_crtc->pipe; in ironlake_fdi_pll_enable()
2737 temp |= (intel_crtc->fdi_lanes - 1) << 19; in ironlake_fdi_pll_enable()
2764 struct drm_i915_private *dev_priv = dev->dev_private; in cpt_phase_pointer_disable()
2775 struct drm_device *dev = crtc->dev; in ironlake_fdi_disable()
2776 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_fdi_disable()
2778 int pipe = intel_crtc->pipe; in ironlake_fdi_disable()
2837 struct drm_i915_private *dev_priv = dev->dev_private; in intel_clear_scanline_wait()
2856 if (crtc->fb == NULL) in intel_crtc_wait_for_pending_flips()
2859 obj = to_intel_framebuffer(crtc->fb)->obj; in intel_crtc_wait_for_pending_flips()
2860 dev_priv = crtc->dev->dev_private; in intel_crtc_wait_for_pending_flips()
2861 wait_event(dev_priv->pending_flip_queue, in intel_crtc_wait_for_pending_flips()
2862 atomic_read(&obj->pending_flip) == 0); in intel_crtc_wait_for_pending_flips()
2867 struct drm_device *dev = crtc->dev; in intel_crtc_driving_pch()
2868 struct drm_mode_config *mode_config = &dev->mode_config; in intel_crtc_driving_pch()
2872 * If there's a non-PCH eDP on this crtc, it must be DP_A, and that in intel_crtc_driving_pch()
2875 list_for_each_entry(encoder, &mode_config->encoder_list, base.head) { in intel_crtc_driving_pch()
2876 if (encoder->base.crtc != crtc) in intel_crtc_driving_pch()
2879 switch (encoder->type) { in intel_crtc_driving_pch()
2881 if (!intel_encoder_is_pch_edp(&encoder->base)) in intel_crtc_driving_pch()
2892 * - PCH PLLs
2893 * - FDI training & RX/TX
2894 * - update transcoder timings
2895 * - DP transcoding bits
2896 * - transcoder
2900 struct drm_device *dev = crtc->dev; in ironlake_pch_enable()
2901 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_pch_enable()
2903 int pipe = intel_crtc->pipe; in ironlake_pch_enable()
2907 dev_priv->display.fdi_link_train(crtc); in ironlake_pch_enable()
2912 transc_sel = intel_crtc->use_pll_a ? TRANSC_DPLLA_SEL : in ironlake_pch_enable()
2956 if (crtc->mode.flags & DRM_MODE_FLAG_PHSYNC) in ironlake_pch_enable()
2958 if (crtc->mode.flags & DRM_MODE_FLAG_PVSYNC) in ironlake_pch_enable()
2985 struct drm_i915_private *dev_priv = dev->dev_private; in intel_cpt_verify_modeset()
3003 struct drm_device *dev = crtc->dev; in ironlake_crtc_enable()
3004 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_crtc_enable()
3006 int pipe = intel_crtc->pipe; in ironlake_crtc_enable()
3007 int plane = intel_crtc->plane; in ironlake_crtc_enable()
3011 if (intel_crtc->active) in ironlake_crtc_enable()
3014 intel_crtc->active = true; in ironlake_crtc_enable()
3031 if (dev_priv->pch_pf_size && in ironlake_crtc_enable()
3033 /* Force use of hard-coded filter coefficients in ironlake_crtc_enable()
3034 * as some pre-programmed values are broken, in ironlake_crtc_enable()
3038 I915_WRITE(PF_WIN_POS(pipe), dev_priv->pch_pf_pos); in ironlake_crtc_enable()
3039 I915_WRITE(PF_WIN_SZ(pipe), dev_priv->pch_pf_size); in ironlake_crtc_enable()
3054 mutex_lock(&dev->struct_mutex); in ironlake_crtc_enable()
3056 mutex_unlock(&dev->struct_mutex); in ironlake_crtc_enable()
3063 struct drm_device *dev = crtc->dev; in ironlake_crtc_disable()
3064 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_crtc_disable()
3066 int pipe = intel_crtc->pipe; in ironlake_crtc_disable()
3067 int plane = intel_crtc->plane; in ironlake_crtc_disable()
3070 if (!intel_crtc->active) in ironlake_crtc_disable()
3079 if (dev_priv->cfb_plane == plane) in ironlake_crtc_disable()
3091 * the connector/encoder ->prepare instead, but we don't always have in ironlake_crtc_disable()
3127 if (!intel_crtc->no_pll) in ironlake_crtc_disable()
3151 intel_crtc->active = false; in ironlake_crtc_disable()
3154 mutex_lock(&dev->struct_mutex); in ironlake_crtc_disable()
3157 mutex_unlock(&dev->struct_mutex); in ironlake_crtc_disable()
3163 int pipe = intel_crtc->pipe; in ironlake_crtc_dpms()
3164 int plane = intel_crtc->plane; in ironlake_crtc_dpms()
3186 if (!enable && intel_crtc->overlay) { in intel_crtc_dpms_overlay()
3187 struct drm_device *dev = intel_crtc->base.dev; in intel_crtc_dpms_overlay()
3188 struct drm_i915_private *dev_priv = dev->dev_private; in intel_crtc_dpms_overlay()
3190 mutex_lock(&dev->struct_mutex); in intel_crtc_dpms_overlay()
3191 dev_priv->mm.interruptible = false; in intel_crtc_dpms_overlay()
3192 (void) intel_overlay_switch_off(intel_crtc->overlay); in intel_crtc_dpms_overlay()
3193 dev_priv->mm.interruptible = true; in intel_crtc_dpms_overlay()
3194 mutex_unlock(&dev->struct_mutex); in intel_crtc_dpms_overlay()
3204 struct drm_device *dev = crtc->dev; in i9xx_crtc_enable()
3205 struct drm_i915_private *dev_priv = dev->dev_private; in i9xx_crtc_enable()
3207 int pipe = intel_crtc->pipe; in i9xx_crtc_enable()
3208 int plane = intel_crtc->plane; in i9xx_crtc_enable()
3210 if (intel_crtc->active) in i9xx_crtc_enable()
3213 intel_crtc->active = true; in i9xx_crtc_enable()
3230 struct drm_device *dev = crtc->dev; in i9xx_crtc_disable()
3231 struct drm_i915_private *dev_priv = dev->dev_private; in i9xx_crtc_disable()
3233 int pipe = intel_crtc->pipe; in i9xx_crtc_disable()
3234 int plane = intel_crtc->plane; in i9xx_crtc_disable()
3236 if (!intel_crtc->active) in i9xx_crtc_disable()
3245 if (dev_priv->cfb_plane == plane) in i9xx_crtc_disable()
3252 intel_crtc->active = false; in i9xx_crtc_disable()
3280 struct drm_device *dev = crtc->dev; in intel_crtc_dpms()
3281 struct drm_i915_private *dev_priv = dev->dev_private; in intel_crtc_dpms()
3284 int pipe = intel_crtc->pipe; in intel_crtc_dpms()
3287 if (intel_crtc->dpms_mode == mode) in intel_crtc_dpms()
3290 intel_crtc->dpms_mode = mode; in intel_crtc_dpms()
3292 dev_priv->display.dpms(crtc, mode); in intel_crtc_dpms()
3294 if (!dev->primary->master) in intel_crtc_dpms()
3297 master_priv = dev->primary->master->driver_priv; in intel_crtc_dpms()
3298 if (!master_priv->sarea_priv) in intel_crtc_dpms()
3301 enabled = crtc->enabled && mode != DRM_MODE_DPMS_OFF; in intel_crtc_dpms()
3305 master_priv->sarea_priv->pipeA_w = enabled ? crtc->mode.hdisplay : 0; in intel_crtc_dpms()
3306 master_priv->sarea_priv->pipeA_h = enabled ? crtc->mode.vdisplay : 0; in intel_crtc_dpms()
3309 master_priv->sarea_priv->pipeB_w = enabled ? crtc->mode.hdisplay : 0; in intel_crtc_dpms()
3310 master_priv->sarea_priv->pipeB_h = enabled ? crtc->mode.vdisplay : 0; in intel_crtc_dpms()
3320 struct drm_crtc_helper_funcs *crtc_funcs = crtc->helper_private; in intel_crtc_disable()
3321 struct drm_device *dev = crtc->dev; in intel_crtc_disable()
3323 crtc_funcs->dpms(crtc, DRM_MODE_DPMS_OFF); in intel_crtc_disable()
3325 if (crtc->fb) { in intel_crtc_disable()
3326 mutex_lock(&dev->struct_mutex); in intel_crtc_disable()
3327 i915_gem_object_unpin(to_intel_framebuffer(crtc->fb)->obj); in intel_crtc_disable()
3328 mutex_unlock(&dev->struct_mutex); in intel_crtc_disable()
3362 struct drm_encoder_helper_funcs *encoder_funcs = encoder->helper_private; in intel_encoder_prepare()
3364 encoder_funcs->dpms(encoder, DRM_MODE_DPMS_OFF); in intel_encoder_prepare()
3369 struct drm_encoder_helper_funcs *encoder_funcs = encoder->helper_private; in intel_encoder_commit()
3370 struct drm_device *dev = encoder->dev; in intel_encoder_commit()
3372 struct intel_crtc *intel_crtc = to_intel_crtc(intel_encoder->base.crtc); in intel_encoder_commit()
3375 encoder_funcs->dpms(encoder, DRM_MODE_DPMS_ON); in intel_encoder_commit()
3378 intel_cpt_verify_modeset(dev, intel_crtc->pipe); in intel_encoder_commit()
3393 struct drm_device *dev = crtc->dev; in intel_crtc_mode_fixup()
3397 if (mode->clock * 3 > IRONLAKE_FDI_FREQ * 4) in intel_crtc_mode_fixup()
3404 if (adjusted_mode->crtc_htotal == 0) in intel_crtc_mode_fixup()
3429 pci_read_config_word(dev->pdev, GCFGC, &gcfgc); in i915gm_get_display_clock_speed()
3495 m_n->tu = 64; /* default size */ in ironlake_compute_m_n()
3498 m_n->gmch_m = bits_per_pixel * pixel_clock; in ironlake_compute_m_n()
3499 m_n->gmch_n = link_clock * nlanes * 8; in ironlake_compute_m_n()
3500 fdi_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n); in ironlake_compute_m_n()
3502 m_n->link_m = pixel_clock; in ironlake_compute_m_n()
3503 m_n->link_n = link_clock; in ironlake_compute_m_n()
3504 fdi_reduce_ratio(&m_n->link_m, &m_n->link_n); in ironlake_compute_m_n()
3655 * intel_calculate_wm - calculate watermark level
3688 entries_required = DIV_ROUND_UP(entries_required, wm->cacheline_size); in intel_calculate_wm()
3692 wm_size = fifo_size - (entries_required + wm->guard_size); in intel_calculate_wm()
3697 if (wm_size > (long)wm->max_wm) in intel_calculate_wm()
3698 wm_size = wm->max_wm; in intel_calculate_wm()
3700 wm_size = wm->default_wm; in intel_calculate_wm()
3716 {1, 0, 800, 400, 3382, 33382, 3983, 33983}, /* DDR2-400 SC */
3717 {1, 0, 800, 667, 3354, 33354, 3807, 33807}, /* DDR2-667 SC */
3718 {1, 0, 800, 800, 3347, 33347, 3763, 33763}, /* DDR2-800 SC */
3719 {1, 1, 800, 667, 6420, 36420, 6873, 36873}, /* DDR3-667 SC */
3720 {1, 1, 800, 800, 5902, 35902, 6318, 36318}, /* DDR3-800 SC */
3722 {1, 0, 667, 400, 3400, 33400, 4021, 34021}, /* DDR2-400 SC */
3723 {1, 0, 667, 667, 3372, 33372, 3845, 33845}, /* DDR2-667 SC */
3724 {1, 0, 667, 800, 3386, 33386, 3822, 33822}, /* DDR2-800 SC */
3725 {1, 1, 667, 667, 6438, 36438, 6911, 36911}, /* DDR3-667 SC */
3726 {1, 1, 667, 800, 5941, 35941, 6377, 36377}, /* DDR3-800 SC */
3728 {1, 0, 400, 400, 3472, 33472, 4173, 34173}, /* DDR2-400 SC */
3729 {1, 0, 400, 667, 3443, 33443, 3996, 33996}, /* DDR2-667 SC */
3730 {1, 0, 400, 800, 3430, 33430, 3946, 33946}, /* DDR2-800 SC */
3731 {1, 1, 400, 667, 6509, 36509, 7062, 37062}, /* DDR3-667 SC */
3732 {1, 1, 400, 800, 5985, 35985, 6501, 36501}, /* DDR3-800 SC */
3734 {0, 0, 800, 400, 3438, 33438, 4065, 34065}, /* DDR2-400 SC */
3735 {0, 0, 800, 667, 3410, 33410, 3889, 33889}, /* DDR2-667 SC */
3736 {0, 0, 800, 800, 3403, 33403, 3845, 33845}, /* DDR2-800 SC */
3737 {0, 1, 800, 667, 6476, 36476, 6955, 36955}, /* DDR3-667 SC */
3738 {0, 1, 800, 800, 5958, 35958, 6400, 36400}, /* DDR3-800 SC */
3740 {0, 0, 667, 400, 3456, 33456, 4103, 34106}, /* DDR2-400 SC */
3741 {0, 0, 667, 667, 3428, 33428, 3927, 33927}, /* DDR2-667 SC */
3742 {0, 0, 667, 800, 3443, 33443, 3905, 33905}, /* DDR2-800 SC */
3743 {0, 1, 667, 667, 6494, 36494, 6993, 36993}, /* DDR3-667 SC */
3744 {0, 1, 667, 800, 5998, 35998, 6460, 36460}, /* DDR3-800 SC */
3746 {0, 0, 400, 400, 3528, 33528, 4255, 34255}, /* DDR2-400 SC */
3747 {0, 0, 400, 667, 3500, 33500, 4079, 34079}, /* DDR2-667 SC */
3748 {0, 0, 400, 800, 3487, 33487, 4029, 34029}, /* DDR2-800 SC */
3749 {0, 1, 400, 667, 6566, 36566, 7145, 37145}, /* DDR3-667 SC */
3750 {0, 1, 400, 800, 6042, 36042, 6584, 36584}, /* DDR3-800 SC */
3766 if (is_desktop == latency->is_desktop && in intel_get_cxsr_latency()
3767 is_ddr3 == latency->is_ddr3 && in intel_get_cxsr_latency()
3768 fsb == latency->fsb_freq && mem == latency->mem_freq) in intel_get_cxsr_latency()
3779 struct drm_i915_private *dev_priv = dev->dev_private; in pineview_disable_cxsr()
3787 * - memory configuration (speed, channels)
3788 * - chipset
3789 * - current MCH state
3803 struct drm_i915_private *dev_priv = dev->dev_private; in i9xx_get_fifo_size()
3809 size = ((dsparb >> DSPARB_CSTART_SHIFT) & 0x7f) - size; in i9xx_get_fifo_size()
3811 DRM_DEBUG_KMS("FIFO size - (0x%08x) %s: %d\n", dsparb, in i9xx_get_fifo_size()
3819 struct drm_i915_private *dev_priv = dev->dev_private; in i85x_get_fifo_size()
3825 size = ((dsparb >> DSPARB_BEND_SHIFT) & 0x1ff) - size; in i85x_get_fifo_size()
3828 DRM_DEBUG_KMS("FIFO size - (0x%08x) %s: %d\n", dsparb, in i85x_get_fifo_size()
3836 struct drm_i915_private *dev_priv = dev->dev_private; in i845_get_fifo_size()
3843 DRM_DEBUG_KMS("FIFO size - (0x%08x) %s: %d\n", dsparb, in i845_get_fifo_size()
3852 struct drm_i915_private *dev_priv = dev->dev_private; in i830_get_fifo_size()
3859 DRM_DEBUG_KMS("FIFO size - (0x%08x) %s: %d\n", dsparb, in i830_get_fifo_size()
3869 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { in single_enabled_crtc()
3870 if (crtc->enabled && crtc->fb) { in single_enabled_crtc()
3882 struct drm_i915_private *dev_priv = dev->dev_private; in pineview_update_wm()
3888 latency = intel_get_cxsr_latency(IS_PINEVIEW_G(dev), dev_priv->is_ddr3, in pineview_update_wm()
3889 dev_priv->fsb_freq, dev_priv->mem_freq); in pineview_update_wm()
3898 int clock = crtc->mode.clock; in pineview_update_wm()
3899 int pixel_size = crtc->fb->bits_per_pixel / 8; in pineview_update_wm()
3904 pixel_size, latency->display_sr); in pineview_update_wm()
3914 pixel_size, latency->cursor_sr); in pineview_update_wm()
3923 pixel_size, latency->display_hpll_disable); in pineview_update_wm()
3932 pixel_size, latency->cursor_hpll_disable); in pineview_update_wm()
3942 DRM_DEBUG_KMS("Self-refresh is enabled\n"); in pineview_update_wm()
3945 DRM_DEBUG_KMS("Self-refresh is disabled\n"); in pineview_update_wm()
3964 if (crtc->fb == NULL || !crtc->enabled) { in g4x_compute_wm0()
3965 *cursor_wm = cursor->guard_size; in g4x_compute_wm0()
3966 *plane_wm = display->guard_size; in g4x_compute_wm0()
3970 htotal = crtc->mode.htotal; in g4x_compute_wm0()
3971 hdisplay = crtc->mode.hdisplay; in g4x_compute_wm0()
3972 clock = crtc->mode.clock; in g4x_compute_wm0()
3973 pixel_size = crtc->fb->bits_per_pixel / 8; in g4x_compute_wm0()
3977 tlb_miss = display->fifo_size*display->cacheline_size - hdisplay * 8; in g4x_compute_wm0()
3980 entries = DIV_ROUND_UP(entries, display->cacheline_size); in g4x_compute_wm0()
3981 *plane_wm = entries + display->guard_size; in g4x_compute_wm0()
3982 if (*plane_wm > (int)display->max_wm) in g4x_compute_wm0()
3983 *plane_wm = display->max_wm; in g4x_compute_wm0()
3989 tlb_miss = cursor->fifo_size*cursor->cacheline_size - hdisplay * 8; in g4x_compute_wm0()
3992 entries = DIV_ROUND_UP(entries, cursor->cacheline_size); in g4x_compute_wm0()
3993 *cursor_wm = entries + cursor->guard_size; in g4x_compute_wm0()
3994 if (*cursor_wm > (int)cursor->max_wm) in g4x_compute_wm0()
3995 *cursor_wm = (int)cursor->max_wm; in g4x_compute_wm0()
4015 if (display_wm > display->max_wm) { in g4x_check_srwm()
4017 display_wm, display->max_wm); in g4x_check_srwm()
4021 if (cursor_wm > cursor->max_wm) { in g4x_check_srwm()
4023 cursor_wm, cursor->max_wm); in g4x_check_srwm()
4055 hdisplay = crtc->mode.hdisplay; in g4x_compute_srwm()
4056 htotal = crtc->mode.htotal; in g4x_compute_srwm()
4057 clock = crtc->mode.clock; in g4x_compute_srwm()
4058 pixel_size = crtc->fb->bits_per_pixel / 8; in g4x_compute_srwm()
4068 entries = DIV_ROUND_UP(min(small, large), display->cacheline_size); in g4x_compute_srwm()
4069 *display_wm = entries + display->guard_size; in g4x_compute_srwm()
4071 /* calculate the self-refresh watermark for display cursor */ in g4x_compute_srwm()
4073 entries = DIV_ROUND_UP(entries, cursor->cacheline_size); in g4x_compute_srwm()
4074 *cursor_wm = entries + cursor->guard_size; in g4x_compute_srwm()
4086 struct drm_i915_private *dev_priv = dev->dev_private; in g4x_update_wm()
4105 g4x_compute_srwm(dev, ffs(enabled) - 1, in g4x_update_wm()
4115 …DRM_DEBUG_KMS("Setting FIFO watermarks - A: plane=%d, cursor=%d, B: plane=%d, cursor=%d, SR: plane… in g4x_update_wm()
4136 struct drm_i915_private *dev_priv = dev->dev_private; in i965_update_wm()
4144 /* self-refresh has much higher latency */ in i965_update_wm()
4146 int clock = crtc->mode.clock; in i965_update_wm()
4147 int htotal = crtc->mode.htotal; in i965_update_wm()
4148 int hdisplay = crtc->mode.hdisplay; in i965_update_wm()
4149 int pixel_size = crtc->fb->bits_per_pixel / 8; in i965_update_wm()
4159 srwm = I965_FIFO_SIZE - entries; in i965_update_wm()
4163 DRM_DEBUG_KMS("self-refresh entries: %d, wm: %d\n", in i965_update_wm()
4170 cursor_sr = i965_cursor_wm_info.fifo_size - in i965_update_wm()
4176 DRM_DEBUG_KMS("self-refresh watermark: display plane %d " in i965_update_wm()
4188 DRM_DEBUG_KMS("Setting FIFO watermarks - A: 8, B: 8, C: 8, SR %d\n", in i965_update_wm()
4201 struct drm_i915_private *dev_priv = dev->dev_private; in i9xx_update_wm()
4217 fifo_size = dev_priv->display.get_fifo_size(dev, 0); in i9xx_update_wm()
4219 if (crtc->enabled && crtc->fb) { in i9xx_update_wm()
4220 planea_wm = intel_calculate_wm(crtc->mode.clock, in i9xx_update_wm()
4222 crtc->fb->bits_per_pixel / 8, in i9xx_update_wm()
4226 planea_wm = fifo_size - wm_info->guard_size; in i9xx_update_wm()
4228 fifo_size = dev_priv->display.get_fifo_size(dev, 1); in i9xx_update_wm()
4230 if (crtc->enabled && crtc->fb) { in i9xx_update_wm()
4231 planeb_wm = intel_calculate_wm(crtc->mode.clock, in i9xx_update_wm()
4233 crtc->fb->bits_per_pixel / 8, in i9xx_update_wm()
4240 planeb_wm = fifo_size - wm_info->guard_size; in i9xx_update_wm()
4242 DRM_DEBUG_KMS("FIFO watermarks - A: %d, B: %d\n", planea_wm, planeb_wm); in i9xx_update_wm()
4249 /* Play safe and disable self-refresh before adjusting watermarks. */ in i9xx_update_wm()
4257 /* self-refresh has much higher latency */ in i9xx_update_wm()
4259 int clock = enabled->mode.clock; in i9xx_update_wm()
4260 int htotal = enabled->mode.htotal; in i9xx_update_wm()
4261 int hdisplay = enabled->mode.hdisplay; in i9xx_update_wm()
4262 int pixel_size = enabled->fb->bits_per_pixel / 8; in i9xx_update_wm()
4271 entries = DIV_ROUND_UP(entries, wm_info->cacheline_size); in i9xx_update_wm()
4272 DRM_DEBUG_KMS("self-refresh entries: %d\n", entries); in i9xx_update_wm()
4273 srwm = wm_info->fifo_size - entries; in i9xx_update_wm()
4284 DRM_DEBUG_KMS("Setting FIFO watermarks - A: %d, B: %d, C: %d, SR %d\n", in i9xx_update_wm()
4312 struct drm_i915_private *dev_priv = dev->dev_private; in i830_update_wm()
4321 planea_wm = intel_calculate_wm(crtc->mode.clock, &i830_wm_info, in i830_update_wm()
4322 dev_priv->display.get_fifo_size(dev, 0), in i830_update_wm()
4323 crtc->fb->bits_per_pixel / 8, in i830_update_wm()
4328 DRM_DEBUG_KMS("Setting FIFO watermarks - A: %d\n", planea_wm); in i830_update_wm()
4348 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_check_srwm()
4363 if (display_wm > display->max_wm) { in ironlake_check_srwm()
4369 if (cursor_wm > cursor->max_wm) { in ironlake_check_srwm()
4384 * Compute watermark values of WM[1-3],
4405 hdisplay = crtc->mode.hdisplay; in ironlake_compute_srwm()
4406 htotal = crtc->mode.htotal; in ironlake_compute_srwm()
4407 clock = crtc->mode.clock; in ironlake_compute_srwm()
4408 pixel_size = crtc->fb->bits_per_pixel / 8; in ironlake_compute_srwm()
4418 entries = DIV_ROUND_UP(min(small, large), display->cacheline_size); in ironlake_compute_srwm()
4419 *display_wm = entries + display->guard_size; in ironlake_compute_srwm()
4427 /* calculate the self-refresh watermark for display cursor */ in ironlake_compute_srwm()
4429 entries = DIV_ROUND_UP(entries, cursor->cacheline_size); in ironlake_compute_srwm()
4430 *cursor_wm = entries + cursor->guard_size; in ironlake_compute_srwm()
4439 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_update_wm()
4452 DRM_DEBUG_KMS("FIFO watermarks For pipe A -" in ironlake_update_wm()
4466 DRM_DEBUG_KMS("FIFO watermarks For pipe B -" in ironlake_update_wm()
4473 * Calculate and update the self-refresh watermark only when one in ironlake_update_wm()
4482 enabled = ffs(enabled) - 1; in ironlake_update_wm()
4522 struct drm_i915_private *dev_priv = dev->dev_private; in sandybridge_update_wm()
4534 DRM_DEBUG_KMS("FIFO watermarks For pipe A -" in sandybridge_update_wm()
4546 DRM_DEBUG_KMS("FIFO watermarks For pipe B -" in sandybridge_update_wm()
4560 DRM_DEBUG_KMS("FIFO watermarks For pipe C -" in sandybridge_update_wm()
4567 * Calculate and update the self-refresh watermark only when one in sandybridge_update_wm()
4581 dev_priv->sprite_scaling_enabled) in sandybridge_update_wm()
4583 enabled = ffs(enabled) - 1; in sandybridge_update_wm()
4642 if (crtc->fb == NULL || !crtc->enabled) { in sandybridge_compute_sprite_wm()
4643 *sprite_wm = display->guard_size; in sandybridge_compute_sprite_wm()
4647 clock = crtc->mode.clock; in sandybridge_compute_sprite_wm()
4651 tlb_miss = display->fifo_size*display->cacheline_size - in sandybridge_compute_sprite_wm()
4655 entries = DIV_ROUND_UP(entries, display->cacheline_size); in sandybridge_compute_sprite_wm()
4656 *sprite_wm = entries + display->guard_size; in sandybridge_compute_sprite_wm()
4657 if (*sprite_wm > (int)display->max_wm) in sandybridge_compute_sprite_wm()
4658 *sprite_wm = display->max_wm; in sandybridge_compute_sprite_wm()
4682 clock = crtc->mode.clock; in sandybridge_compute_sprite_srwm()
4701 entries = DIV_ROUND_UP(min(small, large), display->cacheline_size); in sandybridge_compute_sprite_srwm()
4702 *sprite_wm = entries + display->guard_size; in sandybridge_compute_sprite_srwm()
4710 struct drm_i915_private *dev_priv = dev->dev_private; in sandybridge_update_sprite_wm()
4739 DRM_DEBUG_KMS("sprite watermarks For pipe %d - %d\n", pipe, sprite_wm); in sandybridge_update_sprite_wm()
4784 * intel_update_watermarks - update FIFO watermark values based on current modes
4790 * - normal (i.e. non-self-refresh)
4791 * - self-refresh (SR) mode
4792 * - lines are large relative to FIFO size (buffer can hold up to 2)
4793 * - lines are small relative to FIFO size (buffer can hold more than 2
4813 * to set the non-SR watermarks to 8.
4817 struct drm_i915_private *dev_priv = dev->dev_private; in intel_update_watermarks()
4819 if (dev_priv->display.update_wm) in intel_update_watermarks()
4820 dev_priv->display.update_wm(dev); in intel_update_watermarks()
4826 struct drm_i915_private *dev_priv = dev->dev_private; in intel_update_sprite_watermarks()
4828 if (dev_priv->display.update_sprite_wm) in intel_update_sprite_watermarks()
4829 dev_priv->display.update_sprite_wm(dev, pipe, sprite_width, in intel_update_sprite_watermarks()
4837 return dev_priv->lvds_use_ssc in intel_panel_use_ssc()
4838 && !(dev_priv->quirks & QUIRK_LVDS_SSC_DISABLE); in intel_panel_use_ssc()
4842 * intel_choose_pipe_bpp_dither - figure out what color depth the pipe should send
4847 * attached framebuffer, choose a good color depth to use on the pipe.
4866 struct drm_device *dev = crtc->dev; in intel_choose_pipe_bpp_dither()
4867 struct drm_i915_private *dev_priv = dev->dev_private; in intel_choose_pipe_bpp_dither()
4873 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { in intel_choose_pipe_bpp_dither()
4876 if (encoder->crtc != crtc) in intel_choose_pipe_bpp_dither()
4879 if (intel_encoder->type == INTEL_OUTPUT_LVDS) { in intel_choose_pipe_bpp_dither()
4895 if (intel_encoder->type == INTEL_OUTPUT_EDP) { in intel_choose_pipe_bpp_dither()
4897 unsigned int edp_bpc = dev_priv->edp.bpp / 3; in intel_choose_pipe_bpp_dither()
4907 list_for_each_entry(connector, &dev->mode_config.connector_list, in intel_choose_pipe_bpp_dither()
4909 if (connector->encoder != encoder) in intel_choose_pipe_bpp_dither()
4913 if (connector->display_info.bpc && in intel_choose_pipe_bpp_dither()
4914 connector->display_info.bpc < display_bpc) { in intel_choose_pipe_bpp_dither()
4915 …ping display bpc (was %d) to EDID reported max of %d\n", display_bpc, connector->display_info.bpc); in intel_choose_pipe_bpp_dither()
4916 display_bpc = connector->display_info.bpc; in intel_choose_pipe_bpp_dither()
4924 if (intel_encoder->type == INTEL_OUTPUT_HDMI) { in intel_choose_pipe_bpp_dither()
4935 if (mode->private_flags & INTEL_MODE_DP_FORCE_6BPC) { in intel_choose_pipe_bpp_dither()
4947 switch (crtc->fb->depth) { in intel_choose_pipe_bpp_dither()
4986 struct drm_device *dev = crtc->dev; in i9xx_crtc_mode_set()
4987 struct drm_i915_private *dev_priv = dev->dev_private; in i9xx_crtc_mode_set()
4989 int pipe = intel_crtc->pipe; in i9xx_crtc_mode_set()
4990 int plane = intel_crtc->plane; in i9xx_crtc_mode_set()
4996 struct drm_mode_config *mode_config = &dev->mode_config; in i9xx_crtc_mode_set()
5003 list_for_each_entry(encoder, &mode_config->encoder_list, base.head) { in i9xx_crtc_mode_set()
5004 if (encoder->base.crtc != crtc) in i9xx_crtc_mode_set()
5007 switch (encoder->type) { in i9xx_crtc_mode_set()
5014 if (encoder->needs_tv_clock) in i9xx_crtc_mode_set()
5035 refclk = dev_priv->lvds_ssc_freq * 1000; in i9xx_crtc_mode_set()
5050 ok = limit->find_pll(limit, crtc, adjusted_mode->clock, refclk, &clock); in i9xx_crtc_mode_set()
5053 return -EINVAL; in i9xx_crtc_mode_set()
5059 if (is_lvds && dev_priv->lvds_downclock_avail) { in i9xx_crtc_mode_set()
5060 has_reduced_clock = limit->find_pll(limit, crtc, in i9xx_crtc_mode_set()
5061 dev_priv->lvds_downclock, in i9xx_crtc_mode_set()
5079 if (adjusted_mode->clock >= 100000 in i9xx_crtc_mode_set()
5080 && adjusted_mode->clock < 140500) { in i9xx_crtc_mode_set()
5086 } else if (adjusted_mode->clock >= 140500 in i9xx_crtc_mode_set()
5087 && adjusted_mode->clock <= 200000) { in i9xx_crtc_mode_set()
5119 dpll |= (pixel_multiplier - 1) << SDVO_MULTIPLIER_SHIFT_HIRES; in i9xx_crtc_mode_set()
5128 dpll |= (1 << (clock.p1 - 1)) << DPLL_FPA01_P1_POST_DIV_SHIFT_PINEVIEW; in i9xx_crtc_mode_set()
5130 dpll |= (1 << (clock.p1 - 1)) << DPLL_FPA01_P1_POST_DIV_SHIFT; in i9xx_crtc_mode_set()
5132 dpll |= (1 << (reduced_clock.p1 - 1)) << DPLL_FPA1_P1_POST_DIV_SHIFT; in i9xx_crtc_mode_set()
5148 if (INTEL_INFO(dev)->gen >= 4) in i9xx_crtc_mode_set()
5152 dpll |= (1 << (clock.p1 - 1)) << DPLL_FPA01_P1_POST_DIV_SHIFT; in i9xx_crtc_mode_set()
5157 dpll |= (clock.p1 - 2) << DPLL_FPA01_P1_POST_DIV_SHIFT; in i9xx_crtc_mode_set()
5187 if (pipe == 0 && INTEL_INFO(dev)->gen < 4) { in i9xx_crtc_mode_set()
5191 * XXX: No double-wide on 915GM pipe B. Is that the only reason for the in i9xx_crtc_mode_set()
5194 if (mode->clock > in i9xx_crtc_mode_set()
5195 dev_priv->display.get_display_clock_speed(dev) * 9 / 10) in i9xx_crtc_mode_set()
5204 if (mode->private_flags & INTEL_MODE_DP_FORCE_6BPC) { in i9xx_crtc_mode_set()
5235 temp |= dev_priv->lvds_border_bits; in i9xx_crtc_mode_set()
5236 /* Set the B0-B3 data pairs corresponding to whether we're going to in i9xx_crtc_mode_set()
5237 * set the DPLLs for dual-channel mode or not. in i9xx_crtc_mode_set()
5244 /* It would be nice to set 24 vs 18-bit mode (LVDS_A3_POWER_UP) in i9xx_crtc_mode_set()
5249 if (INTEL_INFO(dev)->gen >= 4) { in i9xx_crtc_mode_set()
5250 if (dev_priv->lvds_dither) in i9xx_crtc_mode_set()
5255 if (adjusted_mode->flags & DRM_MODE_FLAG_NHSYNC) in i9xx_crtc_mode_set()
5257 if (adjusted_mode->flags & DRM_MODE_FLAG_NVSYNC) in i9xx_crtc_mode_set()
5261 char flags[2] = "-+"; in i9xx_crtc_mode_set()
5284 if (INTEL_INFO(dev)->gen >= 4) { in i9xx_crtc_mode_set()
5289 temp = (temp - 1) << DPLL_MD_UDI_MULTIPLIER_SHIFT; in i9xx_crtc_mode_set()
5303 intel_crtc->lowfreq_avail = false; in i9xx_crtc_mode_set()
5306 intel_crtc->lowfreq_avail = true; in i9xx_crtc_mode_set()
5320 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) { in i9xx_crtc_mode_set()
5323 adjusted_mode->crtc_vdisplay -= 1; in i9xx_crtc_mode_set()
5324 adjusted_mode->crtc_vtotal -= 1; in i9xx_crtc_mode_set()
5325 adjusted_mode->crtc_vblank_start -= 1; in i9xx_crtc_mode_set()
5326 adjusted_mode->crtc_vblank_end -= 1; in i9xx_crtc_mode_set()
5327 adjusted_mode->crtc_vsync_end -= 1; in i9xx_crtc_mode_set()
5328 adjusted_mode->crtc_vsync_start -= 1; in i9xx_crtc_mode_set()
5333 (adjusted_mode->crtc_hdisplay - 1) | in i9xx_crtc_mode_set()
5334 ((adjusted_mode->crtc_htotal - 1) << 16)); in i9xx_crtc_mode_set()
5336 (adjusted_mode->crtc_hblank_start - 1) | in i9xx_crtc_mode_set()
5337 ((adjusted_mode->crtc_hblank_end - 1) << 16)); in i9xx_crtc_mode_set()
5339 (adjusted_mode->crtc_hsync_start - 1) | in i9xx_crtc_mode_set()
5340 ((adjusted_mode->crtc_hsync_end - 1) << 16)); in i9xx_crtc_mode_set()
5343 (adjusted_mode->crtc_vdisplay - 1) | in i9xx_crtc_mode_set()
5344 ((adjusted_mode->crtc_vtotal - 1) << 16)); in i9xx_crtc_mode_set()
5346 (adjusted_mode->crtc_vblank_start - 1) | in i9xx_crtc_mode_set()
5347 ((adjusted_mode->crtc_vblank_end - 1) << 16)); in i9xx_crtc_mode_set()
5349 (adjusted_mode->crtc_vsync_start - 1) | in i9xx_crtc_mode_set()
5350 ((adjusted_mode->crtc_vsync_end - 1) << 16)); in i9xx_crtc_mode_set()
5356 ((mode->vdisplay - 1) << 16) | in i9xx_crtc_mode_set()
5357 (mode->hdisplay - 1)); in i9xx_crtc_mode_set()
5360 ((mode->hdisplay - 1) << 16) | (mode->vdisplay - 1)); in i9xx_crtc_mode_set()
5384 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_init_pch_refclk()
5385 struct drm_mode_config *mode_config = &dev->mode_config; in ironlake_init_pch_refclk()
5396 list_for_each_entry(encoder, &mode_config->encoder_list, in ironlake_init_pch_refclk()
5398 switch (encoder->type) { in ironlake_init_pch_refclk()
5405 if (intel_encoder_is_pch_edp(&encoder->base)) in ironlake_init_pch_refclk()
5414 has_ck505 = dev_priv->display_clock_mode; in ironlake_init_pch_refclk()
5497 struct drm_device *dev = crtc->dev; in ironlake_get_refclk()
5498 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_get_refclk()
5500 struct drm_mode_config *mode_config = &dev->mode_config; in ironlake_get_refclk()
5505 list_for_each_entry(encoder, &mode_config->encoder_list, base.head) { in ironlake_get_refclk()
5506 if (encoder->base.crtc != crtc) in ironlake_get_refclk()
5509 switch (encoder->type) { in ironlake_get_refclk()
5522 dev_priv->lvds_ssc_freq); in ironlake_get_refclk()
5523 return dev_priv->lvds_ssc_freq * 1000; in ironlake_get_refclk()
5535 struct drm_device *dev = crtc->dev; in ironlake_crtc_mode_set()
5536 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_crtc_mode_set()
5538 int pipe = intel_crtc->pipe; in ironlake_crtc_mode_set()
5539 int plane = intel_crtc->plane; in ironlake_crtc_mode_set()
5546 struct drm_mode_config *mode_config = &dev->mode_config; in ironlake_crtc_mode_set()
5557 list_for_each_entry(encoder, &mode_config->encoder_list, base.head) { in ironlake_crtc_mode_set()
5558 if (encoder->base.crtc != crtc) in ironlake_crtc_mode_set()
5561 switch (encoder->type) { in ironlake_crtc_mode_set()
5568 if (encoder->needs_tv_clock) in ironlake_crtc_mode_set()
5596 ok = limit->find_pll(limit, crtc, adjusted_mode->clock, refclk, &clock); in ironlake_crtc_mode_set()
5599 return -EINVAL; in ironlake_crtc_mode_set()
5605 if (is_lvds && dev_priv->lvds_downclock_avail) { in ironlake_crtc_mode_set()
5606 has_reduced_clock = limit->find_pll(limit, crtc, in ironlake_crtc_mode_set()
5607 dev_priv->lvds_downclock, in ironlake_crtc_mode_set()
5625 if (adjusted_mode->clock >= 100000 in ironlake_crtc_mode_set()
5626 && adjusted_mode->clock < 140500) { in ironlake_crtc_mode_set()
5632 } else if (adjusted_mode->clock >= 140500 in ironlake_crtc_mode_set()
5633 && adjusted_mode->clock <= 200000) { in ironlake_crtc_mode_set()
5648 !intel_encoder_is_pch_edp(&has_edp_encoder->base)) { in ironlake_crtc_mode_set()
5649 target_clock = mode->clock; in ironlake_crtc_mode_set()
5655 if (is_dp || intel_encoder_is_pch_edp(&has_edp_encoder->base)) in ironlake_crtc_mode_set()
5656 target_clock = mode->clock; in ironlake_crtc_mode_set()
5658 target_clock = adjusted_mode->clock; in ironlake_crtc_mode_set()
5695 intel_crtc->bpp = pipe_bpp; in ironlake_crtc_mode_set()
5704 u32 bps = target_clock * intel_crtc->bpp * 21 / 20; in ironlake_crtc_mode_set()
5708 intel_crtc->fdi_lanes = lane; in ironlake_crtc_mode_set()
5712 ironlake_compute_m_n(intel_crtc->bpp, lane, target_clock, link_bw, in ironlake_crtc_mode_set()
5724 dev_priv->lvds_ssc_freq == 100) || in ironlake_crtc_mode_set()
5742 dpll |= (pixel_multiplier - 1) << PLL_REF_SDVO_HDMI_MULTIPLIER_SHIFT; in ironlake_crtc_mode_set()
5746 if (is_dp || intel_encoder_is_pch_edp(&has_edp_encoder->base)) in ironlake_crtc_mode_set()
5750 dpll |= (1 << (clock.p1 - 1)) << DPLL_FPA01_P1_POST_DIV_SHIFT; in ironlake_crtc_mode_set()
5752 dpll |= (1 << (clock.p1 - 1)) << DPLL_FPA1_P1_POST_DIV_SHIFT; in ironlake_crtc_mode_set()
5790 if (!intel_crtc->no_pll) { in ironlake_crtc_mode_set()
5792 intel_encoder_is_pch_edp(&has_edp_encoder->base)) { in ironlake_crtc_mode_set()
5802 intel_crtc->use_pll_a = true; in ironlake_crtc_mode_set()
5806 intel_crtc->use_pll_a = false; in ironlake_crtc_mode_set()
5810 return -EINVAL; in ironlake_crtc_mode_set()
5832 temp |= dev_priv->lvds_border_bits; in ironlake_crtc_mode_set()
5833 /* Set the B0-B3 data pairs corresponding to whether we're going to in ironlake_crtc_mode_set()
5834 * set the DPLLs for dual-channel mode or not. in ironlake_crtc_mode_set()
5841 /* It would be nice to set 24 vs 18-bit mode (LVDS_A3_POWER_UP) in ironlake_crtc_mode_set()
5845 if (adjusted_mode->flags & DRM_MODE_FLAG_NHSYNC) in ironlake_crtc_mode_set()
5847 if (adjusted_mode->flags & DRM_MODE_FLAG_NVSYNC) in ironlake_crtc_mode_set()
5851 char flags[2] = "-+"; in ironlake_crtc_mode_set()
5866 if ((is_lvds && dev_priv->lvds_dither) || dither) { in ironlake_crtc_mode_set()
5870 if (is_dp || intel_encoder_is_pch_edp(&has_edp_encoder->base)) { in ironlake_crtc_mode_set()
5873 /* For non-DP output, clear any trans DP clock recovery setting.*/ in ironlake_crtc_mode_set()
5880 if (!intel_crtc->no_pll && in ironlake_crtc_mode_set()
5882 intel_encoder_is_pch_edp(&has_edp_encoder->base))) { in ironlake_crtc_mode_set()
5897 intel_crtc->lowfreq_avail = false; in ironlake_crtc_mode_set()
5898 if (!intel_crtc->no_pll) { in ironlake_crtc_mode_set()
5901 intel_crtc->lowfreq_avail = true; in ironlake_crtc_mode_set()
5916 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) { in ironlake_crtc_mode_set()
5919 adjusted_mode->crtc_vdisplay -= 1; in ironlake_crtc_mode_set()
5920 adjusted_mode->crtc_vtotal -= 1; in ironlake_crtc_mode_set()
5921 adjusted_mode->crtc_vblank_start -= 1; in ironlake_crtc_mode_set()
5922 adjusted_mode->crtc_vblank_end -= 1; in ironlake_crtc_mode_set()
5923 adjusted_mode->crtc_vsync_end -= 1; in ironlake_crtc_mode_set()
5924 adjusted_mode->crtc_vsync_start -= 1; in ironlake_crtc_mode_set()
5929 (adjusted_mode->crtc_hdisplay - 1) | in ironlake_crtc_mode_set()
5930 ((adjusted_mode->crtc_htotal - 1) << 16)); in ironlake_crtc_mode_set()
5932 (adjusted_mode->crtc_hblank_start - 1) | in ironlake_crtc_mode_set()
5933 ((adjusted_mode->crtc_hblank_end - 1) << 16)); in ironlake_crtc_mode_set()
5935 (adjusted_mode->crtc_hsync_start - 1) | in ironlake_crtc_mode_set()
5936 ((adjusted_mode->crtc_hsync_end - 1) << 16)); in ironlake_crtc_mode_set()
5939 (adjusted_mode->crtc_vdisplay - 1) | in ironlake_crtc_mode_set()
5940 ((adjusted_mode->crtc_vtotal - 1) << 16)); in ironlake_crtc_mode_set()
5942 (adjusted_mode->crtc_vblank_start - 1) | in ironlake_crtc_mode_set()
5943 ((adjusted_mode->crtc_vblank_end - 1) << 16)); in ironlake_crtc_mode_set()
5945 (adjusted_mode->crtc_vsync_start - 1) | in ironlake_crtc_mode_set()
5946 ((adjusted_mode->crtc_vsync_end - 1) << 16)); in ironlake_crtc_mode_set()
5952 ((mode->hdisplay - 1) << 16) | (mode->vdisplay - 1)); in ironlake_crtc_mode_set()
5960 !intel_encoder_is_pch_edp(&has_edp_encoder->base)) { in ironlake_crtc_mode_set()
5961 ironlake_set_pll_edp(crtc, adjusted_mode->clock); in ironlake_crtc_mode_set()
5991 struct drm_device *dev = crtc->dev; in intel_crtc_mode_set()
5992 struct drm_i915_private *dev_priv = dev->dev_private; in intel_crtc_mode_set()
5994 int pipe = intel_crtc->pipe; in intel_crtc_mode_set()
5999 ret = dev_priv->display.crtc_mode_set(crtc, mode, adjusted_mode, in intel_crtc_mode_set()
6004 intel_crtc->dpms_mode = DRM_MODE_DPMS_OFF; in intel_crtc_mode_set()
6006 intel_crtc->dpms_mode = DRM_MODE_DPMS_ON; in intel_crtc_mode_set()
6016 struct drm_i915_private *dev_priv = connector->dev->dev_private; in intel_eld_uptodate()
6017 uint8_t *eld = connector->eld; in intel_eld_uptodate()
6043 struct drm_i915_private *dev_priv = connector->dev->dev_private; in g4x_write_eld()
6044 uint8_t *eld = connector->eld; in g4x_write_eld()
6083 struct drm_i915_private *dev_priv = connector->dev->dev_private; in ironlake_write_eld()
6084 uint8_t *eld = connector->eld; in ironlake_write_eld()
6092 if (HAS_PCH_IBX(connector->dev)) { in ironlake_write_eld()
6102 i = to_intel_crtc(crtc)->pipe; in ironlake_write_eld()
6118 eldv = IBX_ELD_VALIDB << ((i - 1) * 4); in ironlake_write_eld()
6156 struct drm_crtc *crtc = encoder->crtc; in intel_write_eld()
6158 struct drm_device *dev = encoder->dev; in intel_write_eld()
6159 struct drm_i915_private *dev_priv = dev->dev_private; in intel_write_eld()
6166 connector->base.id, in intel_write_eld()
6168 connector->encoder->base.id, in intel_write_eld()
6169 drm_get_encoder_name(connector->encoder)); in intel_write_eld()
6171 connector->eld[6] = drm_av_sync_delay(connector, mode) / 2; in intel_write_eld()
6173 if (dev_priv->display.write_eld) in intel_write_eld()
6174 dev_priv->display.write_eld(connector, crtc); in intel_write_eld()
6180 struct drm_device *dev = crtc->dev; in intel_crtc_load_lut()
6181 struct drm_i915_private *dev_priv = dev->dev_private; in intel_crtc_load_lut()
6183 int palreg = PALETTE(intel_crtc->pipe); in intel_crtc_load_lut()
6187 if (!crtc->enabled || !intel_crtc->active) in intel_crtc_load_lut()
6192 palreg = LGC_PALETTE(intel_crtc->pipe); in intel_crtc_load_lut()
6196 (intel_crtc->lut_r[i] << 16) | in intel_crtc_load_lut()
6197 (intel_crtc->lut_g[i] << 8) | in intel_crtc_load_lut()
6198 intel_crtc->lut_b[i]); in intel_crtc_load_lut()
6204 struct drm_device *dev = crtc->dev; in i845_update_cursor()
6205 struct drm_i915_private *dev_priv = dev->dev_private; in i845_update_cursor()
6210 if (intel_crtc->cursor_visible == visible) in i845_update_cursor()
6229 intel_crtc->cursor_visible = visible; in i845_update_cursor()
6234 struct drm_device *dev = crtc->dev; in i9xx_update_cursor()
6235 struct drm_i915_private *dev_priv = dev->dev_private; in i9xx_update_cursor()
6237 int pipe = intel_crtc->pipe; in i9xx_update_cursor()
6240 if (intel_crtc->cursor_visible != visible) { in i9xx_update_cursor()
6252 intel_crtc->cursor_visible = visible; in i9xx_update_cursor()
6260 struct drm_device *dev = crtc->dev; in ivb_update_cursor()
6261 struct drm_i915_private *dev_priv = dev->dev_private; in ivb_update_cursor()
6263 int pipe = intel_crtc->pipe; in ivb_update_cursor()
6266 if (intel_crtc->cursor_visible != visible) { in ivb_update_cursor()
6277 intel_crtc->cursor_visible = visible; in ivb_update_cursor()
6283 /* If no-part of the cursor is visible on the framebuffer, then the GPU may hang... */
6287 struct drm_device *dev = crtc->dev; in intel_crtc_update_cursor()
6288 struct drm_i915_private *dev_priv = dev->dev_private; in intel_crtc_update_cursor()
6290 int pipe = intel_crtc->pipe; in intel_crtc_update_cursor()
6291 int x = intel_crtc->cursor_x; in intel_crtc_update_cursor()
6292 int y = intel_crtc->cursor_y; in intel_crtc_update_cursor()
6298 if (on && crtc->enabled && crtc->fb) { in intel_crtc_update_cursor()
6299 base = intel_crtc->cursor_addr; in intel_crtc_update_cursor()
6300 if (x > (int) crtc->fb->width) in intel_crtc_update_cursor()
6303 if (y > (int) crtc->fb->height) in intel_crtc_update_cursor()
6309 if (x + intel_crtc->cursor_width < 0) in intel_crtc_update_cursor()
6313 x = -x; in intel_crtc_update_cursor()
6318 if (y + intel_crtc->cursor_height < 0) in intel_crtc_update_cursor()
6322 y = -y; in intel_crtc_update_cursor()
6327 if (!visible && !intel_crtc->cursor_visible) in intel_crtc_update_cursor()
6342 intel_mark_busy(dev, to_intel_framebuffer(crtc->fb)->obj); in intel_crtc_update_cursor()
6350 struct drm_device *dev = crtc->dev; in intel_crtc_cursor_set()
6351 struct drm_i915_private *dev_priv = dev->dev_private; in intel_crtc_cursor_set()
6364 mutex_lock(&dev->struct_mutex); in intel_crtc_cursor_set()
6371 return -EINVAL; in intel_crtc_cursor_set()
6375 if (&obj->base == NULL) in intel_crtc_cursor_set()
6376 return -ENOENT; in intel_crtc_cursor_set()
6378 if (obj->base.size < width * height * 4) { in intel_crtc_cursor_set()
6380 ret = -ENOMEM; in intel_crtc_cursor_set()
6384 /* we only need to pin inside GTT if cursor is non-phy */ in intel_crtc_cursor_set()
6385 mutex_lock(&dev->struct_mutex); in intel_crtc_cursor_set()
6386 if (!dev_priv->info->cursor_needs_physical) { in intel_crtc_cursor_set()
6387 if (obj->tiling_mode) { in intel_crtc_cursor_set()
6389 ret = -EINVAL; in intel_crtc_cursor_set()
6405 addr = obj->gtt_offset; in intel_crtc_cursor_set()
6409 (intel_crtc->pipe == 0) ? I915_GEM_PHYS_CURSOR_0 : I915_GEM_PHYS_CURSOR_1, in intel_crtc_cursor_set()
6415 addr = obj->phys_obj->handle->busaddr; in intel_crtc_cursor_set()
6422 if (intel_crtc->cursor_bo) { in intel_crtc_cursor_set()
6423 if (dev_priv->info->cursor_needs_physical) { in intel_crtc_cursor_set()
6424 if (intel_crtc->cursor_bo != obj) in intel_crtc_cursor_set()
6425 i915_gem_detach_phys_object(dev, intel_crtc->cursor_bo); in intel_crtc_cursor_set()
6427 i915_gem_object_unpin(intel_crtc->cursor_bo); in intel_crtc_cursor_set()
6428 drm_gem_object_unreference(&intel_crtc->cursor_bo->base); in intel_crtc_cursor_set()
6431 mutex_unlock(&dev->struct_mutex); in intel_crtc_cursor_set()
6433 intel_crtc->cursor_addr = addr; in intel_crtc_cursor_set()
6434 intel_crtc->cursor_bo = obj; in intel_crtc_cursor_set()
6435 intel_crtc->cursor_width = width; in intel_crtc_cursor_set()
6436 intel_crtc->cursor_height = height; in intel_crtc_cursor_set()
6444 mutex_unlock(&dev->struct_mutex); in intel_crtc_cursor_set()
6446 drm_gem_object_unreference_unlocked(&obj->base); in intel_crtc_cursor_set()
6454 intel_crtc->cursor_x = x; in intel_crtc_cursor_move()
6455 intel_crtc->cursor_y = y; in intel_crtc_cursor_move()
6468 intel_crtc->lut_r[regno] = red >> 8; in intel_crtc_fb_gamma_set()
6469 intel_crtc->lut_g[regno] = green >> 8; in intel_crtc_fb_gamma_set()
6470 intel_crtc->lut_b[regno] = blue >> 8; in intel_crtc_fb_gamma_set()
6478 *red = intel_crtc->lut_r[regno] << 8; in intel_crtc_fb_gamma_get()
6479 *green = intel_crtc->lut_g[regno] << 8; in intel_crtc_fb_gamma_get()
6480 *blue = intel_crtc->lut_b[regno] << 8; in intel_crtc_fb_gamma_get()
6490 intel_crtc->lut_r[i] = red[i] >> 8; in intel_crtc_gamma_set()
6491 intel_crtc->lut_g[i] = green[i] >> 8; in intel_crtc_gamma_set()
6492 intel_crtc->lut_b[i] = blue[i] >> 8; in intel_crtc_gamma_set()
6499 * Get a pipe with a simple mode set on it for doing load-based monitor
6502 * It will be up to the load-detect code to adjust the pipe as appropriate for
6528 drm_gem_object_unreference_unlocked(&obj->base); in intel_framebuffer_create()
6529 return ERR_PTR(-ENOMEM); in intel_framebuffer_create()
6534 drm_gem_object_unreference_unlocked(&obj->base); in intel_framebuffer_create()
6539 return &intel_fb->base; in intel_framebuffer_create()
6552 u32 pitch = intel_framebuffer_pitch_for_width(mode->hdisplay, bpp); in intel_framebuffer_size_for_mode()
6553 return ALIGN(pitch * mode->vdisplay, PAGE_SIZE); in intel_framebuffer_size_for_mode()
6567 return ERR_PTR(-ENOMEM); in intel_framebuffer_create_for_mode()
6569 mode_cmd.width = mode->hdisplay; in intel_framebuffer_create_for_mode()
6570 mode_cmd.height = mode->vdisplay; in intel_framebuffer_create_for_mode()
6582 struct drm_i915_private *dev_priv = dev->dev_private; in mode_fits_in_fbdev()
6586 if (dev_priv->fbdev == NULL) in mode_fits_in_fbdev()
6589 obj = dev_priv->fbdev->ifb.obj; in mode_fits_in_fbdev()
6593 fb = &dev_priv->fbdev->ifb.base; in mode_fits_in_fbdev()
6594 if (fb->pitches[0] < intel_framebuffer_pitch_for_width(mode->hdisplay, in mode_fits_in_fbdev()
6595 fb->bits_per_pixel)) in mode_fits_in_fbdev()
6598 if (obj->base.size < mode->vdisplay * fb->pitches[0]) in mode_fits_in_fbdev()
6611 struct drm_encoder *encoder = &intel_encoder->base; in intel_get_load_detect_pipe()
6613 struct drm_device *dev = encoder->dev; in intel_get_load_detect_pipe()
6615 int i = -1; in intel_get_load_detect_pipe()
6618 connector->base.id, drm_get_connector_name(connector), in intel_get_load_detect_pipe()
6619 encoder->base.id, drm_get_encoder_name(encoder)); in intel_get_load_detect_pipe()
6624 * - if the connector already has an assigned crtc, use it (but make in intel_get_load_detect_pipe()
6627 * - try to find the first unused crtc that can drive this connector, in intel_get_load_detect_pipe()
6632 if (encoder->crtc) { in intel_get_load_detect_pipe()
6633 crtc = encoder->crtc; in intel_get_load_detect_pipe()
6636 old->dpms_mode = intel_crtc->dpms_mode; in intel_get_load_detect_pipe()
6637 old->load_detect_temp = false; in intel_get_load_detect_pipe()
6640 if (intel_crtc->dpms_mode != DRM_MODE_DPMS_ON) { in intel_get_load_detect_pipe()
6644 crtc_funcs = crtc->helper_private; in intel_get_load_detect_pipe()
6645 crtc_funcs->dpms(crtc, DRM_MODE_DPMS_ON); in intel_get_load_detect_pipe()
6647 encoder_funcs = encoder->helper_private; in intel_get_load_detect_pipe()
6648 encoder_funcs->dpms(encoder, DRM_MODE_DPMS_ON); in intel_get_load_detect_pipe()
6655 list_for_each_entry(possible_crtc, &dev->mode_config.crtc_list, head) { in intel_get_load_detect_pipe()
6657 if (!(encoder->possible_crtcs & (1 << i))) in intel_get_load_detect_pipe()
6659 if (!possible_crtc->enabled) { in intel_get_load_detect_pipe()
6669 DRM_DEBUG_KMS("no pipe available for load-detect\n"); in intel_get_load_detect_pipe()
6673 encoder->crtc = crtc; in intel_get_load_detect_pipe()
6674 connector->encoder = encoder; in intel_get_load_detect_pipe()
6677 old->dpms_mode = intel_crtc->dpms_mode; in intel_get_load_detect_pipe()
6678 old->load_detect_temp = true; in intel_get_load_detect_pipe()
6679 old->release_fb = NULL; in intel_get_load_detect_pipe()
6684 old_fb = crtc->fb; in intel_get_load_detect_pipe()
6686 /* We need a framebuffer large enough to accommodate all accesses in intel_get_load_detect_pipe()
6693 crtc->fb = mode_fits_in_fbdev(dev, mode); in intel_get_load_detect_pipe()
6694 if (crtc->fb == NULL) { in intel_get_load_detect_pipe()
6695 DRM_DEBUG_KMS("creating tmp fb for load-detection\n"); in intel_get_load_detect_pipe()
6696 crtc->fb = intel_framebuffer_create_for_mode(dev, mode, 24, 32); in intel_get_load_detect_pipe()
6697 old->release_fb = crtc->fb; in intel_get_load_detect_pipe()
6699 DRM_DEBUG_KMS("reusing fbdev for load-detection framebuffer\n"); in intel_get_load_detect_pipe()
6700 if (IS_ERR(crtc->fb)) { in intel_get_load_detect_pipe()
6701 DRM_DEBUG_KMS("failed to allocate framebuffer for load-detection\n"); in intel_get_load_detect_pipe()
6702 crtc->fb = old_fb; in intel_get_load_detect_pipe()
6707 DRM_DEBUG_KMS("failed to set mode on load-detect pipe\n"); in intel_get_load_detect_pipe()
6708 if (old->release_fb) in intel_get_load_detect_pipe()
6709 old->release_fb->funcs->destroy(old->release_fb); in intel_get_load_detect_pipe()
6710 crtc->fb = old_fb; in intel_get_load_detect_pipe()
6715 intel_wait_for_vblank(dev, intel_crtc->pipe); in intel_get_load_detect_pipe()
6724 struct drm_encoder *encoder = &intel_encoder->base; in intel_release_load_detect_pipe()
6725 struct drm_device *dev = encoder->dev; in intel_release_load_detect_pipe()
6726 struct drm_crtc *crtc = encoder->crtc; in intel_release_load_detect_pipe()
6727 struct drm_encoder_helper_funcs *encoder_funcs = encoder->helper_private; in intel_release_load_detect_pipe()
6728 struct drm_crtc_helper_funcs *crtc_funcs = crtc->helper_private; in intel_release_load_detect_pipe()
6731 connector->base.id, drm_get_connector_name(connector), in intel_release_load_detect_pipe()
6732 encoder->base.id, drm_get_encoder_name(encoder)); in intel_release_load_detect_pipe()
6734 if (old->load_detect_temp) { in intel_release_load_detect_pipe()
6735 connector->encoder = NULL; in intel_release_load_detect_pipe()
6738 if (old->release_fb) in intel_release_load_detect_pipe()
6739 old->release_fb->funcs->destroy(old->release_fb); in intel_release_load_detect_pipe()
6745 if (old->dpms_mode != DRM_MODE_DPMS_ON) { in intel_release_load_detect_pipe()
6746 encoder_funcs->dpms(encoder, old->dpms_mode); in intel_release_load_detect_pipe()
6747 crtc_funcs->dpms(crtc, old->dpms_mode); in intel_release_load_detect_pipe()
6754 struct drm_i915_private *dev_priv = dev->dev_private; in intel_crtc_clock_get()
6756 int pipe = intel_crtc->pipe; in intel_crtc_clock_get()
6768 clock.n = ffs((fp & FP_N_PINEVIEW_DIV_MASK) >> FP_N_DIV_SHIFT) - 1; in intel_crtc_clock_get()
6842 struct drm_i915_private *dev_priv = dev->dev_private; in intel_crtc_mode_get()
6844 int pipe = intel_crtc->pipe; in intel_crtc_mode_get()
6855 mode->clock = intel_crtc_clock_get(dev, crtc); in intel_crtc_mode_get()
6856 mode->hdisplay = (htot & 0xffff) + 1; in intel_crtc_mode_get()
6857 mode->htotal = ((htot & 0xffff0000) >> 16) + 1; in intel_crtc_mode_get()
6858 mode->hsync_start = (hsync & 0xffff) + 1; in intel_crtc_mode_get()
6859 mode->hsync_end = ((hsync & 0xffff0000) >> 16) + 1; in intel_crtc_mode_get()
6860 mode->vdisplay = (vtot & 0xffff) + 1; in intel_crtc_mode_get()
6861 mode->vtotal = ((vtot & 0xffff0000) >> 16) + 1; in intel_crtc_mode_get()
6862 mode->vsync_start = (vsync & 0xffff) + 1; in intel_crtc_mode_get()
6863 mode->vsync_end = ((vsync & 0xffff0000) >> 16) + 1; in intel_crtc_mode_get()
6877 drm_i915_private_t *dev_priv = dev->dev_private; in intel_gpu_idle_timer()
6879 if (!list_empty(&dev_priv->mm.active_list)) { in intel_gpu_idle_timer()
6880 /* Still processing requests, so just re-arm the timer. */ in intel_gpu_idle_timer()
6881 mod_timer(&dev_priv->idle_timer, jiffies + in intel_gpu_idle_timer()
6886 dev_priv->busy = false; in intel_gpu_idle_timer()
6887 queue_work(dev_priv->wq, &dev_priv->idle_work); in intel_gpu_idle_timer()
6895 struct drm_crtc *crtc = &intel_crtc->base; in intel_crtc_idle_timer()
6896 drm_i915_private_t *dev_priv = crtc->dev->dev_private; in intel_crtc_idle_timer()
6899 intel_fb = to_intel_framebuffer(crtc->fb); in intel_crtc_idle_timer()
6900 if (intel_fb && intel_fb->obj->active) { in intel_crtc_idle_timer()
6901 /* The framebuffer is still being accessed by the GPU. */ in intel_crtc_idle_timer()
6902 mod_timer(&intel_crtc->idle_timer, jiffies + in intel_crtc_idle_timer()
6907 intel_crtc->busy = false; in intel_crtc_idle_timer()
6908 queue_work(dev_priv->wq, &dev_priv->idle_work); in intel_crtc_idle_timer()
6913 struct drm_device *dev = crtc->dev; in intel_increase_pllclock()
6914 drm_i915_private_t *dev_priv = dev->dev_private; in intel_increase_pllclock()
6916 int pipe = intel_crtc->pipe; in intel_increase_pllclock()
6923 if (!dev_priv->lvds_downclock_avail) in intel_increase_pllclock()
6947 mod_timer(&intel_crtc->idle_timer, jiffies + in intel_increase_pllclock()
6953 struct drm_device *dev = crtc->dev; in intel_decrease_pllclock()
6954 drm_i915_private_t *dev_priv = dev->dev_private; in intel_decrease_pllclock()
6956 int pipe = intel_crtc->pipe; in intel_decrease_pllclock()
6963 if (!dev_priv->lvds_downclock_avail) in intel_decrease_pllclock()
6970 if (!HAS_PIPE_CXSR(dev) && intel_crtc->lowfreq_avail) { in intel_decrease_pllclock()
6991 * intel_idle_update - adjust clocks for idleness
7001 struct drm_device *dev = dev_priv->dev; in intel_idle_update()
7008 mutex_lock(&dev->struct_mutex); in intel_idle_update()
7012 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { in intel_idle_update()
7014 if (!crtc->fb) in intel_idle_update()
7018 if (!intel_crtc->busy) in intel_idle_update()
7023 mutex_unlock(&dev->struct_mutex); in intel_idle_update()
7027 * intel_mark_busy - mark the GPU and possibly the display busy
7038 drm_i915_private_t *dev_priv = dev->dev_private; in intel_mark_busy()
7046 if (!dev_priv->busy) in intel_mark_busy()
7047 dev_priv->busy = true; in intel_mark_busy()
7049 mod_timer(&dev_priv->idle_timer, jiffies + in intel_mark_busy()
7052 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { in intel_mark_busy()
7053 if (!crtc->fb) in intel_mark_busy()
7057 intel_fb = to_intel_framebuffer(crtc->fb); in intel_mark_busy()
7058 if (intel_fb->obj == obj) { in intel_mark_busy()
7059 if (!intel_crtc->busy) { in intel_mark_busy()
7060 /* Non-busy -> busy, upclock */ in intel_mark_busy()
7062 intel_crtc->busy = true; in intel_mark_busy()
7064 /* Busy -> busy, put off timer */ in intel_mark_busy()
7065 mod_timer(&intel_crtc->idle_timer, jiffies + in intel_mark_busy()
7075 struct drm_device *dev = crtc->dev; in intel_crtc_destroy()
7079 spin_lock_irqsave(&dev->event_lock, flags); in intel_crtc_destroy()
7080 work = intel_crtc->unpin_work; in intel_crtc_destroy()
7081 intel_crtc->unpin_work = NULL; in intel_crtc_destroy()
7082 spin_unlock_irqrestore(&dev->event_lock, flags); in intel_crtc_destroy()
7085 cancel_work_sync(&work->work); in intel_crtc_destroy()
7099 mutex_lock(&work->dev->struct_mutex); in intel_unpin_work_fn()
7100 i915_gem_object_unpin(work->old_fb_obj); in intel_unpin_work_fn()
7101 drm_gem_object_unreference(&work->pending_flip_obj->base); in intel_unpin_work_fn()
7102 drm_gem_object_unreference(&work->old_fb_obj->base); in intel_unpin_work_fn()
7104 intel_update_fbc(work->dev); in intel_unpin_work_fn()
7105 mutex_unlock(&work->dev->struct_mutex); in intel_unpin_work_fn()
7112 drm_i915_private_t *dev_priv = dev->dev_private; in do_intel_finish_page_flip()
7126 spin_lock_irqsave(&dev->event_lock, flags); in do_intel_finish_page_flip()
7127 work = intel_crtc->unpin_work; in do_intel_finish_page_flip()
7128 if (work == NULL || !work->pending) { in do_intel_finish_page_flip()
7129 spin_unlock_irqrestore(&dev->event_lock, flags); in do_intel_finish_page_flip()
7133 intel_crtc->unpin_work = NULL; in do_intel_finish_page_flip()
7135 if (work->event) { in do_intel_finish_page_flip()
7136 e = work->event; in do_intel_finish_page_flip()
7137 e->event.sequence = drm_vblank_count_and_time(dev, intel_crtc->pipe, &tvbl); in do_intel_finish_page_flip()
7151 if (10 * (timeval_to_ns(&tnow) - timeval_to_ns(&tvbl)) > in do_intel_finish_page_flip()
7152 9 * crtc->framedur_ns) { in do_intel_finish_page_flip()
7153 e->event.sequence++; in do_intel_finish_page_flip()
7155 crtc->framedur_ns); in do_intel_finish_page_flip()
7158 e->event.tv_sec = tvbl.tv_sec; in do_intel_finish_page_flip()
7159 e->event.tv_usec = tvbl.tv_usec; in do_intel_finish_page_flip()
7161 list_add_tail(&e->base.link, in do_intel_finish_page_flip()
7162 &e->base.file_priv->event_list); in do_intel_finish_page_flip()
7163 wake_up_interruptible(&e->base.file_priv->event_wait); in do_intel_finish_page_flip()
7166 drm_vblank_put(dev, intel_crtc->pipe); in do_intel_finish_page_flip()
7168 spin_unlock_irqrestore(&dev->event_lock, flags); in do_intel_finish_page_flip()
7170 obj = work->old_fb_obj; in do_intel_finish_page_flip()
7172 atomic_clear_mask(1 << intel_crtc->plane, in do_intel_finish_page_flip()
7173 &obj->pending_flip.counter); in do_intel_finish_page_flip()
7174 if (atomic_read(&obj->pending_flip) == 0) in do_intel_finish_page_flip()
7175 wake_up(&dev_priv->pending_flip_queue); in do_intel_finish_page_flip()
7177 schedule_work(&work->work); in do_intel_finish_page_flip()
7179 trace_i915_flip_complete(intel_crtc->plane, work->pending_flip_obj); in do_intel_finish_page_flip()
7184 drm_i915_private_t *dev_priv = dev->dev_private; in intel_finish_page_flip()
7185 struct drm_crtc *crtc = dev_priv->pipe_to_crtc_mapping[pipe]; in intel_finish_page_flip()
7192 drm_i915_private_t *dev_priv = dev->dev_private; in intel_finish_page_flip_plane()
7193 struct drm_crtc *crtc = dev_priv->plane_to_crtc_mapping[plane]; in intel_finish_page_flip_plane()
7200 drm_i915_private_t *dev_priv = dev->dev_private; in intel_prepare_page_flip()
7202 to_intel_crtc(dev_priv->plane_to_crtc_mapping[plane]); in intel_prepare_page_flip()
7205 spin_lock_irqsave(&dev->event_lock, flags); in intel_prepare_page_flip()
7206 if (intel_crtc->unpin_work) { in intel_prepare_page_flip()
7207 if ((++intel_crtc->unpin_work->pending) > 1) in intel_prepare_page_flip()
7212 spin_unlock_irqrestore(&dev->event_lock, flags); in intel_prepare_page_flip()
7220 struct drm_i915_private *dev_priv = dev->dev_private; in intel_gen2_queue_flip()
7231 offset = crtc->y * fb->pitches[0] + crtc->x * fb->bits_per_pixel/8; in intel_gen2_queue_flip()
7240 if (intel_crtc->plane) in intel_gen2_queue_flip()
7247 MI_DISPLAY_FLIP_PLANE(intel_crtc->plane)); in intel_gen2_queue_flip()
7248 OUT_RING(fb->pitches[0]); in intel_gen2_queue_flip()
7249 OUT_RING(obj->gtt_offset + offset); in intel_gen2_queue_flip()
7261 struct drm_i915_private *dev_priv = dev->dev_private; in intel_gen3_queue_flip()
7272 offset = crtc->y * fb->pitches[0] + crtc->x * fb->bits_per_pixel/8; in intel_gen3_queue_flip()
7278 if (intel_crtc->plane) in intel_gen3_queue_flip()
7285 MI_DISPLAY_FLIP_PLANE(intel_crtc->plane)); in intel_gen3_queue_flip()
7286 OUT_RING(fb->pitches[0]); in intel_gen3_queue_flip()
7287 OUT_RING(obj->gtt_offset + offset); in intel_gen3_queue_flip()
7300 struct drm_i915_private *dev_priv = dev->dev_private; in intel_gen4_queue_flip()
7314 * Display Registers (which do not change across a page-flip) in intel_gen4_queue_flip()
7318 MI_DISPLAY_FLIP_PLANE(intel_crtc->plane)); in intel_gen4_queue_flip()
7319 OUT_RING(fb->pitches[0]); in intel_gen4_queue_flip()
7320 OUT_RING(obj->gtt_offset | obj->tiling_mode); in intel_gen4_queue_flip()
7322 /* XXX Enabling the panel-fitter across page-flip is so far in intel_gen4_queue_flip()
7323 * untested on non-native modes, so ignore it for now. in intel_gen4_queue_flip()
7327 pipesrc = I915_READ(PIPESRC(intel_crtc->pipe)) & 0x0fff0fff; in intel_gen4_queue_flip()
7339 struct drm_i915_private *dev_priv = dev->dev_private; in intel_gen6_queue_flip()
7353 MI_DISPLAY_FLIP_PLANE(intel_crtc->plane)); in intel_gen6_queue_flip()
7354 OUT_RING(fb->pitches[0] | obj->tiling_mode); in intel_gen6_queue_flip()
7355 OUT_RING(obj->gtt_offset); in intel_gen6_queue_flip()
7357 pf = I915_READ(PF_CTL(intel_crtc->pipe)) & PF_ENABLE; in intel_gen6_queue_flip()
7358 pipesrc = I915_READ(PIPESRC(intel_crtc->pipe)) & 0x0fff0fff; in intel_gen6_queue_flip()
7376 struct drm_i915_private *dev_priv = dev->dev_private; in intel_gen7_queue_flip()
7378 struct intel_ring_buffer *ring = &dev_priv->ring[BCS]; in intel_gen7_queue_flip()
7389 intel_ring_emit(ring, MI_DISPLAY_FLIP_I915 | (intel_crtc->plane << 19)); in intel_gen7_queue_flip()
7390 intel_ring_emit(ring, (fb->pitches[0] | obj->tiling_mode)); in intel_gen7_queue_flip()
7391 intel_ring_emit(ring, (obj->gtt_offset)); in intel_gen7_queue_flip()
7403 return -ENODEV; in intel_default_queue_flip()
7410 struct drm_device *dev = crtc->dev; in intel_crtc_page_flip()
7411 struct drm_i915_private *dev_priv = dev->dev_private; in intel_crtc_page_flip()
7421 return -ENOMEM; in intel_crtc_page_flip()
7423 work->event = event; in intel_crtc_page_flip()
7424 work->dev = crtc->dev; in intel_crtc_page_flip()
7425 intel_fb = to_intel_framebuffer(crtc->fb); in intel_crtc_page_flip()
7426 work->old_fb_obj = intel_fb->obj; in intel_crtc_page_flip()
7427 INIT_WORK(&work->work, intel_unpin_work_fn); in intel_crtc_page_flip()
7429 ret = drm_vblank_get(dev, intel_crtc->pipe); in intel_crtc_page_flip()
7434 spin_lock_irqsave(&dev->event_lock, flags); in intel_crtc_page_flip()
7435 if (intel_crtc->unpin_work) { in intel_crtc_page_flip()
7436 spin_unlock_irqrestore(&dev->event_lock, flags); in intel_crtc_page_flip()
7438 drm_vblank_put(dev, intel_crtc->pipe); in intel_crtc_page_flip()
7441 return -EBUSY; in intel_crtc_page_flip()
7443 intel_crtc->unpin_work = work; in intel_crtc_page_flip()
7444 spin_unlock_irqrestore(&dev->event_lock, flags); in intel_crtc_page_flip()
7447 obj = intel_fb->obj; in intel_crtc_page_flip()
7449 mutex_lock(&dev->struct_mutex); in intel_crtc_page_flip()
7452 drm_gem_object_reference(&work->old_fb_obj->base); in intel_crtc_page_flip()
7453 drm_gem_object_reference(&obj->base); in intel_crtc_page_flip()
7455 crtc->fb = fb; in intel_crtc_page_flip()
7457 work->pending_flip_obj = obj; in intel_crtc_page_flip()
7459 work->enable_stall_check = true; in intel_crtc_page_flip()
7464 atomic_add(1 << intel_crtc->plane, &work->old_fb_obj->pending_flip); in intel_crtc_page_flip()
7466 ret = dev_priv->display.queue_flip(dev, crtc, fb, obj); in intel_crtc_page_flip()
7471 mutex_unlock(&dev->struct_mutex); in intel_crtc_page_flip()
7473 trace_i915_flip_request(intel_crtc->plane, obj); in intel_crtc_page_flip()
7478 atomic_sub(1 << intel_crtc->plane, &work->old_fb_obj->pending_flip); in intel_crtc_page_flip()
7479 drm_gem_object_unreference(&work->old_fb_obj->base); in intel_crtc_page_flip()
7480 drm_gem_object_unreference(&obj->base); in intel_crtc_page_flip()
7481 mutex_unlock(&dev->struct_mutex); in intel_crtc_page_flip()
7483 spin_lock_irqsave(&dev->event_lock, flags); in intel_crtc_page_flip()
7484 intel_crtc->unpin_work = NULL; in intel_crtc_page_flip()
7485 spin_unlock_irqrestore(&dev->event_lock, flags); in intel_crtc_page_flip()
7487 drm_vblank_put(dev, intel_crtc->pipe); in intel_crtc_page_flip()
7497 struct drm_i915_private *dev_priv = dev->dev_private; in intel_sanitize_modesetting()
7509 * not disable the pipes and planes in the correct order -- leaving in intel_sanitize_modesetting()
7532 struct drm_device *dev = crtc->dev; in intel_crtc_reset()
7538 intel_crtc->dpms_mode = -1; in intel_crtc_reset()
7543 intel_sanitize_modesetting(dev, intel_crtc->pipe, intel_crtc->plane); in intel_crtc_reset()
7568 drm_i915_private_t *dev_priv = dev->dev_private; in intel_crtc_init()
7576 drm_crtc_init(dev, &intel_crtc->base, &intel_crtc_funcs); in intel_crtc_init()
7578 drm_mode_crtc_set_gamma_size(&intel_crtc->base, 256); in intel_crtc_init()
7580 intel_crtc->lut_r[i] = i; in intel_crtc_init()
7581 intel_crtc->lut_g[i] = i; in intel_crtc_init()
7582 intel_crtc->lut_b[i] = i; in intel_crtc_init()
7585 /* Swap pipes & planes for FBC on pre-965 */ in intel_crtc_init()
7586 intel_crtc->pipe = pipe; in intel_crtc_init()
7587 intel_crtc->plane = pipe; in intel_crtc_init()
7590 intel_crtc->plane = !pipe; in intel_crtc_init()
7593 BUG_ON(pipe >= ARRAY_SIZE(dev_priv->plane_to_crtc_mapping) || in intel_crtc_init()
7594 dev_priv->plane_to_crtc_mapping[intel_crtc->plane] != NULL); in intel_crtc_init()
7595 dev_priv->plane_to_crtc_mapping[intel_crtc->plane] = &intel_crtc->base; in intel_crtc_init()
7596 dev_priv->pipe_to_crtc_mapping[intel_crtc->pipe] = &intel_crtc->base; in intel_crtc_init()
7598 intel_crtc_reset(&intel_crtc->base); in intel_crtc_init()
7599 intel_crtc->active = true; /* force the pipe off on setup_init_config */ in intel_crtc_init()
7600 intel_crtc->bpp = 24; /* default for pre-Ironlake */ in intel_crtc_init()
7604 intel_crtc->no_pll = true; in intel_crtc_init()
7612 drm_crtc_helper_add(&intel_crtc->base, &intel_helper_funcs); in intel_crtc_init()
7614 intel_crtc->busy = false; in intel_crtc_init()
7616 setup_timer(&intel_crtc->idle_timer, intel_crtc_idle_timer, in intel_crtc_init()
7623 drm_i915_private_t *dev_priv = dev->dev_private; in intel_get_pipe_from_crtc_id()
7630 return -EINVAL; in intel_get_pipe_from_crtc_id()
7633 drmmode_obj = drm_mode_object_find(dev, pipe_from_crtc_id->crtc_id, in intel_get_pipe_from_crtc_id()
7638 return -EINVAL; in intel_get_pipe_from_crtc_id()
7642 pipe_from_crtc_id->pipe = crtc->pipe; in intel_get_pipe_from_crtc_id()
7653 list_for_each_entry(encoder, &dev->mode_config.encoder_list, base.head) { in intel_encoder_clones()
7654 if (type_mask & encoder->clone_mask) in intel_encoder_clones()
7664 struct drm_i915_private *dev_priv = dev->dev_private; in has_edp_a()
7681 struct drm_i915_private *dev_priv = dev->dev_private; in intel_setup_outputs()
7776 list_for_each_entry(encoder, &dev->mode_config.encoder_list, base.head) { in intel_setup_outputs()
7777 encoder->base.possible_crtcs = encoder->crtc_mask; in intel_setup_outputs()
7778 encoder->base.possible_clones = in intel_setup_outputs()
7779 intel_encoder_clones(dev, encoder->clone_mask); in intel_setup_outputs()
7794 drm_gem_object_unreference_unlocked(&intel_fb->obj->base); in intel_user_framebuffer_destroy()
7804 struct drm_i915_gem_object *obj = intel_fb->obj; in intel_user_framebuffer_create_handle()
7806 return drm_gem_handle_create(file, &obj->base, handle); in intel_user_framebuffer_create_handle()
7821 if (obj->tiling_mode == I915_TILING_Y) in intel_framebuffer_init()
7822 return -EINVAL; in intel_framebuffer_init()
7824 if (mode_cmd->pitches[0] & 63) in intel_framebuffer_init()
7825 return -EINVAL; in intel_framebuffer_init()
7827 switch (mode_cmd->pixel_format) { in intel_framebuffer_init()
7844 return -EINVAL; in intel_framebuffer_init()
7847 ret = drm_framebuffer_init(dev, &intel_fb->base, &intel_fb_funcs); in intel_framebuffer_init()
7849 DRM_ERROR("framebuffer init failed %d\n", ret); in intel_framebuffer_init()
7853 drm_helper_mode_fill_fb_struct(&intel_fb->base, mode_cmd); in intel_framebuffer_init()
7854 intel_fb->obj = obj; in intel_framebuffer_init()
7866 mode_cmd->handles[0])); in intel_user_framebuffer_create()
7867 if (&obj->base == NULL) in intel_user_framebuffer_create()
7868 return ERR_PTR(-ENOENT); in intel_user_framebuffer_create()
7884 WARN_ON(!mutex_is_locked(&dev->struct_mutex)); in intel_alloc_context_page()
7900 DRM_ERROR("failed to set-domain on power context: %d\n", ret); in intel_alloc_context_page()
7909 drm_gem_object_unreference(&ctx->base); in intel_alloc_context_page()
7910 mutex_unlock(&dev->struct_mutex); in intel_alloc_context_page()
7916 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_set_drps()
7938 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_enable_drps()
7965 dev_priv->fmax = fmax; /* IPS callback will increase this */ in ironlake_enable_drps()
7966 dev_priv->fstart = fstart; in ironlake_enable_drps()
7968 dev_priv->max_delay = fstart; in ironlake_enable_drps()
7969 dev_priv->min_delay = fmin; in ironlake_enable_drps()
7970 dev_priv->cur_delay = fstart; in ironlake_enable_drps()
7993 dev_priv->last_count1 = I915_READ(0x112e4) + I915_READ(0x112e8) + in ironlake_enable_drps()
7995 dev_priv->last_time1 = jiffies_to_msecs(jiffies); in ironlake_enable_drps()
7996 dev_priv->last_count2 = I915_READ(0x112f4); in ironlake_enable_drps()
7997 getrawmonotonic(&dev_priv->last_time2); in ironlake_enable_drps()
8002 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_disable_drps()
8013 ironlake_set_drps(dev, dev_priv->fstart); in ironlake_disable_drps()
8023 struct drm_i915_private *dev_priv = dev->dev_private; in gen6_set_rps()
8032 struct drm_i915_private *dev_priv = dev->dev_private; in gen6_disable_rps()
8042 spin_lock_irq(&dev_priv->rps_lock); in gen6_disable_rps()
8043 dev_priv->pm_iir = 0; in gen6_disable_rps()
8044 spin_unlock_irq(&dev_priv->rps_lock); in gen6_disable_rps()
8066 struct drm_i915_private *dev_priv = dev->dev_private; in intel_init_emon()
8086 /* Program P-state weights to account for frequency power adjustment */ in intel_init_emon()
8132 dev_priv->corr = (lcfuse & LCFUSE_HIV_MASK); in intel_init_emon()
8146 if (INTEL_INFO(dev)->gen == 5) in intel_enable_rc6()
8152 if (INTEL_INFO(dev)->gen == 6) { in intel_enable_rc6()
8169 * auto-downclocking. in gen6_enable_rps()
8175 mutex_lock(&dev_priv->dev->struct_mutex); in gen6_enable_rps()
8188 I915_WRITE(RING_MAX_IDLE(dev_priv->ring[i].mmio_base), 10); in gen6_enable_rps()
8196 if (intel_enable_rc6(dev_priv->dev)) in gen6_enable_rps()
8198 ((IS_GEN7(dev_priv->dev)) ? GEN6_RC_CTL_RC6p_ENABLE : 0); in gen6_enable_rps()
8260 dev_priv->max_delay = max_freq; in gen6_enable_rps()
8261 dev_priv->min_delay = min_freq; in gen6_enable_rps()
8262 dev_priv->cur_delay = cur_freq; in gen6_enable_rps()
8273 spin_lock_irq(&dev_priv->rps_lock); in gen6_enable_rps()
8274 WARN_ON(dev_priv->pm_iir != 0); in gen6_enable_rps()
8276 spin_unlock_irq(&dev_priv->rps_lock); in gen6_enable_rps()
8281 mutex_unlock(&dev_priv->dev->struct_mutex); in gen6_enable_rps()
8301 mutex_lock(&dev_priv->dev->struct_mutex); in gen6_update_ring_freq()
8308 for (gpu_freq = dev_priv->max_delay; gpu_freq >= dev_priv->min_delay; in gen6_update_ring_freq()
8309 gpu_freq--) { in gen6_update_ring_freq()
8310 int diff = dev_priv->max_delay - gpu_freq; in gen6_update_ring_freq()
8319 ia_freq = max_ia_freq - ((diff * scaling_factor) / 2); in gen6_update_ring_freq()
8334 mutex_unlock(&dev_priv->dev->struct_mutex); in gen6_update_ring_freq()
8339 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_init_clock_gating()
8359 * order to enable memory self-refresh in ironlake_init_clock_gating()
8408 struct drm_i915_private *dev_priv = dev->dev_private; in gen6_init_clock_gating()
8438 * set in order to enable memory self-refresh and fbc: in gen6_init_clock_gating()
8466 struct drm_i915_private *dev_priv = dev->dev_private; in ivybridge_init_clock_gating()
8512 struct drm_i915_private *dev_priv = dev->dev_private; in g4x_init_clock_gating()
8530 struct drm_i915_private *dev_priv = dev->dev_private; in crestline_init_clock_gating()
8541 struct drm_i915_private *dev_priv = dev->dev_private; in broadwater_init_clock_gating()
8553 struct drm_i915_private *dev_priv = dev->dev_private; in gen3_init_clock_gating()
8563 struct drm_i915_private *dev_priv = dev->dev_private; in i85x_init_clock_gating()
8570 struct drm_i915_private *dev_priv = dev->dev_private; in i830_init_clock_gating()
8577 struct drm_i915_private *dev_priv = dev->dev_private; in ibx_init_clock_gating()
8589 struct drm_i915_private *dev_priv = dev->dev_private; in cpt_init_clock_gating()
8607 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_teardown_rc6()
8609 if (dev_priv->renderctx) { in ironlake_teardown_rc6()
8610 i915_gem_object_unpin(dev_priv->renderctx); in ironlake_teardown_rc6()
8611 drm_gem_object_unreference(&dev_priv->renderctx->base); in ironlake_teardown_rc6()
8612 dev_priv->renderctx = NULL; in ironlake_teardown_rc6()
8615 if (dev_priv->pwrctx) { in ironlake_teardown_rc6()
8616 i915_gem_object_unpin(dev_priv->pwrctx); in ironlake_teardown_rc6()
8617 drm_gem_object_unreference(&dev_priv->pwrctx->base); in ironlake_teardown_rc6()
8618 dev_priv->pwrctx = NULL; in ironlake_teardown_rc6()
8624 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_disable_rc6()
8644 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_setup_rc6()
8646 if (dev_priv->renderctx == NULL) in ironlake_setup_rc6()
8647 dev_priv->renderctx = intel_alloc_context_page(dev); in ironlake_setup_rc6()
8648 if (!dev_priv->renderctx) in ironlake_setup_rc6()
8649 return -ENOMEM; in ironlake_setup_rc6()
8651 if (dev_priv->pwrctx == NULL) in ironlake_setup_rc6()
8652 dev_priv->pwrctx = intel_alloc_context_page(dev); in ironlake_setup_rc6()
8653 if (!dev_priv->pwrctx) { in ironlake_setup_rc6()
8655 return -ENOMEM; in ironlake_setup_rc6()
8663 struct drm_i915_private *dev_priv = dev->dev_private; in ironlake_enable_rc6()
8672 mutex_lock(&dev->struct_mutex); in ironlake_enable_rc6()
8675 mutex_unlock(&dev->struct_mutex); in ironlake_enable_rc6()
8686 mutex_unlock(&dev->struct_mutex); in ironlake_enable_rc6()
8692 OUT_RING(dev_priv->renderctx->gtt_offset | in ironlake_enable_rc6()
8711 mutex_unlock(&dev->struct_mutex); in ironlake_enable_rc6()
8715 I915_WRITE(PWRCTXA, dev_priv->pwrctx->gtt_offset | PWRCTX_EN); in ironlake_enable_rc6()
8717 mutex_unlock(&dev->struct_mutex); in ironlake_enable_rc6()
8722 struct drm_i915_private *dev_priv = dev->dev_private; in intel_init_clock_gating()
8724 dev_priv->display.init_clock_gating(dev); in intel_init_clock_gating()
8726 if (dev_priv->display.init_pch_clock_gating) in intel_init_clock_gating()
8727 dev_priv->display.init_pch_clock_gating(dev); in intel_init_clock_gating()
8733 struct drm_i915_private *dev_priv = dev->dev_private; in intel_init_display()
8737 dev_priv->display.dpms = ironlake_crtc_dpms; in intel_init_display()
8738 dev_priv->display.crtc_mode_set = ironlake_crtc_mode_set; in intel_init_display()
8739 dev_priv->display.update_plane = ironlake_update_plane; in intel_init_display()
8741 dev_priv->display.dpms = i9xx_crtc_dpms; in intel_init_display()
8742 dev_priv->display.crtc_mode_set = i9xx_crtc_mode_set; in intel_init_display()
8743 dev_priv->display.update_plane = i9xx_update_plane; in intel_init_display()
8748 dev_priv->display.fbc_enabled = ironlake_fbc_enabled; in intel_init_display()
8749 dev_priv->display.enable_fbc = ironlake_enable_fbc; in intel_init_display()
8750 dev_priv->display.disable_fbc = ironlake_disable_fbc; in intel_init_display()
8752 dev_priv->display.fbc_enabled = g4x_fbc_enabled; in intel_init_display()
8753 dev_priv->display.enable_fbc = g4x_enable_fbc; in intel_init_display()
8754 dev_priv->display.disable_fbc = g4x_disable_fbc; in intel_init_display()
8756 dev_priv->display.fbc_enabled = i8xx_fbc_enabled; in intel_init_display()
8757 dev_priv->display.enable_fbc = i8xx_enable_fbc; in intel_init_display()
8758 dev_priv->display.disable_fbc = i8xx_disable_fbc; in intel_init_display()
8765 dev_priv->display.get_display_clock_speed = in intel_init_display()
8768 dev_priv->display.get_display_clock_speed = in intel_init_display()
8771 dev_priv->display.get_display_clock_speed = in intel_init_display()
8774 dev_priv->display.get_display_clock_speed = in intel_init_display()
8777 dev_priv->display.get_display_clock_speed = in intel_init_display()
8780 dev_priv->display.get_display_clock_speed = in intel_init_display()
8783 dev_priv->display.get_display_clock_speed = in intel_init_display()
8788 dev_priv->display.force_wake_get = __gen6_gt_force_wake_get; in intel_init_display()
8789 dev_priv->display.force_wake_put = __gen6_gt_force_wake_put; in intel_init_display()
8791 /* IVB configs may use multi-threaded forcewake */ in intel_init_display()
8795 /* A small trick here - if the bios hasn't configured MT forcewake, in intel_init_display()
8801 mutex_lock(&dev->struct_mutex); in intel_init_display()
8805 mutex_unlock(&dev->struct_mutex); in intel_init_display()
8809 dev_priv->display.force_wake_get = in intel_init_display()
8811 dev_priv->display.force_wake_put = in intel_init_display()
8817 dev_priv->display.init_pch_clock_gating = ibx_init_clock_gating; in intel_init_display()
8819 dev_priv->display.init_pch_clock_gating = cpt_init_clock_gating; in intel_init_display()
8823 dev_priv->display.update_wm = ironlake_update_wm; in intel_init_display()
8827 dev_priv->display.update_wm = NULL; in intel_init_display()
8829 dev_priv->display.fdi_link_train = ironlake_fdi_link_train; in intel_init_display()
8830 dev_priv->display.init_clock_gating = ironlake_init_clock_gating; in intel_init_display()
8831 dev_priv->display.write_eld = ironlake_write_eld; in intel_init_display()
8834 dev_priv->display.update_wm = sandybridge_update_wm; in intel_init_display()
8835 dev_priv->display.update_sprite_wm = sandybridge_update_sprite_wm; in intel_init_display()
8839 dev_priv->display.update_wm = NULL; in intel_init_display()
8841 dev_priv->display.fdi_link_train = gen6_fdi_link_train; in intel_init_display()
8842 dev_priv->display.init_clock_gating = gen6_init_clock_gating; in intel_init_display()
8843 dev_priv->display.write_eld = ironlake_write_eld; in intel_init_display()
8846 dev_priv->display.fdi_link_train = ivb_manual_fdi_link_train; in intel_init_display()
8848 dev_priv->display.update_wm = sandybridge_update_wm; in intel_init_display()
8849 dev_priv->display.update_sprite_wm = sandybridge_update_sprite_wm; in intel_init_display()
8853 dev_priv->display.update_wm = NULL; in intel_init_display()
8855 dev_priv->display.init_clock_gating = ivybridge_init_clock_gating; in intel_init_display()
8856 dev_priv->display.write_eld = ironlake_write_eld; in intel_init_display()
8858 dev_priv->display.update_wm = NULL; in intel_init_display()
8861 dev_priv->is_ddr3, in intel_init_display()
8862 dev_priv->fsb_freq, in intel_init_display()
8863 dev_priv->mem_freq)) { in intel_init_display()
8867 (dev_priv->is_ddr3 == 1) ? "3" : "2", in intel_init_display()
8868 dev_priv->fsb_freq, dev_priv->mem_freq); in intel_init_display()
8871 dev_priv->display.update_wm = NULL; in intel_init_display()
8873 dev_priv->display.update_wm = pineview_update_wm; in intel_init_display()
8874 dev_priv->display.init_clock_gating = gen3_init_clock_gating; in intel_init_display()
8876 dev_priv->display.write_eld = g4x_write_eld; in intel_init_display()
8877 dev_priv->display.update_wm = g4x_update_wm; in intel_init_display()
8878 dev_priv->display.init_clock_gating = g4x_init_clock_gating; in intel_init_display()
8880 dev_priv->display.update_wm = i965_update_wm; in intel_init_display()
8882 dev_priv->display.init_clock_gating = crestline_init_clock_gating; in intel_init_display()
8884 dev_priv->display.init_clock_gating = broadwater_init_clock_gating; in intel_init_display()
8886 dev_priv->display.update_wm = i9xx_update_wm; in intel_init_display()
8887 dev_priv->display.get_fifo_size = i9xx_get_fifo_size; in intel_init_display()
8888 dev_priv->display.init_clock_gating = gen3_init_clock_gating; in intel_init_display()
8890 dev_priv->display.update_wm = i830_update_wm; in intel_init_display()
8891 dev_priv->display.init_clock_gating = i85x_init_clock_gating; in intel_init_display()
8892 dev_priv->display.get_fifo_size = i830_get_fifo_size; in intel_init_display()
8894 dev_priv->display.update_wm = i9xx_update_wm; in intel_init_display()
8895 dev_priv->display.get_fifo_size = i85x_get_fifo_size; in intel_init_display()
8896 dev_priv->display.init_clock_gating = i85x_init_clock_gating; in intel_init_display()
8898 dev_priv->display.update_wm = i830_update_wm; in intel_init_display()
8899 dev_priv->display.init_clock_gating = i830_init_clock_gating; in intel_init_display()
8901 dev_priv->display.get_fifo_size = i845_get_fifo_size; in intel_init_display()
8903 dev_priv->display.get_fifo_size = i830_get_fifo_size; in intel_init_display()
8906 /* Default just returns -ENODEV to indicate unsupported */ in intel_init_display()
8907 dev_priv->display.queue_flip = intel_default_queue_flip; in intel_init_display()
8909 switch (INTEL_INFO(dev)->gen) { in intel_init_display()
8911 dev_priv->display.queue_flip = intel_gen2_queue_flip; in intel_init_display()
8915 dev_priv->display.queue_flip = intel_gen3_queue_flip; in intel_init_display()
8920 dev_priv->display.queue_flip = intel_gen4_queue_flip; in intel_init_display()
8924 dev_priv->display.queue_flip = intel_gen6_queue_flip; in intel_init_display()
8927 dev_priv->display.queue_flip = intel_gen7_queue_flip; in intel_init_display()
8939 struct drm_i915_private *dev_priv = dev->dev_private; in quirk_pipea_force()
8941 dev_priv->quirks |= QUIRK_PIPEA_FORCE; in quirk_pipea_force()
8950 struct drm_i915_private *dev_priv = dev->dev_private; in quirk_ssc_force_disable()
8951 dev_priv->quirks |= QUIRK_LVDS_SSC_DISABLE; in quirk_ssc_force_disable()
8969 /* Toshiba Protege R-205, S-209 needs pipe A force quirk */
8992 struct pci_dev *d = dev->pdev; in intel_init_quirks()
8998 if (d->device == q->device && in intel_init_quirks()
8999 (d->subsystem_vendor == q->subsystem_vendor || in intel_init_quirks()
9000 q->subsystem_vendor == PCI_ANY_ID) && in intel_init_quirks()
9001 (d->subsystem_device == q->subsystem_device || in intel_init_quirks()
9002 q->subsystem_device == PCI_ANY_ID)) in intel_init_quirks()
9003 q->hook(dev); in intel_init_quirks()
9010 struct drm_i915_private *dev_priv = dev->dev_private; in i915_disable_vga()
9019 vga_get_uninterruptible(dev->pdev, VGA_RSRC_LEGACY_IO); in i915_disable_vga()
9023 vga_put(dev->pdev, VGA_RSRC_LEGACY_IO); in i915_disable_vga()
9032 struct drm_i915_private *dev_priv = dev->dev_private; in intel_modeset_init()
9037 dev->mode_config.min_width = 0; in intel_modeset_init()
9038 dev->mode_config.min_height = 0; in intel_modeset_init()
9040 dev->mode_config.funcs = (void *)&intel_mode_funcs; in intel_modeset_init()
9047 dev->mode_config.max_width = 2048; in intel_modeset_init()
9048 dev->mode_config.max_height = 2048; in intel_modeset_init()
9050 dev->mode_config.max_width = 4096; in intel_modeset_init()
9051 dev->mode_config.max_height = 4096; in intel_modeset_init()
9053 dev->mode_config.max_width = 8192; in intel_modeset_init()
9054 dev->mode_config.max_height = 8192; in intel_modeset_init()
9056 dev->mode_config.fb_base = dev->agp->base; in intel_modeset_init()
9059 dev_priv->num_pipe, dev_priv->num_pipe > 1 ? "s" : ""); in intel_modeset_init()
9061 for (i = 0; i < dev_priv->num_pipe; i++) { in intel_modeset_init()
9084 INIT_WORK(&dev_priv->idle_work, intel_idle_update); in intel_modeset_init()
9085 setup_timer(&dev_priv->idle_timer, intel_gpu_idle_timer, in intel_modeset_init()
9099 struct drm_i915_private *dev_priv = dev->dev_private; in intel_modeset_cleanup()
9104 mutex_lock(&dev->struct_mutex); in intel_modeset_cleanup()
9109 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { in intel_modeset_cleanup()
9111 if (!crtc->fb) in intel_modeset_cleanup()
9128 mutex_unlock(&dev->struct_mutex); in intel_modeset_cleanup()
9133 cancel_work_sync(&dev_priv->hotplug_work); in intel_modeset_cleanup()
9134 cancel_work_sync(&dev_priv->rps_work); in intel_modeset_cleanup()
9140 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { in intel_modeset_cleanup()
9142 del_timer_sync(&intel_crtc->idle_timer); in intel_modeset_cleanup()
9144 del_timer_sync(&dev_priv->idle_timer); in intel_modeset_cleanup()
9145 cancel_work_sync(&dev_priv->idle_work); in intel_modeset_cleanup()
9155 return &intel_attached_encoder(connector)->base; in intel_best_encoder()
9161 connector->encoder = encoder; in intel_connector_attach_encoder()
9162 drm_mode_connector_attach_encoder(&connector->base, in intel_connector_attach_encoder()
9163 &encoder->base); in intel_connector_attach_encoder()
9167 * set vga decode state - true == enable VGA decode
9171 struct drm_i915_private *dev_priv = dev->dev_private; in intel_modeset_vga_set_state()
9174 pci_read_config_word(dev_priv->bridge_dev, INTEL_GMCH_CTRL, &gmch_ctrl); in intel_modeset_vga_set_state()
9179 pci_write_config_word(dev_priv->bridge_dev, INTEL_GMCH_CTRL, gmch_ctrl); in intel_modeset_vga_set_state()
9220 drm_i915_private_t *dev_priv = dev->dev_private; in intel_display_capture_error_state()
9229 error->cursor[i].control = I915_READ(CURCNTR(i)); in intel_display_capture_error_state()
9230 error->cursor[i].position = I915_READ(CURPOS(i)); in intel_display_capture_error_state()
9231 error->cursor[i].base = I915_READ(CURBASE(i)); in intel_display_capture_error_state()
9233 error->plane[i].control = I915_READ(DSPCNTR(i)); in intel_display_capture_error_state()
9234 error->plane[i].stride = I915_READ(DSPSTRIDE(i)); in intel_display_capture_error_state()
9235 error->plane[i].size = I915_READ(DSPSIZE(i)); in intel_display_capture_error_state()
9236 error->plane[i].pos = I915_READ(DSPPOS(i)); in intel_display_capture_error_state()
9237 error->plane[i].addr = I915_READ(DSPADDR(i)); in intel_display_capture_error_state()
9238 if (INTEL_INFO(dev)->gen >= 4) { in intel_display_capture_error_state()
9239 error->plane[i].surface = I915_READ(DSPSURF(i)); in intel_display_capture_error_state()
9240 error->plane[i].tile_offset = I915_READ(DSPTILEOFF(i)); in intel_display_capture_error_state()
9243 error->pipe[i].conf = I915_READ(PIPECONF(i)); in intel_display_capture_error_state()
9244 error->pipe[i].source = I915_READ(PIPESRC(i)); in intel_display_capture_error_state()
9245 error->pipe[i].htotal = I915_READ(HTOTAL(i)); in intel_display_capture_error_state()
9246 error->pipe[i].hblank = I915_READ(HBLANK(i)); in intel_display_capture_error_state()
9247 error->pipe[i].hsync = I915_READ(HSYNC(i)); in intel_display_capture_error_state()
9248 error->pipe[i].vtotal = I915_READ(VTOTAL(i)); in intel_display_capture_error_state()
9249 error->pipe[i].vblank = I915_READ(VBLANK(i)); in intel_display_capture_error_state()
9250 error->pipe[i].vsync = I915_READ(VSYNC(i)); in intel_display_capture_error_state()
9265 seq_printf(m, " CONF: %08x\n", error->pipe[i].conf); in intel_display_print_error_state()
9266 seq_printf(m, " SRC: %08x\n", error->pipe[i].source); in intel_display_print_error_state()
9267 seq_printf(m, " HTOTAL: %08x\n", error->pipe[i].htotal); in intel_display_print_error_state()
9268 seq_printf(m, " HBLANK: %08x\n", error->pipe[i].hblank); in intel_display_print_error_state()
9269 seq_printf(m, " HSYNC: %08x\n", error->pipe[i].hsync); in intel_display_print_error_state()
9270 seq_printf(m, " VTOTAL: %08x\n", error->pipe[i].vtotal); in intel_display_print_error_state()
9271 seq_printf(m, " VBLANK: %08x\n", error->pipe[i].vblank); in intel_display_print_error_state()
9272 seq_printf(m, " VSYNC: %08x\n", error->pipe[i].vsync); in intel_display_print_error_state()
9275 seq_printf(m, " CNTR: %08x\n", error->plane[i].control); in intel_display_print_error_state()
9276 seq_printf(m, " STRIDE: %08x\n", error->plane[i].stride); in intel_display_print_error_state()
9277 seq_printf(m, " SIZE: %08x\n", error->plane[i].size); in intel_display_print_error_state()
9278 seq_printf(m, " POS: %08x\n", error->plane[i].pos); in intel_display_print_error_state()
9279 seq_printf(m, " ADDR: %08x\n", error->plane[i].addr); in intel_display_print_error_state()
9280 if (INTEL_INFO(dev)->gen >= 4) { in intel_display_print_error_state()
9281 seq_printf(m, " SURF: %08x\n", error->plane[i].surface); in intel_display_print_error_state()
9282 seq_printf(m, " TILEOFF: %08x\n", error->plane[i].tile_offset); in intel_display_print_error_state()
9286 seq_printf(m, " CNTR: %08x\n", error->cursor[i].control); in intel_display_print_error_state()
9287 seq_printf(m, " POS: %08x\n", error->cursor[i].position); in intel_display_print_error_state()
9288 seq_printf(m, " BASE: %08x\n", error->cursor[i].base); in intel_display_print_error_state()