Lines Matching +full:entry +full:- +full:latency
1 // SPDX-License-Identifier: MIT
70 i915->display.sagv.status != I915_SAGV_NOT_CONTROLLED; in intel_has_sagv()
86 ret = snb_pcode_read(&i915->uncore, in intel_sagv_block_time()
90 drm_dbg_kms(&i915->drm, "Couldn't read SAGV block time!\n"); in intel_sagv_block_time()
107 i915->display.sagv.status = I915_SAGV_NOT_CONTROLLED; in intel_sagv_init()
116 drm_WARN_ON(&i915->drm, i915->display.sagv.status == I915_SAGV_UNKNOWN); in intel_sagv_init()
118 i915->display.sagv.block_time_us = intel_sagv_block_time(i915); in intel_sagv_init()
120 drm_dbg_kms(&i915->drm, "SAGV supported: %s, original SAGV block time: %u us\n", in intel_sagv_init()
121 str_yes_no(intel_has_sagv(i915)), i915->display.sagv.block_time_us); in intel_sagv_init()
123 /* avoid overflow when adding with wm0 latency/etc. */ in intel_sagv_init()
124 if (drm_WARN(&i915->drm, i915->display.sagv.block_time_us > U16_MAX, in intel_sagv_init()
126 i915->display.sagv.block_time_us)) in intel_sagv_init()
127 i915->display.sagv.block_time_us = 0; in intel_sagv_init()
130 i915->display.sagv.block_time_us = 0; in intel_sagv_init()
140 * - <= 1 pipe enabled
141 * - All planes can enable watermarks for latencies >= SAGV engine block time
142 * - We're not using an interlaced display configuration
151 if (i915->display.sagv.status == I915_SAGV_ENABLED) in skl_sagv_enable()
154 drm_dbg_kms(&i915->drm, "Enabling SAGV\n"); in skl_sagv_enable()
155 ret = snb_pcode_write(&i915->uncore, GEN9_PCODE_SAGV_CONTROL, in skl_sagv_enable()
161 * Some skl systems, pre-release machines in particular, in skl_sagv_enable()
164 if (IS_SKYLAKE(i915) && ret == -ENXIO) { in skl_sagv_enable()
165 drm_dbg(&i915->drm, "No SAGV found on system, ignoring\n"); in skl_sagv_enable()
166 i915->display.sagv.status = I915_SAGV_NOT_CONTROLLED; in skl_sagv_enable()
169 drm_err(&i915->drm, "Failed to enable SAGV\n"); in skl_sagv_enable()
173 i915->display.sagv.status = I915_SAGV_ENABLED; in skl_sagv_enable()
183 if (i915->display.sagv.status == I915_SAGV_DISABLED) in skl_sagv_disable()
186 drm_dbg_kms(&i915->drm, "Disabling SAGV\n"); in skl_sagv_disable()
188 ret = skl_pcode_request(&i915->uncore, GEN9_PCODE_SAGV_CONTROL, in skl_sagv_disable()
193 * Some skl systems, pre-release machines in particular, in skl_sagv_disable()
196 if (IS_SKYLAKE(i915) && ret == -ENXIO) { in skl_sagv_disable()
197 drm_dbg(&i915->drm, "No SAGV found on system, ignoring\n"); in skl_sagv_disable()
198 i915->display.sagv.status = I915_SAGV_NOT_CONTROLLED; in skl_sagv_disable()
201 drm_err(&i915->drm, "Failed to disable SAGV (%d)\n", ret); in skl_sagv_disable()
205 i915->display.sagv.status = I915_SAGV_DISABLED; in skl_sagv_disable()
210 struct drm_i915_private *i915 = to_i915(state->base.dev); in skl_sagv_pre_plane_update()
223 struct drm_i915_private *i915 = to_i915(state->base.dev); in skl_sagv_post_plane_update()
236 struct drm_i915_private *i915 = to_i915(state->base.dev); in icl_sagv_pre_plane_update()
246 old_mask = old_bw_state->qgv_points_mask; in icl_sagv_pre_plane_update()
247 new_mask = old_bw_state->qgv_points_mask | new_bw_state->qgv_points_mask; in icl_sagv_pre_plane_update()
252 WARN_ON(!new_bw_state->base.changed); in icl_sagv_pre_plane_update()
254 drm_dbg_kms(&i915->drm, "Restricting QGV points: 0x%x -> 0x%x\n", in icl_sagv_pre_plane_update()
268 struct drm_i915_private *i915 = to_i915(state->base.dev); in icl_sagv_post_plane_update()
278 old_mask = old_bw_state->qgv_points_mask | new_bw_state->qgv_points_mask; in icl_sagv_post_plane_update()
279 new_mask = new_bw_state->qgv_points_mask; in icl_sagv_post_plane_update()
284 WARN_ON(!new_bw_state->base.changed); in icl_sagv_post_plane_update()
286 drm_dbg_kms(&i915->drm, "Relaxing QGV points: 0x%x -> 0x%x\n", in icl_sagv_post_plane_update()
300 struct drm_i915_private *i915 = to_i915(state->base.dev); in intel_sagv_pre_plane_update()
305 * afford it due to DBuf limitation - in case if SAGV is completely in intel_sagv_pre_plane_update()
320 struct drm_i915_private *i915 = to_i915(state->base.dev); in intel_sagv_post_plane_update()
325 * afford it due to DBuf limitation - in case if SAGV is completely in intel_sagv_post_plane_update()
340 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); in skl_crtc_can_enable_sagv()
341 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_crtc_can_enable_sagv()
348 if (!crtc_state->hw.active) in skl_crtc_can_enable_sagv()
351 if (crtc_state->hw.pipe_mode.flags & DRM_MODE_FLAG_INTERLACE) in skl_crtc_can_enable_sagv()
356 &crtc_state->wm.skl.optimal.planes[plane_id]; in skl_crtc_can_enable_sagv()
360 if (!wm->wm[0].enable) in skl_crtc_can_enable_sagv()
364 for (level = i915->display.wm.num_levels - 1; in skl_crtc_can_enable_sagv()
365 !wm->wm[level].enable; --level) in skl_crtc_can_enable_sagv()
378 &crtc_state->wm.skl.optimal.planes[plane_id]; in skl_crtc_can_enable_sagv()
384 if (wm->wm[0].enable && !wm->wm[max_level].can_sagv) in skl_crtc_can_enable_sagv()
393 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); in tgl_crtc_can_enable_sagv()
396 if (!crtc_state->hw.active) in tgl_crtc_can_enable_sagv()
401 &crtc_state->wm.skl.optimal.planes[plane_id]; in tgl_crtc_can_enable_sagv()
403 if (wm->wm[0].enable && !wm->sagv.wm0.enable) in tgl_crtc_can_enable_sagv()
412 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); in intel_crtc_can_enable_sagv()
413 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in intel_crtc_can_enable_sagv()
415 if (!i915->display.params.enable_sagv) in intel_crtc_can_enable_sagv()
428 bw_state->active_pipes && !is_power_of_2(bw_state->active_pipes)) in intel_can_enable_sagv()
431 return bw_state->pipe_sagv_reject == 0; in intel_can_enable_sagv()
436 struct drm_i915_private *i915 = to_i915(state->base.dev); in intel_compute_sagv_mask()
453 new_bw_state->pipe_sagv_reject &= ~BIT(crtc->pipe); in intel_compute_sagv_mask()
455 new_bw_state->pipe_sagv_reject |= BIT(crtc->pipe); in intel_compute_sagv_mask()
461 new_bw_state->active_pipes = in intel_compute_sagv_mask()
462 intel_calc_active_pipes(state, old_bw_state->active_pipes); in intel_compute_sagv_mask()
464 if (new_bw_state->active_pipes != old_bw_state->active_pipes) { in intel_compute_sagv_mask()
465 ret = intel_atomic_lock_global_state(&new_bw_state->base); in intel_compute_sagv_mask()
472 ret = intel_atomic_serialize_global_state(&new_bw_state->base); in intel_compute_sagv_mask()
475 } else if (new_bw_state->pipe_sagv_reject != old_bw_state->pipe_sagv_reject) { in intel_compute_sagv_mask()
476 ret = intel_atomic_lock_global_state(&new_bw_state->base); in intel_compute_sagv_mask()
483 struct skl_pipe_wm *pipe_wm = &new_crtc_state->wm.skl.optimal; in intel_compute_sagv_mask()
491 pipe_wm->use_sagv_wm = !HAS_HW_SAGV_WM(i915) && in intel_compute_sagv_mask()
499 static u16 skl_ddb_entry_init(struct skl_ddb_entry *entry, in skl_ddb_entry_init() argument
502 entry->start = start; in skl_ddb_entry_init()
503 entry->end = end; in skl_ddb_entry_init()
510 return DISPLAY_INFO(i915)->dbuf.size / in intel_dbuf_slice_size()
511 hweight8(DISPLAY_INFO(i915)->dbuf.slice_mask); in intel_dbuf_slice_size()
521 ddb->start = 0; in skl_ddb_entry_for_slices()
522 ddb->end = 0; in skl_ddb_entry_for_slices()
526 ddb->start = (ffs(slice_mask) - 1) * slice_size; in skl_ddb_entry_for_slices()
527 ddb->end = fls(slice_mask) * slice_size; in skl_ddb_entry_for_slices()
529 WARN_ON(ddb->start >= ddb->end); in skl_ddb_entry_for_slices()
530 WARN_ON(ddb->end > DISPLAY_INFO(i915)->dbuf.size); in skl_ddb_entry_for_slices()
548 const struct skl_ddb_entry *entry) in skl_ddb_dbuf_slice_mask() argument
554 if (!skl_ddb_entry_size(entry)) in skl_ddb_dbuf_slice_mask()
557 start_slice = entry->start / slice_size; in skl_ddb_dbuf_slice_mask()
558 end_slice = (entry->end - 1) / slice_size; in skl_ddb_dbuf_slice_mask()
561 * Per plane DDB entry can in a really worst case be on multiple slices in skl_ddb_dbuf_slice_mask()
562 * but single entry is anyway contigious. in skl_ddb_dbuf_slice_mask()
574 const struct drm_display_mode *pipe_mode = &crtc_state->hw.pipe_mode; in intel_crtc_ddb_weight()
577 if (!crtc_state->hw.active) in intel_crtc_ddb_weight()
597 to_i915(dbuf_state->base.state->base.dev); in intel_crtc_dbuf_weights()
605 int weight = dbuf_state->weight[pipe]; in intel_crtc_dbuf_weights()
614 if (dbuf_state->slices[pipe] != dbuf_state->slices[for_pipe]) in intel_crtc_dbuf_weights()
630 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_crtc_allocate_ddb()
638 enum pipe pipe = crtc->pipe; in skl_crtc_allocate_ddb()
645 if (new_dbuf_state->weight[pipe] == 0) { in skl_crtc_allocate_ddb()
646 skl_ddb_entry_init(&new_dbuf_state->ddb[pipe], 0, 0); in skl_crtc_allocate_ddb()
650 dbuf_slice_mask = new_dbuf_state->slices[pipe]; in skl_crtc_allocate_ddb()
662 skl_ddb_entry_init(&new_dbuf_state->ddb[pipe], in skl_crtc_allocate_ddb()
663 ddb_slices.start - mbus_offset + start, in skl_crtc_allocate_ddb()
664 ddb_slices.start - mbus_offset + end); in skl_crtc_allocate_ddb()
667 if (old_dbuf_state->slices[pipe] == new_dbuf_state->slices[pipe] && in skl_crtc_allocate_ddb()
668 skl_ddb_entry_equal(&old_dbuf_state->ddb[pipe], in skl_crtc_allocate_ddb()
669 &new_dbuf_state->ddb[pipe])) in skl_crtc_allocate_ddb()
672 ret = intel_atomic_lock_global_state(&new_dbuf_state->base); in skl_crtc_allocate_ddb()
676 crtc_state = intel_atomic_get_crtc_state(&state->base, crtc); in skl_crtc_allocate_ddb()
684 crtc_state->wm.skl.ddb.start = mbus_offset + new_dbuf_state->ddb[pipe].start; in skl_crtc_allocate_ddb()
685 crtc_state->wm.skl.ddb.end = mbus_offset + new_dbuf_state->ddb[pipe].end; in skl_crtc_allocate_ddb()
687 drm_dbg_kms(&i915->drm, in skl_crtc_allocate_ddb()
688 … "[CRTC:%d:%s] dbuf slices 0x%x -> 0x%x, ddb (%d - %d) -> (%d - %d), active pipes 0x%x -> 0x%x\n", in skl_crtc_allocate_ddb()
689 crtc->base.base.id, crtc->base.name, in skl_crtc_allocate_ddb()
690 old_dbuf_state->slices[pipe], new_dbuf_state->slices[pipe], in skl_crtc_allocate_ddb()
691 old_dbuf_state->ddb[pipe].start, old_dbuf_state->ddb[pipe].end, in skl_crtc_allocate_ddb()
692 new_dbuf_state->ddb[pipe].start, new_dbuf_state->ddb[pipe].end, in skl_crtc_allocate_ddb()
693 old_dbuf_state->active_pipes, new_dbuf_state->active_pipes); in skl_crtc_allocate_ddb()
707 unsigned int latency,
715 unsigned int latency = i915->display.wm.skl_latency[level]; in skl_wm_latency() local
717 if (latency == 0) in skl_wm_latency()
726 latency += 4; in skl_wm_latency()
728 if (skl_needs_memory_bw_wa(i915) && wp && wp->x_tiled) in skl_wm_latency()
729 latency += 15; in skl_wm_latency()
731 return latency; in skl_wm_latency()
738 struct intel_plane *plane = to_intel_plane(crtc_state->uapi.crtc->cursor); in skl_cursor_allocation()
739 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev); in skl_cursor_allocation()
749 crtc_state->pixel_rate, &wp, 0); in skl_cursor_allocation()
750 drm_WARN_ON(&i915->drm, ret); in skl_cursor_allocation()
752 for (level = 0; level < i915->display.wm.num_levels; level++) { in skl_cursor_allocation()
753 unsigned int latency = skl_wm_latency(i915, level, &wp); in skl_cursor_allocation() local
755 skl_compute_plane_wm(crtc_state, plane, level, latency, &wp, &wm, &wm); in skl_cursor_allocation()
765 static void skl_ddb_entry_init_from_hw(struct skl_ddb_entry *entry, u32 reg) in skl_ddb_entry_init_from_hw() argument
767 skl_ddb_entry_init(entry, in skl_ddb_entry_init_from_hw()
770 if (entry->end) in skl_ddb_entry_init_from_hw()
771 entry->end++; in skl_ddb_entry_init_from_hw()
804 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_pipe_ddb_get_hw_state()
806 enum pipe pipe = crtc->pipe; in skl_pipe_ddb_get_hw_state()
837 * as is from BSpec itself - that way it is at least easier
900 * as is from BSpec itself - that way it is at least easier
1290 * This function finds an entry with same enabled pipe configuration and
1305 * still here - we will need it once those additional constraints in icl_compute_dbuf_slices()
1332 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_compute_dbuf_slices()
1333 enum pipe pipe = crtc->pipe; in skl_compute_dbuf_slices()
1354 struct drm_i915_private *i915 = to_i915(plane->base.dev); in use_minimal_wm0_only()
1357 crtc_state->uapi.async_flip && in use_minimal_wm0_only()
1358 plane->async_flip; in use_minimal_wm0_only()
1364 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); in skl_total_relative_data_rate()
1365 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_total_relative_data_rate()
1373 data_rate += crtc_state->rel_data_rate[plane_id]; in skl_total_relative_data_rate()
1376 data_rate += crtc_state->rel_data_rate_y[plane_id]; in skl_total_relative_data_rate()
1387 const struct skl_plane_wm *wm = &pipe_wm->planes[plane_id]; in skl_plane_wm_level()
1389 if (level == 0 && pipe_wm->use_sagv_wm) in skl_plane_wm_level()
1390 return &wm->sagv.wm0; in skl_plane_wm_level()
1392 return &wm->wm[level]; in skl_plane_wm_level()
1399 const struct skl_plane_wm *wm = &pipe_wm->planes[plane_id]; in skl_plane_trans_wm()
1401 if (pipe_wm->use_sagv_wm) in skl_plane_trans_wm()
1402 return &wm->sagv.trans_wm; in skl_plane_trans_wm()
1404 return &wm->trans_wm; in skl_plane_trans_wm()
1422 if (wm->min_ddb_alloc > skl_ddb_entry_size(ddb)) in skl_check_wm_level()
1430 if (wm->min_ddb_alloc > skl_ddb_entry_size(ddb_y) || in skl_check_nv12_wm_level()
1431 uv_wm->min_ddb_alloc > skl_ddb_entry_size(ddb)) { in skl_check_nv12_wm_level()
1452 return level > 0 && !wm->wm[level].enable; in skl_need_wm_copy_wa()
1469 extra = min_t(u16, iter->size, in skl_allocate_plane_ddb()
1470 DIV64_U64_ROUND_UP(iter->size * data_rate, in skl_allocate_plane_ddb()
1471 iter->data_rate)); in skl_allocate_plane_ddb()
1472 iter->size -= extra; in skl_allocate_plane_ddb()
1473 iter->data_rate -= data_rate; in skl_allocate_plane_ddb()
1477 * Keep ddb entry of all disabled planes explicitly zeroed in skl_allocate_plane_ddb()
1481 size = wm->min_ddb_alloc + extra; in skl_allocate_plane_ddb()
1483 iter->start = skl_ddb_entry_init(ddb, iter->start, in skl_allocate_plane_ddb()
1484 iter->start + size); in skl_allocate_plane_ddb()
1491 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_crtc_allocate_plane_ddb()
1496 const struct skl_ddb_entry *alloc = &dbuf_state->ddb[crtc->pipe]; in skl_crtc_allocate_plane_ddb()
1497 int num_active = hweight8(dbuf_state->active_pipes); in skl_crtc_allocate_plane_ddb()
1505 memset(crtc_state->wm.skl.plane_ddb, 0, sizeof(crtc_state->wm.skl.plane_ddb)); in skl_crtc_allocate_plane_ddb()
1506 memset(crtc_state->wm.skl.plane_ddb_y, 0, sizeof(crtc_state->wm.skl.plane_ddb_y)); in skl_crtc_allocate_plane_ddb()
1508 if (!crtc_state->hw.active) in skl_crtc_allocate_plane_ddb()
1511 iter.start = alloc->start; in skl_crtc_allocate_plane_ddb()
1519 iter.size -= cursor_size; in skl_crtc_allocate_plane_ddb()
1520 skl_ddb_entry_init(&crtc_state->wm.skl.plane_ddb[PLANE_CURSOR], in skl_crtc_allocate_plane_ddb()
1521 alloc->end - cursor_size, alloc->end); in skl_crtc_allocate_plane_ddb()
1530 for (level = i915->display.wm.num_levels - 1; level >= 0; level--) { in skl_crtc_allocate_plane_ddb()
1534 &crtc_state->wm.skl.optimal.planes[plane_id]; in skl_crtc_allocate_plane_ddb()
1538 &crtc_state->wm.skl.plane_ddb[plane_id]; in skl_crtc_allocate_plane_ddb()
1540 if (wm->wm[level].min_ddb_alloc > skl_ddb_entry_size(ddb)) { in skl_crtc_allocate_plane_ddb()
1541 drm_WARN_ON(&i915->drm, in skl_crtc_allocate_plane_ddb()
1542 wm->wm[level].min_ddb_alloc != U16_MAX); in skl_crtc_allocate_plane_ddb()
1549 blocks += wm->wm[level].min_ddb_alloc; in skl_crtc_allocate_plane_ddb()
1550 blocks += wm->uv_wm[level].min_ddb_alloc; in skl_crtc_allocate_plane_ddb()
1554 iter.size -= blocks; in skl_crtc_allocate_plane_ddb()
1560 drm_dbg_kms(&i915->drm, in skl_crtc_allocate_plane_ddb()
1562 drm_dbg_kms(&i915->drm, "minimum required %d/%d\n", in skl_crtc_allocate_plane_ddb()
1564 return -EINVAL; in skl_crtc_allocate_plane_ddb()
1578 &crtc_state->wm.skl.plane_ddb[plane_id]; in skl_crtc_allocate_plane_ddb()
1580 &crtc_state->wm.skl.plane_ddb_y[plane_id]; in skl_crtc_allocate_plane_ddb()
1582 &crtc_state->wm.skl.optimal.planes[plane_id]; in skl_crtc_allocate_plane_ddb()
1588 crtc_state->nv12_planes & BIT(plane_id)) { in skl_crtc_allocate_plane_ddb()
1589 skl_allocate_plane_ddb(&iter, ddb_y, &wm->wm[level], in skl_crtc_allocate_plane_ddb()
1590 crtc_state->rel_data_rate_y[plane_id]); in skl_crtc_allocate_plane_ddb()
1591 skl_allocate_plane_ddb(&iter, ddb, &wm->uv_wm[level], in skl_crtc_allocate_plane_ddb()
1592 crtc_state->rel_data_rate[plane_id]); in skl_crtc_allocate_plane_ddb()
1594 skl_allocate_plane_ddb(&iter, ddb, &wm->wm[level], in skl_crtc_allocate_plane_ddb()
1595 crtc_state->rel_data_rate[plane_id]); in skl_crtc_allocate_plane_ddb()
1598 drm_WARN_ON(&i915->drm, iter.size != 0 || iter.data_rate != 0); in skl_crtc_allocate_plane_ddb()
1606 for (level++; level < i915->display.wm.num_levels; level++) { in skl_crtc_allocate_plane_ddb()
1609 &crtc_state->wm.skl.plane_ddb[plane_id]; in skl_crtc_allocate_plane_ddb()
1611 &crtc_state->wm.skl.plane_ddb_y[plane_id]; in skl_crtc_allocate_plane_ddb()
1613 &crtc_state->wm.skl.optimal.planes[plane_id]; in skl_crtc_allocate_plane_ddb()
1616 crtc_state->nv12_planes & BIT(plane_id)) in skl_crtc_allocate_plane_ddb()
1617 skl_check_nv12_wm_level(&wm->wm[level], in skl_crtc_allocate_plane_ddb()
1618 &wm->uv_wm[level], in skl_crtc_allocate_plane_ddb()
1621 skl_check_wm_level(&wm->wm[level], ddb); in skl_crtc_allocate_plane_ddb()
1624 wm->wm[level].blocks = wm->wm[level - 1].blocks; in skl_crtc_allocate_plane_ddb()
1625 wm->wm[level].lines = wm->wm[level - 1].lines; in skl_crtc_allocate_plane_ddb()
1626 wm->wm[level].ignore_lines = wm->wm[level - 1].ignore_lines; in skl_crtc_allocate_plane_ddb()
1637 &crtc_state->wm.skl.plane_ddb[plane_id]; in skl_crtc_allocate_plane_ddb()
1639 &crtc_state->wm.skl.plane_ddb_y[plane_id]; in skl_crtc_allocate_plane_ddb()
1641 &crtc_state->wm.skl.optimal.planes[plane_id]; in skl_crtc_allocate_plane_ddb()
1644 crtc_state->nv12_planes & BIT(plane_id)) { in skl_crtc_allocate_plane_ddb()
1645 skl_check_wm_level(&wm->trans_wm, ddb_y); in skl_crtc_allocate_plane_ddb()
1649 skl_check_wm_level(&wm->trans_wm, ddb); in skl_crtc_allocate_plane_ddb()
1652 skl_check_wm_level(&wm->sagv.wm0, ddb); in skl_crtc_allocate_plane_ddb()
1653 skl_check_wm_level(&wm->sagv.trans_wm, ddb); in skl_crtc_allocate_plane_ddb()
1660 * The max latency should be 257 (max the punit can code is 255 and we add 2us
1661 * for the read latency) and cpp should always be <= 8, so that
1667 u8 cpp, u32 latency, u32 dbuf_block_size) in skl_wm_method1() argument
1672 if (latency == 0) in skl_wm_method1()
1675 wm_intermediate_val = latency * pixel_rate * cpp; in skl_wm_method1()
1685 skl_wm_method2(u32 pixel_rate, u32 pipe_htotal, u32 latency, in skl_wm_method2() argument
1691 if (latency == 0) in skl_wm_method2()
1694 wm_intermediate_val = latency * pixel_rate; in skl_wm_method2()
1704 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev); in intel_get_linetime_us()
1709 if (!crtc_state->hw.active) in intel_get_linetime_us()
1712 pixel_rate = crtc_state->pixel_rate; in intel_get_linetime_us()
1714 if (drm_WARN_ON(&i915->drm, pixel_rate == 0)) in intel_get_linetime_us()
1717 crtc_htotal = crtc_state->hw.pipe_mode.crtc_htotal; in intel_get_linetime_us()
1730 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); in skl_compute_wm_params()
1731 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_compute_wm_params()
1737 drm_dbg_kms(&i915->drm, in skl_compute_wm_params()
1739 return -EINVAL; in skl_compute_wm_params()
1742 wp->x_tiled = modifier == I915_FORMAT_MOD_X_TILED; in skl_compute_wm_params()
1743 wp->y_tiled = modifier != I915_FORMAT_MOD_X_TILED && in skl_compute_wm_params()
1745 wp->rc_surface = intel_fb_is_ccs_modifier(modifier); in skl_compute_wm_params()
1746 wp->is_planar = intel_format_info_is_yuv_semiplanar(format, modifier); in skl_compute_wm_params()
1748 wp->width = width; in skl_compute_wm_params()
1749 if (color_plane == 1 && wp->is_planar) in skl_compute_wm_params()
1750 wp->width /= 2; in skl_compute_wm_params()
1752 wp->cpp = format->cpp[color_plane]; in skl_compute_wm_params()
1753 wp->plane_pixel_rate = plane_pixel_rate; in skl_compute_wm_params()
1756 modifier == I915_FORMAT_MOD_Yf_TILED && wp->cpp == 1) in skl_compute_wm_params()
1757 wp->dbuf_block_size = 256; in skl_compute_wm_params()
1759 wp->dbuf_block_size = 512; in skl_compute_wm_params()
1762 switch (wp->cpp) { in skl_compute_wm_params()
1764 wp->y_min_scanlines = 16; in skl_compute_wm_params()
1767 wp->y_min_scanlines = 8; in skl_compute_wm_params()
1770 wp->y_min_scanlines = 4; in skl_compute_wm_params()
1773 MISSING_CASE(wp->cpp); in skl_compute_wm_params()
1774 return -EINVAL; in skl_compute_wm_params()
1777 wp->y_min_scanlines = 4; in skl_compute_wm_params()
1781 wp->y_min_scanlines *= 2; in skl_compute_wm_params()
1783 wp->plane_bytes_per_line = wp->width * wp->cpp; in skl_compute_wm_params()
1784 if (wp->y_tiled) { in skl_compute_wm_params()
1785 interm_pbpl = DIV_ROUND_UP(wp->plane_bytes_per_line * in skl_compute_wm_params()
1786 wp->y_min_scanlines, in skl_compute_wm_params()
1787 wp->dbuf_block_size); in skl_compute_wm_params()
1792 wp->plane_blocks_per_line = div_fixed16(interm_pbpl, in skl_compute_wm_params()
1793 wp->y_min_scanlines); in skl_compute_wm_params()
1795 interm_pbpl = DIV_ROUND_UP(wp->plane_bytes_per_line, in skl_compute_wm_params()
1796 wp->dbuf_block_size); in skl_compute_wm_params()
1798 if (!wp->x_tiled || DISPLAY_VER(i915) >= 10) in skl_compute_wm_params()
1801 wp->plane_blocks_per_line = u32_to_fixed16(interm_pbpl); in skl_compute_wm_params()
1804 wp->y_tile_minimum = mul_u32_fixed16(wp->y_min_scanlines, in skl_compute_wm_params()
1805 wp->plane_blocks_per_line); in skl_compute_wm_params()
1807 wp->linetime_us = fixed16_to_u32_round_up(intel_get_linetime_us(crtc_state)); in skl_compute_wm_params()
1817 const struct drm_framebuffer *fb = plane_state->hw.fb; in skl_compute_plane_wm_params()
1825 width = drm_rect_width(&plane_state->uapi.src) >> 16; in skl_compute_plane_wm_params()
1828 fb->format, fb->modifier, in skl_compute_plane_wm_params()
1829 plane_state->hw.rotation, in skl_compute_plane_wm_params()
1854 unsigned int latency, in skl_compute_plane_wm() argument
1859 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev); in skl_compute_plane_wm()
1864 if (latency == 0 || in skl_compute_plane_wm()
1867 result->min_ddb_alloc = U16_MAX; in skl_compute_plane_wm()
1871 method1 = skl_wm_method1(i915, wp->plane_pixel_rate, in skl_compute_plane_wm()
1872 wp->cpp, latency, wp->dbuf_block_size); in skl_compute_plane_wm()
1873 method2 = skl_wm_method2(wp->plane_pixel_rate, in skl_compute_plane_wm()
1874 crtc_state->hw.pipe_mode.crtc_htotal, in skl_compute_plane_wm()
1875 latency, in skl_compute_plane_wm()
1876 wp->plane_blocks_per_line); in skl_compute_plane_wm()
1878 if (wp->y_tiled) { in skl_compute_plane_wm()
1879 selected_result = max_fixed16(method2, wp->y_tile_minimum); in skl_compute_plane_wm()
1881 if ((wp->cpp * crtc_state->hw.pipe_mode.crtc_htotal / in skl_compute_plane_wm()
1882 wp->dbuf_block_size < 1) && in skl_compute_plane_wm()
1883 (wp->plane_bytes_per_line / wp->dbuf_block_size < 1)) { in skl_compute_plane_wm()
1885 } else if (latency >= wp->linetime_us) { in skl_compute_plane_wm()
1909 * channels' impact on the level 0 memory latency and the relevant in skl_compute_plane_wm()
1914 fixed16_to_u32_round_up(wp->plane_blocks_per_line)); in skl_compute_plane_wm()
1916 wp->plane_blocks_per_line); in skl_compute_plane_wm()
1920 if (level == 0 && wp->rc_surface) in skl_compute_plane_wm()
1921 blocks += fixed16_to_u32_round_up(wp->y_tile_minimum); in skl_compute_plane_wm()
1925 if (wp->y_tiled) { in skl_compute_plane_wm()
1926 blocks += fixed16_to_u32_round_up(wp->y_tile_minimum); in skl_compute_plane_wm()
1927 lines += wp->y_min_scanlines; in skl_compute_plane_wm()
1933 * Make sure result blocks for higher latency levels are in skl_compute_plane_wm()
1938 if (result_prev->blocks > blocks) in skl_compute_plane_wm()
1939 blocks = result_prev->blocks; in skl_compute_plane_wm()
1944 if (wp->y_tiled) { in skl_compute_plane_wm()
1947 if (lines % wp->y_min_scanlines == 0) in skl_compute_plane_wm()
1948 extra_lines = wp->y_min_scanlines; in skl_compute_plane_wm()
1950 extra_lines = wp->y_min_scanlines * 2 - in skl_compute_plane_wm()
1951 lines % wp->y_min_scanlines; in skl_compute_plane_wm()
1954 wp->plane_blocks_per_line); in skl_compute_plane_wm()
1965 result->min_ddb_alloc = U16_MAX; in skl_compute_plane_wm()
1975 result->blocks = blocks; in skl_compute_plane_wm()
1976 result->lines = lines; in skl_compute_plane_wm()
1977 /* Bspec says: value >= plane ddb allocation -> invalid, hence the +1 here */ in skl_compute_plane_wm()
1978 result->min_ddb_alloc = max(min_ddb_alloc, blocks) + 1; in skl_compute_plane_wm()
1979 result->enable = true; in skl_compute_plane_wm()
1981 if (DISPLAY_VER(i915) < 12 && i915->display.sagv.block_time_us) in skl_compute_plane_wm()
1982 result->can_sagv = latency >= i915->display.sagv.block_time_us; in skl_compute_plane_wm()
1991 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev); in skl_compute_wm_levels()
1995 for (level = 0; level < i915->display.wm.num_levels; level++) { in skl_compute_wm_levels()
1997 unsigned int latency = skl_wm_latency(i915, level, wm_params); in skl_compute_wm_levels() local
1999 skl_compute_plane_wm(crtc_state, plane, level, latency, in skl_compute_wm_levels()
2011 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev); in tgl_compute_sagv_wm()
2012 struct skl_wm_level *sagv_wm = &plane_wm->sagv.wm0; in tgl_compute_sagv_wm()
2013 struct skl_wm_level *levels = plane_wm->wm; in tgl_compute_sagv_wm()
2014 unsigned int latency = 0; in tgl_compute_sagv_wm() local
2016 if (i915->display.sagv.block_time_us) in tgl_compute_sagv_wm()
2017 latency = i915->display.sagv.block_time_us + in tgl_compute_sagv_wm()
2020 skl_compute_plane_wm(crtc_state, plane, 0, latency, in tgl_compute_sagv_wm()
2060 * letters. The value wm_l0->blocks is actually Result Blocks, but in skl_compute_transition_wm()
2067 wm0_blocks = wm0->blocks - 1; in skl_compute_transition_wm()
2069 if (wp->y_tiled) { in skl_compute_transition_wm()
2071 (u16)mul_round_up_u32_fixed16(2, wp->y_tile_minimum); in skl_compute_transition_wm()
2083 trans_wm->blocks = blocks; in skl_compute_transition_wm()
2084 trans_wm->min_ddb_alloc = max_t(u16, wm0->min_ddb_alloc, blocks + 1); in skl_compute_transition_wm()
2085 trans_wm->enable = true; in skl_compute_transition_wm()
2092 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); in skl_build_plane_wm_single()
2093 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_build_plane_wm_single()
2094 struct skl_plane_wm *wm = &crtc_state->wm.skl.raw.planes[plane->id]; in skl_build_plane_wm_single()
2103 skl_compute_wm_levels(crtc_state, plane, &wm_params, wm->wm); in skl_build_plane_wm_single()
2105 skl_compute_transition_wm(i915, &wm->trans_wm, in skl_build_plane_wm_single()
2106 &wm->wm[0], &wm_params); in skl_build_plane_wm_single()
2111 skl_compute_transition_wm(i915, &wm->sagv.trans_wm, in skl_build_plane_wm_single()
2112 &wm->sagv.wm0, &wm_params); in skl_build_plane_wm_single()
2122 struct skl_plane_wm *wm = &crtc_state->wm.skl.raw.planes[plane->id]; in skl_build_plane_wm_uv()
2126 wm->is_planar = true; in skl_build_plane_wm_uv()
2134 skl_compute_wm_levels(crtc_state, plane, &wm_params, wm->uv_wm); in skl_build_plane_wm_uv()
2142 struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane); in skl_build_plane_wm()
2143 enum plane_id plane_id = plane->id; in skl_build_plane_wm()
2144 struct skl_plane_wm *wm = &crtc_state->wm.skl.raw.planes[plane_id]; in skl_build_plane_wm()
2145 const struct drm_framebuffer *fb = plane_state->hw.fb; in skl_build_plane_wm()
2158 if (fb->format->is_yuv && fb->format->num_planes > 1) { in skl_build_plane_wm()
2171 struct intel_plane *plane = to_intel_plane(plane_state->uapi.plane); in icl_build_plane_wm()
2172 struct drm_i915_private *i915 = to_i915(plane->base.dev); in icl_build_plane_wm()
2173 enum plane_id plane_id = plane->id; in icl_build_plane_wm()
2174 struct skl_plane_wm *wm = &crtc_state->wm.skl.raw.planes[plane_id]; in icl_build_plane_wm()
2178 if (plane_state->planar_slave) in icl_build_plane_wm()
2183 if (plane_state->planar_linked_plane) { in icl_build_plane_wm()
2184 const struct drm_framebuffer *fb = plane_state->hw.fb; in icl_build_plane_wm()
2186 drm_WARN_ON(&i915->drm, in icl_build_plane_wm()
2188 drm_WARN_ON(&i915->drm, !fb->format->is_yuv || in icl_build_plane_wm()
2189 fb->format->num_planes == 1); in icl_build_plane_wm()
2192 plane_state->planar_linked_plane, 0); in icl_build_plane_wm()
2212 int wm0_lines, int latency) in skl_is_vblank_too_short() argument
2215 &crtc_state->hw.adjusted_mode; in skl_is_vblank_too_short()
2217 /* FIXME missing scaler and DSC pre-fill time */ in skl_is_vblank_too_short()
2218 return crtc_state->framestart_delay + in skl_is_vblank_too_short()
2219 intel_usecs_to_scanlines(adjusted_mode, latency) + in skl_is_vblank_too_short()
2221 adjusted_mode->crtc_vtotal - adjusted_mode->crtc_vblank_start; in skl_is_vblank_too_short()
2226 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); in skl_max_wm0_lines()
2231 const struct skl_plane_wm *wm = &crtc_state->wm.skl.optimal.planes[plane_id]; in skl_max_wm0_lines()
2234 wm0_lines = max_t(int, wm0_lines, wm->wm[0].lines); in skl_max_wm0_lines()
2243 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); in skl_max_wm_level_for_vblank()
2244 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_max_wm_level_for_vblank()
2247 for (level = i915->display.wm.num_levels - 1; level >= 0; level--) { in skl_max_wm_level_for_vblank()
2248 int latency; in skl_max_wm_level_for_vblank() local
2250 /* FIXME should we care about the latency w/a's? */ in skl_max_wm_level_for_vblank()
2251 latency = skl_wm_latency(i915, level, NULL); in skl_max_wm_level_for_vblank()
2252 if (latency == 0) in skl_max_wm_level_for_vblank()
2255 /* FIXME is it correct to use 0 latency for wm0 here? */ in skl_max_wm_level_for_vblank()
2257 latency = 0; in skl_max_wm_level_for_vblank()
2259 if (!skl_is_vblank_too_short(crtc_state, wm0_lines, latency)) in skl_max_wm_level_for_vblank()
2263 return -EINVAL; in skl_max_wm_level_for_vblank()
2268 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); in skl_wm_check_vblank()
2269 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_wm_check_vblank()
2272 if (!crtc_state->hw.active) in skl_wm_check_vblank()
2285 crtc_state->wm_level_disabled = level < i915->display.wm.num_levels - 1; in skl_wm_check_vblank()
2287 for (level++; level < i915->display.wm.num_levels; level++) { in skl_wm_check_vblank()
2292 &crtc_state->wm.skl.optimal.planes[plane_id]; in skl_wm_check_vblank()
2298 wm->wm[level].enable = false; in skl_wm_check_vblank()
2299 wm->uv_wm[level].enable = false; in skl_wm_check_vblank()
2304 i915->display.sagv.block_time_us && in skl_wm_check_vblank()
2306 i915->display.sagv.block_time_us)) { in skl_wm_check_vblank()
2311 &crtc_state->wm.skl.optimal.planes[plane_id]; in skl_wm_check_vblank()
2313 wm->sagv.wm0.enable = false; in skl_wm_check_vblank()
2314 wm->sagv.trans_wm.enable = false; in skl_wm_check_vblank()
2324 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_build_pipe_wm()
2333 * FIXME should perhaps check {old,new}_plane_crtc->hw.crtc in skl_build_pipe_wm()
2337 if (plane->pipe != crtc->pipe) in skl_build_pipe_wm()
2348 crtc_state->wm.skl.optimal = crtc_state->wm.skl.raw; in skl_build_pipe_wm()
2355 const struct skl_ddb_entry *entry) in skl_ddb_entry_write() argument
2357 if (entry->end) in skl_ddb_entry_write()
2359 PLANE_BUF_END(entry->end - 1) | in skl_ddb_entry_write()
2360 PLANE_BUF_START(entry->start)); in skl_ddb_entry_write()
2371 if (level->enable) in skl_write_wm_level()
2373 if (level->ignore_lines) in skl_write_wm_level()
2375 val |= REG_FIELD_PREP(PLANE_WM_BLOCKS_MASK, level->blocks); in skl_write_wm_level()
2376 val |= REG_FIELD_PREP(PLANE_WM_LINES_MASK, level->lines); in skl_write_wm_level()
2384 struct drm_i915_private *i915 = to_i915(plane->base.dev); in skl_write_plane_wm()
2385 enum plane_id plane_id = plane->id; in skl_write_plane_wm()
2386 enum pipe pipe = plane->pipe; in skl_write_plane_wm()
2387 const struct skl_pipe_wm *pipe_wm = &crtc_state->wm.skl.optimal; in skl_write_plane_wm()
2389 &crtc_state->wm.skl.plane_ddb[plane_id]; in skl_write_plane_wm()
2391 &crtc_state->wm.skl.plane_ddb_y[plane_id]; in skl_write_plane_wm()
2394 for (level = 0; level < i915->display.wm.num_levels; level++) in skl_write_plane_wm()
2402 const struct skl_plane_wm *wm = &pipe_wm->planes[plane_id]; in skl_write_plane_wm()
2405 &wm->sagv.wm0); in skl_write_plane_wm()
2407 &wm->sagv.trans_wm); in skl_write_plane_wm()
2421 struct drm_i915_private *i915 = to_i915(plane->base.dev); in skl_write_cursor_wm()
2422 enum plane_id plane_id = plane->id; in skl_write_cursor_wm()
2423 enum pipe pipe = plane->pipe; in skl_write_cursor_wm()
2424 const struct skl_pipe_wm *pipe_wm = &crtc_state->wm.skl.optimal; in skl_write_cursor_wm()
2426 &crtc_state->wm.skl.plane_ddb[plane_id]; in skl_write_cursor_wm()
2429 for (level = 0; level < i915->display.wm.num_levels; level++) in skl_write_cursor_wm()
2437 const struct skl_plane_wm *wm = &pipe_wm->planes[plane_id]; in skl_write_cursor_wm()
2440 &wm->sagv.wm0); in skl_write_cursor_wm()
2442 &wm->sagv.trans_wm); in skl_write_cursor_wm()
2451 return l1->enable == l2->enable && in skl_wm_level_equals()
2452 l1->ignore_lines == l2->ignore_lines && in skl_wm_level_equals()
2453 l1->lines == l2->lines && in skl_wm_level_equals()
2454 l1->blocks == l2->blocks; in skl_wm_level_equals()
2463 for (level = 0; level < i915->display.wm.num_levels; level++) { in skl_plane_wm_equals()
2469 if (!skl_wm_level_equals(&wm1->wm[level], &wm2->wm[level])) in skl_plane_wm_equals()
2473 return skl_wm_level_equals(&wm1->trans_wm, &wm2->trans_wm) && in skl_plane_wm_equals()
2474 skl_wm_level_equals(&wm1->sagv.wm0, &wm2->sagv.wm0) && in skl_plane_wm_equals()
2475 skl_wm_level_equals(&wm1->sagv.trans_wm, &wm2->sagv.trans_wm); in skl_plane_wm_equals()
2481 return a->start < b->end && b->start < a->end; in skl_ddb_entries_overlap()
2487 if (a->end && b->end) { in skl_ddb_entry_union()
2488 a->start = min(a->start, b->start); in skl_ddb_entry_union()
2489 a->end = max(a->end, b->end); in skl_ddb_entry_union()
2490 } else if (b->end) { in skl_ddb_entry_union()
2491 a->start = b->start; in skl_ddb_entry_union()
2492 a->end = b->end; in skl_ddb_entry_union()
2515 struct intel_atomic_state *state = to_intel_atomic_state(new_crtc_state->uapi.state); in skl_ddb_add_affected_planes()
2516 struct intel_crtc *crtc = to_intel_crtc(new_crtc_state->uapi.crtc); in skl_ddb_add_affected_planes()
2517 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_ddb_add_affected_planes()
2520 for_each_intel_plane_on_crtc(&i915->drm, crtc, plane) { in skl_ddb_add_affected_planes()
2522 enum plane_id plane_id = plane->id; in skl_ddb_add_affected_planes()
2524 if (skl_ddb_entry_equal(&old_crtc_state->wm.skl.plane_ddb[plane_id], in skl_ddb_add_affected_planes()
2525 &new_crtc_state->wm.skl.plane_ddb[plane_id]) && in skl_ddb_add_affected_planes()
2526 skl_ddb_entry_equal(&old_crtc_state->wm.skl.plane_ddb_y[plane_id], in skl_ddb_add_affected_planes()
2527 &new_crtc_state->wm.skl.plane_ddb_y[plane_id])) in skl_ddb_add_affected_planes()
2534 new_crtc_state->update_planes |= BIT(plane_id); in skl_ddb_add_affected_planes()
2535 new_crtc_state->async_flip_planes = 0; in skl_ddb_add_affected_planes()
2536 new_crtc_state->do_async_flip = false; in skl_ddb_add_affected_planes()
2544 struct drm_i915_private *i915 = to_i915(dbuf_state->base.state->base.dev); in intel_dbuf_enabled_slices()
2555 enabled_slices |= dbuf_state->slices[pipe]; in intel_dbuf_enabled_slices()
2563 struct drm_i915_private *i915 = to_i915(state->base.dev); in skl_compute_ddb()
2583 new_dbuf_state->active_pipes = in skl_compute_ddb()
2584 intel_calc_active_pipes(state, old_dbuf_state->active_pipes); in skl_compute_ddb()
2586 if (old_dbuf_state->active_pipes != new_dbuf_state->active_pipes) { in skl_compute_ddb()
2587 ret = intel_atomic_lock_global_state(&new_dbuf_state->base); in skl_compute_ddb()
2593 new_dbuf_state->joined_mbus = in skl_compute_ddb()
2594 adlp_check_mbus_joined(new_dbuf_state->active_pipes); in skl_compute_ddb()
2596 for_each_intel_crtc(&i915->drm, crtc) { in skl_compute_ddb()
2597 enum pipe pipe = crtc->pipe; in skl_compute_ddb()
2599 new_dbuf_state->slices[pipe] = in skl_compute_ddb()
2600 skl_compute_dbuf_slices(crtc, new_dbuf_state->active_pipes, in skl_compute_ddb()
2601 new_dbuf_state->joined_mbus); in skl_compute_ddb()
2603 if (old_dbuf_state->slices[pipe] == new_dbuf_state->slices[pipe]) in skl_compute_ddb()
2606 ret = intel_atomic_lock_global_state(&new_dbuf_state->base); in skl_compute_ddb()
2611 new_dbuf_state->enabled_slices = intel_dbuf_enabled_slices(new_dbuf_state); in skl_compute_ddb()
2613 if (old_dbuf_state->enabled_slices != new_dbuf_state->enabled_slices || in skl_compute_ddb()
2614 old_dbuf_state->joined_mbus != new_dbuf_state->joined_mbus) { in skl_compute_ddb()
2615 ret = intel_atomic_serialize_global_state(&new_dbuf_state->base); in skl_compute_ddb()
2619 if (old_dbuf_state->joined_mbus != new_dbuf_state->joined_mbus) { in skl_compute_ddb()
2626 drm_dbg_kms(&i915->drm, in skl_compute_ddb()
2627 "Enabled dbuf slices 0x%x -> 0x%x (total dbuf slices 0x%x), mbus joined? %s->%s\n", in skl_compute_ddb()
2628 old_dbuf_state->enabled_slices, in skl_compute_ddb()
2629 new_dbuf_state->enabled_slices, in skl_compute_ddb()
2630 DISPLAY_INFO(i915)->dbuf.slice_mask, in skl_compute_ddb()
2631 str_yes_no(old_dbuf_state->joined_mbus), in skl_compute_ddb()
2632 str_yes_no(new_dbuf_state->joined_mbus)); in skl_compute_ddb()
2636 enum pipe pipe = crtc->pipe; in skl_compute_ddb()
2638 new_dbuf_state->weight[pipe] = intel_crtc_ddb_weight(new_crtc_state); in skl_compute_ddb()
2640 if (old_dbuf_state->weight[pipe] == new_dbuf_state->weight[pipe]) in skl_compute_ddb()
2643 ret = intel_atomic_lock_global_state(&new_dbuf_state->base); in skl_compute_ddb()
2648 for_each_intel_crtc(&i915->drm, crtc) { in skl_compute_ddb()
2677 struct drm_i915_private *i915 = to_i915(state->base.dev); in skl_print_wm_changes()
2691 old_pipe_wm = &old_crtc_state->wm.skl.optimal; in skl_print_wm_changes()
2692 new_pipe_wm = &new_crtc_state->wm.skl.optimal; in skl_print_wm_changes()
2694 for_each_intel_plane_on_crtc(&i915->drm, crtc, plane) { in skl_print_wm_changes()
2695 enum plane_id plane_id = plane->id; in skl_print_wm_changes()
2698 old = &old_crtc_state->wm.skl.plane_ddb[plane_id]; in skl_print_wm_changes()
2699 new = &new_crtc_state->wm.skl.plane_ddb[plane_id]; in skl_print_wm_changes()
2704 drm_dbg_kms(&i915->drm, in skl_print_wm_changes()
2705 "[PLANE:%d:%s] ddb (%4d - %4d) -> (%4d - %4d), size %4d -> %4d\n", in skl_print_wm_changes()
2706 plane->base.base.id, plane->base.name, in skl_print_wm_changes()
2707 old->start, old->end, new->start, new->end, in skl_print_wm_changes()
2711 for_each_intel_plane_on_crtc(&i915->drm, crtc, plane) { in skl_print_wm_changes()
2712 enum plane_id plane_id = plane->id; in skl_print_wm_changes()
2715 old_wm = &old_pipe_wm->planes[plane_id]; in skl_print_wm_changes()
2716 new_wm = &new_pipe_wm->planes[plane_id]; in skl_print_wm_changes()
2721 drm_dbg_kms(&i915->drm, in skl_print_wm_changes()
2723 " -> %cwm0,%cwm1,%cwm2,%cwm3,%cwm4,%cwm5,%cwm6,%cwm7,%ctwm,%cswm,%cstwm\n", in skl_print_wm_changes()
2724 plane->base.base.id, plane->base.name, in skl_print_wm_changes()
2725 enast(old_wm->wm[0].enable), enast(old_wm->wm[1].enable), in skl_print_wm_changes()
2726 enast(old_wm->wm[2].enable), enast(old_wm->wm[3].enable), in skl_print_wm_changes()
2727 enast(old_wm->wm[4].enable), enast(old_wm->wm[5].enable), in skl_print_wm_changes()
2728 enast(old_wm->wm[6].enable), enast(old_wm->wm[7].enable), in skl_print_wm_changes()
2729 enast(old_wm->trans_wm.enable), in skl_print_wm_changes()
2730 enast(old_wm->sagv.wm0.enable), in skl_print_wm_changes()
2731 enast(old_wm->sagv.trans_wm.enable), in skl_print_wm_changes()
2732 enast(new_wm->wm[0].enable), enast(new_wm->wm[1].enable), in skl_print_wm_changes()
2733 enast(new_wm->wm[2].enable), enast(new_wm->wm[3].enable), in skl_print_wm_changes()
2734 enast(new_wm->wm[4].enable), enast(new_wm->wm[5].enable), in skl_print_wm_changes()
2735 enast(new_wm->wm[6].enable), enast(new_wm->wm[7].enable), in skl_print_wm_changes()
2736 enast(new_wm->trans_wm.enable), in skl_print_wm_changes()
2737 enast(new_wm->sagv.wm0.enable), in skl_print_wm_changes()
2738 enast(new_wm->sagv.trans_wm.enable)); in skl_print_wm_changes()
2740 drm_dbg_kms(&i915->drm, in skl_print_wm_changes()
2742 " -> %c%3d,%c%3d,%c%3d,%c%3d,%c%3d,%c%3d,%c%3d,%c%3d,%c%3d,%c%3d,%c%4d\n", in skl_print_wm_changes()
2743 plane->base.base.id, plane->base.name, in skl_print_wm_changes()
2744 enast(old_wm->wm[0].ignore_lines), old_wm->wm[0].lines, in skl_print_wm_changes()
2745 enast(old_wm->wm[1].ignore_lines), old_wm->wm[1].lines, in skl_print_wm_changes()
2746 enast(old_wm->wm[2].ignore_lines), old_wm->wm[2].lines, in skl_print_wm_changes()
2747 enast(old_wm->wm[3].ignore_lines), old_wm->wm[3].lines, in skl_print_wm_changes()
2748 enast(old_wm->wm[4].ignore_lines), old_wm->wm[4].lines, in skl_print_wm_changes()
2749 enast(old_wm->wm[5].ignore_lines), old_wm->wm[5].lines, in skl_print_wm_changes()
2750 enast(old_wm->wm[6].ignore_lines), old_wm->wm[6].lines, in skl_print_wm_changes()
2751 enast(old_wm->wm[7].ignore_lines), old_wm->wm[7].lines, in skl_print_wm_changes()
2752 enast(old_wm->trans_wm.ignore_lines), old_wm->trans_wm.lines, in skl_print_wm_changes()
2753 enast(old_wm->sagv.wm0.ignore_lines), old_wm->sagv.wm0.lines, in skl_print_wm_changes()
2754 enast(old_wm->sagv.trans_wm.ignore_lines), old_wm->sagv.trans_wm.lines, in skl_print_wm_changes()
2755 enast(new_wm->wm[0].ignore_lines), new_wm->wm[0].lines, in skl_print_wm_changes()
2756 enast(new_wm->wm[1].ignore_lines), new_wm->wm[1].lines, in skl_print_wm_changes()
2757 enast(new_wm->wm[2].ignore_lines), new_wm->wm[2].lines, in skl_print_wm_changes()
2758 enast(new_wm->wm[3].ignore_lines), new_wm->wm[3].lines, in skl_print_wm_changes()
2759 enast(new_wm->wm[4].ignore_lines), new_wm->wm[4].lines, in skl_print_wm_changes()
2760 enast(new_wm->wm[5].ignore_lines), new_wm->wm[5].lines, in skl_print_wm_changes()
2761 enast(new_wm->wm[6].ignore_lines), new_wm->wm[6].lines, in skl_print_wm_changes()
2762 enast(new_wm->wm[7].ignore_lines), new_wm->wm[7].lines, in skl_print_wm_changes()
2763 enast(new_wm->trans_wm.ignore_lines), new_wm->trans_wm.lines, in skl_print_wm_changes()
2764 enast(new_wm->sagv.wm0.ignore_lines), new_wm->sagv.wm0.lines, in skl_print_wm_changes()
2765 enast(new_wm->sagv.trans_wm.ignore_lines), new_wm->sagv.trans_wm.lines); in skl_print_wm_changes()
2767 drm_dbg_kms(&i915->drm, in skl_print_wm_changes()
2769 " -> %4d,%4d,%4d,%4d,%4d,%4d,%4d,%4d,%4d,%4d,%5d\n", in skl_print_wm_changes()
2770 plane->base.base.id, plane->base.name, in skl_print_wm_changes()
2771 old_wm->wm[0].blocks, old_wm->wm[1].blocks, in skl_print_wm_changes()
2772 old_wm->wm[2].blocks, old_wm->wm[3].blocks, in skl_print_wm_changes()
2773 old_wm->wm[4].blocks, old_wm->wm[5].blocks, in skl_print_wm_changes()
2774 old_wm->wm[6].blocks, old_wm->wm[7].blocks, in skl_print_wm_changes()
2775 old_wm->trans_wm.blocks, in skl_print_wm_changes()
2776 old_wm->sagv.wm0.blocks, in skl_print_wm_changes()
2777 old_wm->sagv.trans_wm.blocks, in skl_print_wm_changes()
2778 new_wm->wm[0].blocks, new_wm->wm[1].blocks, in skl_print_wm_changes()
2779 new_wm->wm[2].blocks, new_wm->wm[3].blocks, in skl_print_wm_changes()
2780 new_wm->wm[4].blocks, new_wm->wm[5].blocks, in skl_print_wm_changes()
2781 new_wm->wm[6].blocks, new_wm->wm[7].blocks, in skl_print_wm_changes()
2782 new_wm->trans_wm.blocks, in skl_print_wm_changes()
2783 new_wm->sagv.wm0.blocks, in skl_print_wm_changes()
2784 new_wm->sagv.trans_wm.blocks); in skl_print_wm_changes()
2786 drm_dbg_kms(&i915->drm, in skl_print_wm_changes()
2788 " -> %4d,%4d,%4d,%4d,%4d,%4d,%4d,%4d,%4d,%4d,%5d\n", in skl_print_wm_changes()
2789 plane->base.base.id, plane->base.name, in skl_print_wm_changes()
2790 old_wm->wm[0].min_ddb_alloc, old_wm->wm[1].min_ddb_alloc, in skl_print_wm_changes()
2791 old_wm->wm[2].min_ddb_alloc, old_wm->wm[3].min_ddb_alloc, in skl_print_wm_changes()
2792 old_wm->wm[4].min_ddb_alloc, old_wm->wm[5].min_ddb_alloc, in skl_print_wm_changes()
2793 old_wm->wm[6].min_ddb_alloc, old_wm->wm[7].min_ddb_alloc, in skl_print_wm_changes()
2794 old_wm->trans_wm.min_ddb_alloc, in skl_print_wm_changes()
2795 old_wm->sagv.wm0.min_ddb_alloc, in skl_print_wm_changes()
2796 old_wm->sagv.trans_wm.min_ddb_alloc, in skl_print_wm_changes()
2797 new_wm->wm[0].min_ddb_alloc, new_wm->wm[1].min_ddb_alloc, in skl_print_wm_changes()
2798 new_wm->wm[2].min_ddb_alloc, new_wm->wm[3].min_ddb_alloc, in skl_print_wm_changes()
2799 new_wm->wm[4].min_ddb_alloc, new_wm->wm[5].min_ddb_alloc, in skl_print_wm_changes()
2800 new_wm->wm[6].min_ddb_alloc, new_wm->wm[7].min_ddb_alloc, in skl_print_wm_changes()
2801 new_wm->trans_wm.min_ddb_alloc, in skl_print_wm_changes()
2802 new_wm->sagv.wm0.min_ddb_alloc, in skl_print_wm_changes()
2803 new_wm->sagv.trans_wm.min_ddb_alloc); in skl_print_wm_changes()
2812 struct drm_i915_private *i915 = to_i915(plane->base.dev); in skl_plane_selected_wm_equals()
2815 for (level = 0; level < i915->display.wm.num_levels; level++) { in skl_plane_selected_wm_equals()
2821 if (!skl_wm_level_equals(skl_plane_wm_level(old_pipe_wm, plane->id, level), in skl_plane_selected_wm_equals()
2822 skl_plane_wm_level(new_pipe_wm, plane->id, level))) in skl_plane_selected_wm_equals()
2827 const struct skl_plane_wm *old_wm = &old_pipe_wm->planes[plane->id]; in skl_plane_selected_wm_equals()
2828 const struct skl_plane_wm *new_wm = &new_pipe_wm->planes[plane->id]; in skl_plane_selected_wm_equals()
2830 if (!skl_wm_level_equals(&old_wm->sagv.wm0, &new_wm->sagv.wm0) || in skl_plane_selected_wm_equals()
2831 !skl_wm_level_equals(&old_wm->sagv.trans_wm, &new_wm->sagv.trans_wm)) in skl_plane_selected_wm_equals()
2835 return skl_wm_level_equals(skl_plane_trans_wm(old_pipe_wm, plane->id), in skl_plane_selected_wm_equals()
2836 skl_plane_trans_wm(new_pipe_wm, plane->id)); in skl_plane_selected_wm_equals()
2864 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_wm_add_affected_planes()
2871 for_each_intel_plane_on_crtc(&i915->drm, crtc, plane) { in skl_wm_add_affected_planes()
2873 enum plane_id plane_id = plane->id; in skl_wm_add_affected_planes()
2878 * is non-zero, whereas we want all disabled planes to in skl_wm_add_affected_planes()
2885 &old_crtc_state->wm.skl.optimal, in skl_wm_add_affected_planes()
2886 &new_crtc_state->wm.skl.optimal)) in skl_wm_add_affected_planes()
2893 new_crtc_state->update_planes |= BIT(plane_id); in skl_wm_add_affected_planes()
2894 new_crtc_state->async_flip_planes = 0; in skl_wm_add_affected_planes()
2895 new_crtc_state->do_async_flip = false; in skl_wm_add_affected_planes()
2940 level->enable = val & PLANE_WM_EN; in skl_wm_level_from_reg_val()
2941 level->ignore_lines = val & PLANE_WM_IGNORE_LINES; in skl_wm_level_from_reg_val()
2942 level->blocks = REG_FIELD_GET(PLANE_WM_BLOCKS_MASK, val); in skl_wm_level_from_reg_val()
2943 level->lines = REG_FIELD_GET(PLANE_WM_LINES_MASK, val); in skl_wm_level_from_reg_val()
2949 struct drm_i915_private *i915 = to_i915(crtc->base.dev); in skl_pipe_wm_get_hw_state()
2950 enum pipe pipe = crtc->pipe; in skl_pipe_wm_get_hw_state()
2956 struct skl_plane_wm *wm = &out->planes[plane_id]; in skl_pipe_wm_get_hw_state()
2958 for (level = 0; level < i915->display.wm.num_levels; level++) { in skl_pipe_wm_get_hw_state()
2964 skl_wm_level_from_reg_val(val, &wm->wm[level]); in skl_pipe_wm_get_hw_state()
2972 skl_wm_level_from_reg_val(val, &wm->trans_wm); in skl_pipe_wm_get_hw_state()
2980 skl_wm_level_from_reg_val(val, &wm->sagv.wm0); in skl_pipe_wm_get_hw_state()
2987 skl_wm_level_from_reg_val(val, &wm->sagv.trans_wm); in skl_pipe_wm_get_hw_state()
2989 wm->sagv.wm0 = wm->wm[0]; in skl_pipe_wm_get_hw_state()
2990 wm->sagv.trans_wm = wm->trans_wm; in skl_pipe_wm_get_hw_state()
2998 to_intel_dbuf_state(i915->display.dbuf.obj.state); in skl_wm_get_hw_state()
3002 dbuf_state->joined_mbus = intel_de_read(i915, MBUS_CTL) & MBUS_JOIN; in skl_wm_get_hw_state()
3004 for_each_intel_crtc(&i915->drm, crtc) { in skl_wm_get_hw_state()
3006 to_intel_crtc_state(crtc->base.state); in skl_wm_get_hw_state()
3007 enum pipe pipe = crtc->pipe; in skl_wm_get_hw_state()
3012 memset(&crtc_state->wm.skl.optimal, 0, in skl_wm_get_hw_state()
3013 sizeof(crtc_state->wm.skl.optimal)); in skl_wm_get_hw_state()
3014 if (crtc_state->hw.active) in skl_wm_get_hw_state()
3015 skl_pipe_wm_get_hw_state(crtc, &crtc_state->wm.skl.optimal); in skl_wm_get_hw_state()
3016 crtc_state->wm.skl.raw = crtc_state->wm.skl.optimal; in skl_wm_get_hw_state()
3018 memset(&dbuf_state->ddb[pipe], 0, sizeof(dbuf_state->ddb[pipe])); in skl_wm_get_hw_state()
3022 &crtc_state->wm.skl.plane_ddb[plane_id]; in skl_wm_get_hw_state()
3024 &crtc_state->wm.skl.plane_ddb_y[plane_id]; in skl_wm_get_hw_state()
3026 if (!crtc_state->hw.active) in skl_wm_get_hw_state()
3029 skl_ddb_get_hw_plane_state(i915, crtc->pipe, in skl_wm_get_hw_state()
3032 skl_ddb_entry_union(&dbuf_state->ddb[pipe], ddb); in skl_wm_get_hw_state()
3033 skl_ddb_entry_union(&dbuf_state->ddb[pipe], ddb_y); in skl_wm_get_hw_state()
3036 dbuf_state->weight[pipe] = intel_crtc_ddb_weight(crtc_state); in skl_wm_get_hw_state()
3042 slices = skl_compute_dbuf_slices(crtc, dbuf_state->active_pipes, in skl_wm_get_hw_state()
3043 dbuf_state->joined_mbus); in skl_wm_get_hw_state()
3045 crtc_state->wm.skl.ddb.start = mbus_offset + dbuf_state->ddb[pipe].start; in skl_wm_get_hw_state()
3046 crtc_state->wm.skl.ddb.end = mbus_offset + dbuf_state->ddb[pipe].end; in skl_wm_get_hw_state()
3049 dbuf_state->slices[pipe] = in skl_wm_get_hw_state()
3050 skl_ddb_dbuf_slice_mask(i915, &crtc_state->wm.skl.ddb); in skl_wm_get_hw_state()
3052 drm_dbg_kms(&i915->drm, in skl_wm_get_hw_state()
3053 "[CRTC:%d:%s] dbuf slices 0x%x, ddb (%d - %d), active pipes 0x%x, mbus joined: %s\n", in skl_wm_get_hw_state()
3054 crtc->base.base.id, crtc->base.name, in skl_wm_get_hw_state()
3055 dbuf_state->slices[pipe], dbuf_state->ddb[pipe].start, in skl_wm_get_hw_state()
3056 dbuf_state->ddb[pipe].end, dbuf_state->active_pipes, in skl_wm_get_hw_state()
3057 str_yes_no(dbuf_state->joined_mbus)); in skl_wm_get_hw_state()
3060 dbuf_state->enabled_slices = i915->display.dbuf.enabled_slices; in skl_wm_get_hw_state()
3066 to_intel_dbuf_state(i915->display.dbuf.obj.state); in skl_dbuf_is_misconfigured()
3070 for_each_intel_crtc(&i915->drm, crtc) { in skl_dbuf_is_misconfigured()
3072 to_intel_crtc_state(crtc->base.state); in skl_dbuf_is_misconfigured()
3074 entries[crtc->pipe] = crtc_state->wm.skl.ddb; in skl_dbuf_is_misconfigured()
3077 for_each_intel_crtc(&i915->drm, crtc) { in skl_dbuf_is_misconfigured()
3079 to_intel_crtc_state(crtc->base.state); in skl_dbuf_is_misconfigured()
3082 slices = skl_compute_dbuf_slices(crtc, dbuf_state->active_pipes, in skl_dbuf_is_misconfigured()
3083 dbuf_state->joined_mbus); in skl_dbuf_is_misconfigured()
3084 if (dbuf_state->slices[crtc->pipe] & ~slices) in skl_dbuf_is_misconfigured()
3087 if (skl_ddb_allocation_overlaps(&crtc_state->wm.skl.ddb, entries, in skl_dbuf_is_misconfigured()
3088 I915_MAX_PIPES, crtc->pipe)) in skl_dbuf_is_misconfigured()
3113 drm_dbg_kms(&i915->drm, "BIOS has misprogrammed the DBUF, disabling all planes\n"); in skl_wm_sanitize()
3115 for_each_intel_crtc(&i915->drm, crtc) { in skl_wm_sanitize()
3116 struct intel_plane *plane = to_intel_plane(crtc->base.primary); in skl_wm_sanitize()
3118 to_intel_plane_state(plane->base.state); in skl_wm_sanitize()
3120 to_intel_crtc_state(crtc->base.state); in skl_wm_sanitize()
3122 if (plane_state->uapi.visible) in skl_wm_sanitize()
3125 drm_WARN_ON(&i915->drm, crtc_state->active_planes != 0); in skl_wm_sanitize()
3127 memset(&crtc_state->wm.skl.ddb, 0, sizeof(crtc_state->wm.skl.ddb)); in skl_wm_sanitize()
3140 struct drm_i915_private *i915 = to_i915(state->base.dev); in intel_wm_state_verify()
3148 const struct skl_pipe_wm *sw_wm = &new_crtc_state->wm.skl.optimal; in intel_wm_state_verify()
3153 if (DISPLAY_VER(i915) < 9 || !new_crtc_state->hw.active) in intel_wm_state_verify()
3160 skl_pipe_wm_get_hw_state(crtc, &hw->wm); in intel_wm_state_verify()
3162 skl_pipe_ddb_get_hw_state(crtc, hw->ddb, hw->ddb_y); in intel_wm_state_verify()
3167 hw_enabled_slices != i915->display.dbuf.enabled_slices) in intel_wm_state_verify()
3168 drm_err(&i915->drm, in intel_wm_state_verify()
3170 i915->display.dbuf.enabled_slices, in intel_wm_state_verify()
3173 for_each_intel_plane_on_crtc(&i915->drm, crtc, plane) { in intel_wm_state_verify()
3178 for (level = 0; level < i915->display.wm.num_levels; level++) { in intel_wm_state_verify()
3179 hw_wm_level = &hw->wm.planes[plane->id].wm[level]; in intel_wm_state_verify()
3180 sw_wm_level = skl_plane_wm_level(sw_wm, plane->id, level); in intel_wm_state_verify()
3185 drm_err(&i915->drm, in intel_wm_state_verify()
3187 plane->base.base.id, plane->base.name, level, in intel_wm_state_verify()
3188 sw_wm_level->enable, in intel_wm_state_verify()
3189 sw_wm_level->blocks, in intel_wm_state_verify()
3190 sw_wm_level->lines, in intel_wm_state_verify()
3191 hw_wm_level->enable, in intel_wm_state_verify()
3192 hw_wm_level->blocks, in intel_wm_state_verify()
3193 hw_wm_level->lines); in intel_wm_state_verify()
3196 hw_wm_level = &hw->wm.planes[plane->id].trans_wm; in intel_wm_state_verify()
3197 sw_wm_level = skl_plane_trans_wm(sw_wm, plane->id); in intel_wm_state_verify()
3200 drm_err(&i915->drm, in intel_wm_state_verify()
3202 plane->base.base.id, plane->base.name, in intel_wm_state_verify()
3203 sw_wm_level->enable, in intel_wm_state_verify()
3204 sw_wm_level->blocks, in intel_wm_state_verify()
3205 sw_wm_level->lines, in intel_wm_state_verify()
3206 hw_wm_level->enable, in intel_wm_state_verify()
3207 hw_wm_level->blocks, in intel_wm_state_verify()
3208 hw_wm_level->lines); in intel_wm_state_verify()
3211 hw_wm_level = &hw->wm.planes[plane->id].sagv.wm0; in intel_wm_state_verify()
3212 sw_wm_level = &sw_wm->planes[plane->id].sagv.wm0; in intel_wm_state_verify()
3216 drm_err(&i915->drm, in intel_wm_state_verify()
3218 plane->base.base.id, plane->base.name, in intel_wm_state_verify()
3219 sw_wm_level->enable, in intel_wm_state_verify()
3220 sw_wm_level->blocks, in intel_wm_state_verify()
3221 sw_wm_level->lines, in intel_wm_state_verify()
3222 hw_wm_level->enable, in intel_wm_state_verify()
3223 hw_wm_level->blocks, in intel_wm_state_verify()
3224 hw_wm_level->lines); in intel_wm_state_verify()
3227 hw_wm_level = &hw->wm.planes[plane->id].sagv.trans_wm; in intel_wm_state_verify()
3228 sw_wm_level = &sw_wm->planes[plane->id].sagv.trans_wm; in intel_wm_state_verify()
3232 drm_err(&i915->drm, in intel_wm_state_verify()
3234 plane->base.base.id, plane->base.name, in intel_wm_state_verify()
3235 sw_wm_level->enable, in intel_wm_state_verify()
3236 sw_wm_level->blocks, in intel_wm_state_verify()
3237 sw_wm_level->lines, in intel_wm_state_verify()
3238 hw_wm_level->enable, in intel_wm_state_verify()
3239 hw_wm_level->blocks, in intel_wm_state_verify()
3240 hw_wm_level->lines); in intel_wm_state_verify()
3244 hw_ddb_entry = &hw->ddb[PLANE_CURSOR]; in intel_wm_state_verify()
3245 sw_ddb_entry = &new_crtc_state->wm.skl.plane_ddb[PLANE_CURSOR]; in intel_wm_state_verify()
3248 drm_err(&i915->drm, in intel_wm_state_verify()
3250 plane->base.base.id, plane->base.name, in intel_wm_state_verify()
3251 sw_ddb_entry->start, sw_ddb_entry->end, in intel_wm_state_verify()
3252 hw_ddb_entry->start, hw_ddb_entry->end); in intel_wm_state_verify()
3261 return i915->display.wm.ipc_enabled; in skl_watermark_ipc_enabled()
3283 return i915->dram_info.symmetric_memory; in skl_watermark_ipc_can_enable()
3293 i915->display.wm.ipc_enabled = skl_watermark_ipc_can_enable(i915); in skl_watermark_ipc_init()
3302 bool wm_lv_0_adjust_needed = i915->dram_info.wm_lv_0_adjust_needed; in adjust_wm_latency()
3306 * If a level n (n > 1) has a 0us latency, all levels m (m >= n) in adjust_wm_latency()
3323 * punit doesn't take into account the read latency so we need in adjust_wm_latency()
3333 * WA Level-0 adjustment for 16GB DIMMs: SKL+ in adjust_wm_latency()
3344 int num_levels = i915->display.wm.num_levels; in mtl_read_wm_latency()
3364 int num_levels = i915->display.wm.num_levels; in skl_read_wm_latency()
3372 ret = snb_pcode_read(&i915->uncore, GEN9_PCODE_READ_MEM_LATENCY, &val, NULL); in skl_read_wm_latency()
3374 drm_err(&i915->drm, "SKL Mailbox read error = %d\n", ret); in skl_read_wm_latency()
3385 ret = snb_pcode_read(&i915->uncore, GEN9_PCODE_READ_MEM_LATENCY, &val, NULL); in skl_read_wm_latency()
3387 drm_err(&i915->drm, "SKL Mailbox read error = %d\n", ret); in skl_read_wm_latency()
3402 i915->display.wm.num_levels = 6; in skl_setup_wm_latency()
3404 i915->display.wm.num_levels = 8; in skl_setup_wm_latency()
3407 mtl_read_wm_latency(i915, i915->display.wm.skl_latency); in skl_setup_wm_latency()
3409 skl_read_wm_latency(i915, i915->display.wm.skl_latency); in skl_setup_wm_latency()
3411 intel_print_wm_latency(i915, "Gen9 Plane", i915->display.wm.skl_latency); in skl_setup_wm_latency()
3425 i915->display.funcs.wm = &skl_wm_funcs; in skl_wm_init()
3432 dbuf_state = kmemdup(obj->state, sizeof(*dbuf_state), GFP_KERNEL); in intel_dbuf_duplicate_state()
3436 return &dbuf_state->base; in intel_dbuf_duplicate_state()
3453 struct drm_i915_private *i915 = to_i915(state->base.dev); in intel_atomic_get_dbuf_state()
3456 dbuf_state = intel_atomic_get_global_obj_state(state, &i915->display.dbuf.obj); in intel_atomic_get_dbuf_state()
3469 return -ENOMEM; in intel_dbuf_init()
3471 intel_atomic_global_obj_init(i915, &i915->display.dbuf.obj, in intel_dbuf_init()
3472 &dbuf_state->base, &intel_dbuf_funcs); in intel_dbuf_init()
3483 struct drm_i915_private *i915 = to_i915(state->base.dev); in update_mbus_pre_enable()
3496 if (dbuf_state->joined_mbus) { in update_mbus_pre_enable()
3518 struct drm_i915_private *i915 = to_i915(state->base.dev); in intel_dbuf_pre_plane_update()
3525 (new_dbuf_state->enabled_slices == old_dbuf_state->enabled_slices && in intel_dbuf_pre_plane_update()
3526 new_dbuf_state->joined_mbus == old_dbuf_state->joined_mbus)) in intel_dbuf_pre_plane_update()
3529 WARN_ON(!new_dbuf_state->base.changed); in intel_dbuf_pre_plane_update()
3533 old_dbuf_state->enabled_slices | in intel_dbuf_pre_plane_update()
3534 new_dbuf_state->enabled_slices); in intel_dbuf_pre_plane_update()
3539 struct drm_i915_private *i915 = to_i915(state->base.dev); in intel_dbuf_post_plane_update()
3546 (new_dbuf_state->enabled_slices == old_dbuf_state->enabled_slices && in intel_dbuf_post_plane_update()
3547 new_dbuf_state->joined_mbus == old_dbuf_state->joined_mbus)) in intel_dbuf_post_plane_update()
3550 WARN_ON(!new_dbuf_state->base.changed); in intel_dbuf_post_plane_update()
3553 new_dbuf_state->enabled_slices); in intel_dbuf_post_plane_update()
3577 struct drm_i915_private *i915 = to_i915(state->base.dev); in intel_mbus_dbox_update()
3590 (new_dbuf_state->joined_mbus == old_dbuf_state->joined_mbus && in intel_mbus_dbox_update()
3591 new_dbuf_state->active_pipes == old_dbuf_state->active_pipes)) in intel_mbus_dbox_update()
3604 val |= new_dbuf_state->joined_mbus ? MBUS_DBOX_A_CREDIT(12) : in intel_mbus_dbox_update()
3607 /* Wa_22010947358:adl-p */ in intel_mbus_dbox_update()
3608 val |= new_dbuf_state->joined_mbus ? MBUS_DBOX_A_CREDIT(6) : in intel_mbus_dbox_update()
3629 if (!new_crtc_state->hw.active) in intel_mbus_dbox_update()
3633 if (xelpdp_is_only_pipe_per_dbuf_bank(crtc->pipe, in intel_mbus_dbox_update()
3634 new_dbuf_state->active_pipes)) in intel_mbus_dbox_update()
3640 intel_de_write(i915, PIPE_MBUS_DBOX_CTL(crtc->pipe), pipe_val); in intel_mbus_dbox_update()
3646 struct drm_i915_private *i915 = m->private; in skl_watermark_ipc_status_show()
3655 struct drm_i915_private *i915 = inode->i_private; in skl_watermark_ipc_status_open()
3664 struct seq_file *m = file->private_data; in skl_watermark_ipc_status_write()
3665 struct drm_i915_private *i915 = m->private; in skl_watermark_ipc_status_write()
3674 with_intel_runtime_pm(&i915->runtime_pm, wakeref) { in skl_watermark_ipc_status_write()
3676 drm_info(&i915->drm, in skl_watermark_ipc_status_write()
3678 i915->display.wm.ipc_enabled = enable; in skl_watermark_ipc_status_write()
3696 struct drm_i915_private *i915 = m->private; in intel_sagv_status_show()
3706 str_enabled_disabled(i915->display.params.enable_sagv)); in intel_sagv_status_show()
3707 seq_printf(m, "SAGV status: %s\n", sagv_status[i915->display.sagv.status]); in intel_sagv_status_show()
3708 seq_printf(m, "SAGV block time: %d usec\n", i915->display.sagv.block_time_us); in intel_sagv_status_show()
3717 struct drm_minor *minor = i915->drm.primary; in skl_watermark_debugfs_register()
3720 debugfs_create_file("i915_ipc_status", 0644, minor->debugfs_root, i915, in skl_watermark_debugfs_register()
3724 debugfs_create_file("i915_sagv_status", 0444, minor->debugfs_root, i915, in skl_watermark_debugfs_register()
3732 for (level = i915->display.wm.num_levels - 1; level >= 0; level--) { in skl_watermark_max_latency()
3733 unsigned int latency = skl_wm_latency(i915, level, NULL); in skl_watermark_max_latency() local
3735 if (latency) in skl_watermark_max_latency()
3736 return latency; in skl_watermark_max_latency()