Lines Matching +full:cs +full:- +full:x
2 * Copyright(c) 2011-2016 Intel Corporation. All rights reserved.
162 struct intel_gvt *gvt = engine->i915->gvt; in load_render_mocs()
163 struct intel_uncore *uncore = engine->uncore; in load_render_mocs()
164 u32 cnt = gvt->engine_mmio_list.mocs_mmio_offset_list_cnt; in load_render_mocs()
165 u32 *regs = gvt->engine_mmio_list.mocs_mmio_offset_list; in load_render_mocs()
174 if (!HAS_ENGINE(engine->gt, ring_id)) in load_render_mocs()
198 u32 *cs; in restore_context_mmio_for_inhibit() local
201 struct intel_gvt *gvt = vgpu->gvt; in restore_context_mmio_for_inhibit()
202 int ring_id = req->engine->id; in restore_context_mmio_for_inhibit()
203 int count = gvt->engine_mmio_list.ctx_mmio_count[ring_id]; in restore_context_mmio_for_inhibit()
208 ret = req->engine->emit_flush(req, EMIT_BARRIER); in restore_context_mmio_for_inhibit()
212 cs = intel_ring_begin(req, count * 2 + 2); in restore_context_mmio_for_inhibit()
213 if (IS_ERR(cs)) in restore_context_mmio_for_inhibit()
214 return PTR_ERR(cs); in restore_context_mmio_for_inhibit()
216 *cs++ = MI_LOAD_REGISTER_IMM(count); in restore_context_mmio_for_inhibit()
217 for (mmio = gvt->engine_mmio_list.mmio; in restore_context_mmio_for_inhibit()
218 i915_mmio_reg_valid(mmio->reg); mmio++) { in restore_context_mmio_for_inhibit()
219 if (mmio->id != ring_id || !mmio->in_context) in restore_context_mmio_for_inhibit()
222 *cs++ = i915_mmio_reg_offset(mmio->reg); in restore_context_mmio_for_inhibit()
223 *cs++ = vgpu_vreg_t(vgpu, mmio->reg) | (mmio->mask << 16); in restore_context_mmio_for_inhibit()
224 gvt_dbg_core("add lri reg pair 0x%x:0x%x in inhibit ctx, vgpu:%d, rind_id:%d\n", in restore_context_mmio_for_inhibit()
225 *(cs-2), *(cs-1), vgpu->id, ring_id); in restore_context_mmio_for_inhibit()
228 *cs++ = MI_NOOP; in restore_context_mmio_for_inhibit()
229 intel_ring_advance(req, cs); in restore_context_mmio_for_inhibit()
231 ret = req->engine->emit_flush(req, EMIT_BARRIER); in restore_context_mmio_for_inhibit()
243 u32 *cs; in restore_render_mocs_control_for_inhibit() local
245 cs = intel_ring_begin(req, 2 * GEN9_MOCS_SIZE + 2); in restore_render_mocs_control_for_inhibit()
246 if (IS_ERR(cs)) in restore_render_mocs_control_for_inhibit()
247 return PTR_ERR(cs); in restore_render_mocs_control_for_inhibit()
249 *cs++ = MI_LOAD_REGISTER_IMM(GEN9_MOCS_SIZE); in restore_render_mocs_control_for_inhibit()
252 *cs++ = i915_mmio_reg_offset(GEN9_GFX_MOCS(index)); in restore_render_mocs_control_for_inhibit()
253 *cs++ = vgpu_vreg_t(vgpu, GEN9_GFX_MOCS(index)); in restore_render_mocs_control_for_inhibit()
254 gvt_dbg_core("add lri reg pair 0x%x:0x%x in inhibit ctx, vgpu:%d, rind_id:%d\n", in restore_render_mocs_control_for_inhibit()
255 *(cs-2), *(cs-1), vgpu->id, req->engine->id); in restore_render_mocs_control_for_inhibit()
259 *cs++ = MI_NOOP; in restore_render_mocs_control_for_inhibit()
260 intel_ring_advance(req, cs); in restore_render_mocs_control_for_inhibit()
270 u32 *cs; in restore_render_mocs_l3cc_for_inhibit() local
272 cs = intel_ring_begin(req, 2 * GEN9_MOCS_SIZE / 2 + 2); in restore_render_mocs_l3cc_for_inhibit()
273 if (IS_ERR(cs)) in restore_render_mocs_l3cc_for_inhibit()
274 return PTR_ERR(cs); in restore_render_mocs_l3cc_for_inhibit()
276 *cs++ = MI_LOAD_REGISTER_IMM(GEN9_MOCS_SIZE / 2); in restore_render_mocs_l3cc_for_inhibit()
279 *cs++ = i915_mmio_reg_offset(GEN9_LNCFCMOCS(index)); in restore_render_mocs_l3cc_for_inhibit()
280 *cs++ = vgpu_vreg_t(vgpu, GEN9_LNCFCMOCS(index)); in restore_render_mocs_l3cc_for_inhibit()
281 gvt_dbg_core("add lri reg pair 0x%x:0x%x in inhibit ctx, vgpu:%d, rind_id:%d\n", in restore_render_mocs_l3cc_for_inhibit()
282 *(cs-2), *(cs-1), vgpu->id, req->engine->id); in restore_render_mocs_l3cc_for_inhibit()
286 *cs++ = MI_NOOP; in restore_render_mocs_l3cc_for_inhibit()
287 intel_ring_advance(req, cs); in restore_render_mocs_l3cc_for_inhibit()
301 u32 *cs; in intel_vgpu_restore_inhibit_context() local
303 cs = intel_ring_begin(req, 2); in intel_vgpu_restore_inhibit_context()
304 if (IS_ERR(cs)) in intel_vgpu_restore_inhibit_context()
305 return PTR_ERR(cs); in intel_vgpu_restore_inhibit_context()
307 *cs++ = MI_ARB_ON_OFF | MI_ARB_DISABLE; in intel_vgpu_restore_inhibit_context()
308 *cs++ = MI_NOOP; in intel_vgpu_restore_inhibit_context()
309 intel_ring_advance(req, cs); in intel_vgpu_restore_inhibit_context()
316 if (req->engine->id != RCS0) in intel_vgpu_restore_inhibit_context()
328 cs = intel_ring_begin(req, 2); in intel_vgpu_restore_inhibit_context()
329 if (IS_ERR(cs)) in intel_vgpu_restore_inhibit_context()
330 return PTR_ERR(cs); in intel_vgpu_restore_inhibit_context()
332 *cs++ = MI_ARB_ON_OFF | MI_ARB_ENABLE; in intel_vgpu_restore_inhibit_context()
333 *cs++ = MI_NOOP; in intel_vgpu_restore_inhibit_context()
334 intel_ring_advance(req, cs); in intel_vgpu_restore_inhibit_context()
350 struct intel_uncore *uncore = engine->uncore; in handle_tlb_pending_event()
351 struct intel_vgpu_submission *s = &vgpu->submission; in handle_tlb_pending_event()
352 u32 *regs = vgpu->gvt->engine_mmio_list.tlb_mmio_offset_list; in handle_tlb_pending_event()
353 u32 cnt = vgpu->gvt->engine_mmio_list.tlb_mmio_offset_list_cnt; in handle_tlb_pending_event()
360 if (drm_WARN_ON(&engine->i915->drm, engine->id >= cnt)) in handle_tlb_pending_event()
363 if (!test_and_clear_bit(engine->id, (void *)s->tlb_handle_pending)) in handle_tlb_pending_event()
366 reg = _MMIO(regs[engine->id]); in handle_tlb_pending_event()
375 if (engine->id == RCS0 && INTEL_GEN(engine->i915) >= 9) in handle_tlb_pending_event()
384 engine->name); in handle_tlb_pending_event()
390 gvt_dbg_core("invalidate TLB for ring %s\n", engine->name); in handle_tlb_pending_event()
403 struct intel_uncore *uncore = engine->uncore; in switch_mocs()
408 if (drm_WARN_ON(&engine->i915->drm, engine->id >= ARRAY_SIZE(regs))) in switch_mocs()
411 if (engine->id == RCS0 && IS_GEN(engine->i915, 9)) in switch_mocs()
417 offset.reg = regs[engine->id]; in switch_mocs()
422 old_v = gen9_render_mocs.control_table[engine->id][i]; in switch_mocs()
426 new_v = gen9_render_mocs.control_table[engine->id][i]; in switch_mocs()
434 if (engine->id == RCS0) { in switch_mocs()
458 const u32 *reg_state = ce->lrc_reg_state; in is_inhibit_context()
471 struct intel_uncore *uncore = engine->uncore; in switch_mmio()
476 if (INTEL_GEN(engine->i915) >= 9) in switch_mmio()
479 for (mmio = engine->i915->gvt->engine_mmio_list.mmio; in switch_mmio()
480 i915_mmio_reg_valid(mmio->reg); mmio++) { in switch_mmio()
481 if (mmio->id != engine->id) in switch_mmio()
488 if (IS_GEN(engine->i915, 9) && mmio->in_context) in switch_mmio()
493 vgpu_vreg_t(pre, mmio->reg) = in switch_mmio()
494 intel_uncore_read_fw(uncore, mmio->reg); in switch_mmio()
495 if (mmio->mask) in switch_mmio()
496 vgpu_vreg_t(pre, mmio->reg) &= in switch_mmio()
497 ~(mmio->mask << 16); in switch_mmio()
498 old_v = vgpu_vreg_t(pre, mmio->reg); in switch_mmio()
500 old_v = mmio->value = in switch_mmio()
501 intel_uncore_read_fw(uncore, mmio->reg); in switch_mmio()
506 s = &next->submission; in switch_mmio()
512 if (mmio->in_context && in switch_mmio()
513 !is_inhibit_context(s->shadow[engine->id])) in switch_mmio()
516 if (mmio->mask) in switch_mmio()
517 new_v = vgpu_vreg_t(next, mmio->reg) | in switch_mmio()
518 (mmio->mask << 16); in switch_mmio()
520 new_v = vgpu_vreg_t(next, mmio->reg); in switch_mmio()
522 if (mmio->in_context) in switch_mmio()
524 if (mmio->mask) in switch_mmio()
525 new_v = mmio->value | (mmio->mask << 16); in switch_mmio()
527 new_v = mmio->value; in switch_mmio()
530 intel_uncore_write_fw(uncore, mmio->reg, new_v); in switch_mmio()
532 trace_render_mmio(pre ? pre->id : 0, in switch_mmio()
533 next ? next->id : 0, in switch_mmio()
535 i915_mmio_reg_offset(mmio->reg), in switch_mmio()
544 * intel_gvt_switch_render_mmio - switch mmio context of specific engine
557 engine->name)) in intel_gvt_switch_mmio()
560 gvt_dbg_render("switch ring %s from %s to %s\n", engine->name, in intel_gvt_switch_mmio()
568 intel_uncore_forcewake_get(engine->uncore, FORCEWAKE_ALL); in intel_gvt_switch_mmio()
570 intel_uncore_forcewake_put(engine->uncore, FORCEWAKE_ALL); in intel_gvt_switch_mmio()
574 * intel_gvt_init_engine_mmio_context - Initiate the engine mmio list
582 if (INTEL_GEN(gvt->gt->i915) >= 9) { in intel_gvt_init_engine_mmio_context()
583 gvt->engine_mmio_list.mmio = gen9_engine_mmio_list; in intel_gvt_init_engine_mmio_context()
584 gvt->engine_mmio_list.tlb_mmio_offset_list = gen8_tlb_mmio_offset_list; in intel_gvt_init_engine_mmio_context()
585 gvt->engine_mmio_list.tlb_mmio_offset_list_cnt = ARRAY_SIZE(gen8_tlb_mmio_offset_list); in intel_gvt_init_engine_mmio_context()
586 gvt->engine_mmio_list.mocs_mmio_offset_list = gen9_mocs_mmio_offset_list; in intel_gvt_init_engine_mmio_context()
587 gvt->engine_mmio_list.mocs_mmio_offset_list_cnt = ARRAY_SIZE(gen9_mocs_mmio_offset_list); in intel_gvt_init_engine_mmio_context()
589 gvt->engine_mmio_list.mmio = gen8_engine_mmio_list; in intel_gvt_init_engine_mmio_context()
590 gvt->engine_mmio_list.tlb_mmio_offset_list = gen8_tlb_mmio_offset_list; in intel_gvt_init_engine_mmio_context()
591 gvt->engine_mmio_list.tlb_mmio_offset_list_cnt = ARRAY_SIZE(gen8_tlb_mmio_offset_list); in intel_gvt_init_engine_mmio_context()
594 for (mmio = gvt->engine_mmio_list.mmio; in intel_gvt_init_engine_mmio_context()
595 i915_mmio_reg_valid(mmio->reg); mmio++) { in intel_gvt_init_engine_mmio_context()
596 if (mmio->in_context) { in intel_gvt_init_engine_mmio_context()
597 gvt->engine_mmio_list.ctx_mmio_count[mmio->id]++; in intel_gvt_init_engine_mmio_context()
598 intel_gvt_mmio_set_sr_in_ctx(gvt, mmio->reg.reg); in intel_gvt_init_engine_mmio_context()