Lines Matching +full:post +full:-
1 // SPDX-License-Identifier: MIT
17 * Emits a PIPE_CONTROL with a non-zero post-sync operation, for
21 * [DevSNB-C+{W/A}] Before any depth stall flush (including those
22 * produced by non-pipelined state commands), software needs to first
23 * send a PIPE_CONTROL with no bits set except Post-Sync Operation !=
26 * [Dev-SNB{W/A}]: Before a PIPE_CONTROL with Write Cache Flush Enable
27 * =1, a PIPE_CONTROL with any non-zero post-sync-op is required.
31 * [Dev-SNB{W/A}]: Pipe-control with CS-stall bit set must be sent
32 * BEFORE the pipe-control with a post-sync op and no write-cache
40 * - Render Target Cache Flush Enable ([12] of DW1)
41 * - Depth Cache Flush Enable ([0] of DW1)
42 * - Stall at Pixel Scoreboard ([1] of DW1)
43 * - Depth Stall ([13] of DW1)
44 * - Post-Sync Operation ([13] of DW1)
45 * - Notify Enable ([8] of DW1)"
49 * Post-sync nonzero is what triggered this second workaround, so we
57 intel_gt_scratch_offset(rq->engine->gt, in gen6_emit_post_sync_nonzero_flush()
91 intel_gt_scratch_offset(rq->engine->gt, in gen6_emit_flush_rcs()
124 * TLB invalidate requires a post-sync write. in gen6_emit_flush_rcs()
152 *cs++ = intel_gt_scratch_offset(rq->engine->gt, in gen6_emit_breadcrumb_rcs()
164 *cs++ = i915_request_active_timeline(rq)->hwsp_offset | in gen6_emit_breadcrumb_rcs()
166 *cs++ = rq->fence.seqno; in gen6_emit_breadcrumb_rcs()
171 rq->tail = intel_ring_offset(rq, cs); in gen6_emit_breadcrumb_rcs()
172 assert_ring_tail_valid(rq->ring, rq->tail); in gen6_emit_breadcrumb_rcs()
196 * Bspec vol 1c.3 - blitter engine command streamer: in mi_flush_dw()
199 * Post-Sync Operation field is a value of 1h or 3h." in mi_flush_dw()
291 intel_gt_scratch_offset(rq->engine->gt, in gen7_emit_flush_rcs()
300 * read-cache invalidate bits set) must have the CS_STALL bit set. We in gen7_emit_flush_rcs()
306 * CS_STALL suggests at least a post-sync write. in gen7_emit_flush_rcs()
332 * Workaround: we must issue a pipe_control with CS-stall bit in gen7_emit_flush_rcs()
362 *cs++ = i915_request_active_timeline(rq)->hwsp_offset; in gen7_emit_breadcrumb_rcs()
363 *cs++ = rq->fence.seqno; in gen7_emit_breadcrumb_rcs()
368 rq->tail = intel_ring_offset(rq, cs); in gen7_emit_breadcrumb_rcs()
369 assert_ring_tail_valid(rq->ring, rq->tail); in gen7_emit_breadcrumb_rcs()
376 GEM_BUG_ON(i915_request_active_timeline(rq)->hwsp_ggtt != rq->engine->status_page.vma); in gen6_emit_breadcrumb_xcs()
377 …GEM_BUG_ON(offset_in_page(i915_request_active_timeline(rq)->hwsp_offset) != I915_GEM_HWS_SEQNO_ADD… in gen6_emit_breadcrumb_xcs()
381 *cs++ = rq->fence.seqno; in gen6_emit_breadcrumb_xcs()
385 rq->tail = intel_ring_offset(rq, cs); in gen6_emit_breadcrumb_xcs()
386 assert_ring_tail_valid(rq->ring, rq->tail); in gen6_emit_breadcrumb_xcs()
396 GEM_BUG_ON(i915_request_active_timeline(rq)->hwsp_ggtt != rq->engine->status_page.vma); in gen7_emit_breadcrumb_xcs()
397 …GEM_BUG_ON(offset_in_page(i915_request_active_timeline(rq)->hwsp_offset) != I915_GEM_HWS_SEQNO_ADD… in gen7_emit_breadcrumb_xcs()
402 *cs++ = rq->fence.seqno; in gen7_emit_breadcrumb_xcs()
407 *cs++ = rq->fence.seqno; in gen7_emit_breadcrumb_xcs()
417 rq->tail = intel_ring_offset(rq, cs); in gen7_emit_breadcrumb_xcs()
418 assert_ring_tail_valid(rq->ring, rq->tail); in gen7_emit_breadcrumb_xcs()
427 ~(engine->irq_enable_mask | engine->irq_keep_mask)); in gen6_irq_enable()
432 gen5_gt_enable_irq(engine->gt, engine->irq_enable_mask); in gen6_irq_enable()
437 ENGINE_WRITE(engine, RING_IMR, ~engine->irq_keep_mask); in gen6_irq_disable()
438 gen5_gt_disable_irq(engine->gt, engine->irq_enable_mask); in gen6_irq_disable()
443 ENGINE_WRITE(engine, RING_IMR, ~engine->irq_enable_mask); in hsw_irq_enable_vecs()
448 gen6_gt_pm_unmask_irq(engine->gt, engine->irq_enable_mask); in hsw_irq_enable_vecs()
454 gen6_gt_pm_mask_irq(engine->gt, engine->irq_enable_mask); in hsw_irq_disable_vecs()