Lines Matching full:mmio
30 static void assert_iir_is_zero(struct xe_gt *mmio, struct xe_reg reg) in assert_iir_is_zero() argument
32 u32 val = xe_mmio_read32(mmio, reg); in assert_iir_is_zero()
37 drm_WARN(>_to_xe(mmio)->drm, 1, in assert_iir_is_zero()
40 xe_mmio_write32(mmio, reg, 0xffffffff); in assert_iir_is_zero()
41 xe_mmio_read32(mmio, reg); in assert_iir_is_zero()
42 xe_mmio_write32(mmio, reg, 0xffffffff); in assert_iir_is_zero()
43 xe_mmio_read32(mmio, reg); in assert_iir_is_zero()
52 struct xe_gt *mmio = tile->primary_gt; in unmask_and_enable() local
58 assert_iir_is_zero(mmio, IIR(irqregs)); in unmask_and_enable()
60 xe_mmio_write32(mmio, IER(irqregs), bits); in unmask_and_enable()
61 xe_mmio_write32(mmio, IMR(irqregs), ~bits); in unmask_and_enable()
64 xe_mmio_read32(mmio, IMR(irqregs)); in unmask_and_enable()
70 struct xe_gt *mmio = tile->primary_gt; in mask_and_disable() local
72 xe_mmio_write32(mmio, IMR(irqregs), ~0); in mask_and_disable()
74 xe_mmio_read32(mmio, IMR(irqregs)); in mask_and_disable()
76 xe_mmio_write32(mmio, IER(irqregs), 0); in mask_and_disable()
79 xe_mmio_write32(mmio, IIR(irqregs), ~0); in mask_and_disable()
80 xe_mmio_read32(mmio, IIR(irqregs)); in mask_and_disable()
81 xe_mmio_write32(mmio, IIR(irqregs), ~0); in mask_and_disable()
82 xe_mmio_read32(mmio, IIR(irqregs)); in mask_and_disable()
87 struct xe_gt *mmio = xe_root_mmio_gt(xe); in xelp_intr_disable() local
89 xe_mmio_write32(mmio, GFX_MSTR_IRQ, 0); in xelp_intr_disable()
97 return xe_mmio_read32(mmio, GFX_MSTR_IRQ); in xelp_intr_disable()
103 struct xe_gt *mmio = xe_root_mmio_gt(xe); in gu_misc_irq_ack() local
109 iir = xe_mmio_read32(mmio, IIR(GU_MISC_IRQ_OFFSET)); in gu_misc_irq_ack()
111 xe_mmio_write32(mmio, IIR(GU_MISC_IRQ_OFFSET), iir); in gu_misc_irq_ack()
118 struct xe_gt *mmio = xe_root_mmio_gt(xe); in xelp_intr_enable() local
120 xe_mmio_write32(mmio, GFX_MSTR_IRQ, MASTER_IRQ); in xelp_intr_enable()
122 xe_mmio_read32(mmio, GFX_MSTR_IRQ); in xelp_intr_enable()
194 struct xe_gt *mmio, in gt_engine_identity() argument
203 xe_mmio_write32(mmio, IIR_REG_SELECTOR(bank), BIT(bit)); in gt_engine_identity()
211 ident = xe_mmio_read32(mmio, INTR_IDENTITY_REG(bank)); in gt_engine_identity()
221 xe_mmio_write32(mmio, INTR_IDENTITY_REG(bank), ident); in gt_engine_identity()
268 struct xe_gt *mmio = tile->primary_gt; in gt_irq_handler() local
280 intr_dw[bank] = xe_mmio_read32(mmio, GT_INTR_DW(bank)); in gt_irq_handler()
282 identity[bit] = gt_engine_identity(xe, mmio, bank, bit); in gt_irq_handler()
283 xe_mmio_write32(mmio, GT_INTR_DW(bank), intr_dw[bank]); in gt_irq_handler()
354 struct xe_gt *mmio = xe_root_mmio_gt(xe); in dg1_intr_disable() local
358 xe_mmio_write32(mmio, DG1_MSTR_TILE_INTR, 0); in dg1_intr_disable()
361 val = xe_mmio_read32(mmio, DG1_MSTR_TILE_INTR); in dg1_intr_disable()
365 xe_mmio_write32(mmio, DG1_MSTR_TILE_INTR, val); in dg1_intr_disable()
372 struct xe_gt *mmio = xe_root_mmio_gt(xe); in dg1_intr_enable() local
374 xe_mmio_write32(mmio, DG1_MSTR_TILE_INTR, DG1_MSTR_IRQ); in dg1_intr_enable()
376 xe_mmio_read32(mmio, DG1_MSTR_TILE_INTR); in dg1_intr_enable()
409 struct xe_gt *mmio = tile->primary_gt; in dg1_irq_handler() local
414 master_ctl = xe_mmio_read32(mmio, GFX_MSTR_IRQ); in dg1_irq_handler()
418 * and all MMIO reads will be returned with all 1's. Ignore this in dg1_irq_handler()
427 xe_mmio_write32(mmio, GFX_MSTR_IRQ, master_ctl); in dg1_irq_handler()
450 struct xe_gt *mmio = tile->primary_gt; in gt_irq_reset() local
458 xe_mmio_write32(mmio, RENDER_COPY_INTR_ENABLE, 0); in gt_irq_reset()
459 xe_mmio_write32(mmio, VCS_VECS_INTR_ENABLE, 0); in gt_irq_reset()
461 xe_mmio_write32(mmio, CCS_RSVD_INTR_ENABLE, 0); in gt_irq_reset()
464 xe_mmio_write32(mmio, RCS0_RSVD_INTR_MASK, ~0); in gt_irq_reset()
465 xe_mmio_write32(mmio, BCS_RSVD_INTR_MASK, ~0); in gt_irq_reset()
467 xe_mmio_write32(mmio, XEHPC_BCS1_BCS2_INTR_MASK, ~0); in gt_irq_reset()
469 xe_mmio_write32(mmio, XEHPC_BCS3_BCS4_INTR_MASK, ~0); in gt_irq_reset()
471 xe_mmio_write32(mmio, XEHPC_BCS5_BCS6_INTR_MASK, ~0); in gt_irq_reset()
473 xe_mmio_write32(mmio, XEHPC_BCS7_BCS8_INTR_MASK, ~0); in gt_irq_reset()
474 xe_mmio_write32(mmio, VCS0_VCS1_INTR_MASK, ~0); in gt_irq_reset()
475 xe_mmio_write32(mmio, VCS2_VCS3_INTR_MASK, ~0); in gt_irq_reset()
476 xe_mmio_write32(mmio, VECS0_VECS1_INTR_MASK, ~0); in gt_irq_reset()
478 xe_mmio_write32(mmio, CCS0_CCS1_INTR_MASK, ~0); in gt_irq_reset()
480 xe_mmio_write32(mmio, CCS2_CCS3_INTR_MASK, ~0); in gt_irq_reset()
485 xe_mmio_write32(mmio, GUNIT_GSC_INTR_ENABLE, 0); in gt_irq_reset()
486 xe_mmio_write32(mmio, GUNIT_GSC_INTR_MASK, ~0); in gt_irq_reset()
489 xe_mmio_write32(mmio, GPM_WGBOXPERF_INTR_ENABLE, 0); in gt_irq_reset()
490 xe_mmio_write32(mmio, GPM_WGBOXPERF_INTR_MASK, ~0); in gt_irq_reset()
491 xe_mmio_write32(mmio, GUC_SG_INTR_ENABLE, 0); in gt_irq_reset()
492 xe_mmio_write32(mmio, GUC_SG_INTR_MASK, ~0); in gt_irq_reset()
516 struct xe_gt *mmio = tile->primary_gt; in dg1_irq_reset_mstr() local
518 xe_mmio_write32(mmio, GFX_MSTR_IRQ, ~0); in dg1_irq_reset_mstr()