1202b1f4cSMatt Roper /* SPDX-License-Identifier: MIT */ 2202b1f4cSMatt Roper /* 3202b1f4cSMatt Roper * Copyright © 2022 Intel Corporation 4202b1f4cSMatt Roper */ 5202b1f4cSMatt Roper 6202b1f4cSMatt Roper #ifndef __INTEL_ENGINE_REGS__ 7202b1f4cSMatt Roper #define __INTEL_ENGINE_REGS__ 8202b1f4cSMatt Roper 9202b1f4cSMatt Roper #include "i915_reg_defs.h" 10202b1f4cSMatt Roper 11202b1f4cSMatt Roper #define RING_TAIL(base) _MMIO((base) + 0x30) 12202b1f4cSMatt Roper #define TAIL_ADDR 0x001FFFF8 13202b1f4cSMatt Roper #define RING_HEAD(base) _MMIO((base) + 0x34) 14202b1f4cSMatt Roper #define HEAD_WRAP_COUNT 0xFFE00000 15202b1f4cSMatt Roper #define HEAD_WRAP_ONE 0x00200000 16202b1f4cSMatt Roper #define HEAD_ADDR 0x001FFFFC 17202b1f4cSMatt Roper #define RING_START(base) _MMIO((base) + 0x38) 18202b1f4cSMatt Roper #define RING_CTL(base) _MMIO((base) + 0x3c) 19202b1f4cSMatt Roper #define RING_CTL_SIZE(size) ((size) - PAGE_SIZE) /* in bytes -> pages */ 20202b1f4cSMatt Roper #define RING_NR_PAGES 0x001FF000 21202b1f4cSMatt Roper #define RING_REPORT_MASK 0x00000006 22202b1f4cSMatt Roper #define RING_REPORT_64K 0x00000002 23202b1f4cSMatt Roper #define RING_REPORT_128K 0x00000004 24202b1f4cSMatt Roper #define RING_NO_REPORT 0x00000000 25202b1f4cSMatt Roper #define RING_VALID_MASK 0x00000001 26202b1f4cSMatt Roper #define RING_VALID 0x00000001 27202b1f4cSMatt Roper #define RING_INVALID 0x00000000 28202b1f4cSMatt Roper #define RING_WAIT_I8XX (1 << 0) /* gen2, PRBx_HEAD */ 29202b1f4cSMatt Roper #define RING_WAIT (1 << 11) /* gen3+, PRBx_CTL */ 30202b1f4cSMatt Roper #define RING_WAIT_SEMAPHORE (1 << 10) /* gen6+ */ 31202b1f4cSMatt Roper #define RING_SYNC_0(base) _MMIO((base) + 0x40) 32202b1f4cSMatt Roper #define RING_SYNC_1(base) _MMIO((base) + 0x44) 33202b1f4cSMatt Roper #define RING_SYNC_2(base) _MMIO((base) + 0x48) 34202b1f4cSMatt Roper #define GEN6_RVSYNC (RING_SYNC_0(RENDER_RING_BASE)) 35202b1f4cSMatt Roper #define GEN6_RBSYNC (RING_SYNC_1(RENDER_RING_BASE)) 36202b1f4cSMatt Roper #define GEN6_RVESYNC (RING_SYNC_2(RENDER_RING_BASE)) 37202b1f4cSMatt Roper #define GEN6_VBSYNC (RING_SYNC_0(GEN6_BSD_RING_BASE)) 38202b1f4cSMatt Roper #define GEN6_VRSYNC (RING_SYNC_1(GEN6_BSD_RING_BASE)) 39202b1f4cSMatt Roper #define GEN6_VVESYNC (RING_SYNC_2(GEN6_BSD_RING_BASE)) 40202b1f4cSMatt Roper #define GEN6_BRSYNC (RING_SYNC_0(BLT_RING_BASE)) 41202b1f4cSMatt Roper #define GEN6_BVSYNC (RING_SYNC_1(BLT_RING_BASE)) 42202b1f4cSMatt Roper #define GEN6_BVESYNC (RING_SYNC_2(BLT_RING_BASE)) 43202b1f4cSMatt Roper #define GEN6_VEBSYNC (RING_SYNC_0(VEBOX_RING_BASE)) 44202b1f4cSMatt Roper #define GEN6_VERSYNC (RING_SYNC_1(VEBOX_RING_BASE)) 45202b1f4cSMatt Roper #define GEN6_VEVSYNC (RING_SYNC_2(VEBOX_RING_BASE)) 46202b1f4cSMatt Roper #define RING_PSMI_CTL(base) _MMIO((base) + 0x50) 47202b1f4cSMatt Roper #define GEN8_RC_SEMA_IDLE_MSG_DISABLE REG_BIT(12) 48202b1f4cSMatt Roper #define GEN8_FF_DOP_CLOCK_GATE_DISABLE REG_BIT(10) 49202b1f4cSMatt Roper #define GEN12_WAIT_FOR_EVENT_POWER_DOWN_DISABLE REG_BIT(7) 50202b1f4cSMatt Roper #define GEN6_BSD_GO_INDICATOR REG_BIT(4) 51202b1f4cSMatt Roper #define GEN6_BSD_SLEEP_INDICATOR REG_BIT(3) 52202b1f4cSMatt Roper #define GEN6_BSD_SLEEP_FLUSH_DISABLE REG_BIT(2) 53202b1f4cSMatt Roper #define GEN6_PSMI_SLEEP_MSG_DISABLE REG_BIT(0) 54202b1f4cSMatt Roper #define RING_MAX_IDLE(base) _MMIO((base) + 0x54) 55202b1f4cSMatt Roper #define PWRCTX_MAXCNT(base) _MMIO((base) + 0x54) 56202b1f4cSMatt Roper #define IDLE_TIME_MASK 0xFFFFF 57202b1f4cSMatt Roper #define RING_ACTHD_UDW(base) _MMIO((base) + 0x5c) 58202b1f4cSMatt Roper #define RING_DMA_FADD_UDW(base) _MMIO((base) + 0x60) /* gen8+ */ 59202b1f4cSMatt Roper #define RING_IPEIR(base) _MMIO((base) + 0x64) 60202b1f4cSMatt Roper #define RING_IPEHR(base) _MMIO((base) + 0x68) 61202b1f4cSMatt Roper #define RING_INSTDONE(base) _MMIO((base) + 0x6c) 62202b1f4cSMatt Roper #define RING_INSTPS(base) _MMIO((base) + 0x70) 63202b1f4cSMatt Roper #define RING_DMA_FADD(base) _MMIO((base) + 0x78) 64202b1f4cSMatt Roper #define RING_ACTHD(base) _MMIO((base) + 0x74) 65202b1f4cSMatt Roper #define RING_HWS_PGA(base) _MMIO((base) + 0x80) 66202b1f4cSMatt Roper #define RING_CMD_BUF_CCTL(base) _MMIO((base) + 0x84) 67202b1f4cSMatt Roper #define IPEIR(base) _MMIO((base) + 0x88) 68202b1f4cSMatt Roper #define IPEHR(base) _MMIO((base) + 0x8c) 69202b1f4cSMatt Roper #define RING_ID(base) _MMIO((base) + 0x8c) 70202b1f4cSMatt Roper #define RING_NOPID(base) _MMIO((base) + 0x94) 71202b1f4cSMatt Roper #define RING_HWSTAM(base) _MMIO((base) + 0x98) 72202b1f4cSMatt Roper #define RING_MI_MODE(base) _MMIO((base) + 0x9c) 73202b1f4cSMatt Roper #define RING_IMR(base) _MMIO((base) + 0xa8) 74202b1f4cSMatt Roper #define RING_EIR(base) _MMIO((base) + 0xb0) 75202b1f4cSMatt Roper #define RING_EMR(base) _MMIO((base) + 0xb4) 76202b1f4cSMatt Roper #define RING_ESR(base) _MMIO((base) + 0xb8) 77202b1f4cSMatt Roper #define RING_INSTPM(base) _MMIO((base) + 0xc0) 78202b1f4cSMatt Roper #define RING_CMD_CCTL(base) _MMIO((base) + 0xc4) 79202b1f4cSMatt Roper #define ACTHD(base) _MMIO((base) + 0xc8) 807d296f36SMatt Roper #define GEN8_R_PWR_CLK_STATE(base) _MMIO((base) + 0xc8) 817d296f36SMatt Roper #define GEN8_RPCS_ENABLE (1 << 31) 827d296f36SMatt Roper #define GEN8_RPCS_S_CNT_ENABLE (1 << 18) 837d296f36SMatt Roper #define GEN8_RPCS_S_CNT_SHIFT 15 847d296f36SMatt Roper #define GEN8_RPCS_S_CNT_MASK (0x7 << GEN8_RPCS_S_CNT_SHIFT) 857d296f36SMatt Roper #define GEN11_RPCS_S_CNT_SHIFT 12 867d296f36SMatt Roper #define GEN11_RPCS_S_CNT_MASK (0x3f << GEN11_RPCS_S_CNT_SHIFT) 877d296f36SMatt Roper #define GEN8_RPCS_SS_CNT_ENABLE (1 << 11) 887d296f36SMatt Roper #define GEN8_RPCS_SS_CNT_SHIFT 8 897d296f36SMatt Roper #define GEN8_RPCS_SS_CNT_MASK (0x7 << GEN8_RPCS_SS_CNT_SHIFT) 907d296f36SMatt Roper #define GEN8_RPCS_EU_MAX_SHIFT 4 917d296f36SMatt Roper #define GEN8_RPCS_EU_MAX_MASK (0xf << GEN8_RPCS_EU_MAX_SHIFT) 927d296f36SMatt Roper #define GEN8_RPCS_EU_MIN_SHIFT 0 937d296f36SMatt Roper #define GEN8_RPCS_EU_MIN_MASK (0xf << GEN8_RPCS_EU_MIN_SHIFT) 947d296f36SMatt Roper 95202b1f4cSMatt Roper #define RING_RESET_CTL(base) _MMIO((base) + 0xd0) 96202b1f4cSMatt Roper #define RESET_CTL_CAT_ERROR REG_BIT(2) 97202b1f4cSMatt Roper #define RESET_CTL_READY_TO_RESET REG_BIT(1) 98202b1f4cSMatt Roper #define RESET_CTL_REQUEST_RESET REG_BIT(0) 99202b1f4cSMatt Roper #define DMA_FADD_I8XX(base) _MMIO((base) + 0xd0) 100202b1f4cSMatt Roper #define RING_BBSTATE(base) _MMIO((base) + 0x110) 101202b1f4cSMatt Roper #define RING_BB_PPGTT (1 << 5) 102202b1f4cSMatt Roper #define RING_SBBADDR(base) _MMIO((base) + 0x114) /* hsw+ */ 103202b1f4cSMatt Roper #define RING_SBBSTATE(base) _MMIO((base) + 0x118) /* hsw+ */ 104202b1f4cSMatt Roper #define RING_SBBADDR_UDW(base) _MMIO((base) + 0x11c) /* gen8+ */ 105202b1f4cSMatt Roper #define RING_BBADDR(base) _MMIO((base) + 0x140) 106202b1f4cSMatt Roper #define RING_BBADDR_UDW(base) _MMIO((base) + 0x168) /* gen8+ */ 107202b1f4cSMatt Roper #define CCID(base) _MMIO((base) + 0x180) 108202b1f4cSMatt Roper #define CCID_EN BIT(0) 109202b1f4cSMatt Roper #define CCID_EXTENDED_STATE_RESTORE BIT(2) 110202b1f4cSMatt Roper #define CCID_EXTENDED_STATE_SAVE BIT(3) 111202b1f4cSMatt Roper #define RING_BB_PER_CTX_PTR(base) _MMIO((base) + 0x1c0) /* gen8+ */ 112202b1f4cSMatt Roper #define RING_INDIRECT_CTX(base) _MMIO((base) + 0x1c4) /* gen8+ */ 113202b1f4cSMatt Roper #define RING_INDIRECT_CTX_OFFSET(base) _MMIO((base) + 0x1c8) /* gen8+ */ 114202b1f4cSMatt Roper #define ECOSKPD(base) _MMIO((base) + 0x1d0) 115202b1f4cSMatt Roper #define ECO_CONSTANT_BUFFER_SR_DISABLE REG_BIT(4) 116202b1f4cSMatt Roper #define ECO_GATING_CX_ONLY REG_BIT(3) 117202b1f4cSMatt Roper #define GEN6_BLITTER_FBC_NOTIFY REG_BIT(3) 118202b1f4cSMatt Roper #define ECO_FLIP_DONE REG_BIT(0) 119202b1f4cSMatt Roper #define GEN6_BLITTER_LOCK_SHIFT 16 120202b1f4cSMatt Roper 121202b1f4cSMatt Roper #define BLIT_CCTL(base) _MMIO((base) + 0x204) 122202b1f4cSMatt Roper #define BLIT_CCTL_DST_MOCS_MASK REG_GENMASK(14, 8) 123202b1f4cSMatt Roper #define BLIT_CCTL_SRC_MOCS_MASK REG_GENMASK(6, 0) 124202b1f4cSMatt Roper #define BLIT_CCTL_MASK (BLIT_CCTL_DST_MOCS_MASK | \ 125202b1f4cSMatt Roper BLIT_CCTL_SRC_MOCS_MASK) 126202b1f4cSMatt Roper #define BLIT_CCTL_MOCS(dst, src) \ 127202b1f4cSMatt Roper (REG_FIELD_PREP(BLIT_CCTL_DST_MOCS_MASK, (dst) << 1) | \ 128202b1f4cSMatt Roper REG_FIELD_PREP(BLIT_CCTL_SRC_MOCS_MASK, (src) << 1)) 129202b1f4cSMatt Roper 130202b1f4cSMatt Roper /* 131202b1f4cSMatt Roper * CMD_CCTL read/write fields take a MOCS value and _not_ a table index. 132202b1f4cSMatt Roper * The lsb of each can be considered a separate enabling bit for encryption. 133202b1f4cSMatt Roper * 6:0 == default MOCS value for reads => 6:1 == table index for reads. 134202b1f4cSMatt Roper * 13:7 == default MOCS value for writes => 13:8 == table index for writes. 135202b1f4cSMatt Roper * 15:14 == Reserved => 31:30 are set to 0. 136202b1f4cSMatt Roper */ 137202b1f4cSMatt Roper #define CMD_CCTL_WRITE_OVERRIDE_MASK REG_GENMASK(13, 7) 138202b1f4cSMatt Roper #define CMD_CCTL_READ_OVERRIDE_MASK REG_GENMASK(6, 0) 139202b1f4cSMatt Roper #define CMD_CCTL_MOCS_MASK (CMD_CCTL_WRITE_OVERRIDE_MASK | \ 140202b1f4cSMatt Roper CMD_CCTL_READ_OVERRIDE_MASK) 141202b1f4cSMatt Roper #define CMD_CCTL_MOCS_OVERRIDE(write, read) \ 142202b1f4cSMatt Roper (REG_FIELD_PREP(CMD_CCTL_WRITE_OVERRIDE_MASK, (write) << 1) | \ 143202b1f4cSMatt Roper REG_FIELD_PREP(CMD_CCTL_READ_OVERRIDE_MASK, (read) << 1)) 144202b1f4cSMatt Roper 145e71a7412SMatt Roper #define MI_PREDICATE_RESULT_2(base) _MMIO((base) + 0x3bc) 146e71a7412SMatt Roper #define LOWER_SLICE_ENABLED (1 << 0) 147e71a7412SMatt Roper #define LOWER_SLICE_DISABLED (0 << 0) 148e71a7412SMatt Roper #define MI_PREDICATE_SRC0(base) _MMIO((base) + 0x400) 149e71a7412SMatt Roper #define MI_PREDICATE_SRC0_UDW(base) _MMIO((base) + 0x400 + 4) 150e71a7412SMatt Roper #define MI_PREDICATE_SRC1(base) _MMIO((base) + 0x408) 151e71a7412SMatt Roper #define MI_PREDICATE_SRC1_UDW(base) _MMIO((base) + 0x408 + 4) 152e71a7412SMatt Roper #define MI_PREDICATE_DATA(base) _MMIO((base) + 0x410) 153e71a7412SMatt Roper #define MI_PREDICATE_RESULT(base) _MMIO((base) + 0x418) 154e71a7412SMatt Roper #define MI_PREDICATE_RESULT_1(base) _MMIO((base) + 0x41c) 155e71a7412SMatt Roper 156202b1f4cSMatt Roper #define RING_PP_DIR_DCLV(base) _MMIO((base) + 0x220) 157202b1f4cSMatt Roper #define PP_DIR_DCLV_2G 0xffffffff 158202b1f4cSMatt Roper #define RING_PP_DIR_BASE(base) _MMIO((base) + 0x228) 159202b1f4cSMatt Roper #define RING_ELSP(base) _MMIO((base) + 0x230) 160202b1f4cSMatt Roper #define RING_EXECLIST_STATUS_LO(base) _MMIO((base) + 0x234) 161202b1f4cSMatt Roper #define RING_EXECLIST_STATUS_HI(base) _MMIO((base) + 0x234 + 4) 162202b1f4cSMatt Roper #define RING_CONTEXT_CONTROL(base) _MMIO((base) + 0x244) 163202b1f4cSMatt Roper #define CTX_CTRL_ENGINE_CTX_RESTORE_INHIBIT REG_BIT(0) 164202b1f4cSMatt Roper #define CTX_CTRL_RS_CTX_ENABLE REG_BIT(1) 165202b1f4cSMatt Roper #define CTX_CTRL_ENGINE_CTX_SAVE_INHIBIT REG_BIT(2) 166202b1f4cSMatt Roper #define CTX_CTRL_INHIBIT_SYN_CTX_SWITCH REG_BIT(3) 167202b1f4cSMatt Roper #define GEN12_CTX_CTRL_OAR_CONTEXT_ENABLE REG_BIT(8) 168202b1f4cSMatt Roper #define RING_SEMA_WAIT_POLL(base) _MMIO((base) + 0x24c) 169202b1f4cSMatt Roper #define GEN8_RING_PDP_UDW(base, n) _MMIO((base) + 0x270 + (n) * 8 + 4) 170202b1f4cSMatt Roper #define GEN8_RING_PDP_LDW(base, n) _MMIO((base) + 0x270 + (n) * 8) 171202b1f4cSMatt Roper #define RING_MODE_GEN7(base) _MMIO((base) + 0x29c) 172202b1f4cSMatt Roper #define GFX_RUN_LIST_ENABLE (1 << 15) 173202b1f4cSMatt Roper #define GFX_INTERRUPT_STEERING (1 << 14) 174202b1f4cSMatt Roper #define GFX_TLB_INVALIDATE_EXPLICIT (1 << 13) 175202b1f4cSMatt Roper #define GFX_SURFACE_FAULT_ENABLE (1 << 12) 176202b1f4cSMatt Roper #define GFX_REPLAY_MODE (1 << 11) 177202b1f4cSMatt Roper #define GFX_PSMI_GRANULARITY (1 << 10) 178202b1f4cSMatt Roper #define GFX_PPGTT_ENABLE (1 << 9) 179202b1f4cSMatt Roper #define GEN8_GFX_PPGTT_48B (1 << 7) 180202b1f4cSMatt Roper #define GFX_FORWARD_VBLANK_MASK (3 << 5) 181202b1f4cSMatt Roper #define GFX_FORWARD_VBLANK_NEVER (0 << 5) 182202b1f4cSMatt Roper #define GFX_FORWARD_VBLANK_ALWAYS (1 << 5) 183202b1f4cSMatt Roper #define GFX_FORWARD_VBLANK_COND (2 << 5) 184202b1f4cSMatt Roper #define GEN11_GFX_DISABLE_LEGACY_MODE (1 << 3) 185202b1f4cSMatt Roper #define RING_TIMESTAMP(base) _MMIO((base) + 0x358) 186202b1f4cSMatt Roper #define RING_TIMESTAMP_UDW(base) _MMIO((base) + 0x358 + 4) 187202b1f4cSMatt Roper #define RING_CONTEXT_STATUS_PTR(base) _MMIO((base) + 0x3a0) 188202b1f4cSMatt Roper #define RING_CTX_TIMESTAMP(base) _MMIO((base) + 0x3a8) /* gen8+ */ 189202b1f4cSMatt Roper #define RING_FORCE_TO_NONPRIV(base, i) _MMIO(((base) + 0x4D0) + (i) * 4) 190202b1f4cSMatt Roper #define RING_FORCE_TO_NONPRIV_ADDRESS_MASK REG_GENMASK(25, 2) 191202b1f4cSMatt Roper #define RING_FORCE_TO_NONPRIV_ACCESS_RW (0 << 28) /* CFL+ & Gen11+ */ 192202b1f4cSMatt Roper #define RING_FORCE_TO_NONPRIV_ACCESS_RD (1 << 28) 193202b1f4cSMatt Roper #define RING_FORCE_TO_NONPRIV_ACCESS_WR (2 << 28) 194202b1f4cSMatt Roper #define RING_FORCE_TO_NONPRIV_ACCESS_INVALID (3 << 28) 195202b1f4cSMatt Roper #define RING_FORCE_TO_NONPRIV_ACCESS_MASK (3 << 28) 196202b1f4cSMatt Roper #define RING_FORCE_TO_NONPRIV_RANGE_1 (0 << 0) /* CFL+ & Gen11+ */ 197202b1f4cSMatt Roper #define RING_FORCE_TO_NONPRIV_RANGE_4 (1 << 0) 198202b1f4cSMatt Roper #define RING_FORCE_TO_NONPRIV_RANGE_16 (2 << 0) 199202b1f4cSMatt Roper #define RING_FORCE_TO_NONPRIV_RANGE_64 (3 << 0) 200202b1f4cSMatt Roper #define RING_FORCE_TO_NONPRIV_RANGE_MASK (3 << 0) 201202b1f4cSMatt Roper #define RING_FORCE_TO_NONPRIV_MASK_VALID \ 202202b1f4cSMatt Roper (RING_FORCE_TO_NONPRIV_RANGE_MASK | RING_FORCE_TO_NONPRIV_ACCESS_MASK) 203202b1f4cSMatt Roper #define RING_MAX_NONPRIV_SLOTS 12 204202b1f4cSMatt Roper 205202b1f4cSMatt Roper #define RING_EXECLIST_SQ_CONTENTS(base) _MMIO((base) + 0x510) 206202b1f4cSMatt Roper #define RING_PP_DIR_BASE_READ(base) _MMIO((base) + 0x518) 207202b1f4cSMatt Roper #define RING_EXECLIST_CONTROL(base) _MMIO((base) + 0x550) 208202b1f4cSMatt Roper #define EL_CTRL_LOAD REG_BIT(0) 209202b1f4cSMatt Roper 210202b1f4cSMatt Roper /* There are 16 64-bit CS General Purpose Registers per-engine on Gen8+ */ 211202b1f4cSMatt Roper #define GEN8_RING_CS_GPR(base, n) _MMIO((base) + 0x600 + (n) * 8) 212202b1f4cSMatt Roper #define GEN8_RING_CS_GPR_UDW(base, n) _MMIO((base) + 0x600 + (n) * 8 + 4) 213202b1f4cSMatt Roper 214*93cc7aa0SMatt Roper #define GEN11_VCS_SFC_FORCED_LOCK(base) _MMIO((base) + 0x88c) 215*93cc7aa0SMatt Roper #define GEN11_VCS_SFC_FORCED_LOCK_BIT (1 << 0) 216*93cc7aa0SMatt Roper #define GEN11_VCS_SFC_LOCK_STATUS(base) _MMIO((base) + 0x890) 217*93cc7aa0SMatt Roper #define GEN11_VCS_SFC_USAGE_BIT (1 << 0) 218*93cc7aa0SMatt Roper #define GEN11_VCS_SFC_LOCK_ACK_BIT (1 << 1) 219*93cc7aa0SMatt Roper 220*93cc7aa0SMatt Roper #define GEN11_VECS_SFC_FORCED_LOCK(base) _MMIO((base) + 0x201c) 221*93cc7aa0SMatt Roper #define GEN11_VECS_SFC_FORCED_LOCK_BIT (1 << 0) 222*93cc7aa0SMatt Roper #define GEN11_VECS_SFC_LOCK_ACK(base) _MMIO((base) + 0x2018) 223*93cc7aa0SMatt Roper #define GEN11_VECS_SFC_LOCK_ACK_BIT (1 << 0) 224*93cc7aa0SMatt Roper #define GEN11_VECS_SFC_USAGE(base) _MMIO((base) + 0x2014) 225*93cc7aa0SMatt Roper #define GEN11_VECS_SFC_USAGE_BIT (1 << 0) 226*93cc7aa0SMatt Roper 227202b1f4cSMatt Roper #define RING_HWS_PGA_GEN6(base) _MMIO((base) + 0x2080) 228202b1f4cSMatt Roper 229*93cc7aa0SMatt Roper #define GEN12_HCP_SFC_LOCK_STATUS(base) _MMIO((base) + 0x2914) 230*93cc7aa0SMatt Roper #define GEN12_HCP_SFC_LOCK_ACK_BIT REG_BIT(1) 231*93cc7aa0SMatt Roper #define GEN12_HCP_SFC_USAGE_BIT REG_BIT(0) 232*93cc7aa0SMatt Roper 233202b1f4cSMatt Roper #define VDBOX_CGCTL3F10(base) _MMIO((base) + 0x3f10) 234202b1f4cSMatt Roper #define IECPUNIT_CLKGATE_DIS REG_BIT(22) 235202b1f4cSMatt Roper 236202b1f4cSMatt Roper #define VDBOX_CGCTL3F18(base) _MMIO((base) + 0x3f18) 237202b1f4cSMatt Roper #define ALNUNIT_CLKGATE_DIS REG_BIT(13) 238202b1f4cSMatt Roper 239202b1f4cSMatt Roper 240202b1f4cSMatt Roper #endif /* __INTEL_ENGINE_REGS__ */ 241