Lines Matching +full:0 +full:xcfff

130 	enum forcewake_domains fw = 0;  in wal_get_fw_for_rmw()
134 for (i = 0, wa = wal->list; i < wal->count; i++, wa++) in wal_get_fw_for_rmw()
147 unsigned int start = 0, end = wal->count; in _wa_add()
203 GEM_BUG_ON(i915_mmio_reg_offset(wa_[0].reg) == in _wa_add()
206 i915_mmio_reg_offset(wa_[0].reg)) in _wa_add()
209 swap(wa_[1], wa_[0]); in _wa_add()
257 wa_write_clr_set(wal, reg, ~0, set); in wa_write()
263 wa_mcr_write_clr_set(wal, reg, ~0, set); in wa_mcr_write()
281 wa_write_clr_set(wal, reg, clr, 0); in wa_write_clr()
287 wa_mcr_write_clr_set(wal, reg, clr, 0); in wa_mcr_write_clr()
304 wa_add(wal, reg, 0, _MASKED_BIT_ENABLE(val), val, true); in wa_masked_en()
310 wa_mcr_add(wal, reg, 0, _MASKED_BIT_ENABLE(val), val, true); in wa_mcr_masked_en()
316 wa_add(wal, reg, 0, _MASKED_BIT_DISABLE(val), val, true); in wa_masked_dis()
322 wa_mcr_add(wal, reg, 0, _MASKED_BIT_DISABLE(val), val, true); in wa_mcr_masked_dis()
329 wa_add(wal, reg, 0, _MASKED_FIELD(mask, val), mask, true); in wa_masked_field_set()
336 wa_mcr_add(wal, reg, 0, _MASKED_FIELD(mask, val), mask, true); in wa_mcr_masked_field_set()
424 (IS_BROADWELL_GT3(i915) ? HDC_FENCE_DEST_SLM_DISABLE : 0)); in bdw_ctx_workarounds_init()
539 u8 vals[3] = { 0, 0, 0 }; in skl_tune_iz_hashing()
542 for (i = 0; i < 3; i++) { in skl_tune_iz_hashing()
553 * subslice_7eu[i] != 0 (because of the check above) and in skl_tune_iz_hashing()
556 * -> 0 <= ss <= 3; in skl_tune_iz_hashing()
562 if (vals[0] == 0 && vals[1] == 0 && vals[2] == 0) in skl_tune_iz_hashing()
569 GEN9_IZ_HASHING_MASK(0), in skl_tune_iz_hashing()
572 GEN9_IZ_HASHING(0, vals[0])); in skl_tune_iz_hashing()
653 wa_mcr_add(wal, GEN10_CACHE_MODE_SS, 0, in icl_ctx_workarounds_init()
655 0 /* write-only, so skip validation */, in icl_ctx_workarounds_init()
668 wa_write(wal, IVB_FBC_RT_BASE, 0xFFFFFFFF & ~ILK_FBC_RT_VALID); in icl_ctx_workarounds_init()
670 0, in icl_ctx_workarounds_init()
671 0xFFFFFFFF); in icl_ctx_workarounds_init()
686 REG_FIELD_PREP(L3_PWM_TIMER_INIT_VAL_MASK, 0x7f)); in dg2_ctx_gt_tuning_init()
731 * the clear mask is just set to ~0 to make sure other bits are not in gen12_ctx_workarounds_init()
736 ~0, in gen12_ctx_workarounds_init()
738 0, false); in gen12_ctx_workarounds_init()
773 wa_masked_field_set(wal, VF_PREEMPTION, PREEMPTION_VERTEX_COUNT, 0x4000); in dg2_ctx_workarounds_init()
794 wa_add(wal, DRAW_WATERMARK, VERT_WM_VAL, 0x3FF, 0, false); in xelpg_ctx_gt_tuning_init()
808 PREEMPTION_VERTEX_COUNT, 0x4000); in xelpg_ctx_workarounds_init()
968 if (wal->count == 0) in intel_engine_emit_ctx_wa()
969 return 0; in intel_engine_emit_ctx_wa()
986 for (i = 0, wa = wal->list; i < wal->count; i++, wa++) { in intel_engine_emit_ctx_wa()
1015 return 0; in intel_engine_emit_ctx_wa()
1084 HSW_ROW_CHICKEN3, 0, in hsw_gt_workarounds_init()
1086 0 /* XXX does this reg exist? */, true); in hsw_gt_workarounds_init()
1184 if (IS_KABYLAKE(gt->i915) && IS_GRAPHICS_STEP(gt->i915, 0, STEP_C0)) in kbl_gt_workarounds_init()
1268 * one of the higher subslices, we run the risk of reading back 0's or in icl_wa_init_mcr()
1271 subslice = __ffs(intel_sseu_get_hsw_subslices(sseu, 0)); in icl_wa_init_mcr()
1281 __add_mcr_wa(gt, wal, 0, subslice); in icl_wa_init_mcr()
1288 unsigned long slice, subslice = 0, slice_mask = 0; in xehp_init_mcr()
1289 u32 lncf_mask = 0; in xehp_init_mcr()
1327 lncf_mask |= (0x3 << (i * 2)); in xehp_init_mcr()
1344 if (IS_XEHPSDV(gt->i915) && slice_mask & BIT(0)) in xehp_init_mcr()
1362 __set_mcr_steering(wal, MCFG_MCR_SELECTOR, 0, 2); in xehp_init_mcr()
1363 __set_mcr_steering(wal, SF_MCR_SELECTOR, 0, 2); in xehp_init_mcr()
1370 __set_mcr_steering(wal, GAM_MCR_SELECTOR, 1, 0); in xehp_init_mcr()
1383 dss = intel_sseu_find_first_xehp_dss(&gt->info.sseu, 0, 0); in pvc_init_mcr()
1496 0, 0, false); in gen12_gt_workarounds_init()
1609 0, 0, false); in dg2_gt_workarounds_init()
1738 if (MEDIA_VER_FULL(i915) == IP_VER(13, 0)) in gt_init_workarounds()
1805 "%s workaround lost on %s! (reg[%x]=0x%x, relevant bits were 0x%x vs expected 0x%x)\n", in wa_verify()
1833 for (i = 0, wa = wal->list; i < wal->count; i++, wa++) { in wa_list_apply()
1834 u32 val, old = 0; in wa_list_apply()
1885 for (i = 0, wa = wal->list; i < wal->count; i++, wa++) in wa_list_verify()
2100 whitelist_reg_ext(w, _MMIO(0x2000 + engine->mmio_base), in icl_whitelist_build()
2103 whitelist_reg_ext(w, _MMIO(0x2014 + engine->mmio_base), in icl_whitelist_build()
2106 whitelist_reg_ext(w, _MMIO(0x23B0 + engine->mmio_base), in icl_whitelist_build()
2176 * Prevent read/write access to [0x4400, 0x4600) which covers in blacklist_trtt()
2181 whitelist_reg_ext(w, _MMIO(0x4400), in blacklist_trtt()
2184 whitelist_reg_ext(w, _MMIO(0x4500), in blacklist_trtt()
2262 for (i = 0, wa = wal->list; i < wal->count; i++, wa++) in intel_engine_apply_whitelist()
2303 * Even on the few platforms where MOCS 0 is a in engine_fake_wa_init()
2308 drm_WARN_ON(&engine->i915->drm, mocs_r == 0); in engine_fake_wa_init()
2358 wa_mcr_add(wal, XEHP_HDC_CHICKEN0, 0, in rcs_engine_wa_init()
2360 0, true); in rcs_engine_wa_init()
2464 0); in rcs_engine_wa_init()
2491 * kernel-only register CS_DEBUG_MODE1 (0x20EC). Any granularity in rcs_engine_wa_init()
2497 * register CS_CHICKEN1 (0x2580). CS_CHICKEN1 is saved/restored on in rcs_engine_wa_init()
2524 * CS_CHICKEN1[0] does not disable object-level preemption as in rcs_engine_wa_init()
2525 * it is supposed to (nor does CS_DEBUG_MODE1[0] if we had been in rcs_engine_wa_init()
2586 GEN9_LNCF_NONIA_COHERENT_ATOMICS_ENABLE, 0); in rcs_engine_wa_init()
2588 GEN8_LQSQ_NONIA_COHERENT_ATOMICS_ENABLE, 0); in rcs_engine_wa_init()
2590 EVICTION_PERF_FIX_ENABLE, 0); in rcs_engine_wa_init()
2637 if (0) { /* causes HiZ corruption on ivb:gt1 */ in rcs_engine_wa_init()
2765 0, _MASKED_BIT_ENABLE(VS_TIMER_DISPATCH), in rcs_engine_wa_init()
2767 IS_I965G(i915) ? 0 : VS_TIMER_DISPATCH, true); in rcs_engine_wa_init()
2781 0, _MASKED_BIT_ENABLE(ECO_CONSTANT_BUFFER_SR_DISABLE), in rcs_engine_wa_init()
2782 0 /* XXX bit doesn't stick on Broadwater */, in rcs_engine_wa_init()
2956 * Note that register 0xE420 is write-only and cannot be read in general_render_compute_wa_init()
2960 wa_mcr_add(wal, GEN10_CACHE_MODE_SS, 0, in general_render_compute_wa_init()
2962 0 /* write-only, so skip validation */, in general_render_compute_wa_init()
3022 { .start = 0x5500, .end = 0x55ff },
3023 { .start = 0x7000, .end = 0x7fff },
3024 { .start = 0x9400, .end = 0x97ff },
3025 { .start = 0xb000, .end = 0xb3ff },
3026 { .start = 0xe000, .end = 0xe7ff },
3031 { .start = 0x8150, .end = 0x815f },
3032 { .start = 0x9520, .end = 0x955f },
3033 { .start = 0xb100, .end = 0xb3ff },
3034 { .start = 0xde80, .end = 0xe8ff },
3035 { .start = 0x24a00, .end = 0x24a7f },
3040 { .start = 0x4000, .end = 0x4aff },
3041 { .start = 0x5200, .end = 0x52ff },
3042 { .start = 0x5400, .end = 0x7fff },
3043 { .start = 0x8140, .end = 0x815f },
3044 { .start = 0x8c80, .end = 0x8dff },
3045 { .start = 0x94d0, .end = 0x955f },
3046 { .start = 0x9680, .end = 0x96ff },
3047 { .start = 0xb000, .end = 0xb3ff },
3048 { .start = 0xc800, .end = 0xcfff },
3049 { .start = 0xd800, .end = 0xd8ff },
3050 { .start = 0xdc00, .end = 0xffff },
3051 { .start = 0x17000, .end = 0x17fff },
3052 { .start = 0x24a00, .end = 0x24a7f },
3075 for (i = 0; mcr_ranges[i].start; i++) in mcr_range()
3089 unsigned int i, count = 0; in wa_list_srm()
3097 for (i = 0, wa = wal->list; i < wal->count; i++, wa++) { in wa_list_srm()
3106 for (i = 0, wa = wal->list; i < wal->count; i++, wa++) { in wa_list_srm()
3115 *cs++ = 0; in wa_list_srm()
3119 return 0; in wa_list_srm()
3135 return 0; in engine_wa_list_verify()
3146 if (err == 0) in engine_wa_list_verify()
3151 err = i915_vma_pin_ww(vma, &ww, 0, 0, in engine_wa_list_verify()
3163 if (err == 0) in engine_wa_list_verify()
3174 if (i915_request_wait(rq, 0, HZ / 5) < 0) { in engine_wa_list_verify()
3185 err = 0; in engine_wa_list_verify()
3186 for (i = 0, wa = wal->list; i < wal->count; i++, wa++) { in engine_wa_list_verify()