Home
last modified time | relevance | path

Searched refs:i915_vma_offset (Results 1 – 25 of 26) sorted by relevance

12

/linux/drivers/gpu/drm/i915/gt/
H A Dselftest_tlb.c25 GEM_BUG_ON(addr < i915_vma_offset(vma)); in vma_set_qw()
26 GEM_BUG_ON(addr >= i915_vma_offset(vma) + i915_vma_size(vma) + sizeof(val)); in vma_set_qw()
28 (addr - i915_vma_offset(vma)), val, 1); in vma_set_qw()
74 GEM_BUG_ON(i915_vma_offset(va) != addr); in pte_tlbinv()
119 *cs++ = lower_32_bits(i915_vma_offset(vma)); in pte_tlbinv()
120 *cs++ = upper_32_bits(i915_vma_offset(vma)); in pte_tlbinv()
130 err = rq->engine->emit_bb_start(rq, i915_vma_offset(vma), 0, 0); in pte_tlbinv()
160 .start = i915_vma_offset(vb), in pte_tlbinv()
H A Dselftest_engine_cs.c184 i915_vma_offset(batch), 8, in perf_mi_bb_start()
328 i915_vma_offset(base), 8, in perf_mi_noop()
338 i915_vma_offset(nop), in perf_mi_noop()
H A Dselftest_lrc.c1040 *cs++ = lower_32_bits(i915_vma_offset(scratch) + x); in store_context()
1041 *cs++ = upper_32_bits(i915_vma_offset(scratch) + x); in store_context()
1108 *cs++ = lower_32_bits(i915_vma_offset(b_before)); in record_registers()
1109 *cs++ = upper_32_bits(i915_vma_offset(b_before)); in record_registers()
1124 *cs++ = lower_32_bits(i915_vma_offset(b_after)); in record_registers()
1125 *cs++ = upper_32_bits(i915_vma_offset(b_after)); in record_registers()
1246 *cs++ = lower_32_bits(i915_vma_offset(batch)); in poison_registers()
1247 *cs++ = upper_32_bits(i915_vma_offset(batch)); in poison_registers()
H A Dselftest_hangcheck.c99 return i915_vma_offset(hws) + in hws_address()
184 *batch++ = lower_32_bits(i915_vma_offset(vma)); in hang_create_request()
185 *batch++ = upper_32_bits(i915_vma_offset(vma)); in hang_create_request()
198 *batch++ = lower_32_bits(i915_vma_offset(vma)); in hang_create_request()
211 *batch++ = lower_32_bits(i915_vma_offset(vma)); in hang_create_request()
223 *batch++ = lower_32_bits(i915_vma_offset(vma)); in hang_create_request()
238 err = rq->engine->emit_bb_start(rq, i915_vma_offset(vma), PAGE_SIZE, flags); in hang_create_request()
H A Dselftest_workarounds.c522 u64 addr = i915_vma_offset(scratch); in check_dirty_whitelist()
641 i915_vma_offset(batch), PAGE_SIZE, in check_dirty_whitelist()
869 u64 offset = i915_vma_offset(results) + sizeof(u32) * i; in read_whitelisted_registers()
939 err = engine->emit_bb_start(rq, i915_vma_offset(batch), 0, 0); in scrub_whitelisted_registers()
H A Dintel_renderstate.c68 u64 r = s + i915_vma_offset(so->vma); in render_state_setup()
H A Dselftest_ring_submission.c54 *cs++ = i915_vma_offset(vma) + 4000; in create_wally()
H A Dselftest_execlists.c2747 *cs++ = lower_32_bits(i915_vma_offset(vma)); in create_gang()
2748 *cs++ = upper_32_bits(i915_vma_offset(vma)); in create_gang()
2751 u64 offset = i915_vma_offset((*prev)->batch); in create_gang()
2776 i915_vma_offset(vma), in create_gang()
3103 addr = i915_vma_offset(result) + offset + i * sizeof(*cs); in create_gpr_user()
3113 *cs++ = lower_32_bits(i915_vma_offset(result)); in create_gpr_user()
3114 *cs++ = upper_32_bits(i915_vma_offset(result)); in create_gpr_user()
3192 i915_vma_offset(batch), in create_gpr_client()
3523 i915_vma_offset(vma), in smoke_submit()
H A Dgen7_renderclear.c109 return i915_vma_offset(bc->vma); in batch_addr()
H A Dintel_ring_submission.c939 i915_vma_offset(engine->wa_ctx.vma), 0, in clear_residuals()
/linux/drivers/gpu/drm/i915/gem/selftests/
H A Di915_gem_client_blt.c197 *cs++ = lower_32_bits(i915_vma_offset(dst->vma)); in prepare_blit()
198 *cs++ = upper_32_bits(i915_vma_offset(dst->vma)); in prepare_blit()
201 *cs++ = lower_32_bits(i915_vma_offset(src->vma)); in prepare_blit()
202 *cs++ = upper_32_bits(i915_vma_offset(src->vma)); in prepare_blit()
243 *cs++ = lower_32_bits(i915_vma_offset(dst->vma)); in prepare_blit()
245 *cs++ = upper_32_bits(i915_vma_offset(dst->vma)); in prepare_blit()
248 *cs++ = lower_32_bits(i915_vma_offset(src->vma)); in prepare_blit()
250 *cs++ = upper_32_bits(i915_vma_offset(src->vma)); in prepare_blit()
459 if (drm_mm_node_allocated(&vma->node) && i915_vma_offset(vma) != addr) { in pin_buffer()
469 GEM_BUG_ON(i915_vma_offset(vma) != addr); in pin_buffer()
[all …]
H A Digt_gem_utils.c66 offset += i915_vma_offset(vma); in igt_emit_store_dw()
146 i915_vma_offset(batch), in igt_gpu_fill_dw()
H A Di915_gem_context.c927 *cmd++ = lower_32_bits(i915_vma_offset(vma)); in rpcs_query_batch()
928 *cmd++ = upper_32_bits(i915_vma_offset(vma)); in rpcs_query_batch()
1013 i915_vma_offset(batch), in emit_rpcs_query()
1573 err = engine->emit_bb_start(rq, i915_vma_offset(vma), in write_to_scratch()
1679 *cmd++ = i915_vma_offset(vma) + result; in read_from_scratch()
1706 err = engine->emit_bb_start(rq, i915_vma_offset(vma), in read_from_scratch()
H A Di915_gem_mman.c1610 err = engine->emit_bb_start(rq, i915_vma_offset(vma), 0, 0); in __igt_mmap_gpu()
H A Dhuge_pages.c411 IS_ALIGNED(i915_vma_offset(vma), SZ_2M) && in igt_check_page_sizes()
/linux/drivers/gpu/drm/i915/selftests/
H A Digt_spinner.c120 return i915_vma_offset(hws) + seqno_offset(rq->fence.context); in hws_address()
194 *batch++ = lower_32_bits(i915_vma_offset(vma)); in igt_spinner_create_request()
195 *batch++ = upper_32_bits(i915_vma_offset(vma)); in igt_spinner_create_request()
210 err = engine->emit_bb_start(rq, i915_vma_offset(vma), PAGE_SIZE, flags); in igt_spinner_create_request()
H A Di915_request.c1012 i915_vma_offset(batch), in emit_bb_start()
1149 *cmd++ = lower_32_bits(i915_vma_offset(vma)); in recursive_batch()
1150 *cmd++ = upper_32_bits(i915_vma_offset(vma)); in recursive_batch()
1153 *cmd++ = lower_32_bits(i915_vma_offset(vma)); in recursive_batch()
1156 *cmd++ = lower_32_bits(i915_vma_offset(vma)); in recursive_batch()
/linux/drivers/gpu/drm/i915/
H A Di915_vma.h167 static inline u64 i915_vma_offset(const struct i915_vma *vma) in i915_vma_offset() function
177 GEM_BUG_ON(upper_32_bits(i915_vma_offset(vma))); in i915_ggtt_offset()
178 GEM_BUG_ON(upper_32_bits(i915_vma_offset(vma) + in i915_ggtt_offset()
180 return lower_32_bits(i915_vma_offset(vma)); in i915_ggtt_offset()
H A Di915_vma.c608 i915_vma_offset(vma), in i915_vma_pin_iomap()
702 if (alignment && !IS_ALIGNED(i915_vma_offset(vma), alignment)) in i915_vma_misplaced()
709 i915_vma_offset(vma) < (flags & PIN_OFFSET_MASK)) in i915_vma_misplaced()
713 i915_vma_offset(vma) != (flags & PIN_OFFSET_MASK)) in i915_vma_misplaced()
731 IS_ALIGNED(i915_vma_offset(vma), vma->fence_alignment)); in __i915_vma_set_map_and_fenceable()
H A Di915_cmd_parser.c1474 shadow_addr = gen8_canonical_addr(i915_vma_offset(shadow)); in intel_engine_cmd_parser()
1475 batch_addr = gen8_canonical_addr(i915_vma_offset(batch) + batch_offset); in intel_engine_cmd_parser()
H A Di915_debugfs.c207 i915_vma_offset(vma), i915_vma_size(vma), in i915_debugfs_describe_obj()
/linux/drivers/gpu/drm/i915/gem/
H A Di915_gem_execbuffer.c383 const u64 start = i915_vma_offset(vma); in eb_vma_misplaced()
676 if (entry->offset != i915_vma_offset(vma)) { in eb_reserve_vma()
677 entry->offset = i915_vma_offset(vma) | UPDATE; in eb_reserve_vma()
1020 if (entry->offset != i915_vma_offset(vma)) { in eb_validate_vmas()
1021 entry->offset = i915_vma_offset(vma) | UPDATE; in eb_validate_vmas()
1102 return gen8_canonical_addr((int)reloc->delta + i915_vma_offset(target)); in relocation_target()
1478 gen8_canonical_addr(i915_vma_offset(target->vma)) == reloc->presumed_offset) in eb_relocate_entry()
2400 i915_vma_offset(batch) + in eb_request_submit()
2411 i915_vma_offset(eb->trampoline) + in eb_request_submit()
/linux/drivers/gpu/drm/i915/gt/uc/
H A Dintel_gsc_uc_heci_cmd_submit.c184 err = engine->emit_bb_start(rq, i915_vma_offset(pkt->bb_vma), PAGE_SIZE, 0); in intel_gsc_uc_heci_cmd_submit_nonpriv()
/linux/drivers/gpu/drm/i915/display/
H A Dintel_dpt.c325 return i915_vma_offset(dpt_vma); in intel_dpt_offset()
/linux/drivers/gpu/drm/i915/pxp/
H A Dintel_pxp_gsccs.c101 pkt.addr_in = i915_vma_offset(exec_res->pkt_vma); in gsccs_send_message()

12