Lines Matching full:ee

440 				 const struct intel_engine_coredump *ee)  in error_print_instdone()  argument
447 ee->instdone.instdone); in error_print_instdone()
449 if (ee->engine->class != RENDER_CLASS || GRAPHICS_VER(m->i915) <= 3) in error_print_instdone()
453 ee->instdone.slice_common); in error_print_instdone()
458 for_each_ss_steering(iter, ee->engine->gt, slice, subslice) in error_print_instdone()
461 ee->instdone.sampler[slice][subslice]); in error_print_instdone()
463 for_each_ss_steering(iter, ee->engine->gt, slice, subslice) in error_print_instdone()
466 ee->instdone.row[slice][subslice]); in error_print_instdone()
472 for_each_ss_steering(iter, ee->engine->gt, slice, subslice) in error_print_instdone()
475 ee->instdone.geom_svg[slice][subslice]); in error_print_instdone()
479 ee->instdone.slice_common_extra[0]); in error_print_instdone()
481 ee->instdone.slice_common_extra[1]); in error_print_instdone()
525 intel_gpu_error_find_batch(const struct intel_engine_coredump *ee) in intel_gpu_error_find_batch() argument
527 return __find_vma(ee->vma, "batch"); in intel_gpu_error_find_batch()
531 const struct intel_engine_coredump *ee) in error_print_engine() argument
536 err_printf(m, "%s command stream:\n", ee->engine->name); in error_print_engine()
537 err_printf(m, " CCID: 0x%08x\n", ee->ccid); in error_print_engine()
538 err_printf(m, " START: 0x%08x\n", ee->start); in error_print_engine()
539 err_printf(m, " HEAD: 0x%08x [0x%08x]\n", ee->head, ee->rq_head); in error_print_engine()
541 ee->tail, ee->rq_post, ee->rq_tail); in error_print_engine()
542 err_printf(m, " CTL: 0x%08x\n", ee->ctl); in error_print_engine()
543 err_printf(m, " MODE: 0x%08x\n", ee->mode); in error_print_engine()
544 err_printf(m, " HWS: 0x%08x\n", ee->hws); in error_print_engine()
546 (u32)(ee->acthd>>32), (u32)ee->acthd); in error_print_engine()
547 err_printf(m, " IPEIR: 0x%08x\n", ee->ipeir); in error_print_engine()
548 err_printf(m, " IPEHR: 0x%08x\n", ee->ipehr); in error_print_engine()
549 err_printf(m, " ESR: 0x%08x\n", ee->esr); in error_print_engine()
551 error_print_instdone(m, ee); in error_print_engine()
553 batch = intel_gpu_error_find_batch(ee); in error_print_engine()
564 (u32)(ee->bbaddr>>32), (u32)ee->bbaddr); in error_print_engine()
565 err_printf(m, " BB_STATE: 0x%08x\n", ee->bbstate); in error_print_engine()
566 err_printf(m, " INSTPS: 0x%08x\n", ee->instps); in error_print_engine()
568 err_printf(m, " INSTPM: 0x%08x\n", ee->instpm); in error_print_engine()
569 err_printf(m, " FADDR: 0x%08x %08x\n", upper_32_bits(ee->faddr), in error_print_engine()
570 lower_32_bits(ee->faddr)); in error_print_engine()
572 err_printf(m, " RC PSMI: 0x%08x\n", ee->rc_psmi); in error_print_engine()
573 err_printf(m, " FAULT_REG: 0x%08x\n", ee->fault_reg); in error_print_engine()
576 err_printf(m, " NOPID: 0x%08x\n", ee->nopid); in error_print_engine()
577 err_printf(m, " EXCC: 0x%08x\n", ee->excc); in error_print_engine()
578 err_printf(m, " CMD_CCTL: 0x%08x\n", ee->cmd_cctl); in error_print_engine()
579 err_printf(m, " CSCMDOP: 0x%08x\n", ee->cscmdop); in error_print_engine()
580 err_printf(m, " CTX_SR_CTL: 0x%08x\n", ee->ctx_sr_ctl); in error_print_engine()
581 err_printf(m, " DMA_FADDR_HI: 0x%08x\n", ee->dma_faddr_hi); in error_print_engine()
582 err_printf(m, " DMA_FADDR_LO: 0x%08x\n", ee->dma_faddr_lo); in error_print_engine()
585 err_printf(m, " GFX_MODE: 0x%08x\n", ee->vm_info.gfx_mode); in error_print_engine()
591 i, ee->vm_info.pdp[i]); in error_print_engine()
594 ee->vm_info.pp_dir_base); in error_print_engine()
598 for (n = 0; n < ee->num_ports; n++) { in error_print_engine()
600 error_print_request(m, " ", &ee->execlist[n]); in error_print_engine()
810 const struct intel_engine_coredump *ee; in err_print_gt_engines() local
812 for (ee = gt->engine; ee; ee = ee->next) { in err_print_gt_engines()
816 if (ee->guc_capture_node) in err_print_gt_engines()
817 intel_guc_capture_print_engine_node(m, ee); in err_print_gt_engines()
820 ee->engine->name); in err_print_gt_engines()
822 error_print_engine(m, ee); in err_print_gt_engines()
825 err_printf(m, " hung: %u\n", ee->hung); in err_print_gt_engines()
826 err_printf(m, " engine reset count: %u\n", ee->reset_count); in err_print_gt_engines()
827 error_print_context(m, " Active context: ", &ee->context); in err_print_gt_engines()
829 for (vma = ee->vma; vma; vma = vma->next) in err_print_gt_engines()
830 intel_gpu_error_print_vma(m, ee->engine, vma); in err_print_gt_engines()
838 const struct intel_engine_coredump *ee; in __err_print_to_sgl() local
859 for (ee = error->gt ? error->gt->engine : NULL; ee; ee = ee->next) in __err_print_to_sgl()
861 ee->engine->name, in __err_print_to_sgl()
862 ee->context.comm, in __err_print_to_sgl()
863 ee->context.pid); in __err_print_to_sgl()
1050 struct intel_engine_coredump *ee = gt->engine; in cleanup_gt() local
1052 gt->engine = ee->next; in cleanup_gt()
1054 i915_vma_coredump_free(ee->vma); in cleanup_gt()
1055 intel_guc_capture_free_node(ee); in cleanup_gt()
1056 kfree(ee); in cleanup_gt()
1232 static void engine_record_registers(struct intel_engine_coredump *ee) in engine_record_registers() argument
1234 const struct intel_engine_cs *engine = ee->engine; in engine_record_registers()
1238 ee->rc_psmi = ENGINE_READ(engine, RING_PSMI_CTL); in engine_record_registers()
1246 ee->fault_reg = intel_uncore_read(engine->uncore, in engine_record_registers()
1250 ee->fault_reg = intel_gt_mcr_read_any(engine->gt, in engine_record_registers()
1253 ee->fault_reg = intel_uncore_read(engine->uncore, in engine_record_registers()
1256 ee->fault_reg = intel_uncore_read(engine->uncore, in engine_record_registers()
1259 ee->fault_reg = GEN6_RING_FAULT_REG_READ(engine); in engine_record_registers()
1263 ee->esr = ENGINE_READ(engine, RING_ESR); in engine_record_registers()
1264 ee->faddr = ENGINE_READ(engine, RING_DMA_FADD); in engine_record_registers()
1265 ee->ipeir = ENGINE_READ(engine, RING_IPEIR); in engine_record_registers()
1266 ee->ipehr = ENGINE_READ(engine, RING_IPEHR); in engine_record_registers()
1267 ee->instps = ENGINE_READ(engine, RING_INSTPS); in engine_record_registers()
1268 ee->bbaddr = ENGINE_READ(engine, RING_BBADDR); in engine_record_registers()
1269 ee->ccid = ENGINE_READ(engine, CCID); in engine_record_registers()
1271 ee->faddr |= (u64)ENGINE_READ(engine, RING_DMA_FADD_UDW) << 32; in engine_record_registers()
1272 ee->bbaddr |= (u64)ENGINE_READ(engine, RING_BBADDR_UDW) << 32; in engine_record_registers()
1274 ee->bbstate = ENGINE_READ(engine, RING_BBSTATE); in engine_record_registers()
1276 ee->faddr = ENGINE_READ(engine, DMA_FADD_I8XX); in engine_record_registers()
1277 ee->ipeir = ENGINE_READ(engine, IPEIR); in engine_record_registers()
1278 ee->ipehr = ENGINE_READ(engine, IPEHR); in engine_record_registers()
1282 ee->cmd_cctl = ENGINE_READ(engine, RING_CMD_CCTL); in engine_record_registers()
1283 ee->cscmdop = ENGINE_READ(engine, RING_CSCMDOP); in engine_record_registers()
1284 ee->ctx_sr_ctl = ENGINE_READ(engine, RING_CTX_SR_CTL); in engine_record_registers()
1285 ee->dma_faddr_hi = ENGINE_READ(engine, RING_DMA_FADD_UDW); in engine_record_registers()
1286 ee->dma_faddr_lo = ENGINE_READ(engine, RING_DMA_FADD); in engine_record_registers()
1287 ee->nopid = ENGINE_READ(engine, RING_NOPID); in engine_record_registers()
1288 ee->excc = ENGINE_READ(engine, RING_EXCC); in engine_record_registers()
1291 intel_engine_get_instdone(engine, &ee->instdone); in engine_record_registers()
1293 ee->instpm = ENGINE_READ(engine, RING_INSTPM); in engine_record_registers()
1294 ee->acthd = intel_engine_get_active_head(engine); in engine_record_registers()
1295 ee->start = ENGINE_READ(engine, RING_START); in engine_record_registers()
1296 ee->head = ENGINE_READ(engine, RING_HEAD); in engine_record_registers()
1297 ee->tail = ENGINE_READ(engine, RING_TAIL); in engine_record_registers()
1298 ee->ctl = ENGINE_READ(engine, RING_CTL); in engine_record_registers()
1300 ee->mode = ENGINE_READ(engine, RING_MI_MODE); in engine_record_registers()
1330 ee->hws = intel_uncore_read(engine->uncore, mmio); in engine_record_registers()
1333 ee->reset_count = i915_reset_engine_count(&i915->gpu_error, engine); in engine_record_registers()
1338 ee->vm_info.gfx_mode = ENGINE_READ(engine, RING_MODE_GEN7); in engine_record_registers()
1341 ee->vm_info.pp_dir_base = in engine_record_registers()
1344 ee->vm_info.pp_dir_base = in engine_record_registers()
1350 ee->vm_info.pdp[i] = in engine_record_registers()
1353 ee->vm_info.pdp[i] <<= 32; in engine_record_registers()
1354 ee->vm_info.pdp[i] |= in engine_record_registers()
1384 static void engine_record_execlists(struct intel_engine_coredump *ee) in engine_record_execlists() argument
1386 const struct intel_engine_execlists * const el = &ee->engine->execlists; in engine_record_execlists()
1391 record_request(*port++, &ee->execlist[n++]); in engine_record_execlists()
1393 ee->num_ports = n; in engine_record_execlists()
1503 static void add_vma(struct intel_engine_coredump *ee, in add_vma() argument
1507 vma->next = ee->vma; in add_vma()
1508 ee->vma = vma; in add_vma()
1533 static void add_vma_coredump(struct intel_engine_coredump *ee, in add_vma_coredump() argument
1539 add_vma(ee, create_vma_coredump(gt, vma, name, compress)); in add_vma_coredump()
1545 struct intel_engine_coredump *ee; in intel_engine_coredump_alloc() local
1547 ee = kzalloc(sizeof(*ee), gfp); in intel_engine_coredump_alloc()
1548 if (!ee) in intel_engine_coredump_alloc()
1551 ee->engine = engine; in intel_engine_coredump_alloc()
1554 engine_record_registers(ee); in intel_engine_coredump_alloc()
1555 engine_record_execlists(ee); in intel_engine_coredump_alloc()
1558 return ee; in intel_engine_coredump_alloc()
1562 engine_coredump_add_context(struct intel_engine_coredump *ee, in engine_coredump_add_context() argument
1568 ee->simulated |= record_context(&ee->context, ce); in engine_coredump_add_context()
1569 if (ee->simulated) in engine_coredump_add_context()
1584 intel_engine_coredump_add_request(struct intel_engine_coredump *ee, in intel_engine_coredump_add_request() argument
1590 vma = engine_coredump_add_context(ee, rq->context, gfp); in intel_engine_coredump_add_request()
1602 ee->rq_head = rq->head; in intel_engine_coredump_add_request()
1603 ee->rq_post = rq->postfix; in intel_engine_coredump_add_request()
1604 ee->rq_tail = rq->tail; in intel_engine_coredump_add_request()
1610 intel_engine_coredump_add_vma(struct intel_engine_coredump *ee, in intel_engine_coredump_add_vma() argument
1614 const struct intel_engine_cs *engine = ee->engine; in intel_engine_coredump_add_vma()
1620 add_vma(ee, in intel_engine_coredump_add_vma()
1631 add_vma_coredump(ee, engine->gt, engine->status_page.vma, in intel_engine_coredump_add_vma()
1634 add_vma_coredump(ee, engine->gt, engine->wa_ctx.vma, in intel_engine_coredump_add_vma()
1644 struct intel_engine_coredump *ee; in capture_engine() local
1648 ee = intel_engine_coredump_alloc(engine, ALLOW_FAIL, dump_flags); in capture_engine()
1649 if (!ee) in capture_engine()
1658 capture = intel_engine_coredump_add_request(ee, rq, ATOMIC_MAYFAIL); in capture_engine()
1661 capture = engine_coredump_add_context(ee, ce, ATOMIC_MAYFAIL); in capture_engine()
1665 intel_engine_coredump_add_vma(ee, capture, compress); in capture_engine()
1668 intel_guc_capture_get_matching_node(engine->gt, ee, ce); in capture_engine()
1670 kfree(ee); in capture_engine()
1671 ee = NULL; in capture_engine()
1674 return ee; in capture_engine()
1687 struct intel_engine_coredump *ee; in gt_record_engines() local
1692 ee = capture_engine(engine, compress, dump_flags); in gt_record_engines()
1693 if (!ee) in gt_record_engines()
1696 ee->hung = engine->mask & engine_mask; in gt_record_engines()
1698 gt->simulated |= ee->simulated; in gt_record_engines()
1699 if (ee->simulated) { in gt_record_engines()
1701 intel_guc_capture_free_node(ee); in gt_record_engines()
1702 kfree(ee); in gt_record_engines()
1706 ee->next = gt->engine; in gt_record_engines()
1707 gt->engine = ee; in gt_record_engines()
1937 static u32 generate_ecode(const struct intel_engine_coredump *ee) in generate_ecode() argument
1945 return ee ? ee->ipehr ^ ee->instdone.instdone : 0; in generate_ecode()