Lines Matching full:if

61 #if defined(CONFIG_DEBUGGER) || defined(CONFIG_KEXEC)
87 if (pmac_backlight) { in pmac_backlight_unblank()
111 if (debugger(regs)) in oops_begin()
119 if (!arch_spin_trylock(&die_lock)) { in oops_begin()
120 if (cpu == die_owner) in oops_begin()
129 if (machine_is(powermac)) in oops_begin()
143 if (!die_nest_count) in oops_end()
152 if (kexec_should_crash(current) || (TRAP(regs) == 0x100)) { in oops_end()
163 if (!signr) in oops_end()
168 * from panic() called below can race and corrupt it. If we in oops_end()
172 if (in_interrupt() || panic_on_oops || !current->pid || in oops_end()
177 if (in_interrupt()) in oops_end()
179 if (panic_on_oops) in oops_end()
201 if (notify_die(DIE_OOPS, str, regs, err, 255, SIGSEGV) == NOTIFY_STOP) in __die()
214 if (__die(str, regs, err)) in die()
236 if (!user_mode(regs)) { in _exception()
241 if (show_unhandled_signals && unhandled_signal(current, signr)) { in _exception()
257 /* See if any machine dependent calls */ in system_reset_exception()
258 if (ppc_md.system_reset_exception) { in system_reset_exception()
259 if (ppc_md.system_reset_exception(regs)) in system_reset_exception()
265 /* Must die if the interrupt is not recoverable */ in system_reset_exception()
266 if (!(regs->msr & MSR_RI)) in system_reset_exception()
275 * Check if the NIP corresponds to the address of a sync
290 if (((msr & 0xffff0000) == 0 || (msr & (0x80000 | 0x40000))) in check_io_access()
300 if (*nip == 0x60000000) /* nop */ in check_io_access()
302 else if (*nip == 0x4c00012c) /* isync */ in check_io_access()
304 if (*nip == 0x7c0004ac || (*nip >> 26) == 3) { in check_io_access()
354 #if defined(CONFIG_4xx)
359 if (reason & ESR_IMCP) { in machine_check_4xx()
374 if (reason & ESR_IMCP){ in machine_check_440A()
380 if (mcsr & MCSR_IB) in machine_check_440A()
382 if (mcsr & MCSR_DRB) in machine_check_440A()
384 if (mcsr & MCSR_DWB) in machine_check_440A()
386 if (mcsr & MCSR_TLBP) in machine_check_440A()
388 if (mcsr & MCSR_ICP){ in machine_check_440A()
392 if (mcsr & MCSR_DCSP) in machine_check_440A()
394 if (mcsr & MCSR_DCFP) in machine_check_440A()
396 if (mcsr & MCSR_IMPE) in machine_check_440A()
411 if (reason & ESR_IMCP) { in machine_check_47x()
418 if (mcsr & MCSR_IB) in machine_check_47x()
420 if (mcsr & MCSR_DRB) in machine_check_47x()
422 if (mcsr & MCSR_DWB) in machine_check_47x()
424 if (mcsr & MCSR_TLBP) in machine_check_47x()
426 if (mcsr & MCSR_ICP) { in machine_check_47x()
430 if (mcsr & MCSR_DCSP) in machine_check_47x()
432 if (mcsr & PPC47x_MCSR_GPR) in machine_check_47x()
434 if (mcsr & PPC47x_MCSR_FPR) in machine_check_47x()
436 if (mcsr & PPC47x_MCSR_IPR) in machine_check_47x()
451 if (reason & MCSR_LD) { in machine_check_e500mc()
453 if (recoverable == 1) in machine_check_e500mc()
460 if (reason & MCSR_MCP) in machine_check_e500mc()
463 if (reason & MCSR_ICPERR) { in machine_check_e500mc()
476 * if it wasn't due to an L1 parity error. in machine_check_e500mc()
481 if (reason & MCSR_DCPERR_MC) { in machine_check_e500mc()
489 if (!(mfspr(SPRN_L1CSR2) & L1CSR2_DCWS)) in machine_check_e500mc()
493 if (reason & MCSR_L2MMU_MHIT) { in machine_check_e500mc()
498 if (reason & MCSR_NMI) in machine_check_e500mc()
501 if (reason & MCSR_IF) { in machine_check_e500mc()
506 if (reason & MCSR_LD) { in machine_check_e500mc()
511 if (reason & MCSR_ST) { in machine_check_e500mc()
516 if (reason & MCSR_LDG) { in machine_check_e500mc()
521 if (reason & MCSR_TLBSYNC) in machine_check_e500mc()
524 if (reason & MCSR_BSL2_ERR) { in machine_check_e500mc()
529 if (reason & MCSR_MAV) { in machine_check_e500mc()
548 if (reason & MCSR_BUS_RBERR) { in machine_check_e500()
549 if (fsl_rio_mcheck_exception(regs)) in machine_check_e500()
556 if (reason & MCSR_MCP) in machine_check_e500()
558 if (reason & MCSR_ICPERR) in machine_check_e500()
560 if (reason & MCSR_DCP_PERR) in machine_check_e500()
562 if (reason & MCSR_DCPERR) in machine_check_e500()
564 if (reason & MCSR_BUS_IAERR) in machine_check_e500()
566 if (reason & MCSR_BUS_RAERR) in machine_check_e500()
568 if (reason & MCSR_BUS_WAERR) in machine_check_e500()
570 if (reason & MCSR_BUS_IBERR) in machine_check_e500()
572 if (reason & MCSR_BUS_RBERR) in machine_check_e500()
574 if (reason & MCSR_BUS_WBERR) in machine_check_e500()
576 if (reason & MCSR_BUS_IPERR) in machine_check_e500()
578 if (reason & MCSR_BUS_RPERR) in machine_check_e500()
596 if (reason & MCSR_MCP) in machine_check_e200()
598 if (reason & MCSR_CP_PERR) in machine_check_e200()
600 if (reason & MCSR_CPERR) in machine_check_e200()
602 if (reason & MCSR_EXCP_ERR) in machine_check_e200()
604 if (reason & MCSR_BUS_IRERR) in machine_check_e200()
606 if (reason & MCSR_BUS_DRERR) in machine_check_e200()
608 if (reason & MCSR_BUS_WRERR) in machine_check_e200()
657 /* See if any machine dependent calls. In theory, we would want in machine_check_exception()
658 * to call the CPU first, and call the ppc_md. one if the CPU in machine_check_exception()
663 if (ppc_md.machine_check_exception) in machine_check_exception()
665 else if (cur_cpu_spec->machine_check) in machine_check_exception()
668 if (recover > 0) in machine_check_exception()
671 #if defined(CONFIG_8xx) && defined(CONFIG_PCI) in machine_check_exception()
682 if (debugger_fault_handler(regs)) in machine_check_exception()
685 if (check_io_access(regs)) in machine_check_exception()
690 /* Must die if the interrupt is not recoverable */ in machine_check_exception()
691 if (!(regs->msr & MSR_RI)) in machine_check_exception()
710 if (notify_die(DIE_IABR_MATCH, "iabr_match", regs, 5, in instruction_breakpoint_exception()
713 if (debugger_iabr_match(regs)) in instruction_breakpoint_exception()
727 if (notify_die(DIE_SSTEP, "single_step", regs, 5, in single_step_exception()
730 if (debugger_sstep(regs)) in single_step_exception()
738 * check if the instruction was being single-stepped, and if so,
744 if (single_stepping(regs)) in emulate_single_step()
753 if ((fpscr & FPSCR_VE) && (fpscr & FPSCR_VX)) in __parse_fpscr()
757 else if ((fpscr & FPSCR_OE) && (fpscr & FPSCR_OX)) in __parse_fpscr()
761 else if ((fpscr & FPSCR_UE) && (fpscr & FPSCR_UX)) in __parse_fpscr()
765 else if ((fpscr & FPSCR_ZE) && (fpscr & FPSCR_ZX)) in __parse_fpscr()
769 else if ((fpscr & FPSCR_XE) && (fpscr & FPSCR_XX)) in __parse_fpscr()
789 * Return non-zero if we can't emulate, or -EFAULT if the associated
806 /* Early out if we are an invalid form of lswx */ in emulate_string_inst()
807 if ((instword & PPC_INST_STRING_MASK) == PPC_INST_LSWX) in emulate_string_inst()
808 if ((rT == rA) || (rT == NB_RB)) in emulate_string_inst()
835 if (get_user(val, (u8 __user *)EA)) in emulate_string_inst()
839 if (pos == 0) in emulate_string_inst()
846 if (put_user(val, (u8 __user *)EA)) in emulate_string_inst()
855 if (++pos == 4) { in emulate_string_inst()
857 if (++rT == 32) in emulate_string_inst()
904 if (!user_mode(regs) || (regs->msr & MSR_LE)) in emulate_instruction()
908 if (get_user(instword, (u32 __user *)(regs->nip))) in emulate_instruction()
912 if ((instword & PPC_INST_MFSPR_PVR_MASK) == PPC_INST_MFSPR_PVR) { in emulate_instruction()
920 if ((instword & PPC_INST_DCBA_MASK) == PPC_INST_DCBA) { in emulate_instruction()
926 if ((instword & PPC_INST_MCRXR_MASK) == PPC_INST_MCRXR) { in emulate_instruction()
937 if ((instword & PPC_INST_STRING_GEN_MASK) == PPC_INST_STRING) { in emulate_instruction()
943 if ((instword & PPC_INST_POPCNTB_MASK) == PPC_INST_POPCNTB) { in emulate_instruction()
949 if ((instword & PPC_INST_ISEL_MASK) == PPC_INST_ISEL) { in emulate_instruction()
956 if (((instword & PPC_INST_MFSPR_DSCR_MASK) == PPC_INST_MFSPR_DSCR) && in emulate_instruction()
964 if (((instword & PPC_INST_MTSPR_DSCR_MASK) == PPC_INST_MTSPR_DSCR) && in emulate_instruction()
987 /* We can now get here via a FP Unavailable exception if the core in program_check_exception()
990 if (reason & REASON_FP) { in program_check_exception()
995 if (reason & REASON_TRAP) { in program_check_exception()
998 if (debugger_bpt(regs)) in program_check_exception()
1002 if (notify_die(DIE_BPT, "breakpoint", regs, 5, 5, SIGTRAP) in program_check_exception()
1006 if (!(regs->msr & MSR_PR) && /* not user-mode */ in program_check_exception()
1022 * hardware people - not sure if it can happen on any illegal in program_check_exception()
1042 /* Try to emulate it if we should. */ in program_check_exception()
1043 if (reason & (REASON_ILLEGAL | REASON_PRIVILEGED)) { in program_check_exception()
1055 if (reason & REASON_PRIVILEGED) in program_check_exception()
1066 if (!(current->thread.align_ctl & PR_UNALIGN_SIGBUS)) in alignment_exception()
1069 if (fixed == 1) { in alignment_exception()
1076 if (fixed == -EFAULT) { in alignment_exception()
1083 if (user_mode(regs)) in alignment_exception()
1122 if (user_mode(regs)) { in altivec_unavailable_exception()
1136 if (user_mode(regs)) { in vsx_unavailable_exception()
1160 #if defined(CONFIG_MATH_EMULATION) || defined(CONFIG_8XX_MINIMAL_FPEMU) in SoftwareEmulation()
1166 if (!user_mode(regs)) { in SoftwareEmulation()
1173 if (errcode >= 0) in SoftwareEmulation()
1196 if (errcode >= 0) in SoftwareEmulation()
1224 if (debug_status & (DBSR_DAC1R | DBSR_DAC1W)) { in handle_debug()
1232 } else if (debug_status & (DBSR_DAC2R | DBSR_DAC2W)) { in handle_debug()
1237 } else if (debug_status & DBSR_IAC1) { in handle_debug()
1243 } else if (debug_status & DBSR_IAC2) { in handle_debug()
1248 } else if (debug_status & DBSR_IAC3) { in handle_debug()
1254 } else if (debug_status & DBSR_IAC4) { in handle_debug()
1262 * Check all other debug flags and see if that bit needs to be turned in handle_debug()
1265 if (DBCR_ACTIVE_EVENTS(current->thread.dbcr0, current->thread.dbcr1)) in handle_debug()
1271 if (changed & 0x01) in handle_debug()
1284 if (debug_status & DBSR_BT) { in DebugException()
1293 if (user_mode(regs)) { in DebugException()
1300 if (notify_die(DIE_SSTEP, "block_step", regs, 5, in DebugException()
1304 if (debugger_sstep(regs)) in DebugException()
1306 } else if (debug_status & DBSR_IC) { /* Instruction complete */ in DebugException()
1314 if (notify_die(DIE_SSTEP, "single_step", regs, 5, in DebugException()
1319 if (debugger_sstep(regs)) in DebugException()
1322 if (user_mode(regs)) { in DebugException()
1324 if (DBCR_ACTIVE_EVENTS(current->thread.dbcr0, in DebugException()
1338 #if !defined(CONFIG_TAU_INT)
1351 if (!user_mode(regs)) { in altivec_assist_exception()
1361 if (err == 0) { in altivec_assist_exception()
1367 if (err == -EFAULT) { in altivec_assist_exception()
1383 if (!user_mode(regs)) { in vsx_assist_exception()
1403 if (error_code & (ESR_DLK|ESR_ILK)) in CacheLockingException()
1423 if ((spefscr & SPEFSCR_FOVF) && (fpexc_mode & PR_FP_EXC_OVF)) { in SPEFloatingPointException()
1426 else if ((spefscr & SPEFSCR_FUNF) && (fpexc_mode & PR_FP_EXC_UND)) { in SPEFloatingPointException()
1429 else if ((spefscr & SPEFSCR_FDBZ) && (fpexc_mode & PR_FP_EXC_DIV)) in SPEFloatingPointException()
1431 else if ((spefscr & SPEFSCR_FINV) && (fpexc_mode & PR_FP_EXC_INV)) { in SPEFloatingPointException()
1434 else if ((spefscr & (SPEFSCR_FG | SPEFSCR_FX)) && (fpexc_mode & PR_FP_EXC_RES)) in SPEFloatingPointException()
1438 if (err == 0) { in SPEFloatingPointException()
1444 if (err == -EFAULT) { in SPEFloatingPointException()
1447 } else if (err == -EINVAL) { in SPEFloatingPointException()
1464 if (regs->msr & MSR_SPE) in SPEFloatingPointRoundException()
1470 if (err == 0) { in SPEFloatingPointRoundException()
1476 if (err == -EFAULT) { in SPEFloatingPointRoundException()
1479 } else if (err == -EINVAL) { in SPEFloatingPointRoundException()
1491 * We enter here if we get an unrecoverable exception, that is, one
1523 * We enter here if we discover during exception entry that we are
1585 if (!powerpc_debugfs_root) in ppc_warn_emulated_init()
1590 if (!dir) in ppc_warn_emulated_init()
1595 if (!d) in ppc_warn_emulated_init()
1601 if (!d) in ppc_warn_emulated_init()