Searched refs:_PAGE_PRIVILEGED (Results 1 – 7 of 7) sorted by relevance
177 VM_WARN_ON_ONCE(oldval & _PAGE_PRIVILEGED); in __pte_flags_need_flush() 178 VM_WARN_ON_ONCE(newval & _PAGE_PRIVILEGED); in __pte_flags_need_flush()
20 #define _PAGE_PRIVILEGED 0x00008 /* kernel access only */ macro 115 #define _PAGE_KERNEL_RW (_PAGE_PRIVILEGED | _PAGE_RW | _PAGE_DIRTY)116 #define _PAGE_KERNEL_RO (_PAGE_PRIVILEGED | _PAGE_READ)117 #define _PAGE_KERNEL_ROX (_PAGE_PRIVILEGED | _PAGE_READ | _PAGE_EXEC)118 #define _PAGE_KERNEL_RWX (_PAGE_PRIVILEGED | _PAGE_DIRTY | _PAGE_RW | _PAGE_EXEC)523 return !(pte_raw(pte) & cpu_to_be64(_PAGE_PRIVILEGED)); in pte_user() 697 if ((access & _PAGE_PRIVILEGED) != (ptev & _PAGE_PRIVILEGED)) in check_pte_access()
14 .mask = _PAGE_PRIVILEGED,
567 if (pteflags & _PAGE_PRIVILEGED) { in htab_convert_pte_flags() 1964 * We set _PAGE_PRIVILEGED only when in DEFINE_INTERRUPT_HANDLER() 1967 * _PAGE_PRIVILEGED is NOT set in DEFINE_INTERRUPT_HANDLER() 1971 access |= _PAGE_PRIVILEGED; in DEFINE_INTERRUPT_HANDLER() 1973 access &= ~_PAGE_PRIVILEGED; in DEFINE_INTERRUPT_HANDLER()
295 if (pte & _PAGE_PRIVILEGED) { in kvmppc_mmu_radix_xlate() 301 if (!(pte & _PAGE_PRIVILEGED)) { in kvmppc_mmu_radix_xlate()
186 _PAGE_PRESENT | _PAGE_READ | _PAGE_PRIVILEGED, in __spu_trap_data_map()
176 access |= _PAGE_PRIVILEGED; in xsl_fault_handler_bh()