/arch/arc/kernel/ |
D | unaligned.c | 53 goto fault; \ 68 goto fault; \ 95 goto fault; \ 128 goto fault; \ 162 fault: state->fault = 1; in fixup_load() 182 goto fault; in fixup_store() 194 fault: state->fault = 1; in fixup_store() 228 if (state.fault) in misaligned_fixup() 229 goto fault; in misaligned_fixup() 233 goto fault; in misaligned_fixup() [all …]
|
/arch/s390/mm/ |
D | fault.c | 348 static noinline void do_fault_error(struct pt_regs *regs, int fault) in do_fault_error() argument 352 switch (fault) { in do_fault_error() 358 si_code = (fault == VM_FAULT_BADMAP) ? in do_fault_error() 372 if (fault & VM_FAULT_OOM) { in do_fault_error() 377 } else if (fault & VM_FAULT_SIGSEGV) { in do_fault_error() 383 } else if (fault & VM_FAULT_SIGBUS) { in do_fault_error() 417 int fault; in do_exception() local 437 fault = VM_FAULT_BADCONTEXT; in do_exception() 457 fault = VM_FAULT_BADMAP; in do_exception() 466 fault = VM_FAULT_BADMAP; in do_exception() [all …]
|
/arch/powerpc/mm/ |
D | fault.c | 118 unsigned int fault) in do_sigbus() argument 134 if (fault & (VM_FAULT_HWPOISON|VM_FAULT_HWPOISON_LARGE)) { in do_sigbus() 140 if (fault & VM_FAULT_HWPOISON_LARGE) in do_sigbus() 141 lsb = hstate_index_to_shift(VM_FAULT_GET_HINDEX(fault)); in do_sigbus() 142 if (fault & VM_FAULT_HWPOISON) in do_sigbus() 150 static int mm_fault_error(struct pt_regs *regs, unsigned long addr, int fault) in mm_fault_error() argument 162 if (!(fault & VM_FAULT_RETRY)) in mm_fault_error() 171 if (!(fault & VM_FAULT_ERROR)) in mm_fault_error() 175 if (fault & VM_FAULT_OOM) { in mm_fault_error() 188 if (fault & (VM_FAULT_SIGBUS|VM_FAULT_HWPOISON|VM_FAULT_HWPOISON_LARGE)) in mm_fault_error() [all …]
|
/arch/m68k/mm/ |
D | fault.c | 74 int fault; in do_page_fault() local 139 fault = handle_mm_fault(mm, vma, address, flags); in do_page_fault() 140 pr_debug("handle_mm_fault returns %d\n", fault); in do_page_fault() 142 if ((fault & VM_FAULT_RETRY) && fatal_signal_pending(current)) in do_page_fault() 145 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 146 if (fault & VM_FAULT_OOM) in do_page_fault() 148 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 150 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 161 if (fault & VM_FAULT_MAJOR) in do_page_fault() 165 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
/arch/arc/mm/ |
D | fault.c | 61 int fault, ret; in do_page_fault() local 132 fault = handle_mm_fault(mm, vma, address, flags); in do_page_fault() 136 if ((fault & VM_FAULT_ERROR) && !(fault & VM_FAULT_RETRY)) in do_page_fault() 142 if (likely(!(fault & VM_FAULT_ERROR))) { in do_page_fault() 145 if (fault & VM_FAULT_MAJOR) in do_page_fault() 150 if (fault & VM_FAULT_RETRY) { in do_page_fault() 162 if (fault & VM_FAULT_OOM) in do_page_fault() 164 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 166 else if (fault & VM_FAULT_SIGBUS) in do_page_fault()
|
/arch/hexagon/mm/ |
D | vm_fault.c | 54 int fault; in do_page_fault() local 104 fault = handle_mm_fault(mm, vma, address, flags); in do_page_fault() 106 if ((fault & VM_FAULT_RETRY) && fatal_signal_pending(current)) in do_page_fault() 110 if (likely(!(fault & VM_FAULT_ERROR))) { in do_page_fault() 112 if (fault & VM_FAULT_MAJOR) in do_page_fault() 116 if (fault & VM_FAULT_RETRY) { in do_page_fault() 133 if (fault & VM_FAULT_OOM) { in do_page_fault() 141 if (fault & VM_FAULT_SIGBUS) { in do_page_fault()
|
/arch/microblaze/mm/ |
D | fault.c | 94 int fault; in do_page_fault() local 219 fault = handle_mm_fault(mm, vma, address, flags); in do_page_fault() 221 if ((fault & VM_FAULT_RETRY) && fatal_signal_pending(current)) in do_page_fault() 224 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 225 if (fault & VM_FAULT_OOM) in do_page_fault() 227 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 229 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 235 if (unlikely(fault & VM_FAULT_MAJOR)) in do_page_fault() 239 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
/arch/alpha/mm/ |
D | fault.c | 90 int fault, si_code = SEGV_MAPERR; in do_page_fault() local 151 fault = handle_mm_fault(mm, vma, address, flags); in do_page_fault() 153 if ((fault & VM_FAULT_RETRY) && fatal_signal_pending(current)) in do_page_fault() 156 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 157 if (fault & VM_FAULT_OOM) in do_page_fault() 159 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 161 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 167 if (fault & VM_FAULT_MAJOR) in do_page_fault() 171 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
/arch/openrisc/mm/ |
D | fault.c | 56 int fault; in do_page_fault() local 166 fault = handle_mm_fault(mm, vma, address, flags); in do_page_fault() 168 if ((fault & VM_FAULT_RETRY) && fatal_signal_pending(current)) in do_page_fault() 171 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 172 if (fault & VM_FAULT_OOM) in do_page_fault() 174 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 176 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 183 if (fault & VM_FAULT_MAJOR) in do_page_fault() 187 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
/arch/avr32/mm/ |
D | fault.c | 66 int fault; in do_page_fault() local 137 fault = handle_mm_fault(mm, vma, address, flags); in do_page_fault() 139 if ((fault & VM_FAULT_RETRY) && fatal_signal_pending(current)) in do_page_fault() 142 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 143 if (fault & VM_FAULT_OOM) in do_page_fault() 145 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 147 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 153 if (fault & VM_FAULT_MAJOR) in do_page_fault() 157 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
/arch/unicore32/mm/ |
D | fault.c | 174 int fault; in __do_pf() local 177 fault = VM_FAULT_BADMAP; in __do_pf() 189 fault = VM_FAULT_BADACCESS; in __do_pf() 197 fault = handle_mm_fault(mm, vma, addr & PAGE_MASK, flags); in __do_pf() 198 return fault; in __do_pf() 204 return fault; in __do_pf() 211 int fault, sig, code; in do_pf() local 254 fault = __do_pf(mm, addr, fsr, flags, tsk); in do_pf() 260 if ((fault & VM_FAULT_RETRY) && fatal_signal_pending(current)) in do_pf() 263 if (!(fault & VM_FAULT_ERROR) && (flags & FAULT_FLAG_ALLOW_RETRY)) { in do_pf() [all …]
|
/arch/xtensa/mm/ |
D | fault.c | 46 int fault; in do_page_fault() local 112 fault = handle_mm_fault(mm, vma, address, flags); in do_page_fault() 114 if ((fault & VM_FAULT_RETRY) && fatal_signal_pending(current)) in do_page_fault() 117 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 118 if (fault & VM_FAULT_OOM) in do_page_fault() 120 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 122 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 127 if (fault & VM_FAULT_MAJOR) in do_page_fault() 131 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
/arch/metag/mm/ |
D | fault.c | 55 int fault; in do_page_fault() local 136 fault = handle_mm_fault(mm, vma, address, flags); in do_page_fault() 138 if ((fault & VM_FAULT_RETRY) && fatal_signal_pending(current)) in do_page_fault() 141 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 142 if (fault & VM_FAULT_OOM) in do_page_fault() 144 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 146 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 151 if (fault & VM_FAULT_MAJOR) in do_page_fault() 155 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
/arch/parisc/mm/ |
D | fault.c | 208 int fault; in do_page_fault() local 247 fault = handle_mm_fault(mm, vma, address, flags); in do_page_fault() 249 if ((fault & VM_FAULT_RETRY) && fatal_signal_pending(current)) in do_page_fault() 252 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 258 if (fault & VM_FAULT_OOM) in do_page_fault() 260 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 262 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 267 if (fault & VM_FAULT_MAJOR) in do_page_fault() 271 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
/arch/cris/mm/ |
D | fault.c | 60 int fault; in do_page_fault() local 171 fault = handle_mm_fault(mm, vma, address, flags); in do_page_fault() 173 if ((fault & VM_FAULT_RETRY) && fatal_signal_pending(current)) in do_page_fault() 176 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 177 if (fault & VM_FAULT_OOM) in do_page_fault() 179 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 181 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 187 if (fault & VM_FAULT_MAJOR) in do_page_fault() 191 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
/arch/mn10300/kernel/ |
D | gdb-stub.c | 517 goto fault; in gdbstub_single_step() 523 goto fault; in gdbstub_single_step() 525 goto fault; in gdbstub_single_step() 528 goto fault; in gdbstub_single_step() 534 goto fault; in gdbstub_single_step() 537 goto fault; in gdbstub_single_step() 544 goto fault; in gdbstub_single_step() 552 goto fault; in gdbstub_single_step() 554 goto fault; in gdbstub_single_step() 564 goto fault; in gdbstub_single_step() [all …]
|
/arch/ia64/mm/ |
D | fault.c | 87 int fault; in ia64_do_page_fault() local 162 fault = handle_mm_fault(mm, vma, address, flags); in ia64_do_page_fault() 164 if ((fault & VM_FAULT_RETRY) && fatal_signal_pending(current)) in ia64_do_page_fault() 167 if (unlikely(fault & VM_FAULT_ERROR)) { in ia64_do_page_fault() 173 if (fault & VM_FAULT_OOM) { in ia64_do_page_fault() 175 } else if (fault & VM_FAULT_SIGSEGV) { in ia64_do_page_fault() 177 } else if (fault & VM_FAULT_SIGBUS) { in ia64_do_page_fault() 185 if (fault & VM_FAULT_MAJOR) in ia64_do_page_fault() 189 if (fault & VM_FAULT_RETRY) { in ia64_do_page_fault()
|
/arch/score/mm/ |
D | fault.c | 52 int fault; in do_page_fault() local 113 fault = handle_mm_fault(mm, vma, address, flags); in do_page_fault() 114 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 115 if (fault & VM_FAULT_OOM) in do_page_fault() 117 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 119 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 123 if (fault & VM_FAULT_MAJOR) in do_page_fault()
|
/arch/sh/mm/ |
D | fault.c | 321 unsigned long address, unsigned int fault) in mm_fault_error() argument 328 if (!(fault & VM_FAULT_RETRY)) in mm_fault_error() 335 if (!(fault & VM_FAULT_ERROR)) in mm_fault_error() 338 if (fault & VM_FAULT_OOM) { in mm_fault_error() 354 if (fault & VM_FAULT_SIGBUS) in mm_fault_error() 356 else if (fault & VM_FAULT_SIGSEGV) in mm_fault_error() 404 int fault; in do_page_fault() local 489 fault = handle_mm_fault(mm, vma, address, flags); in do_page_fault() 491 if (unlikely(fault & (VM_FAULT_RETRY | VM_FAULT_ERROR))) in do_page_fault() 492 if (mm_fault_error(regs, error_code, address, fault)) in do_page_fault() [all …]
|
/arch/arm/mm/ |
D | fault.c | 228 int fault; in __do_page_fault() local 231 fault = VM_FAULT_BADMAP; in __do_page_fault() 243 fault = VM_FAULT_BADACCESS; in __do_page_fault() 255 return fault; in __do_page_fault() 263 int fault, sig, code; in do_page_fault() local 312 fault = __do_page_fault(mm, addr, fsr, flags, tsk); in do_page_fault() 318 if ((fault & VM_FAULT_RETRY) && fatal_signal_pending(current)) { in do_page_fault() 331 if (!(fault & VM_FAULT_ERROR) && flags & FAULT_FLAG_ALLOW_RETRY) { in do_page_fault() 332 if (fault & VM_FAULT_MAJOR) { in do_page_fault() 341 if (fault & VM_FAULT_RETRY) { in do_page_fault() [all …]
|
/arch/mips/mm/ |
D | fault.c | 47 int fault; in __do_page_fault() local 158 fault = handle_mm_fault(mm, vma, address, flags); in __do_page_fault() 160 if ((fault & VM_FAULT_RETRY) && fatal_signal_pending(current)) in __do_page_fault() 164 if (unlikely(fault & VM_FAULT_ERROR)) { in __do_page_fault() 165 if (fault & VM_FAULT_OOM) in __do_page_fault() 167 else if (fault & VM_FAULT_SIGSEGV) in __do_page_fault() 169 else if (fault & VM_FAULT_SIGBUS) in __do_page_fault() 174 if (fault & VM_FAULT_MAJOR) { in __do_page_fault() 183 if (fault & VM_FAULT_RETRY) { in __do_page_fault()
|
/arch/sparc/kernel/ |
D | wuf.S | 257 LEON_PI( lda [%l4] ASI_LEON_MMUREGS, %g0) ! clear fault status 258 SUN_PI_( lda [%l4] ASI_M_MMUREGS, %g0) ! clear fault status 267 or %l5, 0x2, %l5 ! turn on no-fault bit 283 andn %twin_tmp1, 0x2, %twin_tmp1 ! clear no-fault bit 288 LEON_PI(lda [%twin_tmp2] ASI_LEON_MMUREGS, %g0) ! read fault address 289 SUN_PI_(lda [%twin_tmp2] ASI_M_MMUREGS, %g0) ! read fault address 292 LEON_PI(lda [%twin_tmp2] ASI_LEON_MMUREGS, %twin_tmp2) ! read fault status 293 SUN_PI_(lda [%twin_tmp2] ASI_M_MMUREGS, %twin_tmp2) ! read fault status 294 andcc %twin_tmp2, 0x2, %g0 ! did fault occur?
|
/arch/mn10300/mm/ |
D | fault.c | 126 int fault; in do_page_fault() local 257 fault = handle_mm_fault(mm, vma, address, flags); in do_page_fault() 259 if ((fault & VM_FAULT_RETRY) && fatal_signal_pending(current)) in do_page_fault() 262 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 263 if (fault & VM_FAULT_OOM) in do_page_fault() 265 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 267 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 272 if (fault & VM_FAULT_MAJOR) in do_page_fault() 276 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
/arch/tile/kernel/ |
D | pmc.c | 25 int handle_perf_interrupt(struct pt_regs *regs, int fault) in handle_perf_interrupt() argument 30 panic("Unexpected PERF_COUNT interrupt %d\n", fault); in handle_perf_interrupt() 33 retval = perf_irq(regs, fault); in handle_perf_interrupt()
|
/arch/mips/kernel/ |
D | unaligned.c | 1277 goto fault; 1288 goto fault; 1316 goto fault; 1329 goto fault; 1342 goto fault; 1357 goto fault; 1370 goto fault; 1394 goto fault; 1413 goto fault; 1432 goto fault; [all …]
|