/arch/nios2/kernel/ |
D | misaligned.c | 72 unsigned int fault; in handle_unaligned_c() local 85 fault = 0; in handle_unaligned_c() 98 fault |= __get_user(d0, (u8 *)(addr+0)); in handle_unaligned_c() 99 fault |= __get_user(d1, (u8 *)(addr+1)); in handle_unaligned_c() 111 fault |= __put_user(d0, (u8 *)(addr+0)); in handle_unaligned_c() 112 fault |= __put_user(d1, (u8 *)(addr+1)); in handle_unaligned_c() 116 fault |= __get_user(d0, (u8 *)(addr+0)); in handle_unaligned_c() 117 fault |= __get_user(d1, (u8 *)(addr+1)); in handle_unaligned_c() 133 fault |= __put_user(d0, (u8 *)(addr+0)); in handle_unaligned_c() 134 fault |= __put_user(d1, (u8 *)(addr+1)); in handle_unaligned_c() [all …]
|
/arch/arc/kernel/ |
D | unaligned.c | 50 goto fault; \ 65 goto fault; \ 92 goto fault; \ 125 goto fault; \ 159 fault: state->fault = 1; in fixup_load() 179 goto fault; in fixup_store() 191 fault: state->fault = 1; in fixup_store() 225 if (state.fault) in misaligned_fixup() 226 goto fault; in misaligned_fixup() 230 goto fault; in misaligned_fixup() [all …]
|
/arch/mips/loongson64/ |
D | cop2-ex.c | 76 goto fault; in loongson_cu2_call() 80 goto fault; in loongson_cu2_call() 92 goto fault; in loongson_cu2_call() 96 goto fault; in loongson_cu2_call() 118 goto fault; in loongson_cu2_call() 123 goto fault; in loongson_cu2_call() 135 goto fault; in loongson_cu2_call() 141 goto fault; in loongson_cu2_call() 165 goto fault; in loongson_cu2_call() 176 goto fault; in loongson_cu2_call() [all …]
|
/arch/mips/kernel/ |
D | unaligned.c | 170 goto fault; in emulate_load_store_insn() 179 goto fault; in emulate_load_store_insn() 204 goto fault; in emulate_load_store_insn() 217 goto fault; in emulate_load_store_insn() 230 goto fault; in emulate_load_store_insn() 245 goto fault; in emulate_load_store_insn() 258 goto fault; in emulate_load_store_insn() 283 goto fault; in emulate_load_store_insn() 302 goto fault; in emulate_load_store_insn() 321 goto fault; in emulate_load_store_insn() [all …]
|
D | r2300_fpu.S | 26 PTR 9b,fault; \ 32 PTR 9b,fault; \ 33 PTR 9b+4,fault; \ 126 .type fault, @function 127 .ent fault 128 fault: li v0, -EFAULT label 130 .end fault
|
/arch/x86/include/asm/ |
D | virtext.h | 47 _ASM_EXTABLE(1b, %l[fault]) :::: fault); in cpu_vmxoff() 48 fault: in cpu_vmxoff() 127 _ASM_EXTABLE(1b, %l[fault]) in cpu_svm_disable() 128 ::: "memory" : fault); in cpu_svm_disable() 129 fault: in cpu_svm_disable()
|
/arch/powerpc/lib/ |
D | checksum_32.S | 109 EX_TABLE(8 ## n ## 0b, fault); \ 110 EX_TABLE(8 ## n ## 1b, fault); \ 111 EX_TABLE(8 ## n ## 2b, fault); \ 112 EX_TABLE(8 ## n ## 3b, fault); \ 113 EX_TABLE(8 ## n ## 4b, fault); \ 114 EX_TABLE(8 ## n ## 5b, fault); \ 115 EX_TABLE(8 ## n ## 6b, fault); \ 116 EX_TABLE(8 ## n ## 7b, fault); 243 fault: label 247 EX_TABLE(70b, fault); [all …]
|
/arch/m68k/mm/ |
D | fault.c | 73 vm_fault_t fault; in do_page_fault() local 140 fault = handle_mm_fault(vma, address, flags, regs); in do_page_fault() 141 pr_debug("handle_mm_fault returns %x\n", fault); in do_page_fault() 143 if (fault_signal_pending(fault, regs)) in do_page_fault() 146 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 147 if (fault & VM_FAULT_OOM) in do_page_fault() 149 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 151 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 157 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
/arch/s390/mm/ |
D | fault.c | 320 vm_fault_t fault) in do_fault_error() argument 324 switch (fault) { in do_fault_error() 333 si_code = (fault == VM_FAULT_BADMAP) ? in do_fault_error() 348 if (fault & VM_FAULT_OOM) { in do_fault_error() 353 } else if (fault & VM_FAULT_SIGSEGV) { in do_fault_error() 359 } else if (fault & VM_FAULT_SIGBUS) { in do_fault_error() 392 vm_fault_t fault; in do_exception() local 412 fault = VM_FAULT_BADCONTEXT; in do_exception() 418 fault = VM_FAULT_BADMAP; in do_exception() 446 fault = VM_FAULT_BADMAP; in do_exception() [all …]
|
/arch/powerpc/mm/ |
D | fault.c | 137 vm_fault_t fault) in do_sigbus() argument 144 if (fault & (VM_FAULT_HWPOISON|VM_FAULT_HWPOISON_LARGE)) { in do_sigbus() 150 if (fault & VM_FAULT_HWPOISON_LARGE) in do_sigbus() 151 lsb = hstate_index_to_shift(VM_FAULT_GET_HINDEX(fault)); in do_sigbus() 152 if (fault & VM_FAULT_HWPOISON) in do_sigbus() 165 vm_fault_t fault) in mm_fault_error() argument 175 if (fault & VM_FAULT_OOM) { in mm_fault_error() 184 if (fault & (VM_FAULT_SIGBUS|VM_FAULT_HWPOISON| in mm_fault_error() 186 return do_sigbus(regs, addr, fault); in mm_fault_error() 187 else if (fault & VM_FAULT_SIGSEGV) in mm_fault_error() [all …]
|
/arch/x86/hyperv/ |
D | nested.c | 29 goto fault; in hyperv_flush_guest_mapping() 40 goto fault; in hyperv_flush_guest_mapping() 53 fault: in hyperv_flush_guest_mapping() 101 goto fault; in hyperv_flush_guest_mapping_range() 111 goto fault; in hyperv_flush_guest_mapping_range() 120 goto fault; in hyperv_flush_guest_mapping_range() 132 fault: in hyperv_flush_guest_mapping_range()
|
/arch/hexagon/mm/ |
D | vm_fault.c | 42 vm_fault_t fault; in do_page_fault() local 94 fault = handle_mm_fault(vma, address, flags, regs); in do_page_fault() 96 if (fault_signal_pending(fault, regs)) in do_page_fault() 100 if (likely(!(fault & VM_FAULT_ERROR))) { in do_page_fault() 102 if (fault & VM_FAULT_RETRY) { in do_page_fault() 118 if (fault & VM_FAULT_OOM) { in do_page_fault() 126 if (fault & VM_FAULT_SIGBUS) { in do_page_fault()
|
/arch/microblaze/mm/ |
D | fault.c | 93 vm_fault_t fault; in do_page_fault() local 220 fault = handle_mm_fault(vma, address, flags, regs); in do_page_fault() 222 if (fault_signal_pending(fault, regs)) in do_page_fault() 225 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 226 if (fault & VM_FAULT_OOM) in do_page_fault() 228 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 230 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 236 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
/arch/arc/mm/ |
D | fault.c | 73 vm_fault_t fault = VM_FAULT_SIGSEGV; /* handle_mm_fault() output */ in do_page_fault() local 134 fault = handle_mm_fault(vma, address, flags, regs); in do_page_fault() 137 if (fault_signal_pending(fault, regs)) { in do_page_fault() 146 if (unlikely((fault & VM_FAULT_RETRY) && in do_page_fault() 159 if (likely(!(fault & VM_FAULT_ERROR))) in do_page_fault() 166 if (fault & VM_FAULT_OOM) { in do_page_fault() 171 if (fault & VM_FAULT_SIGBUS) { in do_page_fault()
|
/arch/alpha/mm/ |
D | fault.c | 92 vm_fault_t fault; in do_page_fault() local 153 fault = handle_mm_fault(vma, address, flags, regs); in do_page_fault() 155 if (fault_signal_pending(fault, regs)) in do_page_fault() 158 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 159 if (fault & VM_FAULT_OOM) in do_page_fault() 161 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 163 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 169 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
/arch/parisc/mm/ |
D | fault.c | 267 vm_fault_t fault = 0; in do_page_fault() local 307 fault = handle_mm_fault(vma, address, flags, regs); in do_page_fault() 309 if (fault_signal_pending(fault, regs)) in do_page_fault() 312 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 318 if (fault & VM_FAULT_OOM) in do_page_fault() 320 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 322 else if (fault & (VM_FAULT_SIGBUS|VM_FAULT_HWPOISON| in do_page_fault() 328 if (fault & VM_FAULT_RETRY) { in do_page_fault() 387 if (fault & (VM_FAULT_HWPOISON|VM_FAULT_HWPOISON_LARGE)) { in do_page_fault() 397 if (fault & VM_FAULT_HWPOISON_LARGE) in do_page_fault() [all …]
|
/arch/riscv/mm/ |
D | fault.c | 40 static inline void mm_fault_error(struct pt_regs *regs, unsigned long addr, vm_fault_t fault) in mm_fault_error() argument 42 if (fault & VM_FAULT_OOM) { in mm_fault_error() 53 } else if (fault & VM_FAULT_SIGBUS) { in mm_fault_error() 198 vm_fault_t fault; in do_page_fault() local 277 fault = handle_mm_fault(vma, addr, flags, regs); in do_page_fault() 284 if (fault_signal_pending(fault, regs)) in do_page_fault() 287 if (unlikely((fault & VM_FAULT_RETRY) && (flags & FAULT_FLAG_ALLOW_RETRY))) { in do_page_fault() 300 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 301 mm_fault_error(regs, addr, fault); in do_page_fault()
|
/arch/openrisc/mm/ |
D | fault.c | 53 vm_fault_t fault; in do_page_fault() local 165 fault = handle_mm_fault(vma, address, flags, regs); in do_page_fault() 167 if (fault_signal_pending(fault, regs)) in do_page_fault() 170 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 171 if (fault & VM_FAULT_OOM) in do_page_fault() 173 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 175 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 182 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
/arch/nios2/mm/ |
D | fault.c | 50 vm_fault_t fault; in do_page_fault() local 137 fault = handle_mm_fault(vma, address, flags, regs); in do_page_fault() 139 if (fault_signal_pending(fault, regs)) in do_page_fault() 142 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 143 if (fault & VM_FAULT_OOM) in do_page_fault() 145 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 147 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 153 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
/arch/ia64/mm/ |
D | fault.c | 72 vm_fault_t fault; in ia64_do_page_fault() local 149 fault = handle_mm_fault(vma, address, flags, regs); in ia64_do_page_fault() 151 if (fault_signal_pending(fault, regs)) in ia64_do_page_fault() 154 if (unlikely(fault & VM_FAULT_ERROR)) { in ia64_do_page_fault() 160 if (fault & VM_FAULT_OOM) { in ia64_do_page_fault() 162 } else if (fault & VM_FAULT_SIGSEGV) { in ia64_do_page_fault() 164 } else if (fault & VM_FAULT_SIGBUS) { in ia64_do_page_fault() 172 if (fault & VM_FAULT_RETRY) { in ia64_do_page_fault()
|
/arch/xtensa/mm/ |
D | fault.c | 44 vm_fault_t fault; in do_page_fault() local 113 fault = handle_mm_fault(vma, address, flags, regs); in do_page_fault() 115 if (fault_signal_pending(fault, regs)) { in do_page_fault() 121 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 122 if (fault & VM_FAULT_OOM) in do_page_fault() 124 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault() 126 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 131 if (fault & VM_FAULT_RETRY) { in do_page_fault()
|
/arch/sh/mm/ |
D | fault.c | 316 unsigned long address, vm_fault_t fault) in mm_fault_error() argument 322 if (fault_signal_pending(fault, regs)) { in mm_fault_error() 329 if (!(fault & VM_FAULT_RETRY)) in mm_fault_error() 332 if (!(fault & VM_FAULT_ERROR)) in mm_fault_error() 335 if (fault & VM_FAULT_OOM) { in mm_fault_error() 349 if (fault & VM_FAULT_SIGBUS) in mm_fault_error() 351 else if (fault & VM_FAULT_SIGSEGV) in mm_fault_error() 399 vm_fault_t fault; in do_page_fault() local 484 fault = handle_mm_fault(vma, address, flags, regs); in do_page_fault() 486 if (unlikely(fault & (VM_FAULT_RETRY | VM_FAULT_ERROR))) in do_page_fault() [all …]
|
/arch/csky/mm/ |
D | fault.c | 54 int fault; in do_page_fault() local 153 fault = handle_mm_fault(vma, address, write ? FAULT_FLAG_WRITE : 0, in do_page_fault() 155 if (unlikely(fault & VM_FAULT_ERROR)) { in do_page_fault() 156 if (fault & VM_FAULT_OOM) in do_page_fault() 158 else if (fault & VM_FAULT_SIGBUS) in do_page_fault() 160 else if (fault & VM_FAULT_SIGSEGV) in do_page_fault()
|
/arch/arm/mm/ |
D | alignment.c | 222 goto fault; \ 243 goto fault; \ 275 goto fault; \ 317 goto fault; \ 384 fault: in do_alignment_ldrhstrh() 446 fault: in do_alignment_ldrdstrd() 482 fault: in do_alignment_ldrstr() 582 fault: in do_alignment_ldmstm() 772 int fault; in alignment_get_arm() local 775 fault = get_user(instr, ip); in alignment_get_arm() [all …]
|
/arch/sparc/kernel/ |
D | wuf.S | 258 LEON_PI( lda [%l4] ASI_LEON_MMUREGS, %g0) ! clear fault status 259 SUN_PI_( lda [%l4] ASI_M_MMUREGS, %g0) ! clear fault status 268 or %l5, 0x2, %l5 ! turn on no-fault bit 284 andn %twin_tmp1, 0x2, %twin_tmp1 ! clear no-fault bit 289 LEON_PI(lda [%twin_tmp2] ASI_LEON_MMUREGS, %g0) ! read fault address 290 SUN_PI_(lda [%twin_tmp2] ASI_M_MMUREGS, %g0) ! read fault address 293 LEON_PI(lda [%twin_tmp2] ASI_LEON_MMUREGS, %twin_tmp2) ! read fault status 294 SUN_PI_(lda [%twin_tmp2] ASI_M_MMUREGS, %twin_tmp2) ! read fault status 295 andcc %twin_tmp2, 0x2, %g0 ! did fault occur?
|