/arch/microblaze/kernel/ |
D | asm-offsets.c | 93 DEFINE(TI_CPU_CONTEXT, offsetof(struct thread_info, cpu_context)); in main() 98 DEFINE(CC_R1, offsetof(struct cpu_context, r1)); /* r1 */ in main() 99 DEFINE(CC_R2, offsetof(struct cpu_context, r2)); in main() 101 DEFINE(CC_R13, offsetof(struct cpu_context, r13)); in main() 102 DEFINE(CC_R14, offsetof(struct cpu_context, r14)); in main() 103 DEFINE(CC_R15, offsetof(struct cpu_context, r15)); in main() 104 DEFINE(CC_R16, offsetof(struct cpu_context, r16)); in main() 105 DEFINE(CC_R17, offsetof(struct cpu_context, r17)); in main() 106 DEFINE(CC_R18, offsetof(struct cpu_context, r18)); in main() 108 DEFINE(CC_R19, offsetof(struct cpu_context, r19)); in main() [all …]
|
D | process.c | 64 memset(&ti->cpu_context, 0, sizeof(struct cpu_context)); in copy_thread() 65 ti->cpu_context.r1 = (unsigned long)childregs; in copy_thread() 66 ti->cpu_context.r20 = (unsigned long)usp; /* fn */ in copy_thread() 67 ti->cpu_context.r19 = (unsigned long)arg; in copy_thread() 71 ti->cpu_context.msr = childregs->msr & ~MSR_IE; in copy_thread() 73 ti->cpu_context.r15 = (unsigned long)ret_from_kernel_thread - 8; in copy_thread() 80 memset(&ti->cpu_context, 0, sizeof(struct cpu_context)); in copy_thread() 81 ti->cpu_context.r1 = (unsigned long)childregs; in copy_thread() 83 ti->cpu_context.msr = (unsigned long)childregs->msr; in copy_thread() 103 ti->cpu_context.msr = (childregs->msr|MSR_VM); in copy_thread() [all …]
|
D | unwind.c | 295 const struct cpu_context *cpu_context = in microblaze_unwind() local 296 &thread_info->cpu_context; in microblaze_unwind() 300 cpu_context->r1, in microblaze_unwind() 301 cpu_context->r15, trace); in microblaze_unwind()
|
/arch/avr32/include/asm/ |
D | switch_to.h | 36 struct cpu_context; 39 struct cpu_context *, 40 struct cpu_context *); 44 last = __switch_to(prev, &prev->thread.cpu_context + 1, \ 45 &next->thread.cpu_context); \
|
D | processor.h | 98 struct cpu_context { struct 114 struct cpu_context cpu_context; argument 120 .cpu_context = { \ 142 #define thread_saved_pc(tsk) ((tsk)->thread.cpu_context.pc) 153 #define KSTK_EIP(tsk) ((tsk)->thread.cpu_context.pc) 154 #define KSTK_ESP(tsk) ((tsk)->thread.cpu_context.ksp)
|
/arch/mips/mm/ |
D | tlb-r3k.c | 73 if (cpu_context(cpu, mm) != 0) { in local_flush_tlb_mm() 75 printk("[tlbmm<%lu>]", (unsigned long)cpu_context(cpu, mm)); in local_flush_tlb_mm() 87 if (cpu_context(cpu, mm) != 0) { in local_flush_tlb_range() 92 cpu_context(cpu, mm) & ASID_MASK, start, end); in local_flush_tlb_range() 98 int newpid = cpu_context(cpu, mm) & ASID_MASK; in local_flush_tlb_range() 164 if (cpu_context(cpu, vma->vm_mm) != 0) { in local_flush_tlb_page() 169 printk("[tlbpage<%lu,0x%08lx>]", cpu_context(cpu, vma->vm_mm), page); in local_flush_tlb_page() 171 newpid = cpu_context(cpu, vma->vm_mm) & ASID_MASK; in local_flush_tlb_page() 205 if ((pid != (cpu_context(cpu, vma->vm_mm) & ASID_MASK)) || (cpu_context(cpu, vma->vm_mm) == 0)) { in __update_tlb() 207 (cpu_context(cpu, vma->vm_mm)), pid); in __update_tlb()
|
D | tlb-r8k.c | 57 if (cpu_context(cpu, mm) != 0) in local_flush_tlb_mm() 69 if (!cpu_context(cpu, mm)) in local_flush_tlb_range() 155 if (!cpu_context(cpu, vma->vm_mm)) in local_flush_tlb_page()
|
/arch/arm/kernel/ |
D | kgdb.c | 90 gdb_regs[_R4] = ti->cpu_context.r4; in sleeping_thread_to_gdb_regs() 91 gdb_regs[_R5] = ti->cpu_context.r5; in sleeping_thread_to_gdb_regs() 92 gdb_regs[_R6] = ti->cpu_context.r6; in sleeping_thread_to_gdb_regs() 93 gdb_regs[_R7] = ti->cpu_context.r7; in sleeping_thread_to_gdb_regs() 94 gdb_regs[_R8] = ti->cpu_context.r8; in sleeping_thread_to_gdb_regs() 95 gdb_regs[_R9] = ti->cpu_context.r9; in sleeping_thread_to_gdb_regs() 96 gdb_regs[_R10] = ti->cpu_context.sl; in sleeping_thread_to_gdb_regs() 97 gdb_regs[_FP] = ti->cpu_context.fp; in sleeping_thread_to_gdb_regs() 98 gdb_regs[_SPT] = ti->cpu_context.sp; in sleeping_thread_to_gdb_regs() 99 gdb_regs[_PC] = ti->cpu_context.pc; in sleeping_thread_to_gdb_regs()
|
D | xscale-cp0.c | 39 thread->cpu_context.extra[0] = 0; in dsp_do() 40 thread->cpu_context.extra[1] = 0; in dsp_do() 44 dsp_save_state(current_thread_info()->cpu_context.extra); in dsp_do() 45 dsp_load_state(thread->cpu_context.extra); in dsp_do()
|
D | process.c | 305 memset(&thread->cpu_context, 0, sizeof(struct cpu_context_save)); in copy_thread() 324 thread->cpu_context.r4 = stk_sz; in copy_thread() 325 thread->cpu_context.r5 = stack_start; in copy_thread() 328 thread->cpu_context.pc = (unsigned long)ret_from_fork; in copy_thread() 329 thread->cpu_context.sp = (unsigned long)childregs; in copy_thread()
|
/arch/avr32/kernel/ |
D | process.c | 129 fp = tsk->thread.cpu_context.r7; in show_trace_log_lvl() 186 sp = tsk->thread.cpu_context.ksp; in show_stack_log_lvl() 290 p->thread.cpu_context.r0 = arg; in copy_thread() 291 p->thread.cpu_context.r1 = usp; /* fn */ in copy_thread() 292 p->thread.cpu_context.r2 = (unsigned long)syscall_return; in copy_thread() 293 p->thread.cpu_context.pc = (unsigned long)ret_from_kernel_thread; in copy_thread() 300 p->thread.cpu_context.pc = (unsigned long)ret_from_fork; in copy_thread() 303 p->thread.cpu_context.sr = MODE_SUPERVISOR | SR_GM; in copy_thread() 304 p->thread.cpu_context.ksp = (unsigned long)childregs; in copy_thread() 335 unsigned long fp = p->thread.cpu_context.r7; in get_wchan() [all …]
|
/arch/mips/include/asm/ |
D | mmu_context.h | 86 #define cpu_context(cpu, mm) ((mm)->context.asid[cpu]) macro 87 #define cpu_asid(cpu, mm) (cpu_context((cpu), (mm)) & ASID_MASK) 120 cpu_context(cpu, mm) = asid_cache(cpu) = asid; in get_new_mmu_context() 133 cpu_context(i, mm) = 0; in init_new_context() 153 if ((cpu_context(cpu, next) ^ asid_cache(cpu)) & ASID_VERSION_MASK) in switch_mm() 224 cpu_context(cpu, mm) = 0; in drop_mmu_context()
|
/arch/sh/include/asm/ |
D | mmu_context.h | 38 #define cpu_context(cpu, mm) ((mm)->context.id[cpu]) macro 41 (cpu_context((cpu), (mm)) & MMU_CONTEXT_ASID_MASK) 62 if (((cpu_context(cpu, mm) ^ asid) & MMU_CONTEXT_VERSION_MASK) == 0) in get_mmu_context() 90 cpu_context(cpu, mm) = asid_cache(cpu) = asid; in get_mmu_context() 103 cpu_context(i, mm) = NO_CONTEXT; in init_new_context()
|
/arch/arm/include/asm/ |
D | thread_info.h | 56 struct cpu_context_save cpu_context; /* cpu context */ member 98 ((unsigned long)(task_thread_info(tsk)->cpu_context.pc)) 100 ((unsigned long)(task_thread_info(tsk)->cpu_context.sp)) 104 ((unsigned long)(task_thread_info(tsk)->cpu_context.fp)) 107 ((unsigned long)(task_thread_info(tsk)->cpu_context.r7))
|
/arch/sh/mm/ |
D | tlbflush_32.c | 19 if (vma->vm_mm && cpu_context(cpu, vma->vm_mm) != NO_CONTEXT) { in local_flush_tlb_page() 45 if (cpu_context(cpu, mm) != NO_CONTEXT) { in local_flush_tlb_range() 52 cpu_context(cpu, mm) = NO_CONTEXT; in local_flush_tlb_range() 112 if (cpu_context(cpu, mm) != NO_CONTEXT) { in local_flush_tlb_mm() 116 cpu_context(cpu, mm) = NO_CONTEXT; in local_flush_tlb_mm()
|
D | tlbflush_64.c | 89 if (cpu_context(cpu, mm) == NO_CONTEXT) in local_flush_tlb_range() 133 if (cpu_context(cpu, mm) == NO_CONTEXT) in local_flush_tlb_mm() 138 cpu_context(cpu, mm) = NO_CONTEXT; in local_flush_tlb_mm()
|
D | cache-sh4.c | 192 if (cpu_context(smp_processor_id(), mm) == NO_CONTEXT) in sh4_flush_cache_mm() 223 if (cpu_context(smp_processor_id(), vma->vm_mm) == NO_CONTEXT) in sh4_flush_cache_page() 286 if (cpu_context(smp_processor_id(), vma->vm_mm) == NO_CONTEXT) in sh4_flush_cache_range()
|
/arch/unicore32/include/asm/ |
D | thread_info.h | 74 struct cpu_context_save cpu_context; /* cpu context */ member 105 ((unsigned long)(task_thread_info(tsk)->cpu_context.pc)) 107 ((unsigned long)(task_thread_info(tsk)->cpu_context.sp)) 109 ((unsigned long)(task_thread_info(tsk)->cpu_context.fp))
|
D | suspend.h | 21 struct cpu_context_save cpu_context; /* cpu context */ member
|
/arch/arm64/include/asm/ |
D | thread_info.h | 100 ((unsigned long)(tsk->thread.cpu_context.pc)) 102 ((unsigned long)(tsk->thread.cpu_context.sp)) 104 ((unsigned long)(tsk->thread.cpu_context.fp))
|
D | processor.h | 65 struct cpu_context { struct 82 struct cpu_context cpu_context; /* cpu context */ argument
|
/arch/unicore32/kernel/ |
D | process.c | 230 memset(&thread->cpu_context, 0, sizeof(struct cpu_context_save)); in copy_thread() 231 thread->cpu_context.sp = (unsigned long)childregs; in copy_thread() 233 thread->cpu_context.pc = (unsigned long)ret_from_kernel_thread; in copy_thread() 234 thread->cpu_context.r4 = stack_start; in copy_thread() 235 thread->cpu_context.r5 = stk_sz; in copy_thread() 238 thread->cpu_context.pc = (unsigned long)ret_from_fork; in copy_thread()
|
/arch/microblaze/include/asm/ |
D | thread_info.h | 31 struct cpu_context { struct 74 struct cpu_context cpu_context; member
|
/arch/arm64/kernel/ |
D | process.c | 316 memset(&p->thread.cpu_context, 0, sizeof(struct cpu_context)); in copy_thread() 359 p->thread.cpu_context.x19 = stack_start; in copy_thread() 360 p->thread.cpu_context.x20 = stk_sz; in copy_thread() 362 p->thread.cpu_context.pc = (unsigned long)ret_from_fork; in copy_thread() 363 p->thread.cpu_context.sp = (unsigned long)childregs; in copy_thread()
|
/arch/mips/kernel/ |
D | smp.c | 344 if (cpu != smp_processor_id() && cpu_context(cpu, mm)) in flush_tlb_mm() 345 cpu_context(cpu, mm) = 0; in flush_tlb_mm() 383 if (cpu != smp_processor_id() && cpu_context(cpu, mm)) in flush_tlb_range() 384 cpu_context(cpu, mm) = 0; in flush_tlb_range() 429 if (cpu != smp_processor_id() && cpu_context(cpu, vma->vm_mm)) in flush_tlb_page() 430 cpu_context(cpu, vma->vm_mm) = 0; in flush_tlb_page()
|