Home
last modified time | relevance | path

Searched refs:prev (Results 1 – 25 of 153) sorted by relevance

1234567

/arch/mips/include/asm/
Dswitch_to.h44 #define __mips_mt_fpaff_switch_to(prev) \ argument
46 struct thread_info *__prev_ti = task_thread_info(prev); \
50 (!(KSTK_STATUS(prev) & ST0_CU1))) { \
52 prev->cpus_allowed = prev->thread.user_cpus_allowed; \
58 #define __mips_mt_fpaff_switch_to(prev) do { (void) (prev); } while (0) argument
67 #define switch_to(prev, next, last) \ argument
69 __mips_mt_fpaff_switch_to(prev); \
71 __save_dsp(prev); \
73 (last) = resume(prev, next, task_thread_info(next)); \
76 #define finish_arch_switch(prev) \ argument
/arch/x86/lib/
Dcmpxchg.c12 u8 prev; in cmpxchg_386_u8() local
17 prev = *(u8 *)ptr; in cmpxchg_386_u8()
18 if (prev == old) in cmpxchg_386_u8()
21 return prev; in cmpxchg_386_u8()
27 u16 prev; in cmpxchg_386_u16() local
32 prev = *(u16 *)ptr; in cmpxchg_386_u16()
33 if (prev == old) in cmpxchg_386_u16()
36 return prev; in cmpxchg_386_u16()
42 u32 prev; in cmpxchg_386_u32() local
47 prev = *(u32 *)ptr; in cmpxchg_386_u32()
[all …]
/arch/mn10300/include/asm/
Dswitch_to.h22 #define switch_fpu(prev, next) \ argument
24 if ((prev)->thread.fpu_flags & THREAD_HAS_FPU) { \
25 (prev)->thread.fpu_flags &= ~THREAD_HAS_FPU; \
26 (prev)->thread.uregs->epsw &= ~EPSW_FE; \
27 fpu_save(&(prev)->thread.fpu_state); \
31 #define switch_fpu(prev, next) do {} while (0) argument
36 struct task_struct *__switch_to(struct thread_struct *prev,
40 #define switch_to(prev, next, last) \ argument
42 switch_fpu(prev, next); \
44 (last) = __switch_to(&(prev)->thread, &(next)->thread, (prev)); \
/arch/ia64/include/asm/
Dswitch_to.h34 extern void ia64_account_on_switch (struct task_struct *prev, struct task_struct *next);
51 #define __switch_to(prev,next,last) do { \ argument
52 IA64_ACCOUNT_ON_SWITCH(prev, next); \
53 if (IA64_HAS_EXTRA_STATE(prev)) \
54 ia64_save_extra(prev); \
68 # define switch_to(prev,next,last) do { \ argument
69 if (ia64_psr(task_pt_regs(prev))->mfh && ia64_is_local_fpu_owner(prev)) { \
70 ia64_psr(task_pt_regs(prev))->mfh = 0; \
71 (prev)->thread.flags |= IA64_THREAD_FPH_VALID; \
72 __ia64_save_fpu((prev)->thread.fph); \
[all …]
/arch/powerpc/include/asm/
Dcmpxchg.h18 unsigned long prev; in __xchg_u32() local
27 : "=&r" (prev), "+m" (*(volatile unsigned int *)p) in __xchg_u32()
31 return prev; in __xchg_u32()
43 unsigned long prev; in __xchg_u32_local() local
50 : "=&r" (prev), "+m" (*(volatile unsigned int *)p) in __xchg_u32_local()
54 return prev; in __xchg_u32_local()
61 unsigned long prev; in __xchg_u64() local
70 : "=&r" (prev), "+m" (*(volatile unsigned long *)p) in __xchg_u64()
74 return prev; in __xchg_u64()
80 unsigned long prev; in __xchg_u64_local() local
[all …]
Dmmu_context.h20 extern void switch_mmu_context(struct mm_struct *prev, struct mm_struct *next);
43 static inline void switch_mm(struct mm_struct *prev, struct mm_struct *next, in switch_mm() argument
59 if (prev == next) in switch_mm()
64 if (prev->context.acop || next->context.acop) in switch_mm()
86 switch_mmu_context(prev, next); in switch_mm()
97 static inline void activate_mm(struct mm_struct *prev, struct mm_struct *next) in activate_mm() argument
102 switch_mm(prev, next, current); in activate_mm()
/arch/tile/include/asm/
Dswitch_to.h40 #define switch_to(prev, next, last) ((last) = _switch_to((prev), (next))) argument
41 extern struct task_struct *_switch_to(struct task_struct *prev,
45 extern struct task_struct *__switch_to(struct task_struct *prev,
57 #define finish_arch_switch(prev) do { \ argument
58 if (unlikely((prev)->state == TASK_DEAD)) \
60 ((prev)->pid << _SIM_CONTROL_OPERATOR_BITS)); \
72 struct task_struct *sim_notify_fork(struct task_struct *prev);
/arch/s390/include/asm/
Dswitch_to.h79 #define switch_to(prev,next,last) do { \ argument
80 if (prev->mm) { \
81 save_fp_regs(&prev->thread.fp_regs); \
82 save_access_regs(&prev->thread.acrs[0]); \
89 prev = __switch_to(prev,next); \
95 #define finish_arch_switch(prev) do { \ argument
97 account_vtime(prev, current); \
Dcmpxchg.h96 unsigned long addr, prev, tmp; in __cmpxchg() local
116 : "=&d" (prev), "=&d" (tmp), "=Q" (*(int *) ptr) in __cmpxchg()
120 return prev >> shift; in __cmpxchg()
137 : "=&d" (prev), "=&d" (tmp), "=Q" (*(int *) ptr) in __cmpxchg()
141 return prev >> shift; in __cmpxchg()
145 : "=&d" (prev), "=Q" (*(int *) ptr) in __cmpxchg()
148 return prev; in __cmpxchg()
153 : "=&d" (prev), "=Q" (*(long *) ptr) in __cmpxchg()
156 return prev; in __cmpxchg()
/arch/um/os-Linux/
Dirq.c82 struct irq_fd **prev; in os_free_irq_by_cb() local
85 prev = &active_fds; in os_free_irq_by_cb()
86 while (*prev != NULL) { in os_free_irq_by_cb()
87 if ((*test)(*prev, arg)) { in os_free_irq_by_cb()
88 struct irq_fd *old_fd = *prev; in os_free_irq_by_cb()
90 (pollfds[i].fd != (*prev)->fd)) { in os_free_irq_by_cb()
94 (*prev)->fd, pollfds[i].fd); in os_free_irq_by_cb()
107 *last_irq_ptr2 = prev; in os_free_irq_by_cb()
109 *prev = (*prev)->next; in os_free_irq_by_cb()
115 prev = &(*prev)->next; in os_free_irq_by_cb()
/arch/sh/include/asm/
Dswitch_to_32.h63 struct task_struct *__switch_to(struct task_struct *prev,
69 #define switch_to(prev, next, last) \ argument
79 if (is_dsp_enabled(prev)) \
80 __save_dsp(prev); \
82 __ts1 = (u32 *)&prev->thread.sp; \
83 __ts2 = (u32 *)&prev->thread.pc; \
84 __ts4 = (u32 *)prev; \
128 #define finish_arch_switch(prev) \ argument
130 if (is_dsp_enabled(prev)) \
131 __restore_dsp(prev); \
Dswitch_to_64.h19 struct task_struct *sh64_switch_to(struct task_struct *prev,
24 #define switch_to(prev,next,last) \ argument
30 last = sh64_switch_to(prev, &prev->thread, next, \
/arch/blackfin/include/asm/
Dswitch_to.h21 asmlinkage struct task_struct *resume(struct task_struct *prev, struct task_struct *next);
24 #define switch_to(prev,next,last) \ argument
26 memcpy (&task_thread_info(prev)->l1_task_info, L1_SCRATCH_TASK_INFO, \
30 (last) = resume (prev, next); \
33 #define switch_to(prev, next, last) \ argument
35 (last) = resume(prev, next); \
/arch/x86/include/asm/
Dswitch_to.h5 struct task_struct *__switch_to(struct task_struct *prev,
31 #define switch_to(prev, next, last) \ argument
55 : [prev_sp] "=m" (prev->thread.sp), \
56 [prev_ip] "=m" (prev->thread.ip), \
70 [prev] "a" (prev), \
104 #define switch_to(prev, next, last) \ argument
118 : [next] "S" (next), [prev] "D" (prev), \
Dmmu_context.h12 static inline void paravirt_activate_mm(struct mm_struct *prev, in paravirt_activate_mm() argument
33 static inline void switch_mm(struct mm_struct *prev, struct mm_struct *next, in switch_mm() argument
38 if (likely(prev != next)) { in switch_mm()
49 cpumask_clear_cpu(cpu, mm_cpumask(prev)); in switch_mm()
54 if (unlikely(prev->context.ldt != next->context.ldt)) in switch_mm()
74 #define activate_mm(prev, next) \ argument
76 paravirt_activate_mm((prev), (next)); \
77 switch_mm((prev), (next), NULL); \
Dcmpxchg_32.h27 u64 prev = *ptr; in set_64bit() local
32 : "=m" (*ptr), "+A" (prev) in set_64bit()
52 u64 prev; in __cmpxchg64() local
54 : "=A" (prev), in __cmpxchg64()
60 return prev; in __cmpxchg64()
65 u64 prev; in __cmpxchg64_local() local
67 : "=A" (prev), in __cmpxchg64_local()
73 return prev; in __cmpxchg64_local()
/arch/frv/include/asm/
Dswitch_to.h25 struct task_struct *prev);
27 #define switch_to(prev, next, last) \ argument
29 (prev)->thread.sched_lr = \
31 (last) = __switch_to(&(prev)->thread, &(next)->thread, (prev)); \
Dmmu_context.h35 #define switch_mm(prev, next, tsk) \ argument
37 if (prev != next) \
38 change_mm_context(&prev->context, &next->context, next->pgd); \
41 #define activate_mm(prev, next) \ argument
43 change_mm_context(&prev->context, &next->context, next->pgd); \
/arch/alpha/include/asm/
Dxchg.h139 unsigned long prev, tmp, cmp, addr64; in ____cmpxchg() local
157 : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64) in ____cmpxchg()
160 return prev; in ____cmpxchg()
166 unsigned long prev, tmp, cmp, addr64; in ____cmpxchg() local
184 : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64) in ____cmpxchg()
187 return prev; in ____cmpxchg()
193 unsigned long prev, cmp; in ____cmpxchg() local
207 : "=&r"(prev), "=&r"(cmp), "=m"(*m) in ____cmpxchg()
210 return prev; in ____cmpxchg()
216 unsigned long prev, cmp; in ____cmpxchg() local
[all …]
/arch/avr32/include/asm/
Dswitch_to.h18 #define finish_arch_switch(prev) \ argument
20 ocd_write(PID, prev->pid); \
39 #define switch_to(prev, next, last) \ argument
41 last = __switch_to(prev, &prev->thread.cpu_context + 1, \
/arch/parisc/lib/
Dbitops.c62 unsigned long prev; in __cmpxchg_u64() local
65 if ((prev = *ptr) == old) in __cmpxchg_u64()
68 return prev; in __cmpxchg_u64()
75 unsigned int prev; in __cmpxchg_u32() local
78 if ((prev = *ptr) == old) in __cmpxchg_u32()
81 return (unsigned long)prev; in __cmpxchg_u32()
/arch/m32r/include/asm/
Dswitch_to.h28 #define switch_to(prev, next, last) do { \ argument
44 : "0" (prev), \
45 "r" (&(prev->thread.sp)), "r" (&(next->thread.sp)), \
46 "r" (&(prev->thread.lr)), "r" (&(next->thread.lr)) \
/arch/c6x/include/asm/
Dswitch_to.h20 asmlinkage void *__switch_to(struct thread_struct *prev,
24 #define switch_to(prev, next, last) \ argument
27 (last) = __switch_to(&(prev)->thread, \
28 &(next)->thread, (prev)); \
/arch/hexagon/include/asm/
Dmmu_context.h66 static inline void switch_mm(struct mm_struct *prev, struct mm_struct *next, in switch_mm() argument
75 if (next->context.generation < prev->context.generation) { in switch_mm()
79 next->context.generation = prev->context.generation; in switch_mm()
88 static inline void activate_mm(struct mm_struct *prev, struct mm_struct *next) in activate_mm() argument
93 switch_mm(prev, next, current_thread_info()->task); in activate_mm()
/arch/powerpc/platforms/cell/spufs/
Dswitch.c1791 static int quiece_spu(struct spu_state *prev, struct spu *spu) in quiece_spu() argument
1803 if (check_spu_isolate(prev, spu)) { /* Step 2. */ in quiece_spu()
1806 disable_interrupts(prev, spu); /* Step 3. */ in quiece_spu()
1807 set_watchdog_timer(prev, spu); /* Step 4. */ in quiece_spu()
1808 inhibit_user_access(prev, spu); /* Step 5. */ in quiece_spu()
1809 if (check_spu_isolate(prev, spu)) { /* Step 6. */ in quiece_spu()
1812 set_switch_pending(prev, spu); /* Step 7. */ in quiece_spu()
1813 save_mfc_cntl(prev, spu); /* Step 8. */ in quiece_spu()
1814 save_spu_runcntl(prev, spu); /* Step 9. */ in quiece_spu()
1815 save_mfc_sr1(prev, spu); /* Step 10. */ in quiece_spu()
[all …]

1234567