Lines Matching refs:thread
85 if (tsk == current && tsk->thread.regs && in check_if_tm_restore_required()
86 MSR_TM_ACTIVE(tsk->thread.regs->msr) && in check_if_tm_restore_required()
88 tsk->thread.ckpt_regs.msr = tsk->thread.regs->msr; in check_if_tm_restore_required()
155 msr = tsk->thread.regs->msr; in __giveup_fpu()
161 tsk->thread.regs->msr = msr; in __giveup_fpu()
180 if (tsk->thread.regs) { in flush_fp_to_thread()
190 if (tsk->thread.regs->msr & MSR_FP) { in flush_fp_to_thread()
214 if (current->thread.regs && (current->thread.regs->msr & MSR_FP)) { in enable_kernel_fp()
223 if(!msr_tm_active(cpumsr) && msr_tm_active(current->thread.regs->msr)) in enable_kernel_fp()
231 if (tsk->thread.load_fp || msr_tm_active(tsk->thread.regs->msr)) { in restore_fp()
232 load_fp_state(¤t->thread.fp_state); in restore_fp()
233 current->thread.load_fp++; in restore_fp()
250 msr = tsk->thread.regs->msr; in __giveup_altivec()
256 tsk->thread.regs->msr = msr; in __giveup_altivec()
277 if (current->thread.regs && (current->thread.regs->msr & MSR_VEC)) { in enable_kernel_altivec()
286 if(!msr_tm_active(cpumsr) && msr_tm_active(current->thread.regs->msr)) in enable_kernel_altivec()
299 if (tsk->thread.regs) { in flush_altivec_to_thread()
301 if (tsk->thread.regs->msr & MSR_VEC) { in flush_altivec_to_thread()
313 (tsk->thread.load_vec || msr_tm_active(tsk->thread.regs->msr))) { in restore_altivec()
314 load_vr_state(&tsk->thread.vr_state); in restore_altivec()
315 tsk->thread.used_vr = 1; in restore_altivec()
316 tsk->thread.load_vec++; in restore_altivec()
330 if (tsk->thread.regs->msr & MSR_FP) in __giveup_vsx()
332 if (tsk->thread.regs->msr & MSR_VEC) in __giveup_vsx()
334 tsk->thread.regs->msr &= ~MSR_VSX; in __giveup_vsx()
348 if (tsk->thread.regs->msr & MSR_FP) in save_vsx()
350 if (tsk->thread.regs->msr & MSR_VEC) in save_vsx()
362 if (current->thread.regs && in enable_kernel_vsx()
363 (current->thread.regs->msr & (MSR_VSX|MSR_VEC|MSR_FP))) { in enable_kernel_vsx()
372 if(!msr_tm_active(cpumsr) && msr_tm_active(current->thread.regs->msr)) in enable_kernel_vsx()
374 if (current->thread.regs->msr & MSR_FP) in enable_kernel_vsx()
376 if (current->thread.regs->msr & MSR_VEC) in enable_kernel_vsx()
385 if (tsk->thread.regs) { in flush_vsx_to_thread()
387 if (tsk->thread.regs->msr & (MSR_VSX|MSR_VEC|MSR_FP)) { in flush_vsx_to_thread()
399 tsk->thread.used_vsr = 1; in restore_vsx()
427 if (current->thread.regs && (current->thread.regs->msr & MSR_SPE)) { in enable_kernel_spe()
436 if (tsk->thread.regs) { in flush_spe_to_thread()
438 if (tsk->thread.regs->msr & MSR_SPE) { in flush_spe_to_thread()
440 tsk->thread.spefscr = mfspr(SPRN_SPEFSCR); in flush_spe_to_thread()
476 if (!tsk->thread.regs) in giveup_all()
479 usermsr = tsk->thread.regs->msr; in giveup_all()
513 !current->thread.load_fp && !loadvec(current->thread)) in restore_math()
524 msr |= MSR_FP | current->thread.fpexc_mode; in restore_math()
543 if (!tsk->thread.regs) in save_all()
546 usermsr = tsk->thread.regs->msr; in save_all()
575 if (tsk->thread.regs) { in flush_all_to_thread()
581 if (tsk->thread.regs->msr & MSR_SPE) in flush_all_to_thread()
582 tsk->thread.spefscr = mfspr(SPRN_SPEFSCR); in flush_all_to_thread()
596 current->thread.trap_nr = signal_code; in do_send_trap()
614 current->thread.trap_nr = TRAP_HWBKPT; in do_break()
640 static void set_debug_reg_defaults(struct thread_struct *thread) in set_debug_reg_defaults() argument
642 thread->debug.iac1 = thread->debug.iac2 = 0; in set_debug_reg_defaults()
644 thread->debug.iac3 = thread->debug.iac4 = 0; in set_debug_reg_defaults()
646 thread->debug.dac1 = thread->debug.dac2 = 0; in set_debug_reg_defaults()
648 thread->debug.dvc1 = thread->debug.dvc2 = 0; in set_debug_reg_defaults()
650 thread->debug.dbcr0 = 0; in set_debug_reg_defaults()
655 thread->debug.dbcr1 = DBCR1_IAC1US | DBCR1_IAC2US | in set_debug_reg_defaults()
661 thread->debug.dbcr2 = DBCR2_DAC1US | DBCR2_DAC2US; in set_debug_reg_defaults()
663 thread->debug.dbcr1 = 0; in set_debug_reg_defaults()
701 if ((current->thread.debug.dbcr0 & DBCR0_IDM) in switch_booke_debug_regs()
708 static void set_debug_reg_defaults(struct thread_struct *thread) in set_debug_reg_defaults() argument
710 thread->hw_brk.address = 0; in set_debug_reg_defaults()
711 thread->hw_brk.type = 0; in set_debug_reg_defaults()
712 set_breakpoint(&thread->hw_brk); in set_debug_reg_defaults()
819 return tsk && tsk->thread.regs && (tsk->thread.regs->msr & MSR_TM); in tm_enabled()
862 giveup_all(container_of(thr, struct task_struct, thread)); in tm_reclaim_thread()
870 tm_reclaim_thread(¤t->thread, current_thread_info(), cause); in tm_reclaim_current()
885 struct thread_struct *thr = &tsk->thread; in tm_reclaim_task()
913 extern void __tm_recheckpoint(struct thread_struct *thread,
916 void tm_recheckpoint(struct thread_struct *thread, in tm_recheckpoint() argument
921 if (!(thread->regs->msr & MSR_TM)) in tm_recheckpoint()
934 tm_restore_sprs(thread); in tm_recheckpoint()
936 __tm_recheckpoint(thread, orig_msr); in tm_recheckpoint()
959 if (!MSR_TM_ACTIVE(new->thread.regs->msr)){ in tm_recheckpoint_new_task()
960 tm_restore_sprs(&new->thread); in tm_recheckpoint_new_task()
963 msr = new->thread.ckpt_regs.msr; in tm_recheckpoint_new_task()
967 new->pid, new->thread.regs->msr, msr); in tm_recheckpoint_new_task()
969 tm_recheckpoint(&new->thread, msr); in tm_recheckpoint_new_task()
976 new->thread.regs->msr &= ~(MSR_FP | MSR_VEC | MSR_VSX); in tm_recheckpoint_new_task()
991 prev->thread.load_tm++; in __switch_to_tm()
993 if (!MSR_TM_ACTIVE(prev->thread.regs->msr) && prev->thread.load_tm == 0) in __switch_to_tm()
994 prev->thread.regs->msr &= ~MSR_TM; in __switch_to_tm()
1029 msr_diff = current->thread.ckpt_regs.msr & ~regs->msr; in restore_tm_state()
1034 current->thread.load_fp = 1; in restore_tm_state()
1037 current->thread.load_vec = 1; in restore_tm_state()
1139 new_thread = &new->thread; in __switch_to()
1140 old_thread = ¤t->thread; in __switch_to()
1169 switch_booke_debug_regs(&new->thread.debug); in __switch_to()
1176 if (unlikely(!hw_brk_match(this_cpu_ptr(¤t_brk), &new->thread.hw_brk))) in __switch_to()
1177 __set_breakpoint(&new->thread.hw_brk); in __switch_to()
1185 save_sprs(&prev->thread); in __switch_to()
1217 if (current_thread_info()->task->thread.regs) in __switch_to()
1218 restore_math(current_thread_info()->task->thread.regs); in __switch_to()
1411 set_debug_reg_defaults(¤t->thread); in flush_thread()
1462 p->thread.ksp_vsid = sp_vsid; in setup_ksp_vsid()
1500 p->thread.regs = NULL; /* no user register state */ in copy_thread()
1510 p->thread.regs = childregs; in copy_thread()
1538 p->thread.ksp = sp; in copy_thread()
1540 p->thread.ksp_limit = (unsigned long)task_stack_page(p) + in copy_thread()
1544 p->thread.ptrace_bps[0] = NULL; in copy_thread()
1547 p->thread.fp_save_area = NULL; in copy_thread()
1549 p->thread.vr_save_area = NULL; in copy_thread()
1556 p->thread.dscr_inherit = current->thread.dscr_inherit; in copy_thread()
1557 p->thread.dscr = mfspr(SPRN_DSCR); in copy_thread()
1560 p->thread.ppr = INIT_PPR; in copy_thread()
1579 if (!current->thread.regs) { in start_thread()
1581 current->thread.regs = regs - 1; in start_thread()
1661 current->thread.used_vsr = 0; in start_thread()
1663 current->thread.load_fp = 0; in start_thread()
1664 memset(¤t->thread.fp_state, 0, sizeof(current->thread.fp_state)); in start_thread()
1665 current->thread.fp_save_area = NULL; in start_thread()
1667 memset(¤t->thread.vr_state, 0, sizeof(current->thread.vr_state)); in start_thread()
1668 current->thread.vr_state.vscr.u[3] = 0x00010000; /* Java mode disabled */ in start_thread()
1669 current->thread.vr_save_area = NULL; in start_thread()
1670 current->thread.vrsave = 0; in start_thread()
1671 current->thread.used_vr = 0; in start_thread()
1672 current->thread.load_vec = 0; in start_thread()
1675 memset(current->thread.evr, 0, sizeof(current->thread.evr)); in start_thread()
1676 current->thread.acc = 0; in start_thread()
1677 current->thread.spefscr = 0; in start_thread()
1678 current->thread.used_spe = 0; in start_thread()
1681 current->thread.tm_tfhar = 0; in start_thread()
1682 current->thread.tm_texasr = 0; in start_thread()
1683 current->thread.tm_tfiar = 0; in start_thread()
1684 current->thread.load_tm = 0; in start_thread()
1694 struct pt_regs *regs = tsk->thread.regs; in set_fpexc_mode()
1715 tsk->thread.spefscr_last = mfspr(SPRN_SPEFSCR); in set_fpexc_mode()
1716 tsk->thread.fpexc_mode = val & in set_fpexc_mode()
1734 tsk->thread.fpexc_mode = __pack_fe01(val); in set_fpexc_mode()
1737 | tsk->thread.fpexc_mode; in set_fpexc_mode()
1745 if (tsk->thread.fpexc_mode & PR_FP_EXC_SW_ENABLE) in get_fpexc_mode()
1760 tsk->thread.spefscr_last = mfspr(SPRN_SPEFSCR); in get_fpexc_mode()
1761 val = tsk->thread.fpexc_mode; in get_fpexc_mode()
1768 val = __unpack_fe01(tsk->thread.fpexc_mode); in get_fpexc_mode()
1774 struct pt_regs *regs = tsk->thread.regs; in set_endian()
1795 struct pt_regs *regs = tsk->thread.regs; in get_endian()
1818 tsk->thread.align_ctl = val; in set_unalign_ctl()
1824 return put_user(tsk->thread.align_ctl, (unsigned int __user *)adr); in get_unalign_ctl()
1873 sp = p->thread.ksp; in get_wchan()
1910 sp = tsk->thread.ksp; in show_stack()