Lines Matching refs:thread

96 	if (tsk == current && tsk->thread.regs &&  in check_if_tm_restore_required()
97 MSR_TM_ACTIVE(tsk->thread.regs->msr) && in check_if_tm_restore_required()
99 regs_set_return_msr(&tsk->thread.ckpt_regs, in check_if_tm_restore_required()
100 tsk->thread.regs->msr); in check_if_tm_restore_required()
161 msr = tsk->thread.regs->msr; in __giveup_fpu()
165 regs_set_return_msr(tsk->thread.regs, msr); in __giveup_fpu()
184 if (tsk->thread.regs) { in flush_fp_to_thread()
194 if (tsk->thread.regs->msr & MSR_FP) { in flush_fp_to_thread()
218 if (current->thread.regs && (current->thread.regs->msr & MSR_FP)) { in enable_kernel_fp()
228 MSR_TM_ACTIVE(current->thread.regs->msr)) in enable_kernel_fp()
244 msr = tsk->thread.regs->msr; in __giveup_altivec()
248 regs_set_return_msr(tsk->thread.regs, msr); in __giveup_altivec()
269 if (current->thread.regs && (current->thread.regs->msr & MSR_VEC)) { in enable_kernel_altivec()
279 MSR_TM_ACTIVE(current->thread.regs->msr)) in enable_kernel_altivec()
292 if (tsk->thread.regs) { in flush_altivec_to_thread()
294 if (tsk->thread.regs->msr & MSR_VEC) { in flush_altivec_to_thread()
307 unsigned long msr = tsk->thread.regs->msr; in __giveup_vsx()
339 if (current->thread.regs && in enable_kernel_vsx()
340 (current->thread.regs->msr & (MSR_VSX|MSR_VEC|MSR_FP))) { in enable_kernel_vsx()
350 MSR_TM_ACTIVE(current->thread.regs->msr)) in enable_kernel_vsx()
359 if (tsk->thread.regs) { in flush_vsx_to_thread()
361 if (tsk->thread.regs->msr & (MSR_VSX|MSR_VEC|MSR_FP)) { in flush_vsx_to_thread()
388 if (current->thread.regs && (current->thread.regs->msr & MSR_SPE)) { in enable_kernel_spe()
397 if (tsk->thread.regs) { in flush_spe_to_thread()
399 if (tsk->thread.regs->msr & MSR_SPE) { in flush_spe_to_thread()
401 tsk->thread.spefscr = mfspr(SPRN_SPEFSCR); in flush_spe_to_thread()
430 if (!tsk->thread.regs) in giveup_all()
435 usermsr = tsk->thread.regs->msr; in giveup_all()
459 if (current->thread.load_fp) { in should_restore_fp()
460 current->thread.load_fp++; in should_restore_fp()
468 load_fp_state(&current->thread.fp_state); in do_restore_fp()
478 if (cpu_has_feature(CPU_FTR_ALTIVEC) && (current->thread.load_vec)) { in should_restore_altivec()
479 current->thread.load_vec++; in should_restore_altivec()
487 load_vr_state(&current->thread.vr_state); in do_restore_altivec()
488 current->thread.used_vr = 1; in do_restore_altivec()
504 current->thread.used_vsr = 1; in do_restore_vsx()
552 fpexc_mode = current->thread.fpexc_mode; in restore_math()
572 if (!tsk->thread.regs) in save_all()
575 usermsr = tsk->thread.regs->msr; in save_all()
598 if (tsk->thread.regs) { in flush_all_to_thread()
602 if (tsk->thread.regs->msr & MSR_SPE) in flush_all_to_thread()
603 tsk->thread.spefscr = mfspr(SPRN_SPEFSCR); in flush_all_to_thread()
616 current->thread.trap_nr = TRAP_HWBKPT; in do_send_trap()
643 current->thread.hw_brk[0] = null_brk; in do_break_handler()
644 current->thread.hw_brk[0].flags |= HW_BRK_FLAG_DISABLED; in do_break_handler()
652 info = &current->thread.hw_brk[i]; in do_break_handler()
658 current->thread.hw_brk[i] = null_brk; in do_break_handler()
659 current->thread.hw_brk[i].flags |= HW_BRK_FLAG_DISABLED; in do_break_handler()
666 current->thread.trap_nr = TRAP_HWBKPT; in DEFINE_INTERRUPT_HANDLER()
695 static void set_debug_reg_defaults(struct thread_struct *thread) in set_debug_reg_defaults() argument
697 thread->debug.iac1 = thread->debug.iac2 = 0; in set_debug_reg_defaults()
699 thread->debug.iac3 = thread->debug.iac4 = 0; in set_debug_reg_defaults()
701 thread->debug.dac1 = thread->debug.dac2 = 0; in set_debug_reg_defaults()
703 thread->debug.dvc1 = thread->debug.dvc2 = 0; in set_debug_reg_defaults()
705 thread->debug.dbcr0 = 0; in set_debug_reg_defaults()
710 thread->debug.dbcr1 = DBCR1_IAC1US | DBCR1_IAC2US | in set_debug_reg_defaults()
716 thread->debug.dbcr2 = DBCR2_DAC1US | DBCR2_DAC2US; in set_debug_reg_defaults()
718 thread->debug.dbcr1 = 0; in set_debug_reg_defaults()
756 if ((current->thread.debug.dbcr0 & DBCR0_IDM) in switch_booke_debug_regs()
770 static void set_debug_reg_defaults(struct thread_struct *thread) in set_debug_reg_defaults() argument
776 thread->hw_brk[i] = null_brk; in set_debug_reg_defaults()
778 set_breakpoint(i, &thread->hw_brk[i]); in set_debug_reg_defaults()
801 &new->thread.hw_brk[i]))) in switch_hw_breakpoint()
804 __set_breakpoint(i, &new->thread.hw_brk[i]); in switch_hw_breakpoint()
901 return tsk && tsk->thread.regs && (tsk->thread.regs->msr & MSR_TM); in tm_enabled()
924 giveup_all(container_of(thr, struct task_struct, thread)); in tm_reclaim_thread()
951 tm_reclaim_thread(&current->thread, cause); in tm_reclaim_current()
966 struct thread_struct *thr = &tsk->thread; in tm_reclaim_task()
996 extern void __tm_recheckpoint(struct thread_struct *thread);
998 void tm_recheckpoint(struct thread_struct *thread) in tm_recheckpoint() argument
1002 if (!(thread->regs->msr & MSR_TM)) in tm_recheckpoint()
1015 tm_restore_sprs(thread); in tm_recheckpoint()
1017 __tm_recheckpoint(thread); in tm_recheckpoint()
1038 if (!MSR_TM_ACTIVE(new->thread.regs->msr)){ in tm_recheckpoint_new_task()
1039 tm_restore_sprs(&new->thread); in tm_recheckpoint_new_task()
1044 new->pid, new->thread.regs->msr); in tm_recheckpoint_new_task()
1046 tm_recheckpoint(&new->thread); in tm_recheckpoint_new_task()
1053 new->thread.regs->msr &= ~(MSR_FP | MSR_VEC | MSR_VSX); in tm_recheckpoint_new_task()
1068 prev->thread.load_tm++; in __switch_to_tm()
1070 if (!MSR_TM_ACTIVE(prev->thread.regs->msr) && prev->thread.load_tm == 0) in __switch_to_tm()
1071 prev->thread.regs->msr &= ~MSR_TM; in __switch_to_tm()
1106 msr_diff = current->thread.ckpt_regs.msr & ~regs->msr; in restore_tm_state()
1111 current->thread.load_fp = 1; in restore_tm_state()
1114 current->thread.load_vec = 1; in restore_tm_state()
1213 new_thread = &new->thread; in __switch_to()
1214 old_thread = &current->thread; in __switch_to()
1242 switch_booke_debug_regs(&new->thread.debug); in __switch_to()
1257 save_sprs(&prev->thread); in __switch_to()
1314 if (current->thread.regs) in __switch_to()
1315 restore_math(current->thread.regs); in __switch_to()
1545 set_debug_reg_defaults(&current->thread); in flush_thread()
1560 if (!current->thread.regs) { in arch_setup_new_exec()
1562 current->thread.regs = regs - 1; in arch_setup_new_exec()
1566 current->thread.regs->amr = default_amr; in arch_setup_new_exec()
1567 current->thread.regs->iamr = default_iamr; in arch_setup_new_exec()
1613 if (t->thread.tidr) in set_thread_tidr()
1616 t->thread.tidr = (u16)task_pid_nr(t); in set_thread_tidr()
1617 mtspr(SPRN_TIDR, t->thread.tidr); in set_thread_tidr()
1672 p->thread.ksp_vsid = sp_vsid; in setup_ksp_vsid()
1715 p->thread.regs = NULL; /* no user register state */ in copy_thread()
1724 p->thread.regs = childregs; in copy_thread()
1755 p->thread.ksp = sp; in copy_thread()
1758 p->thread.ptrace_bps[i] = NULL; in copy_thread()
1762 p->thread.fp_save_area = NULL; in copy_thread()
1765 p->thread.vr_save_area = NULL; in copy_thread()
1768 p->thread.kuap = KUAP_NONE; in copy_thread()
1775 p->thread.dscr_inherit = current->thread.dscr_inherit; in copy_thread()
1776 p->thread.dscr = mfspr(SPRN_DSCR); in copy_thread()
1781 p->thread.tidr = 0; in copy_thread()
1882 current->thread.used_vsr = 0; in start_thread()
1884 current->thread.load_slb = 0; in start_thread()
1885 current->thread.load_fp = 0; in start_thread()
1887 memset(&current->thread.fp_state, 0, sizeof(current->thread.fp_state)); in start_thread()
1888 current->thread.fp_save_area = NULL; in start_thread()
1891 memset(&current->thread.vr_state, 0, sizeof(current->thread.vr_state)); in start_thread()
1892 current->thread.vr_state.vscr.u[3] = 0x00010000; /* Java mode disabled */ in start_thread()
1893 current->thread.vr_save_area = NULL; in start_thread()
1894 current->thread.vrsave = 0; in start_thread()
1895 current->thread.used_vr = 0; in start_thread()
1896 current->thread.load_vec = 0; in start_thread()
1899 memset(current->thread.evr, 0, sizeof(current->thread.evr)); in start_thread()
1900 current->thread.acc = 0; in start_thread()
1901 current->thread.spefscr = 0; in start_thread()
1902 current->thread.used_spe = 0; in start_thread()
1905 current->thread.tm_tfhar = 0; in start_thread()
1906 current->thread.tm_texasr = 0; in start_thread()
1907 current->thread.tm_tfiar = 0; in start_thread()
1908 current->thread.load_tm = 0; in start_thread()
1918 struct pt_regs *regs = tsk->thread.regs; in set_fpexc_mode()
1939 tsk->thread.spefscr_last = mfspr(SPRN_SPEFSCR); in set_fpexc_mode()
1940 tsk->thread.fpexc_mode = val & in set_fpexc_mode()
1956 tsk->thread.fpexc_mode = __pack_fe01(val); in set_fpexc_mode()
1959 | tsk->thread.fpexc_mode); in set_fpexc_mode()
1968 if (tsk->thread.fpexc_mode & PR_FP_EXC_SW_ENABLE) { in get_fpexc_mode()
1983 tsk->thread.spefscr_last = mfspr(SPRN_SPEFSCR); in get_fpexc_mode()
1984 val = tsk->thread.fpexc_mode; in get_fpexc_mode()
1989 val = __unpack_fe01(tsk->thread.fpexc_mode); in get_fpexc_mode()
1996 struct pt_regs *regs = tsk->thread.regs; in set_endian()
2017 struct pt_regs *regs = tsk->thread.regs; in get_endian()
2040 tsk->thread.align_ctl = val; in set_unalign_ctl()
2046 return put_user(tsk->thread.align_ctl, (unsigned int __user *)adr); in get_unalign_ctl()
2119 sp = p->thread.ksp; in ___get_wchan()
2174 sp = tsk->thread.ksp; in show_stack()