/arch/sh/kernel/ |
D | irq.c | 60 struct thread_info tinfo; member 87 irqctx->tinfo.task = curctx->tinfo.task; in handle_one_irq() 88 irqctx->tinfo.previous_sp = current_stack_pointer; in handle_one_irq() 94 irqctx->tinfo.preempt_count = in handle_one_irq() 95 (irqctx->tinfo.preempt_count & ~SOFTIRQ_MASK) | in handle_one_irq() 96 (curctx->tinfo.preempt_count & SOFTIRQ_MASK); in handle_one_irq() 126 irqctx->tinfo.task = NULL; in irq_ctx_init() 127 irqctx->tinfo.exec_domain = NULL; in irq_ctx_init() 128 irqctx->tinfo.cpu = cpu; in irq_ctx_init() 129 irqctx->tinfo.preempt_count = HARDIRQ_OFFSET; in irq_ctx_init() [all …]
|
D | dumpstack.c | 57 struct thread_info *tinfo, int *graph) in print_ftrace_graph_addr() argument 59 struct task_struct *task = tinfo->task; in print_ftrace_graph_addr() 80 struct thread_info *tinfo, int *graph) in print_ftrace_graph_addr() argument
|
/arch/x86/kernel/ |
D | irq_32.c | 62 struct thread_info tinfo; member 100 irqctx->tinfo.task = curctx->tinfo.task; in execute_on_irq_stack() 101 irqctx->tinfo.previous_esp = current_stack_pointer; in execute_on_irq_stack() 104 irqctx->tinfo.preempt_count = curctx->tinfo.preempt_count; in execute_on_irq_stack() 132 memset(&irqctx->tinfo, 0, sizeof(struct thread_info)); in irq_ctx_init() 133 irqctx->tinfo.cpu = cpu; in irq_ctx_init() 134 irqctx->tinfo.preempt_count = HARDIRQ_OFFSET; in irq_ctx_init() 135 irqctx->tinfo.addr_limit = MAKE_MM_SEG(0); in irq_ctx_init() 142 memset(&irqctx->tinfo, 0, sizeof(struct thread_info)); in irq_ctx_init() 143 irqctx->tinfo.cpu = cpu; in irq_ctx_init() [all …]
|
D | dumpstack.c | 38 struct thread_info *tinfo, int *graph) in print_ftrace_graph_addr() argument 47 task = tinfo->task; in print_ftrace_graph_addr() 64 struct thread_info *tinfo, int *graph) in print_ftrace_graph_addr() argument 75 static inline int valid_stack_ptr(struct thread_info *tinfo, in valid_stack_ptr() argument 78 void *t = tinfo; in valid_stack_ptr() 89 print_context_stack(struct thread_info *tinfo, in print_context_stack() argument 96 while (valid_stack_ptr(tinfo, stack, sizeof(*stack), end)) { in print_context_stack() 108 print_ftrace_graph_addr(addr, data, ops, tinfo, graph); in print_context_stack() 117 print_context_stack_bp(struct thread_info *tinfo, in print_context_stack_bp() argument 125 while (valid_stack_ptr(tinfo, ret_addr, sizeof(*ret_addr), end)) { in print_context_stack_bp() [all …]
|
D | dumpstack_64.c | 121 struct thread_info *tinfo; in dump_trace() local 144 tinfo = task_thread_info(task); in dump_trace() 155 bp = ops->walk_stack(tinfo, stack, bp, ops, in dump_trace() 174 bp = ops->walk_stack(tinfo, stack, bp, in dump_trace() 193 bp = ops->walk_stack(tinfo, stack, bp, ops, data, NULL, &graph); in dump_trace()
|
D | ptrace.c | 187 struct thread_info *tinfo; in kernel_stack_pointer() local 192 tinfo = (struct thread_info *)context; in kernel_stack_pointer() 193 if (tinfo->previous_esp) in kernel_stack_pointer() 194 return tinfo->previous_esp; in kernel_stack_pointer()
|
/arch/metag/kernel/ |
D | irq.c | 20 struct thread_info tinfo; member 81 irqctx->tinfo.task = curctx->tinfo.task; in do_IRQ() 87 irqctx->tinfo.preempt_count = in do_IRQ() 88 (irqctx->tinfo.preempt_count & ~SOFTIRQ_MASK) | in do_IRQ() 89 (curctx->tinfo.preempt_count & SOFTIRQ_MASK); in do_IRQ() 134 irqctx->tinfo.task = NULL; in irq_ctx_init() 135 irqctx->tinfo.exec_domain = NULL; in irq_ctx_init() 136 irqctx->tinfo.cpu = cpu; in irq_ctx_init() 137 irqctx->tinfo.preempt_count = HARDIRQ_OFFSET; in irq_ctx_init() 138 irqctx->tinfo.addr_limit = MAKE_MM_SEG(0); in irq_ctx_init() [all …]
|
/arch/x86/um/ |
D | sysrq_32.c | 38 static inline int valid_stack_ptr(struct thread_info *tinfo, void *p) in valid_stack_ptr() argument 40 return p > (void *)tinfo && in valid_stack_ptr() 41 p < (void *)tinfo + THREAD_SIZE - 3; in valid_stack_ptr() 45 static inline unsigned long print_context_stack(struct thread_info *tinfo, in print_context_stack() argument 51 while (valid_stack_ptr(tinfo, (void *)ebp)) { in print_context_stack() 59 while (valid_stack_ptr(tinfo, stack)) { in print_context_stack()
|
/arch/avr32/kernel/ |
D | process.c | 111 static inline int valid_stack_ptr(struct thread_info *tinfo, unsigned long p) in valid_stack_ptr() argument 113 return (p > (unsigned long)tinfo) in valid_stack_ptr() 114 && (p < (unsigned long)tinfo + THREAD_SIZE - 3); in valid_stack_ptr() 122 struct thread_info *tinfo; in show_trace_log_lvl() local 136 tinfo = task_thread_info(tsk); in show_trace_log_lvl() 138 while (valid_stack_ptr(tinfo, fp)) { in show_trace_log_lvl() 182 struct thread_info *tinfo; in show_stack_log_lvl() local 188 sp = (unsigned long)&tinfo; in show_stack_log_lvl() 193 tinfo = task_thread_info(tsk); in show_stack_log_lvl() 195 if (valid_stack_ptr(tinfo, sp)) { in show_stack_log_lvl() [all …]
|
/arch/avr32/oprofile/ |
D | backtrace.c | 28 static inline int valid_stack_ptr(struct thread_info *tinfo, unsigned long p) in valid_stack_ptr() argument 30 return (p > (unsigned long)tinfo) in valid_stack_ptr() 31 && (p < (unsigned long)tinfo + THREAD_SIZE - 3); in valid_stack_ptr()
|
/arch/x86/include/asm/ |
D | stacktrace.h | 17 typedef unsigned long (*walk_stack_t)(struct thread_info *tinfo, 26 print_context_stack(struct thread_info *tinfo, 32 print_context_stack_bp(struct thread_info *tinfo,
|
/arch/openrisc/kernel/ |
D | traps.c | 44 static inline int valid_stack_ptr(struct thread_info *tinfo, void *p) in valid_stack_ptr() argument 46 return p > (void *)tinfo && p < (void *)tinfo + THREAD_SIZE - 3; in valid_stack_ptr()
|