/linux/tools/perf/util/ |
A D | unwind-libdw.c | 218 return entry(pc, ui) || !(--ui->max_stack) ? in frame_callback() 225 int max_stack) in unwind__get_entries() argument 233 .max_stack = max_stack, in unwind__get_entries() 241 ui = zalloc(sizeof(ui_buf) + sizeof(ui_buf.entries[0]) * max_stack); in unwind__get_entries() 265 if (err && ui->max_stack != max_stack) in unwind__get_entries()
|
A D | unwind-libunwind-local.c | 642 void *arg, int max_stack) in get_entries() argument 645 unw_word_t ips[max_stack]; in get_entries() 661 if (max_stack - 1 > 0) { in get_entries() 672 while (!ret && (unw_step(&c) > 0) && i < max_stack) { in get_entries() 688 max_stack = i; in get_entries() 694 for (i = 0; i < max_stack && !ret; i++) { in get_entries() 698 j = max_stack - i - 1; in get_entries() 707 struct perf_sample *data, int max_stack) in _unwind__get_entries() argument 718 if (max_stack <= 0) in _unwind__get_entries() 721 return get_entries(&ui, cb, arg, max_stack); in _unwind__get_entries()
|
A D | unwind.h | 26 struct perf_sample *data, int max_stack); 32 struct perf_sample *data, int max_stack); 68 int max_stack __maybe_unused) in unwind__get_entries()
|
A D | unwind-libunwind.c | 83 struct perf_sample *data, int max_stack) in unwind__get_entries() argument 86 return thread->maps->unwind_libunwind_ops->get_entries(cb, arg, thread, data, max_stack); in unwind__get_entries()
|
A D | unwind-libdw.h | 21 int max_stack; member
|
A D | evsel_config.h | 41 int max_stack; member
|
A D | callchain.h | 104 u16 max_stack; member 252 int max_stack);
|
A D | top.h | 34 int max_stack; member
|
A D | machine.c | 2614 int max_stack, in resolve_lbr_callchain_sample() argument 2719 int max_stack) in thread__resolve_callchain_sample() argument 2737 root_al, max_stack, in thread__resolve_callchain_sample() 2762 int nr = min(max_stack, (int)branch->nr); in thread__resolve_callchain_sample() 2931 int max_stack) in thread__resolve_callchain_unwind() argument 2944 thread, sample, max_stack); in thread__resolve_callchain_unwind() 2953 int max_stack) in thread__resolve_callchain() argument 2963 max_stack); in thread__resolve_callchain() 2968 max_stack); in thread__resolve_callchain() 2972 max_stack); in thread__resolve_callchain() [all …]
|
A D | evsel.c | 838 attr->sample_max_stack = param->max_stack; in __evsel__config_callchain() 924 int max_stack = 0; in evsel__apply_config_terms() local 964 max_stack = term->val.max_stack; in evsel__apply_config_terms() 999 if ((callgraph_buf != NULL) || (dump_size > 0) || max_stack) { in evsel__apply_config_terms() 1002 if (max_stack) { in evsel__apply_config_terms() 1003 param.max_stack = max_stack; in evsel__apply_config_terms()
|
A D | machine.h | 194 int max_stack);
|
/linux/arch/mips/kernel/ |
A D | perf_event.c | 36 if (entry->nr >= entry->max_stack) in save_raw_perf_callchain() 60 if (entry->nr >= entry->max_stack) in perf_callchain_kernel()
|
/linux/kernel/events/ |
A D | callchain.c | 181 u32 max_stack, bool crosstask, bool add_mark) in get_perf_callchain() argument 192 ctx.max_stack = max_stack; in get_perf_callchain()
|
/linux/arch/arm64/kernel/ |
A D | perf_callchain.c | 118 while (entry->nr < entry->max_stack && in perf_callchain_user() 128 while ((entry->nr < entry->max_stack) && in perf_callchain_user()
|
/linux/arch/riscv/kernel/ |
A D | perf_callchain.c | 69 while (fp && !(fp & 0x3) && entry->nr < entry->max_stack) in perf_callchain_user()
|
/linux/arch/arm/kernel/ |
A D | perf_callchain.c | 79 while ((entry->nr < entry->max_stack) && in perf_callchain_user()
|
/linux/arch/csky/kernel/ |
A D | perf_callchain.c | 106 while (fp && !(fp & 0x3) && entry->nr < entry->max_stack) in perf_callchain_user()
|
/linux/drivers/net/ethernet/netronome/nfp/bpf/ |
A D | verifier.c | 767 unsigned int max_stack; in nfp_bpf_finalize() local 795 max_stack = nn_readb(nn, NFP_NET_CFG_BPF_STACK_SZ) * 64; in nfp_bpf_finalize() 797 if (nfp_prog->stack_size > max_stack) { in nfp_bpf_finalize() 799 nfp_prog->stack_size, max_stack); in nfp_bpf_finalize()
|
A D | offload.c | 500 unsigned int max_stack, max_prog_len; in nfp_net_bpf_load() local 510 max_stack = nn_readb(nn, NFP_NET_CFG_BPF_STACK_SZ) * 64; in nfp_net_bpf_load() 511 if (nfp_prog->stack_size > max_stack) { in nfp_net_bpf_load()
|
/linux/tools/perf/ |
A D | builtin-report.c | 89 int max_stack; member 315 ret = hist_entry_iter__add(&iter, &al, rep->max_stack, rep); in process_sample_event() 1175 .max_stack = PERF_MAX_STACK_DEPTH, in cmd_report() 1235 OPT_INTEGER(0, "max-stack", &report.max_stack, in cmd_report() 1394 (int)itrace_synth_opts.callchain_sz > report.max_stack) in cmd_report() 1395 report.max_stack = itrace_synth_opts.callchain_sz; in cmd_report()
|
A D | builtin-trace.c | 165 unsigned int max_stack; member 2406 int max_stack = evsel->core.attr.sample_max_stack ? local 2408 trace->max_stack; 2414 err = thread__resolve_callchain(al.thread, cursor, evsel, sample, NULL, NULL, max_stack); 4093 evsel->core.attr.sample_max_stack = trace->max_stack; 4782 .max_stack = UINT_MAX, 4850 OPT_UINTEGER(0, "max-stack", &trace.max_stack, 5031 if (trace.max_stack == UINT_MAX) { 5032 trace.max_stack = input_name ? PERF_MAX_STACK_DEPTH : sysctl__max_stack();
|
/linux/arch/powerpc/perf/ |
A D | callchain_64.c | 109 while (entry->nr < entry->max_stack) { in perf_callchain_user_64()
|
A D | callchain_32.c | 147 while (entry->nr < entry->max_stack) { in perf_callchain_user_32()
|
/linux/arch/xtensa/kernel/ |
A D | perf_event.c | 332 xtensa_backtrace_kernel(regs, entry->max_stack, in perf_callchain_kernel() 339 xtensa_backtrace_user(regs, entry->max_stack, in perf_callchain_user()
|
/linux/include/linux/ |
A D | perf_event.h | 70 u32 max_stack; member 1262 u32 max_stack, bool crosstask, bool add_mark); 1264 extern int get_callchain_buffers(int max_stack); 1287 if (ctx->nr < ctx->max_stack && !ctx->contexts_maxed) { in perf_callchain_store()
|