| /linux/tools/perf/util/ |
| H A D | unwind-libdw.c | 259 return entry(pc, ui) || !(--ui->max_stack) ? in frame_callback() 266 int max_stack, in unwind__get_entries() argument 275 .max_stack = max_stack, in unwind__get_entries() 285 ui = zalloc(sizeof(ui_buf) + sizeof(ui_buf.entries[0]) * max_stack); in unwind__get_entries() 309 if (err && ui->max_stack != max_stack) in unwind__get_entries()
|
| H A D | unwind.h | 26 struct perf_sample *data, int max_stack, bool best_effort); 37 struct perf_sample *data, int max_stack, 66 int max_stack __maybe_unused, in unwind__get_entries()
|
| H A D | unwind-libunwind-local.c | 735 void *arg, int max_stack) in get_entries() argument 739 unw_word_t ips[max_stack]; in get_entries() 755 if (max_stack - 1 > 0) { in get_entries() 766 while (!ret && (unw_step(&c) > 0) && i < max_stack) { in get_entries() 782 max_stack = i; in get_entries() 788 for (i = 0; i < max_stack && !ret; i++) { in get_entries() 792 j = max_stack - i - 1; in get_entries() 801 struct perf_sample *data, int max_stack, in _unwind__get_entries() argument 814 if (max_stack <= 0) in _unwind__get_entries() 817 return get_entries(&ui, cb, arg, max_stack); in _unwind__get_entries()
|
| H A D | unwind-libunwind.c | 84 struct perf_sample *data, int max_stack, in unwind__get_entries() argument 90 return ops->get_entries(cb, arg, thread, data, max_stack, best_effort); in unwind__get_entries()
|
| H A D | bpf_lock_contention.c | 193 bpf_map__set_value_size(skel->maps.stacks, con->max_stack * sizeof(u64)); in lock_contention_prepare() 205 bpf_map__set_value_size(skel->maps.stack_buf, con->max_stack * sizeof(u64)); in lock_contention_prepare() 207 con->max_stack * sizeof(u64)); in lock_contention_prepare() 211 skel->rodata->max_stack = con->max_stack; in lock_contention_prepare() 633 idx < con->max_stack - 1) in lock_contention_get_name() 662 size_t stack_size = con->max_stack * sizeof(*stack_trace); in pop_owner_stack_trace() 720 size_t stack_size = con->max_stack * sizeof(*stack_trace); in lock_contention_read() 767 if (!match_callstack_filter(machine, stack_trace, con->max_stack)) { in lock_contention_read()
|
| H A D | callchain.h | 104 u16 max_stack; member 254 int max_stack); 317 struct perf_sample *sample, int max_stack,
|
| H A D | unwind-libdw.h | 21 int max_stack; member
|
| H A D | machine.h | 194 int max_stack, 203 int max_stack) in thread__resolve_callchain() argument 211 max_stack, in thread__resolve_callchain()
|
| H A D | evsel_config.h | 43 int max_stack; member
|
| H A D | top.h | 34 int max_stack; member
|
| H A D | lock-contention.h | 154 int max_stack; member
|
| H A D | machine.c | 2594 int max_stack, in resolve_lbr_callchain_sample() argument 2600 int chain_nr = min(max_stack, (int)chain->nr), i; in resolve_lbr_callchain_sample() 2713 int max_stack, in thread__resolve_callchain_sample() argument 2733 root_al, max_stack, in thread__resolve_callchain_sample() 2759 int nr = min(max_stack, (int)branch->nr); in thread__resolve_callchain_sample() 2828 i < chain_nr && nr_entries < max_stack; i++) { in thread__resolve_callchain_sample() 2961 int max_stack, bool symbols) in thread__resolve_callchain_unwind() argument 2977 thread, sample, max_stack, false); in thread__resolve_callchain_unwind() 2986 int max_stack, in __thread__resolve_callchain() argument 3000 max_stack, symbols); in __thread__resolve_callchain() [all …]
|
| H A D | kwork.h | 234 unsigned int max_stack; member
|
| /linux/arch/mips/kernel/ |
| H A D | perf_event.c | 36 if (entry->nr >= entry->max_stack) in save_raw_perf_callchain() 60 if (entry->nr >= entry->max_stack) in perf_callchain_kernel()
|
| /linux/kernel/events/ |
| H A D | callchain.c | 221 u32 max_stack, bool crosstask, bool add_mark) in get_perf_callchain() argument 236 ctx.max_stack = max_stack; in get_perf_callchain()
|
| /linux/tools/perf/util/bpf_skel/ |
| H A D | lock_contention.bpf.c | 191 const volatile int max_stack; variable 560 for (i = 0; i < max_stack; i++) in contention_begin() 570 bpf_get_task_stack(task, buf, max_stack * sizeof(unsigned long), 0); in contention_begin() 705 for (i = 0; i < (u32)max_stack; i++) in contention_end() 726 max_stack * sizeof(unsigned long), 0); in contention_end()
|
| /linux/arch/arm/kernel/ |
| H A D | perf_callchain.c | 74 while ((entry->nr < entry->max_stack) && in perf_callchain_user()
|
| /linux/arch/csky/kernel/ |
| H A D | perf_callchain.c | 102 while (fp && !(fp & 0x3) && entry->nr < entry->max_stack) in perf_callchain_user()
|
| /linux/arch/powerpc/perf/ |
| H A D | callchain_64.c | 82 while (entry->nr < entry->max_stack) { in perf_callchain_user_64()
|
| H A D | callchain_32.c | 147 while (entry->nr < entry->max_stack) { in perf_callchain_user_32()
|
| /linux/drivers/net/ethernet/netronome/nfp/bpf/ |
| H A D | verifier.c | 767 unsigned int max_stack; in nfp_bpf_finalize() local 795 max_stack = nn_readb(nn, NFP_NET_CFG_BPF_STACK_SZ) * 64; in nfp_bpf_finalize() 797 if (nfp_prog->stack_size > max_stack) { in nfp_bpf_finalize() 799 nfp_prog->stack_size, max_stack); in nfp_bpf_finalize()
|
| H A D | offload.c | 501 unsigned int max_stack, max_prog_len; in nfp_net_bpf_load() local 511 max_stack = nn_readb(nn, NFP_NET_CFG_BPF_STACK_SZ) * 64; in nfp_net_bpf_load() 512 if (nfp_prog->stack_size > max_stack) { in nfp_net_bpf_load()
|
| /linux/arch/xtensa/kernel/ |
| H A D | perf_event.c | 341 xtensa_backtrace_kernel(regs, entry->max_stack, in perf_callchain_kernel() 348 xtensa_backtrace_user(regs, entry->max_stack, in perf_callchain_user()
|
| /linux/tools/perf/ |
| H A D | builtin-report.c | 102 int max_stack; member 340 ret = hist_entry_iter__add(&iter, &al, rep->max_stack, rep); in process_sample_event() 1320 .max_stack = PERF_MAX_STACK_DEPTH, in cmd_report() 1384 OPT_INTEGER(0, "max-stack", &report.max_stack, in cmd_report() 1577 (int)itrace_synth_opts.callchain_sz > report.max_stack) in cmd_report() 1578 report.max_stack = itrace_synth_opts.callchain_sz; in cmd_report()
|
| /linux/include/linux/ |
| H A D | perf_event.h | 66 u32 max_stack; member 1723 u32 max_stack, bool crosstask, bool add_mark); 1724 extern int get_callchain_buffers(int max_stack); 1748 if (ctx->nr < ctx->max_stack && !ctx->contexts_maxed) { in perf_callchain_store()
|