/linux/tools/testing/selftests/bpf/progs/ |
H A D | verifier_bounds_deduction_non_const.c | 13 call %[bpf_ktime_get_ns]; \ in deducing_bounds_from_non_const_1() 24 : __imm(bpf_ktime_get_ns) in deducing_bounds_from_non_const_1() 34 call %[bpf_ktime_get_ns]; \ in deducing_bounds_from_non_const_2() 45 : __imm(bpf_ktime_get_ns) in deducing_bounds_from_non_const_2() 55 call %[bpf_ktime_get_ns]; \ in deducing_bounds_from_non_const_3() 67 : __imm(bpf_ktime_get_ns) in deducing_bounds_from_non_const_3() 77 call %[bpf_ktime_get_ns]; \ in deducing_bounds_from_non_const_4() 89 : __imm(bpf_ktime_get_ns) in deducing_bounds_from_non_const_4() 99 call %[bpf_ktime_get_ns]; \ in deducing_bounds_from_non_const_5() 110 : __imm(bpf_ktime_get_ns) in deducing_bounds_from_non_const_5() [all...] |
H A D | verifier_bounds_mix_sign_unsign.c | 22 call %[bpf_ktime_get_ns]; \ in signed_and_unsigned_positive_bounds() 41 : __imm(bpf_ktime_get_ns), in signed_and_unsigned_positive_bounds() 54 call %[bpf_ktime_get_ns]; \ in checks_mixing_signed_and_unsigned() 73 : __imm(bpf_ktime_get_ns), in checks_mixing_signed_and_unsigned() 86 call %[bpf_ktime_get_ns]; \ in signed_and_unsigned_variant_2() 107 : __imm(bpf_ktime_get_ns), in signed_and_unsigned_variant_2() 120 call %[bpf_ktime_get_ns]; \ in signed_and_unsigned_variant_3() 140 : __imm(bpf_ktime_get_ns), in signed_and_unsigned_variant_3() 152 call %[bpf_ktime_get_ns]; \ in signed_and_unsigned_variant_4() 171 : __imm(bpf_ktime_get_ns), in signed_and_unsigned_variant_4() [all...] |
H A D | verifier_scalar_ids.c | 32 "call %[bpf_ktime_get_ns];" in __flag() 46 : __imm(bpf_ktime_get_ns) in __flag() 64 "call %[bpf_ktime_get_ns];" in __flag() 79 : __imm(bpf_ktime_get_ns) in __flag() 97 "call %[bpf_ktime_get_ns];" in __flag() 112 : __imm(bpf_ktime_get_ns) in __flag() 131 "call %[bpf_ktime_get_ns];" in __flag() 149 : __imm(bpf_ktime_get_ns) in __flag() 201 "call %[bpf_ktime_get_ns];" in __flag() 209 : __imm(bpf_ktime_get_ns) in __flag() [all...] |
H A D | exceptions.c | 160 return bpf_ktime_get_ns(); in subprog() 167 return bpf_ktime_get_ns(); in throwing_subprog() 172 return bpf_ktime_get_ns(); in global_subprog() 179 return bpf_ktime_get_ns(); in throwing_global_subprog() 335 u64 time = bpf_ktime_get_ns(); in exception_assert_range() 344 u64 time = bpf_ktime_get_ns(); in exception_assert_range_with() 353 u64 time = bpf_ktime_get_ns(); in exception_bad_assert_range() 362 u64 time = bpf_ktime_get_ns(); in exception_bad_assert_range_with()
|
H A D | verifier_reg_equal.c | 13 call %[bpf_ktime_get_ns]; \ in subreg_equality_1() 29 : __imm(bpf_ktime_get_ns) in subreg_equality_1() 39 call %[bpf_ktime_get_ns]; \ in subreg_equality_2() 54 : __imm(bpf_ktime_get_ns) in subreg_equality_2()
|
H A D | verifier_search_pruning.c | 148 call %[bpf_ktime_get_ns]; \ in should_be_verified_nop_operation() 156 : __imm(bpf_ktime_get_ns), in should_be_verified_nop_operation() 184 l2_%=: call %[bpf_ktime_get_ns]; \ in be_verified_invalid_stack_access() 188 : __imm(bpf_ktime_get_ns), in be_verified_invalid_stack_access() 320 call %[bpf_ktime_get_ns]; \ in __flag() 323 call %[bpf_ktime_get_ns]; \ in __flag() 336 : __imm(bpf_ktime_get_ns) in __flag()
|
H A D | bpf_hashmap_full_update_bench.c | 35 u64 start_time = bpf_ktime_get_ns(); in benchmark() 38 percpu_time[cpu & 255] = bpf_ktime_get_ns() - start_time; in benchmark()
|
H A D | local_storage_rcu_tasks_trace_bench.c | 45 current_gp_start = bpf_ktime_get_ns(); in pregp_step() 60 __sync_add_and_fetch(&gp_times, bpf_ktime_get_ns() - current_gp_start); in postgp()
|
H A D | verifier_spill_fill.c | 797 call %[bpf_ktime_get_ns]; \ in spill_32bit_range_track() 818 : __imm(bpf_ktime_get_ns) in spill_32bit_range_track() 1115 "call %[bpf_ktime_get_ns];" in __flag() 1123 "call %[bpf_ktime_get_ns];" in __flag() 1131 : __imm(bpf_ktime_get_ns) in __flag() 1152 "call %[bpf_ktime_get_ns];" in __flag() 1160 "call %[bpf_ktime_get_ns];" in __flag() 1170 : __imm(bpf_ktime_get_ns) in __flag() 1187 "call %[bpf_ktime_get_ns];" in __flag() 1190 "call %[bpf_ktime_get_ns];" in __flag() [all...] |
H A D | bpf_hashmap_lookup.c | 58 start_time = bpf_ktime_get_ns(); in benchmark() 60 percpu_times[cpu & CPU_MASK][times_index] = bpf_ktime_get_ns() - start_time; in benchmark()
|
H A D | res_spin_lock.c | 119 time_beg = bpf_ktime_get_ns(); in res_spin_lock_test_held_lock_max() 140 time = bpf_ktime_get_ns() - time_beg; in res_spin_lock_test_held_lock_max()
|
H A D | verifier_iterating_callbacks.c | 679 call %[bpf_ktime_get_ns]; \ in check_add_const() 696 : __imm(bpf_ktime_get_ns), in check_add_const() 710 "call %[bpf_ktime_get_ns];" in check_add_const_3regs() 722 : __imm(bpf_ktime_get_ns), in check_add_const_3regs() 737 "call %[bpf_ktime_get_ns];" in check_add_const_3regs_2if() 753 : __imm(bpf_ktime_get_ns), in check_add_const_3regs_2if() 765 "call %[bpf_ktime_get_ns];" in __flag() 767 "call %[bpf_ktime_get_ns];" in __flag() 769 "call %[bpf_ktime_get_ns];" in __flag() 781 : __imm(bpf_ktime_get_ns), in __flag() [all...] |
H A D | verifier_gotol.c | 15 call %[bpf_ktime_get_ns]; \ in gotol_small_imm() 28 : __imm(bpf_ktime_get_ns) in gotol_small_imm()
|
H A D | xdping_kern.c | 114 recvtime = bpf_ktime_get_ns(); in xdping_client() 148 pinginfo->start = bpf_ktime_get_ns(); in xdping_client()
|
H A D | net_timestamping.c | 120 u64 timestamp = bpf_ktime_get_ns(); in bpf_test_delay() 193 u64 timestamp = bpf_ktime_get_ns(); in BPF_PROG()
|
/linux/tools/perf/util/bpf_skel/ |
H A D | func_latency.bpf.c | 138 now = bpf_ktime_get_ns(); in BPF_PROG() 158 update_latency(bpf_ktime_get_ns() - *start); in BPF_PROG() 174 now = bpf_ktime_get_ns(); in BPF_PROG() 194 update_latency(bpf_ktime_get_ns() - *start); in BPF_PROG()
|
H A D | kwork_top.bpf.c | 188 __u64 ts = bpf_ktime_get_ns(); in on_switch() 209 __u64 ts = bpf_ktime_get_ns(); in on_irq_handler_entry() 245 __u64 ts = bpf_ktime_get_ns(); in on_irq_handler_exit() 281 __u64 ts = bpf_ktime_get_ns(); in on_softirq_entry() 317 __u64 ts = bpf_ktime_get_ns(); in on_softirq_exit()
|
H A D | syscall_summary.bpf.c | 113 st.timestamp = bpf_ktime_get_ns(); in sys_enter() 146 delta = bpf_ktime_get_ns() - st->timestamp; in sys_exit()
|
/linux/samples/bpf/ |
H A D | cpustat_kern.c | 139 *cts = bpf_ktime_get_ns(); in bpf_prog1() 143 cur_ts = bpf_ktime_get_ns(); in bpf_prog1() 238 *pts = bpf_ktime_get_ns(); in bpf_prog2() 242 cur_ts = bpf_ktime_get_ns(); in bpf_prog2()
|
H A D | lathist_kern.c | 35 *ts = bpf_ktime_get_ns(); in bpf_prog1() 83 cur_ts = bpf_ktime_get_ns(); in bpf_prog2()
|
H A D | tracex3.bpf.c | 29 u64 val = bpf_ktime_get_ns(); in bpf_prog1() 73 u64 cur_time = bpf_ktime_get_ns(); in bpf_prog2()
|
H A D | offwaketime.bpf.c | 122 ts = bpf_ktime_get_ns(); 132 delta = bpf_ktime_get_ns() - *tsp;
|
H A D | hbm_kern.h | 138 qdp->lasttime = bpf_ktime_get_ns(); in hbm_init_vqueue() 148 curtime = bpf_ktime_get_ns(); in hbm_init_edt_vqueue()
|
H A D | tracex4.bpf.c | 46 .val = bpf_ktime_get_ns(), in bpf_prog2()
|
/linux/tools/bpf/runqslower/ |
H A D | runqslower.bpf.c | 41 *ptr = bpf_ktime_get_ns(); in trace_enqueue() 90 delta_us = (bpf_ktime_get_ns() - *tsp) / 1000; in handle__sched_switch()
|