/Linux-v5.10/kernel/trace/ |
D | trace.c | 752 static u64 buffer_ftrace_now(struct array_buffer *buf, int cpu) in buffer_ftrace_now() 768 return buffer_ftrace_now(&global_trace.array_buffer, cpu); in ftrace_now() 945 if (tr->array_buffer.buffer) in tracer_tracing_on() 946 ring_buffer_record_on(tr->array_buffer.buffer); in tracer_tracing_on() 1014 buffer = global_trace.array_buffer.buffer; in __trace_puts() 1067 buffer = global_trace.array_buffer.buffer; in __trace_bputs() 1196 static int resize_buffer_duplicate_size(struct array_buffer *trace_buf, 1197 struct array_buffer *size_buf, int cpu_id); 1198 static void set_buffer_entries(struct array_buffer *buf, unsigned long val); 1208 &tr->array_buffer, RING_BUFFER_ALL_CPUS); in tracing_alloc_snapshot_instance() [all …]
|
D | trace_selftest.c | 26 static int trace_test_buffer_cpu(struct array_buffer *buf, int cpu) in trace_test_buffer_cpu() 63 static int __maybe_unused trace_test_buffer(struct array_buffer *buf, unsigned long *count) in trace_test_buffer() 365 ret = trace_test_buffer(&tr->array_buffer, &count); in trace_selftest_startup_dynamic_tracing() 386 ret = trace_test_buffer(&tr->array_buffer, &count); in trace_selftest_startup_dynamic_tracing() 690 ret = trace_test_buffer(&tr->array_buffer, &count); in trace_selftest_startup_function() 776 tracing_reset_online_cpus(&tr->array_buffer); in trace_selftest_startup_function_graph() 798 ret = trace_test_buffer(&tr->array_buffer, &count); in trace_selftest_startup_function_graph() 856 ret = trace_test_buffer(&tr->array_buffer, NULL); in trace_selftest_startup_irqsoff() 918 ret = trace_test_buffer(&tr->array_buffer, NULL); in trace_selftest_startup_preemptoff() 984 ret = trace_test_buffer(&tr->array_buffer, NULL); in trace_selftest_startup_preemptirqsoff() [all …]
|
D | trace_kdb.c | 46 ring_buffer_read_prepare(iter.array_buffer->buffer, in ftrace_dump_buf() 54 ring_buffer_read_prepare(iter.array_buffer->buffer, in ftrace_dump_buf() 127 atomic_inc(&per_cpu_ptr(iter.array_buffer->data, cpu)->disabled); in kdb_ftdump() 142 atomic_dec(&per_cpu_ptr(iter.array_buffer->data, cpu)->disabled); in kdb_ftdump()
|
D | trace_sched_wakeup.c | 85 *data = per_cpu_ptr(tr->array_buffer.data, cpu); in func_prolog_preempt_disable() 381 struct trace_buffer *buffer = tr->array_buffer.buffer; in tracing_sched_switch_trace() 411 struct trace_buffer *buffer = tr->array_buffer.buffer; in tracing_sched_wakeup_trace() 462 disabled = atomic_inc_return(&per_cpu_ptr(wakeup_trace->array_buffer.data, cpu)->disabled); in probe_wakeup_sched_switch() 474 data = per_cpu_ptr(wakeup_trace->array_buffer.data, wakeup_cpu); in probe_wakeup_sched_switch() 497 atomic_dec(&per_cpu_ptr(wakeup_trace->array_buffer.data, cpu)->disabled); in probe_wakeup_sched_switch() 516 tracing_reset_online_cpus(&tr->array_buffer); in wakeup_reset() 554 disabled = atomic_inc_return(&per_cpu_ptr(wakeup_trace->array_buffer.data, cpu)->disabled); in probe_wakeup() 586 data = per_cpu_ptr(wakeup_trace->array_buffer.data, wakeup_cpu); in probe_wakeup() 601 atomic_dec(&per_cpu_ptr(wakeup_trace->array_buffer.data, cpu)->disabled); in probe_wakeup()
|
D | trace_mmiotrace.c | 35 tracing_reset_online_cpus(&tr->array_buffer); in mmio_reset_data() 125 unsigned long over = ring_buffer_overruns(iter->array_buffer->buffer); in count_overruns() 300 struct trace_buffer *buffer = tr->array_buffer.buffer; in __trace_mmiotrace_rw() 321 struct trace_array_cpu *data = per_cpu_ptr(tr->array_buffer.data, smp_processor_id()); in mmio_trace_rw() 330 struct trace_buffer *buffer = tr->array_buffer.buffer; in __trace_mmiotrace_map() 354 data = per_cpu_ptr(tr->array_buffer.data, smp_processor_id()); in mmio_trace_mapping()
|
D | trace_irqsoff.c | 125 *data = per_cpu_ptr(tr->array_buffer.data, cpu); in func_prolog_dec() 170 tracing_reset_online_cpus(&irqsoff_trace->array_buffer); in irqsoff_display_graph() 385 data = per_cpu_ptr(tr->array_buffer.data, cpu); in start_critical_timing() 423 data = per_cpu_ptr(tr->array_buffer.data, cpu); in stop_critical_timing()
|
D | trace_functions.c | 110 tr->array_buffer.cpu = get_cpu(); in function_trace_init() 127 tracing_reset_online_cpus(&tr->array_buffer); in function_trace_start() 152 data = per_cpu_ptr(tr->array_buffer.data, cpu); in function_trace_call() 201 data = per_cpu_ptr(tr->array_buffer.data, cpu); in function_stack_trace_call()
|
D | trace_functions_graph.c | 104 struct trace_buffer *buffer = tr->array_buffer.buffer; in __trace_graph_entry() 174 data = per_cpu_ptr(tr->array_buffer.data, cpu); in trace_graph_entry() 224 struct trace_buffer *buffer = tr->array_buffer.buffer; in __trace_graph_return() 255 data = per_cpu_ptr(tr->array_buffer.data, cpu); in trace_graph_return() 447 ring_buffer_consume(iter->array_buffer->buffer, iter->cpu, in get_return_for_leaf() 449 event = ring_buffer_peek(iter->array_buffer->buffer, iter->cpu, in get_return_for_leaf() 506 usecs = iter->ts - iter->array_buffer->time_start; in print_graph_rel_time()
|
D | trace_branch.c | 58 data = this_cpu_ptr(tr->array_buffer.data); in probe_likely_condition() 63 buffer = tr->array_buffer.buffer; in probe_likely_condition()
|
D | trace.h | 194 struct array_buffer { struct 291 struct array_buffer array_buffer; member 304 struct array_buffer max_buffer; 746 void tracing_reset_online_cpus(struct array_buffer *buf); 1142 return this_cpu_read(tr->array_buffer.data->ftrace_ignore_pid) != in ftrace_trace_task()
|
D | trace_events.c | 245 data = this_cpu_ptr(tr->array_buffer.data); in trace_event_ignore_this_pid() 571 this_cpu_write(tr->array_buffer.data->ignore_pid, ret || in event_filter_pid_sched_switch_probe_pre() 587 this_cpu_write(tr->array_buffer.data->ignore_pid, in event_filter_pid_sched_switch_probe_post() 599 if (!this_cpu_read(tr->array_buffer.data->ignore_pid)) in event_filter_pid_sched_wakeup_probe_pre() 605 this_cpu_write(tr->array_buffer.data->ignore_pid, in event_filter_pid_sched_wakeup_probe_pre() 617 if (this_cpu_read(tr->array_buffer.data->ignore_pid)) in event_filter_pid_sched_wakeup_probe_post() 624 this_cpu_write(tr->array_buffer.data->ignore_pid, in event_filter_pid_sched_wakeup_probe_post() 667 per_cpu_ptr(tr->array_buffer.data, cpu)->ignore_pid = false; in __ftrace_clear_event_pids() 1680 this_cpu_write(tr->array_buffer.data->ignore_pid, in ignore_task_cpu()
|
D | trace_syscalls.c | 328 buffer = tr->array_buffer.buffer; in ftrace_syscall_enter() 374 buffer = tr->array_buffer.buffer; in ftrace_syscall_exit()
|
D | trace_hwlat.c | 108 struct trace_buffer *buffer = tr->array_buffer.buffer; in trace_hwlat_sample()
|
D | trace_events_synth.c | 465 buffer = trace_file->tr->array_buffer.buffer; in trace_event_raw_event_synth() 1479 trace_state->buffer = file->tr->array_buffer.buffer; in __synth_event_trace_start()
|
D | ftrace.c | 149 pid = this_cpu_read(tr->array_buffer.data->ftrace_ignore_pid); in ftrace_pid_func() 7068 this_cpu_write(tr->array_buffer.data->ftrace_ignore_pid, in ftrace_filter_pid_sched_switch_probe() 7071 this_cpu_write(tr->array_buffer.data->ftrace_ignore_pid, in ftrace_filter_pid_sched_switch_probe() 7137 per_cpu_ptr(tr->array_buffer.data, cpu)->ftrace_ignore_pid = FTRACE_PID_TRACE; in clear_ftrace_pids() 7336 this_cpu_write(tr->array_buffer.data->ftrace_ignore_pid, in ignore_task_cpu() 7339 this_cpu_write(tr->array_buffer.data->ftrace_ignore_pid, in ignore_task_cpu()
|
D | blktrace.c | 81 buffer = blk_tr->array_buffer.buffer; in trace_note() 254 buffer = blk_tr->array_buffer.buffer; in __blk_add_trace()
|
D | trace_output.c | 541 unsigned long long abs_ts = iter->ts - iter->array_buffer->time_start; in lat_print_timestamp()
|
/Linux-v5.10/include/linux/ |
D | trace_events.h | 14 struct array_buffer; 82 struct array_buffer *array_buffer; member
|