1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun * Copyright (c) 2012 The Chromium OS Authors.
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * SPDX-License-Identifier: GPL-2.0+
5*4882a593Smuzhiyun */
6*4882a593Smuzhiyun
7*4882a593Smuzhiyun #include <common.h>
8*4882a593Smuzhiyun #include <mapmem.h>
9*4882a593Smuzhiyun #include <trace.h>
10*4882a593Smuzhiyun #include <asm/io.h>
11*4882a593Smuzhiyun #include <asm/sections.h>
12*4882a593Smuzhiyun
13*4882a593Smuzhiyun DECLARE_GLOBAL_DATA_PTR;
14*4882a593Smuzhiyun
15*4882a593Smuzhiyun static char trace_enabled __attribute__((section(".data")));
16*4882a593Smuzhiyun static char trace_inited __attribute__((section(".data")));
17*4882a593Smuzhiyun
18*4882a593Smuzhiyun /* The header block at the start of the trace memory area */
19*4882a593Smuzhiyun struct trace_hdr {
20*4882a593Smuzhiyun int func_count; /* Total number of function call sites */
21*4882a593Smuzhiyun u64 call_count; /* Total number of tracked function calls */
22*4882a593Smuzhiyun u64 untracked_count; /* Total number of untracked function calls */
23*4882a593Smuzhiyun int funcs_used; /* Total number of functions used */
24*4882a593Smuzhiyun
25*4882a593Smuzhiyun /*
26*4882a593Smuzhiyun * Call count for each function. This is indexed by the word offset
27*4882a593Smuzhiyun * of the function from gd->relocaddr
28*4882a593Smuzhiyun */
29*4882a593Smuzhiyun uintptr_t *call_accum;
30*4882a593Smuzhiyun
31*4882a593Smuzhiyun /* Function trace list */
32*4882a593Smuzhiyun struct trace_call *ftrace; /* The function call records */
33*4882a593Smuzhiyun ulong ftrace_size; /* Num. of ftrace records we have space for */
34*4882a593Smuzhiyun ulong ftrace_count; /* Num. of ftrace records written */
35*4882a593Smuzhiyun ulong ftrace_too_deep_count; /* Functions that were too deep */
36*4882a593Smuzhiyun
37*4882a593Smuzhiyun int depth;
38*4882a593Smuzhiyun int depth_limit;
39*4882a593Smuzhiyun int max_depth;
40*4882a593Smuzhiyun };
41*4882a593Smuzhiyun
42*4882a593Smuzhiyun static struct trace_hdr *hdr; /* Pointer to start of trace buffer */
43*4882a593Smuzhiyun
44*4882a593Smuzhiyun static inline uintptr_t __attribute__((no_instrument_function))
func_ptr_to_num(void * func_ptr)45*4882a593Smuzhiyun func_ptr_to_num(void *func_ptr)
46*4882a593Smuzhiyun {
47*4882a593Smuzhiyun uintptr_t offset = (uintptr_t)func_ptr;
48*4882a593Smuzhiyun
49*4882a593Smuzhiyun #ifdef CONFIG_SANDBOX
50*4882a593Smuzhiyun offset -= (uintptr_t)&_init;
51*4882a593Smuzhiyun #else
52*4882a593Smuzhiyun if (gd->flags & GD_FLG_RELOC)
53*4882a593Smuzhiyun offset -= gd->relocaddr;
54*4882a593Smuzhiyun else
55*4882a593Smuzhiyun offset -= CONFIG_SYS_TEXT_BASE;
56*4882a593Smuzhiyun #endif
57*4882a593Smuzhiyun return offset / FUNC_SITE_SIZE;
58*4882a593Smuzhiyun }
59*4882a593Smuzhiyun
add_ftrace(void * func_ptr,void * caller,ulong flags)60*4882a593Smuzhiyun static void __attribute__((no_instrument_function)) add_ftrace(void *func_ptr,
61*4882a593Smuzhiyun void *caller, ulong flags)
62*4882a593Smuzhiyun {
63*4882a593Smuzhiyun if (hdr->depth > hdr->depth_limit) {
64*4882a593Smuzhiyun hdr->ftrace_too_deep_count++;
65*4882a593Smuzhiyun return;
66*4882a593Smuzhiyun }
67*4882a593Smuzhiyun if (hdr->ftrace_count < hdr->ftrace_size) {
68*4882a593Smuzhiyun struct trace_call *rec = &hdr->ftrace[hdr->ftrace_count];
69*4882a593Smuzhiyun
70*4882a593Smuzhiyun rec->func = func_ptr_to_num(func_ptr);
71*4882a593Smuzhiyun rec->caller = func_ptr_to_num(caller);
72*4882a593Smuzhiyun rec->flags = flags | (timer_get_us() & FUNCF_TIMESTAMP_MASK);
73*4882a593Smuzhiyun }
74*4882a593Smuzhiyun hdr->ftrace_count++;
75*4882a593Smuzhiyun }
76*4882a593Smuzhiyun
add_textbase(void)77*4882a593Smuzhiyun static void __attribute__((no_instrument_function)) add_textbase(void)
78*4882a593Smuzhiyun {
79*4882a593Smuzhiyun if (hdr->ftrace_count < hdr->ftrace_size) {
80*4882a593Smuzhiyun struct trace_call *rec = &hdr->ftrace[hdr->ftrace_count];
81*4882a593Smuzhiyun
82*4882a593Smuzhiyun rec->func = CONFIG_SYS_TEXT_BASE;
83*4882a593Smuzhiyun rec->caller = 0;
84*4882a593Smuzhiyun rec->flags = FUNCF_TEXTBASE;
85*4882a593Smuzhiyun }
86*4882a593Smuzhiyun hdr->ftrace_count++;
87*4882a593Smuzhiyun }
88*4882a593Smuzhiyun
89*4882a593Smuzhiyun /**
90*4882a593Smuzhiyun * This is called on every function entry
91*4882a593Smuzhiyun *
92*4882a593Smuzhiyun * We add to our tally for this function and add to the list of called
93*4882a593Smuzhiyun * functions.
94*4882a593Smuzhiyun *
95*4882a593Smuzhiyun * @param func_ptr Pointer to function being entered
96*4882a593Smuzhiyun * @param caller Pointer to function which called this function
97*4882a593Smuzhiyun */
__cyg_profile_func_enter(void * func_ptr,void * caller)98*4882a593Smuzhiyun void __attribute__((no_instrument_function)) __cyg_profile_func_enter(
99*4882a593Smuzhiyun void *func_ptr, void *caller)
100*4882a593Smuzhiyun {
101*4882a593Smuzhiyun if (trace_enabled) {
102*4882a593Smuzhiyun int func;
103*4882a593Smuzhiyun
104*4882a593Smuzhiyun add_ftrace(func_ptr, caller, FUNCF_ENTRY);
105*4882a593Smuzhiyun func = func_ptr_to_num(func_ptr);
106*4882a593Smuzhiyun if (func < hdr->func_count) {
107*4882a593Smuzhiyun hdr->call_accum[func]++;
108*4882a593Smuzhiyun hdr->call_count++;
109*4882a593Smuzhiyun } else {
110*4882a593Smuzhiyun hdr->untracked_count++;
111*4882a593Smuzhiyun }
112*4882a593Smuzhiyun hdr->depth++;
113*4882a593Smuzhiyun if (hdr->depth > hdr->depth_limit)
114*4882a593Smuzhiyun hdr->max_depth = hdr->depth;
115*4882a593Smuzhiyun }
116*4882a593Smuzhiyun }
117*4882a593Smuzhiyun
118*4882a593Smuzhiyun /**
119*4882a593Smuzhiyun * This is called on every function exit
120*4882a593Smuzhiyun *
121*4882a593Smuzhiyun * We do nothing here.
122*4882a593Smuzhiyun *
123*4882a593Smuzhiyun * @param func_ptr Pointer to function being entered
124*4882a593Smuzhiyun * @param caller Pointer to function which called this function
125*4882a593Smuzhiyun */
__cyg_profile_func_exit(void * func_ptr,void * caller)126*4882a593Smuzhiyun void __attribute__((no_instrument_function)) __cyg_profile_func_exit(
127*4882a593Smuzhiyun void *func_ptr, void *caller)
128*4882a593Smuzhiyun {
129*4882a593Smuzhiyun if (trace_enabled) {
130*4882a593Smuzhiyun add_ftrace(func_ptr, caller, FUNCF_EXIT);
131*4882a593Smuzhiyun hdr->depth--;
132*4882a593Smuzhiyun }
133*4882a593Smuzhiyun }
134*4882a593Smuzhiyun
135*4882a593Smuzhiyun /**
136*4882a593Smuzhiyun * Produce a list of called functions
137*4882a593Smuzhiyun *
138*4882a593Smuzhiyun * The information is written into the supplied buffer - a header followed
139*4882a593Smuzhiyun * by a list of function records.
140*4882a593Smuzhiyun *
141*4882a593Smuzhiyun * @param buff Buffer to place list into
142*4882a593Smuzhiyun * @param buff_size Size of buffer
143*4882a593Smuzhiyun * @param needed Returns size of buffer needed, which may be
144*4882a593Smuzhiyun * greater than buff_size if we ran out of space.
145*4882a593Smuzhiyun * @return 0 if ok, -1 if space was exhausted
146*4882a593Smuzhiyun */
trace_list_functions(void * buff,int buff_size,unsigned int * needed)147*4882a593Smuzhiyun int trace_list_functions(void *buff, int buff_size, unsigned int *needed)
148*4882a593Smuzhiyun {
149*4882a593Smuzhiyun struct trace_output_hdr *output_hdr = NULL;
150*4882a593Smuzhiyun void *end, *ptr = buff;
151*4882a593Smuzhiyun int func;
152*4882a593Smuzhiyun int upto;
153*4882a593Smuzhiyun
154*4882a593Smuzhiyun end = buff ? buff + buff_size : NULL;
155*4882a593Smuzhiyun
156*4882a593Smuzhiyun /* Place some header information */
157*4882a593Smuzhiyun if (ptr + sizeof(struct trace_output_hdr) < end)
158*4882a593Smuzhiyun output_hdr = ptr;
159*4882a593Smuzhiyun ptr += sizeof(struct trace_output_hdr);
160*4882a593Smuzhiyun
161*4882a593Smuzhiyun /* Add information about each function */
162*4882a593Smuzhiyun for (func = upto = 0; func < hdr->func_count; func++) {
163*4882a593Smuzhiyun int calls = hdr->call_accum[func];
164*4882a593Smuzhiyun
165*4882a593Smuzhiyun if (!calls)
166*4882a593Smuzhiyun continue;
167*4882a593Smuzhiyun
168*4882a593Smuzhiyun if (ptr + sizeof(struct trace_output_func) < end) {
169*4882a593Smuzhiyun struct trace_output_func *stats = ptr;
170*4882a593Smuzhiyun
171*4882a593Smuzhiyun stats->offset = func * FUNC_SITE_SIZE;
172*4882a593Smuzhiyun stats->call_count = calls;
173*4882a593Smuzhiyun upto++;
174*4882a593Smuzhiyun }
175*4882a593Smuzhiyun ptr += sizeof(struct trace_output_func);
176*4882a593Smuzhiyun }
177*4882a593Smuzhiyun
178*4882a593Smuzhiyun /* Update the header */
179*4882a593Smuzhiyun if (output_hdr) {
180*4882a593Smuzhiyun output_hdr->rec_count = upto;
181*4882a593Smuzhiyun output_hdr->type = TRACE_CHUNK_FUNCS;
182*4882a593Smuzhiyun }
183*4882a593Smuzhiyun
184*4882a593Smuzhiyun /* Work out how must of the buffer we used */
185*4882a593Smuzhiyun *needed = ptr - buff;
186*4882a593Smuzhiyun if (ptr > end)
187*4882a593Smuzhiyun return -1;
188*4882a593Smuzhiyun return 0;
189*4882a593Smuzhiyun }
190*4882a593Smuzhiyun
trace_list_calls(void * buff,int buff_size,unsigned * needed)191*4882a593Smuzhiyun int trace_list_calls(void *buff, int buff_size, unsigned *needed)
192*4882a593Smuzhiyun {
193*4882a593Smuzhiyun struct trace_output_hdr *output_hdr = NULL;
194*4882a593Smuzhiyun void *end, *ptr = buff;
195*4882a593Smuzhiyun int rec, upto;
196*4882a593Smuzhiyun int count;
197*4882a593Smuzhiyun
198*4882a593Smuzhiyun end = buff ? buff + buff_size : NULL;
199*4882a593Smuzhiyun
200*4882a593Smuzhiyun /* Place some header information */
201*4882a593Smuzhiyun if (ptr + sizeof(struct trace_output_hdr) < end)
202*4882a593Smuzhiyun output_hdr = ptr;
203*4882a593Smuzhiyun ptr += sizeof(struct trace_output_hdr);
204*4882a593Smuzhiyun
205*4882a593Smuzhiyun /* Add information about each call */
206*4882a593Smuzhiyun count = hdr->ftrace_count;
207*4882a593Smuzhiyun if (count > hdr->ftrace_size)
208*4882a593Smuzhiyun count = hdr->ftrace_size;
209*4882a593Smuzhiyun for (rec = upto = 0; rec < count; rec++) {
210*4882a593Smuzhiyun if (ptr + sizeof(struct trace_call) < end) {
211*4882a593Smuzhiyun struct trace_call *call = &hdr->ftrace[rec];
212*4882a593Smuzhiyun struct trace_call *out = ptr;
213*4882a593Smuzhiyun
214*4882a593Smuzhiyun out->func = call->func * FUNC_SITE_SIZE;
215*4882a593Smuzhiyun out->caller = call->caller * FUNC_SITE_SIZE;
216*4882a593Smuzhiyun out->flags = call->flags;
217*4882a593Smuzhiyun upto++;
218*4882a593Smuzhiyun }
219*4882a593Smuzhiyun ptr += sizeof(struct trace_call);
220*4882a593Smuzhiyun }
221*4882a593Smuzhiyun
222*4882a593Smuzhiyun /* Update the header */
223*4882a593Smuzhiyun if (output_hdr) {
224*4882a593Smuzhiyun output_hdr->rec_count = upto;
225*4882a593Smuzhiyun output_hdr->type = TRACE_CHUNK_CALLS;
226*4882a593Smuzhiyun }
227*4882a593Smuzhiyun
228*4882a593Smuzhiyun /* Work out how must of the buffer we used */
229*4882a593Smuzhiyun *needed = ptr - buff;
230*4882a593Smuzhiyun if (ptr > end)
231*4882a593Smuzhiyun return -1;
232*4882a593Smuzhiyun return 0;
233*4882a593Smuzhiyun }
234*4882a593Smuzhiyun
235*4882a593Smuzhiyun /* Print basic information about tracing */
trace_print_stats(void)236*4882a593Smuzhiyun void trace_print_stats(void)
237*4882a593Smuzhiyun {
238*4882a593Smuzhiyun ulong count;
239*4882a593Smuzhiyun
240*4882a593Smuzhiyun #ifndef FTRACE
241*4882a593Smuzhiyun puts("Warning: make U-Boot with FTRACE to enable function instrumenting.\n");
242*4882a593Smuzhiyun puts("You will likely get zeroed data here\n");
243*4882a593Smuzhiyun #endif
244*4882a593Smuzhiyun if (!trace_inited) {
245*4882a593Smuzhiyun printf("Trace is disabled\n");
246*4882a593Smuzhiyun return;
247*4882a593Smuzhiyun }
248*4882a593Smuzhiyun print_grouped_ull(hdr->func_count, 10);
249*4882a593Smuzhiyun puts(" function sites\n");
250*4882a593Smuzhiyun print_grouped_ull(hdr->call_count, 10);
251*4882a593Smuzhiyun puts(" function calls\n");
252*4882a593Smuzhiyun print_grouped_ull(hdr->untracked_count, 10);
253*4882a593Smuzhiyun puts(" untracked function calls\n");
254*4882a593Smuzhiyun count = min(hdr->ftrace_count, hdr->ftrace_size);
255*4882a593Smuzhiyun print_grouped_ull(count, 10);
256*4882a593Smuzhiyun puts(" traced function calls");
257*4882a593Smuzhiyun if (hdr->ftrace_count > hdr->ftrace_size) {
258*4882a593Smuzhiyun printf(" (%lu dropped due to overflow)",
259*4882a593Smuzhiyun hdr->ftrace_count - hdr->ftrace_size);
260*4882a593Smuzhiyun }
261*4882a593Smuzhiyun puts("\n");
262*4882a593Smuzhiyun printf("%15d maximum observed call depth\n", hdr->max_depth);
263*4882a593Smuzhiyun printf("%15d call depth limit\n", hdr->depth_limit);
264*4882a593Smuzhiyun print_grouped_ull(hdr->ftrace_too_deep_count, 10);
265*4882a593Smuzhiyun puts(" calls not traced due to depth\n");
266*4882a593Smuzhiyun }
267*4882a593Smuzhiyun
trace_set_enabled(int enabled)268*4882a593Smuzhiyun void __attribute__((no_instrument_function)) trace_set_enabled(int enabled)
269*4882a593Smuzhiyun {
270*4882a593Smuzhiyun trace_enabled = enabled != 0;
271*4882a593Smuzhiyun }
272*4882a593Smuzhiyun
273*4882a593Smuzhiyun /**
274*4882a593Smuzhiyun * Init the tracing system ready for used, and enable it
275*4882a593Smuzhiyun *
276*4882a593Smuzhiyun * @param buff Pointer to trace buffer
277*4882a593Smuzhiyun * @param buff_size Size of trace buffer
278*4882a593Smuzhiyun */
trace_init(void * buff,size_t buff_size)279*4882a593Smuzhiyun int __attribute__((no_instrument_function)) trace_init(void *buff,
280*4882a593Smuzhiyun size_t buff_size)
281*4882a593Smuzhiyun {
282*4882a593Smuzhiyun ulong func_count = gd->mon_len / FUNC_SITE_SIZE;
283*4882a593Smuzhiyun size_t needed;
284*4882a593Smuzhiyun int was_disabled = !trace_enabled;
285*4882a593Smuzhiyun
286*4882a593Smuzhiyun if (!was_disabled) {
287*4882a593Smuzhiyun #ifdef CONFIG_TRACE_EARLY
288*4882a593Smuzhiyun char *end;
289*4882a593Smuzhiyun ulong used;
290*4882a593Smuzhiyun
291*4882a593Smuzhiyun /*
292*4882a593Smuzhiyun * Copy over the early trace data if we have it. Disable
293*4882a593Smuzhiyun * tracing while we are doing this.
294*4882a593Smuzhiyun */
295*4882a593Smuzhiyun trace_enabled = 0;
296*4882a593Smuzhiyun hdr = map_sysmem(CONFIG_TRACE_EARLY_ADDR,
297*4882a593Smuzhiyun CONFIG_TRACE_EARLY_SIZE);
298*4882a593Smuzhiyun end = (char *)&hdr->ftrace[hdr->ftrace_count];
299*4882a593Smuzhiyun used = end - (char *)hdr;
300*4882a593Smuzhiyun printf("trace: copying %08lx bytes of early data from %x to %08lx\n",
301*4882a593Smuzhiyun used, CONFIG_TRACE_EARLY_ADDR,
302*4882a593Smuzhiyun (ulong)map_to_sysmem(buff));
303*4882a593Smuzhiyun memcpy(buff, hdr, used);
304*4882a593Smuzhiyun #else
305*4882a593Smuzhiyun puts("trace: already enabled\n");
306*4882a593Smuzhiyun return -1;
307*4882a593Smuzhiyun #endif
308*4882a593Smuzhiyun }
309*4882a593Smuzhiyun hdr = (struct trace_hdr *)buff;
310*4882a593Smuzhiyun needed = sizeof(*hdr) + func_count * sizeof(uintptr_t);
311*4882a593Smuzhiyun if (needed > buff_size) {
312*4882a593Smuzhiyun printf("trace: buffer size %zd bytes: at least %zd needed\n",
313*4882a593Smuzhiyun buff_size, needed);
314*4882a593Smuzhiyun return -1;
315*4882a593Smuzhiyun }
316*4882a593Smuzhiyun
317*4882a593Smuzhiyun if (was_disabled)
318*4882a593Smuzhiyun memset(hdr, '\0', needed);
319*4882a593Smuzhiyun hdr->func_count = func_count;
320*4882a593Smuzhiyun hdr->call_accum = (uintptr_t *)(hdr + 1);
321*4882a593Smuzhiyun
322*4882a593Smuzhiyun /* Use any remaining space for the timed function trace */
323*4882a593Smuzhiyun hdr->ftrace = (struct trace_call *)(buff + needed);
324*4882a593Smuzhiyun hdr->ftrace_size = (buff_size - needed) / sizeof(*hdr->ftrace);
325*4882a593Smuzhiyun add_textbase();
326*4882a593Smuzhiyun
327*4882a593Smuzhiyun puts("trace: enabled\n");
328*4882a593Smuzhiyun hdr->depth_limit = 15;
329*4882a593Smuzhiyun trace_enabled = 1;
330*4882a593Smuzhiyun trace_inited = 1;
331*4882a593Smuzhiyun return 0;
332*4882a593Smuzhiyun }
333*4882a593Smuzhiyun
334*4882a593Smuzhiyun #ifdef CONFIG_TRACE_EARLY
trace_early_init(void)335*4882a593Smuzhiyun int __attribute__((no_instrument_function)) trace_early_init(void)
336*4882a593Smuzhiyun {
337*4882a593Smuzhiyun ulong func_count = gd->mon_len / FUNC_SITE_SIZE;
338*4882a593Smuzhiyun size_t buff_size = CONFIG_TRACE_EARLY_SIZE;
339*4882a593Smuzhiyun size_t needed;
340*4882a593Smuzhiyun
341*4882a593Smuzhiyun /* We can ignore additional calls to this function */
342*4882a593Smuzhiyun if (trace_enabled)
343*4882a593Smuzhiyun return 0;
344*4882a593Smuzhiyun
345*4882a593Smuzhiyun hdr = map_sysmem(CONFIG_TRACE_EARLY_ADDR, CONFIG_TRACE_EARLY_SIZE);
346*4882a593Smuzhiyun needed = sizeof(*hdr) + func_count * sizeof(uintptr_t);
347*4882a593Smuzhiyun if (needed > buff_size) {
348*4882a593Smuzhiyun printf("trace: buffer size is %zd bytes, at least %zd needed\n",
349*4882a593Smuzhiyun buff_size, needed);
350*4882a593Smuzhiyun return -1;
351*4882a593Smuzhiyun }
352*4882a593Smuzhiyun
353*4882a593Smuzhiyun memset(hdr, '\0', needed);
354*4882a593Smuzhiyun hdr->call_accum = (uintptr_t *)(hdr + 1);
355*4882a593Smuzhiyun hdr->func_count = func_count;
356*4882a593Smuzhiyun
357*4882a593Smuzhiyun /* Use any remaining space for the timed function trace */
358*4882a593Smuzhiyun hdr->ftrace = (struct trace_call *)((char *)hdr + needed);
359*4882a593Smuzhiyun hdr->ftrace_size = (buff_size - needed) / sizeof(*hdr->ftrace);
360*4882a593Smuzhiyun add_textbase();
361*4882a593Smuzhiyun hdr->depth_limit = 200;
362*4882a593Smuzhiyun printf("trace: early enable at %08x\n", CONFIG_TRACE_EARLY_ADDR);
363*4882a593Smuzhiyun
364*4882a593Smuzhiyun trace_enabled = 1;
365*4882a593Smuzhiyun return 0;
366*4882a593Smuzhiyun }
367*4882a593Smuzhiyun #endif
368