1
2
3
4
5
6#include <common.h>
7#include <mapmem.h>
8#include <time.h>
9#include <trace.h>
10#include <asm/global_data.h>
11#include <asm/io.h>
12#include <asm/sections.h>
13
14DECLARE_GLOBAL_DATA_PTR;
15
16static char trace_enabled __section(".data");
17static char trace_inited __section(".data");
18
19
20struct trace_hdr {
21 int func_count;
22 u64 call_count;
23 u64 untracked_count;
24 int funcs_used;
25
26
27
28
29
30 uintptr_t *call_accum;
31
32
33 struct trace_call *ftrace;
34 ulong ftrace_size;
35 ulong ftrace_count;
36 ulong ftrace_too_deep_count;
37
38 int depth;
39 int depth_limit;
40 int max_depth;
41};
42
43static struct trace_hdr *hdr;
44
45static inline uintptr_t __attribute__((no_instrument_function))
46 func_ptr_to_num(void *func_ptr)
47{
48 uintptr_t offset = (uintptr_t)func_ptr;
49
50#ifdef CONFIG_SANDBOX
51 offset -= (uintptr_t)&_init;
52#else
53 if (gd->flags & GD_FLG_RELOC)
54 offset -= gd->relocaddr;
55 else
56 offset -= CONFIG_SYS_TEXT_BASE;
57#endif
58 return offset / FUNC_SITE_SIZE;
59}
60
61#if defined(CONFIG_EFI_LOADER) && (defined(CONFIG_ARM) || defined(CONFIG_RISCV))
62
63
64
65
66static volatile gd_t *trace_gd;
67
68
69
70
71static void __attribute__((no_instrument_function)) trace_save_gd(void)
72{
73 trace_gd = gd;
74}
75
76
77
78
79
80
81
82
83
84static void __attribute__((no_instrument_function)) trace_swap_gd(void)
85{
86 volatile gd_t *temp_gd = trace_gd;
87
88 trace_gd = gd;
89 set_gd(temp_gd);
90}
91
92#else
93
94static void __attribute__((no_instrument_function)) trace_save_gd(void)
95{
96}
97
98static void __attribute__((no_instrument_function)) trace_swap_gd(void)
99{
100}
101
102#endif
103
104static void __attribute__((no_instrument_function)) add_ftrace(void *func_ptr,
105 void *caller, ulong flags)
106{
107 if (hdr->depth > hdr->depth_limit) {
108 hdr->ftrace_too_deep_count++;
109 return;
110 }
111 if (hdr->ftrace_count < hdr->ftrace_size) {
112 struct trace_call *rec = &hdr->ftrace[hdr->ftrace_count];
113
114 rec->func = func_ptr_to_num(func_ptr);
115 rec->caller = func_ptr_to_num(caller);
116 rec->flags = flags | (timer_get_us() & FUNCF_TIMESTAMP_MASK);
117 }
118 hdr->ftrace_count++;
119}
120
121static void __attribute__((no_instrument_function)) add_textbase(void)
122{
123 if (hdr->ftrace_count < hdr->ftrace_size) {
124 struct trace_call *rec = &hdr->ftrace[hdr->ftrace_count];
125
126 rec->func = CONFIG_SYS_TEXT_BASE;
127 rec->caller = 0;
128 rec->flags = FUNCF_TEXTBASE;
129 }
130 hdr->ftrace_count++;
131}
132
133
134
135
136
137
138
139
140
141
142void __attribute__((no_instrument_function)) __cyg_profile_func_enter(
143 void *func_ptr, void *caller)
144{
145 if (trace_enabled) {
146 int func;
147
148 trace_swap_gd();
149 add_ftrace(func_ptr, caller, FUNCF_ENTRY);
150 func = func_ptr_to_num(func_ptr);
151 if (func < hdr->func_count) {
152 hdr->call_accum[func]++;
153 hdr->call_count++;
154 } else {
155 hdr->untracked_count++;
156 }
157 hdr->depth++;
158 if (hdr->depth > hdr->depth_limit)
159 hdr->max_depth = hdr->depth;
160 trace_swap_gd();
161 }
162}
163
164
165
166
167
168
169
170void __attribute__((no_instrument_function)) __cyg_profile_func_exit(
171 void *func_ptr, void *caller)
172{
173 if (trace_enabled) {
174 trace_swap_gd();
175 add_ftrace(func_ptr, caller, FUNCF_EXIT);
176 hdr->depth--;
177 trace_swap_gd();
178 }
179}
180
181
182
183
184
185
186
187
188
189
190
191
192
193int trace_list_functions(void *buff, size_t buff_size, size_t *needed)
194{
195 struct trace_output_hdr *output_hdr = NULL;
196 void *end, *ptr = buff;
197 size_t func;
198 size_t upto;
199
200 end = buff ? buff + buff_size : NULL;
201
202
203 if (ptr + sizeof(struct trace_output_hdr) < end)
204 output_hdr = ptr;
205 ptr += sizeof(struct trace_output_hdr);
206
207
208 for (func = upto = 0; func < hdr->func_count; func++) {
209 size_t calls = hdr->call_accum[func];
210
211 if (!calls)
212 continue;
213
214 if (ptr + sizeof(struct trace_output_func) < end) {
215 struct trace_output_func *stats = ptr;
216
217 stats->offset = func * FUNC_SITE_SIZE;
218 stats->call_count = calls;
219 upto++;
220 }
221 ptr += sizeof(struct trace_output_func);
222 }
223
224
225 if (output_hdr) {
226 output_hdr->rec_count = upto;
227 output_hdr->type = TRACE_CHUNK_FUNCS;
228 }
229
230
231 *needed = ptr - buff;
232 if (ptr > end)
233 return -ENOSPC;
234
235 return 0;
236}
237
238
239
240
241
242
243
244
245
246
247
248
249
250int trace_list_calls(void *buff, size_t buff_size, size_t *needed)
251{
252 struct trace_output_hdr *output_hdr = NULL;
253 void *end, *ptr = buff;
254 size_t rec, upto;
255 size_t count;
256
257 end = buff ? buff + buff_size : NULL;
258
259
260 if (ptr + sizeof(struct trace_output_hdr) < end)
261 output_hdr = ptr;
262 ptr += sizeof(struct trace_output_hdr);
263
264
265 count = hdr->ftrace_count;
266 if (count > hdr->ftrace_size)
267 count = hdr->ftrace_size;
268 for (rec = upto = 0; rec < count; rec++) {
269 if (ptr + sizeof(struct trace_call) < end) {
270 struct trace_call *call = &hdr->ftrace[rec];
271 struct trace_call *out = ptr;
272
273 out->func = call->func * FUNC_SITE_SIZE;
274 out->caller = call->caller * FUNC_SITE_SIZE;
275 out->flags = call->flags;
276 upto++;
277 }
278 ptr += sizeof(struct trace_call);
279 }
280
281
282 if (output_hdr) {
283 output_hdr->rec_count = upto;
284 output_hdr->type = TRACE_CHUNK_CALLS;
285 }
286
287
288 *needed = ptr - buff;
289 if (ptr > end)
290 return -ENOSPC;
291
292 return 0;
293}
294
295
296
297
298void trace_print_stats(void)
299{
300 ulong count;
301
302#ifndef FTRACE
303 puts("Warning: make U-Boot with FTRACE to enable function instrumenting.\n");
304 puts("You will likely get zeroed data here\n");
305#endif
306 if (!trace_inited) {
307 printf("Trace is disabled\n");
308 return;
309 }
310 print_grouped_ull(hdr->func_count, 10);
311 puts(" function sites\n");
312 print_grouped_ull(hdr->call_count, 10);
313 puts(" function calls\n");
314 print_grouped_ull(hdr->untracked_count, 10);
315 puts(" untracked function calls\n");
316 count = min(hdr->ftrace_count, hdr->ftrace_size);
317 print_grouped_ull(count, 10);
318 puts(" traced function calls");
319 if (hdr->ftrace_count > hdr->ftrace_size) {
320 printf(" (%lu dropped due to overflow)",
321 hdr->ftrace_count - hdr->ftrace_size);
322 }
323 puts("\n");
324 printf("%15d maximum observed call depth\n", hdr->max_depth);
325 printf("%15d call depth limit\n", hdr->depth_limit);
326 print_grouped_ull(hdr->ftrace_too_deep_count, 10);
327 puts(" calls not traced due to depth\n");
328}
329
330void __attribute__((no_instrument_function)) trace_set_enabled(int enabled)
331{
332 trace_enabled = enabled != 0;
333}
334
335
336
337
338
339
340
341
342int __attribute__((no_instrument_function)) trace_init(void *buff,
343 size_t buff_size)
344{
345 ulong func_count = gd->mon_len / FUNC_SITE_SIZE;
346 size_t needed;
347 int was_disabled = !trace_enabled;
348
349 trace_save_gd();
350
351 if (!was_disabled) {
352#ifdef CONFIG_TRACE_EARLY
353 char *end;
354 ulong used;
355
356
357
358
359
360 trace_enabled = 0;
361 hdr = map_sysmem(CONFIG_TRACE_EARLY_ADDR,
362 CONFIG_TRACE_EARLY_SIZE);
363 end = (char *)&hdr->ftrace[min(hdr->ftrace_count,
364 hdr->ftrace_size)];
365 used = end - (char *)hdr;
366 printf("trace: copying %08lx bytes of early data from %x to %08lx\n",
367 used, CONFIG_TRACE_EARLY_ADDR,
368 (ulong)map_to_sysmem(buff));
369 memcpy(buff, hdr, used);
370#else
371 puts("trace: already enabled\n");
372 return -EALREADY;
373#endif
374 }
375 hdr = (struct trace_hdr *)buff;
376 needed = sizeof(*hdr) + func_count * sizeof(uintptr_t);
377 if (needed > buff_size) {
378 printf("trace: buffer size %zd bytes: at least %zd needed\n",
379 buff_size, needed);
380 return -ENOSPC;
381 }
382
383 if (was_disabled)
384 memset(hdr, '\0', needed);
385 hdr->func_count = func_count;
386 hdr->call_accum = (uintptr_t *)(hdr + 1);
387
388
389 hdr->ftrace = (struct trace_call *)(buff + needed);
390 hdr->ftrace_size = (buff_size - needed) / sizeof(*hdr->ftrace);
391 add_textbase();
392
393 puts("trace: enabled\n");
394 hdr->depth_limit = CONFIG_TRACE_CALL_DEPTH_LIMIT;
395 trace_enabled = 1;
396 trace_inited = 1;
397
398 return 0;
399}
400
401#ifdef CONFIG_TRACE_EARLY
402
403
404
405
406
407int __attribute__((no_instrument_function)) trace_early_init(void)
408{
409 ulong func_count = gd->mon_len / FUNC_SITE_SIZE;
410 size_t buff_size = CONFIG_TRACE_EARLY_SIZE;
411 size_t needed;
412
413
414 if (trace_enabled)
415 return 0;
416
417 hdr = map_sysmem(CONFIG_TRACE_EARLY_ADDR, CONFIG_TRACE_EARLY_SIZE);
418 needed = sizeof(*hdr) + func_count * sizeof(uintptr_t);
419 if (needed > buff_size) {
420 printf("trace: buffer size is %zd bytes, at least %zd needed\n",
421 buff_size, needed);
422 return -ENOSPC;
423 }
424
425 memset(hdr, '\0', needed);
426 hdr->call_accum = (uintptr_t *)(hdr + 1);
427 hdr->func_count = func_count;
428
429
430 hdr->ftrace = (struct trace_call *)((char *)hdr + needed);
431 hdr->ftrace_size = (buff_size - needed) / sizeof(*hdr->ftrace);
432 add_textbase();
433 hdr->depth_limit = CONFIG_TRACE_EARLY_CALL_DEPTH_LIMIT;
434 printf("trace: early enable at %08x\n", CONFIG_TRACE_EARLY_ADDR);
435
436 trace_enabled = 1;
437
438 return 0;
439}
440#endif
441