Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/tools/objtool/check.c
29267 views
1
// SPDX-License-Identifier: GPL-2.0-or-later
2
/*
3
* Copyright (C) 2015-2017 Josh Poimboeuf <[email protected]>
4
*/
5
6
#include <string.h>
7
#include <stdlib.h>
8
#include <inttypes.h>
9
#include <sys/mman.h>
10
11
#include <objtool/builtin.h>
12
#include <objtool/cfi.h>
13
#include <objtool/arch.h>
14
#include <objtool/check.h>
15
#include <objtool/special.h>
16
#include <objtool/warn.h>
17
#include <objtool/endianness.h>
18
19
#include <linux/objtool_types.h>
20
#include <linux/hashtable.h>
21
#include <linux/kernel.h>
22
#include <linux/static_call_types.h>
23
#include <linux/string.h>
24
25
struct alternative {
26
struct alternative *next;
27
struct instruction *insn;
28
};
29
30
static unsigned long nr_cfi, nr_cfi_reused, nr_cfi_cache;
31
32
static struct cfi_init_state initial_func_cfi;
33
static struct cfi_state init_cfi;
34
static struct cfi_state func_cfi;
35
static struct cfi_state force_undefined_cfi;
36
37
struct instruction *find_insn(struct objtool_file *file,
38
struct section *sec, unsigned long offset)
39
{
40
struct instruction *insn;
41
42
hash_for_each_possible(file->insn_hash, insn, hash, sec_offset_hash(sec, offset)) {
43
if (insn->sec == sec && insn->offset == offset)
44
return insn;
45
}
46
47
return NULL;
48
}
49
50
struct instruction *next_insn_same_sec(struct objtool_file *file,
51
struct instruction *insn)
52
{
53
if (insn->idx == INSN_CHUNK_MAX)
54
return find_insn(file, insn->sec, insn->offset + insn->len);
55
56
insn++;
57
if (!insn->len)
58
return NULL;
59
60
return insn;
61
}
62
63
static struct instruction *next_insn_same_func(struct objtool_file *file,
64
struct instruction *insn)
65
{
66
struct instruction *next = next_insn_same_sec(file, insn);
67
struct symbol *func = insn_func(insn);
68
69
if (!func)
70
return NULL;
71
72
if (next && insn_func(next) == func)
73
return next;
74
75
/* Check if we're already in the subfunction: */
76
if (func == func->cfunc)
77
return NULL;
78
79
/* Move to the subfunction: */
80
return find_insn(file, func->cfunc->sec, func->cfunc->offset);
81
}
82
83
static struct instruction *prev_insn_same_sec(struct objtool_file *file,
84
struct instruction *insn)
85
{
86
if (insn->idx == 0) {
87
if (insn->prev_len)
88
return find_insn(file, insn->sec, insn->offset - insn->prev_len);
89
return NULL;
90
}
91
92
return insn - 1;
93
}
94
95
static struct instruction *prev_insn_same_sym(struct objtool_file *file,
96
struct instruction *insn)
97
{
98
struct instruction *prev = prev_insn_same_sec(file, insn);
99
100
if (prev && insn_func(prev) == insn_func(insn))
101
return prev;
102
103
return NULL;
104
}
105
106
#define for_each_insn(file, insn) \
107
for (struct section *__sec, *__fake = (struct section *)1; \
108
__fake; __fake = NULL) \
109
for_each_sec(file, __sec) \
110
sec_for_each_insn(file, __sec, insn)
111
112
#define func_for_each_insn(file, func, insn) \
113
for (insn = find_insn(file, func->sec, func->offset); \
114
insn; \
115
insn = next_insn_same_func(file, insn))
116
117
#define sym_for_each_insn(file, sym, insn) \
118
for (insn = find_insn(file, sym->sec, sym->offset); \
119
insn && insn->offset < sym->offset + sym->len; \
120
insn = next_insn_same_sec(file, insn))
121
122
#define sym_for_each_insn_continue_reverse(file, sym, insn) \
123
for (insn = prev_insn_same_sec(file, insn); \
124
insn && insn->offset >= sym->offset; \
125
insn = prev_insn_same_sec(file, insn))
126
127
#define sec_for_each_insn_from(file, insn) \
128
for (; insn; insn = next_insn_same_sec(file, insn))
129
130
#define sec_for_each_insn_continue(file, insn) \
131
for (insn = next_insn_same_sec(file, insn); insn; \
132
insn = next_insn_same_sec(file, insn))
133
134
static inline struct symbol *insn_call_dest(struct instruction *insn)
135
{
136
if (insn->type == INSN_JUMP_DYNAMIC ||
137
insn->type == INSN_CALL_DYNAMIC)
138
return NULL;
139
140
return insn->_call_dest;
141
}
142
143
static inline struct reloc *insn_jump_table(struct instruction *insn)
144
{
145
if (insn->type == INSN_JUMP_DYNAMIC ||
146
insn->type == INSN_CALL_DYNAMIC)
147
return insn->_jump_table;
148
149
return NULL;
150
}
151
152
static inline unsigned long insn_jump_table_size(struct instruction *insn)
153
{
154
if (insn->type == INSN_JUMP_DYNAMIC ||
155
insn->type == INSN_CALL_DYNAMIC)
156
return insn->_jump_table_size;
157
158
return 0;
159
}
160
161
static bool is_jump_table_jump(struct instruction *insn)
162
{
163
struct alt_group *alt_group = insn->alt_group;
164
165
if (insn_jump_table(insn))
166
return true;
167
168
/* Retpoline alternative for a jump table? */
169
return alt_group && alt_group->orig_group &&
170
insn_jump_table(alt_group->orig_group->first_insn);
171
}
172
173
static bool is_sibling_call(struct instruction *insn)
174
{
175
/*
176
* Assume only STT_FUNC calls have jump-tables.
177
*/
178
if (insn_func(insn)) {
179
/* An indirect jump is either a sibling call or a jump to a table. */
180
if (insn->type == INSN_JUMP_DYNAMIC)
181
return !is_jump_table_jump(insn);
182
}
183
184
/* add_jump_destinations() sets insn_call_dest(insn) for sibling calls. */
185
return (is_static_jump(insn) && insn_call_dest(insn));
186
}
187
188
/*
189
* Checks if a string ends with another.
190
*/
191
static bool str_ends_with(const char *s, const char *sub)
192
{
193
const int slen = strlen(s);
194
const int sublen = strlen(sub);
195
196
if (sublen > slen)
197
return 0;
198
199
return !memcmp(s + slen - sublen, sub, sublen);
200
}
201
202
/*
203
* Checks if a function is a Rust "noreturn" one.
204
*/
205
static bool is_rust_noreturn(const struct symbol *func)
206
{
207
/*
208
* If it does not start with "_R", then it is not a Rust symbol.
209
*/
210
if (strncmp(func->name, "_R", 2))
211
return false;
212
213
/*
214
* These are just heuristics -- we do not control the precise symbol
215
* name, due to the crate disambiguators (which depend on the compiler)
216
* as well as changes to the source code itself between versions (since
217
* these come from the Rust standard library).
218
*/
219
return str_ends_with(func->name, "_4core5sliceSp15copy_from_slice17len_mismatch_fail") ||
220
str_ends_with(func->name, "_4core6option13unwrap_failed") ||
221
str_ends_with(func->name, "_4core6result13unwrap_failed") ||
222
str_ends_with(func->name, "_4core9panicking5panic") ||
223
str_ends_with(func->name, "_4core9panicking9panic_fmt") ||
224
str_ends_with(func->name, "_4core9panicking14panic_explicit") ||
225
str_ends_with(func->name, "_4core9panicking14panic_nounwind") ||
226
str_ends_with(func->name, "_4core9panicking18panic_bounds_check") ||
227
str_ends_with(func->name, "_4core9panicking18panic_nounwind_fmt") ||
228
str_ends_with(func->name, "_4core9panicking19assert_failed_inner") ||
229
str_ends_with(func->name, "_4core9panicking30panic_null_pointer_dereference") ||
230
str_ends_with(func->name, "_4core9panicking36panic_misaligned_pointer_dereference") ||
231
str_ends_with(func->name, "_7___rustc17rust_begin_unwind") ||
232
strstr(func->name, "_4core9panicking13assert_failed") ||
233
strstr(func->name, "_4core9panicking11panic_const24panic_const_") ||
234
(strstr(func->name, "_4core5slice5index") &&
235
strstr(func->name, "slice_") &&
236
str_ends_with(func->name, "_fail"));
237
}
238
239
/*
240
* This checks to see if the given function is a "noreturn" function.
241
*
242
* For global functions which are outside the scope of this object file, we
243
* have to keep a manual list of them.
244
*
245
* For local functions, we have to detect them manually by simply looking for
246
* the lack of a return instruction.
247
*/
248
static bool __dead_end_function(struct objtool_file *file, struct symbol *func,
249
int recursion)
250
{
251
int i;
252
struct instruction *insn;
253
bool empty = true;
254
255
#define NORETURN(func) __stringify(func),
256
static const char * const global_noreturns[] = {
257
#include "noreturns.h"
258
};
259
#undef NORETURN
260
261
if (!func)
262
return false;
263
264
if (func->bind == STB_GLOBAL || func->bind == STB_WEAK) {
265
if (is_rust_noreturn(func))
266
return true;
267
268
for (i = 0; i < ARRAY_SIZE(global_noreturns); i++)
269
if (!strcmp(func->name, global_noreturns[i]))
270
return true;
271
}
272
273
if (func->bind == STB_WEAK)
274
return false;
275
276
if (!func->len)
277
return false;
278
279
insn = find_insn(file, func->sec, func->offset);
280
if (!insn || !insn_func(insn))
281
return false;
282
283
func_for_each_insn(file, func, insn) {
284
empty = false;
285
286
if (insn->type == INSN_RETURN)
287
return false;
288
}
289
290
if (empty)
291
return false;
292
293
/*
294
* A function can have a sibling call instead of a return. In that
295
* case, the function's dead-end status depends on whether the target
296
* of the sibling call returns.
297
*/
298
func_for_each_insn(file, func, insn) {
299
if (is_sibling_call(insn)) {
300
struct instruction *dest = insn->jump_dest;
301
302
if (!dest)
303
/* sibling call to another file */
304
return false;
305
306
/* local sibling call */
307
if (recursion == 5) {
308
/*
309
* Infinite recursion: two functions have
310
* sibling calls to each other. This is a very
311
* rare case. It means they aren't dead ends.
312
*/
313
return false;
314
}
315
316
return __dead_end_function(file, insn_func(dest), recursion+1);
317
}
318
}
319
320
return true;
321
}
322
323
static bool dead_end_function(struct objtool_file *file, struct symbol *func)
324
{
325
return __dead_end_function(file, func, 0);
326
}
327
328
static void init_cfi_state(struct cfi_state *cfi)
329
{
330
int i;
331
332
for (i = 0; i < CFI_NUM_REGS; i++) {
333
cfi->regs[i].base = CFI_UNDEFINED;
334
cfi->vals[i].base = CFI_UNDEFINED;
335
}
336
cfi->cfa.base = CFI_UNDEFINED;
337
cfi->drap_reg = CFI_UNDEFINED;
338
cfi->drap_offset = -1;
339
}
340
341
static void init_insn_state(struct objtool_file *file, struct insn_state *state,
342
struct section *sec)
343
{
344
memset(state, 0, sizeof(*state));
345
init_cfi_state(&state->cfi);
346
347
if (opts.noinstr && sec)
348
state->noinstr = sec->noinstr;
349
}
350
351
static struct cfi_state *cfi_alloc(void)
352
{
353
struct cfi_state *cfi = calloc(1, sizeof(struct cfi_state));
354
if (!cfi) {
355
ERROR_GLIBC("calloc");
356
exit(1);
357
}
358
nr_cfi++;
359
return cfi;
360
}
361
362
static int cfi_bits;
363
static struct hlist_head *cfi_hash;
364
365
static inline bool cficmp(struct cfi_state *cfi1, struct cfi_state *cfi2)
366
{
367
return memcmp((void *)cfi1 + sizeof(cfi1->hash),
368
(void *)cfi2 + sizeof(cfi2->hash),
369
sizeof(struct cfi_state) - sizeof(struct hlist_node));
370
}
371
372
static inline u32 cfi_key(struct cfi_state *cfi)
373
{
374
return jhash((void *)cfi + sizeof(cfi->hash),
375
sizeof(*cfi) - sizeof(cfi->hash), 0);
376
}
377
378
static struct cfi_state *cfi_hash_find_or_add(struct cfi_state *cfi)
379
{
380
struct hlist_head *head = &cfi_hash[hash_min(cfi_key(cfi), cfi_bits)];
381
struct cfi_state *obj;
382
383
hlist_for_each_entry(obj, head, hash) {
384
if (!cficmp(cfi, obj)) {
385
nr_cfi_cache++;
386
return obj;
387
}
388
}
389
390
obj = cfi_alloc();
391
*obj = *cfi;
392
hlist_add_head(&obj->hash, head);
393
394
return obj;
395
}
396
397
static void cfi_hash_add(struct cfi_state *cfi)
398
{
399
struct hlist_head *head = &cfi_hash[hash_min(cfi_key(cfi), cfi_bits)];
400
401
hlist_add_head(&cfi->hash, head);
402
}
403
404
static void *cfi_hash_alloc(unsigned long size)
405
{
406
cfi_bits = max(10, ilog2(size));
407
cfi_hash = mmap(NULL, sizeof(struct hlist_head) << cfi_bits,
408
PROT_READ|PROT_WRITE,
409
MAP_PRIVATE|MAP_ANON, -1, 0);
410
if (cfi_hash == (void *)-1L) {
411
ERROR_GLIBC("mmap fail cfi_hash");
412
cfi_hash = NULL;
413
} else if (opts.stats) {
414
printf("cfi_bits: %d\n", cfi_bits);
415
}
416
417
return cfi_hash;
418
}
419
420
static unsigned long nr_insns;
421
static unsigned long nr_insns_visited;
422
423
/*
424
* Call the arch-specific instruction decoder for all the instructions and add
425
* them to the global instruction list.
426
*/
427
static int decode_instructions(struct objtool_file *file)
428
{
429
struct section *sec;
430
struct symbol *func;
431
unsigned long offset;
432
struct instruction *insn;
433
int ret;
434
435
for_each_sec(file, sec) {
436
struct instruction *insns = NULL;
437
u8 prev_len = 0;
438
u8 idx = 0;
439
440
if (!(sec->sh.sh_flags & SHF_EXECINSTR))
441
continue;
442
443
if (strcmp(sec->name, ".altinstr_replacement") &&
444
strcmp(sec->name, ".altinstr_aux") &&
445
strncmp(sec->name, ".discard.", 9))
446
sec->text = true;
447
448
if (!strcmp(sec->name, ".noinstr.text") ||
449
!strcmp(sec->name, ".entry.text") ||
450
!strcmp(sec->name, ".cpuidle.text") ||
451
!strncmp(sec->name, ".text..__x86.", 13))
452
sec->noinstr = true;
453
454
/*
455
* .init.text code is ran before userspace and thus doesn't
456
* strictly need retpolines, except for modules which are
457
* loaded late, they very much do need retpoline in their
458
* .init.text
459
*/
460
if (!strcmp(sec->name, ".init.text") && !opts.module)
461
sec->init = true;
462
463
for (offset = 0; offset < sec->sh.sh_size; offset += insn->len) {
464
if (!insns || idx == INSN_CHUNK_MAX) {
465
insns = calloc(sizeof(*insn), INSN_CHUNK_SIZE);
466
if (!insns) {
467
ERROR_GLIBC("calloc");
468
return -1;
469
}
470
idx = 0;
471
} else {
472
idx++;
473
}
474
insn = &insns[idx];
475
insn->idx = idx;
476
477
INIT_LIST_HEAD(&insn->call_node);
478
insn->sec = sec;
479
insn->offset = offset;
480
insn->prev_len = prev_len;
481
482
ret = arch_decode_instruction(file, sec, offset,
483
sec->sh.sh_size - offset,
484
insn);
485
if (ret)
486
return ret;
487
488
prev_len = insn->len;
489
490
/*
491
* By default, "ud2" is a dead end unless otherwise
492
* annotated, because GCC 7 inserts it for certain
493
* divide-by-zero cases.
494
*/
495
if (insn->type == INSN_BUG)
496
insn->dead_end = true;
497
498
hash_add(file->insn_hash, &insn->hash, sec_offset_hash(sec, insn->offset));
499
nr_insns++;
500
}
501
502
sec_for_each_sym(sec, func) {
503
if (func->type != STT_NOTYPE && func->type != STT_FUNC)
504
continue;
505
506
if (func->offset == sec->sh.sh_size) {
507
/* Heuristic: likely an "end" symbol */
508
if (func->type == STT_NOTYPE)
509
continue;
510
ERROR("%s(): STT_FUNC at end of section", func->name);
511
return -1;
512
}
513
514
if (func->embedded_insn || func->alias != func)
515
continue;
516
517
if (!find_insn(file, sec, func->offset)) {
518
ERROR("%s(): can't find starting instruction", func->name);
519
return -1;
520
}
521
522
sym_for_each_insn(file, func, insn) {
523
insn->sym = func;
524
if (func->type == STT_FUNC &&
525
insn->type == INSN_ENDBR &&
526
list_empty(&insn->call_node)) {
527
if (insn->offset == func->offset) {
528
list_add_tail(&insn->call_node, &file->endbr_list);
529
file->nr_endbr++;
530
} else {
531
file->nr_endbr_int++;
532
}
533
}
534
}
535
}
536
}
537
538
if (opts.stats)
539
printf("nr_insns: %lu\n", nr_insns);
540
541
return 0;
542
}
543
544
/*
545
* Read the pv_ops[] .data table to find the static initialized values.
546
*/
547
static int add_pv_ops(struct objtool_file *file, const char *symname)
548
{
549
struct symbol *sym, *func;
550
unsigned long off, end;
551
struct reloc *reloc;
552
int idx;
553
554
sym = find_symbol_by_name(file->elf, symname);
555
if (!sym)
556
return 0;
557
558
off = sym->offset;
559
end = off + sym->len;
560
for (;;) {
561
reloc = find_reloc_by_dest_range(file->elf, sym->sec, off, end - off);
562
if (!reloc)
563
break;
564
565
idx = (reloc_offset(reloc) - sym->offset) / sizeof(unsigned long);
566
567
func = reloc->sym;
568
if (func->type == STT_SECTION)
569
func = find_symbol_by_offset(reloc->sym->sec,
570
reloc_addend(reloc));
571
if (!func) {
572
ERROR_FUNC(reloc->sym->sec, reloc_addend(reloc),
573
"can't find func at %s[%d]", symname, idx);
574
return -1;
575
}
576
577
if (objtool_pv_add(file, idx, func))
578
return -1;
579
580
off = reloc_offset(reloc) + 1;
581
if (off > end)
582
break;
583
}
584
585
return 0;
586
}
587
588
/*
589
* Allocate and initialize file->pv_ops[].
590
*/
591
static int init_pv_ops(struct objtool_file *file)
592
{
593
static const char *pv_ops_tables[] = {
594
"pv_ops",
595
"xen_cpu_ops",
596
"xen_irq_ops",
597
"xen_mmu_ops",
598
NULL,
599
};
600
const char *pv_ops;
601
struct symbol *sym;
602
int idx, nr, ret;
603
604
if (!opts.noinstr)
605
return 0;
606
607
file->pv_ops = NULL;
608
609
sym = find_symbol_by_name(file->elf, "pv_ops");
610
if (!sym)
611
return 0;
612
613
nr = sym->len / sizeof(unsigned long);
614
file->pv_ops = calloc(sizeof(struct pv_state), nr);
615
if (!file->pv_ops) {
616
ERROR_GLIBC("calloc");
617
return -1;
618
}
619
620
for (idx = 0; idx < nr; idx++)
621
INIT_LIST_HEAD(&file->pv_ops[idx].targets);
622
623
for (idx = 0; (pv_ops = pv_ops_tables[idx]); idx++) {
624
ret = add_pv_ops(file, pv_ops);
625
if (ret)
626
return ret;
627
}
628
629
return 0;
630
}
631
632
static int create_static_call_sections(struct objtool_file *file)
633
{
634
struct static_call_site *site;
635
struct section *sec;
636
struct instruction *insn;
637
struct symbol *key_sym;
638
char *key_name, *tmp;
639
int idx;
640
641
sec = find_section_by_name(file->elf, ".static_call_sites");
642
if (sec) {
643
INIT_LIST_HEAD(&file->static_call_list);
644
WARN("file already has .static_call_sites section, skipping");
645
return 0;
646
}
647
648
if (list_empty(&file->static_call_list))
649
return 0;
650
651
idx = 0;
652
list_for_each_entry(insn, &file->static_call_list, call_node)
653
idx++;
654
655
sec = elf_create_section_pair(file->elf, ".static_call_sites",
656
sizeof(*site), idx, idx * 2);
657
if (!sec)
658
return -1;
659
660
/* Allow modules to modify the low bits of static_call_site::key */
661
sec->sh.sh_flags |= SHF_WRITE;
662
663
idx = 0;
664
list_for_each_entry(insn, &file->static_call_list, call_node) {
665
666
/* populate reloc for 'addr' */
667
if (!elf_init_reloc_text_sym(file->elf, sec,
668
idx * sizeof(*site), idx * 2,
669
insn->sec, insn->offset))
670
return -1;
671
672
/* find key symbol */
673
key_name = strdup(insn_call_dest(insn)->name);
674
if (!key_name) {
675
ERROR_GLIBC("strdup");
676
return -1;
677
}
678
if (strncmp(key_name, STATIC_CALL_TRAMP_PREFIX_STR,
679
STATIC_CALL_TRAMP_PREFIX_LEN)) {
680
ERROR("static_call: trampoline name malformed: %s", key_name);
681
return -1;
682
}
683
tmp = key_name + STATIC_CALL_TRAMP_PREFIX_LEN - STATIC_CALL_KEY_PREFIX_LEN;
684
memcpy(tmp, STATIC_CALL_KEY_PREFIX_STR, STATIC_CALL_KEY_PREFIX_LEN);
685
686
key_sym = find_symbol_by_name(file->elf, tmp);
687
if (!key_sym) {
688
if (!opts.module) {
689
ERROR("static_call: can't find static_call_key symbol: %s", tmp);
690
return -1;
691
}
692
693
/*
694
* For modules(), the key might not be exported, which
695
* means the module can make static calls but isn't
696
* allowed to change them.
697
*
698
* In that case we temporarily set the key to be the
699
* trampoline address. This is fixed up in
700
* static_call_add_module().
701
*/
702
key_sym = insn_call_dest(insn);
703
}
704
705
/* populate reloc for 'key' */
706
if (!elf_init_reloc_data_sym(file->elf, sec,
707
idx * sizeof(*site) + 4,
708
(idx * 2) + 1, key_sym,
709
is_sibling_call(insn) * STATIC_CALL_SITE_TAIL))
710
return -1;
711
712
idx++;
713
}
714
715
return 0;
716
}
717
718
static int create_retpoline_sites_sections(struct objtool_file *file)
719
{
720
struct instruction *insn;
721
struct section *sec;
722
int idx;
723
724
sec = find_section_by_name(file->elf, ".retpoline_sites");
725
if (sec) {
726
WARN("file already has .retpoline_sites, skipping");
727
return 0;
728
}
729
730
idx = 0;
731
list_for_each_entry(insn, &file->retpoline_call_list, call_node)
732
idx++;
733
734
if (!idx)
735
return 0;
736
737
sec = elf_create_section_pair(file->elf, ".retpoline_sites",
738
sizeof(int), idx, idx);
739
if (!sec)
740
return -1;
741
742
idx = 0;
743
list_for_each_entry(insn, &file->retpoline_call_list, call_node) {
744
745
if (!elf_init_reloc_text_sym(file->elf, sec,
746
idx * sizeof(int), idx,
747
insn->sec, insn->offset))
748
return -1;
749
750
idx++;
751
}
752
753
return 0;
754
}
755
756
static int create_return_sites_sections(struct objtool_file *file)
757
{
758
struct instruction *insn;
759
struct section *sec;
760
int idx;
761
762
sec = find_section_by_name(file->elf, ".return_sites");
763
if (sec) {
764
WARN("file already has .return_sites, skipping");
765
return 0;
766
}
767
768
idx = 0;
769
list_for_each_entry(insn, &file->return_thunk_list, call_node)
770
idx++;
771
772
if (!idx)
773
return 0;
774
775
sec = elf_create_section_pair(file->elf, ".return_sites",
776
sizeof(int), idx, idx);
777
if (!sec)
778
return -1;
779
780
idx = 0;
781
list_for_each_entry(insn, &file->return_thunk_list, call_node) {
782
783
if (!elf_init_reloc_text_sym(file->elf, sec,
784
idx * sizeof(int), idx,
785
insn->sec, insn->offset))
786
return -1;
787
788
idx++;
789
}
790
791
return 0;
792
}
793
794
static int create_ibt_endbr_seal_sections(struct objtool_file *file)
795
{
796
struct instruction *insn;
797
struct section *sec;
798
int idx;
799
800
sec = find_section_by_name(file->elf, ".ibt_endbr_seal");
801
if (sec) {
802
WARN("file already has .ibt_endbr_seal, skipping");
803
return 0;
804
}
805
806
idx = 0;
807
list_for_each_entry(insn, &file->endbr_list, call_node)
808
idx++;
809
810
if (opts.stats) {
811
printf("ibt: ENDBR at function start: %d\n", file->nr_endbr);
812
printf("ibt: ENDBR inside functions: %d\n", file->nr_endbr_int);
813
printf("ibt: superfluous ENDBR: %d\n", idx);
814
}
815
816
if (!idx)
817
return 0;
818
819
sec = elf_create_section_pair(file->elf, ".ibt_endbr_seal",
820
sizeof(int), idx, idx);
821
if (!sec)
822
return -1;
823
824
idx = 0;
825
list_for_each_entry(insn, &file->endbr_list, call_node) {
826
827
int *site = (int *)sec->data->d_buf + idx;
828
struct symbol *sym = insn->sym;
829
*site = 0;
830
831
if (opts.module && sym && sym->type == STT_FUNC &&
832
insn->offset == sym->offset &&
833
(!strcmp(sym->name, "init_module") ||
834
!strcmp(sym->name, "cleanup_module"))) {
835
ERROR("%s(): Magic init_module() function name is deprecated, use module_init(fn) instead",
836
sym->name);
837
return -1;
838
}
839
840
if (!elf_init_reloc_text_sym(file->elf, sec,
841
idx * sizeof(int), idx,
842
insn->sec, insn->offset))
843
return -1;
844
845
idx++;
846
}
847
848
return 0;
849
}
850
851
static int create_cfi_sections(struct objtool_file *file)
852
{
853
struct section *sec;
854
struct symbol *sym;
855
int idx;
856
857
sec = find_section_by_name(file->elf, ".cfi_sites");
858
if (sec) {
859
INIT_LIST_HEAD(&file->call_list);
860
WARN("file already has .cfi_sites section, skipping");
861
return 0;
862
}
863
864
idx = 0;
865
for_each_sym(file, sym) {
866
if (sym->type != STT_FUNC)
867
continue;
868
869
if (strncmp(sym->name, "__cfi_", 6))
870
continue;
871
872
idx++;
873
}
874
875
sec = elf_create_section_pair(file->elf, ".cfi_sites",
876
sizeof(unsigned int), idx, idx);
877
if (!sec)
878
return -1;
879
880
idx = 0;
881
for_each_sym(file, sym) {
882
if (sym->type != STT_FUNC)
883
continue;
884
885
if (strncmp(sym->name, "__cfi_", 6))
886
continue;
887
888
if (!elf_init_reloc_text_sym(file->elf, sec,
889
idx * sizeof(unsigned int), idx,
890
sym->sec, sym->offset))
891
return -1;
892
893
idx++;
894
}
895
896
return 0;
897
}
898
899
static int create_mcount_loc_sections(struct objtool_file *file)
900
{
901
size_t addr_size = elf_addr_size(file->elf);
902
struct instruction *insn;
903
struct section *sec;
904
int idx;
905
906
sec = find_section_by_name(file->elf, "__mcount_loc");
907
if (sec) {
908
INIT_LIST_HEAD(&file->mcount_loc_list);
909
WARN("file already has __mcount_loc section, skipping");
910
return 0;
911
}
912
913
if (list_empty(&file->mcount_loc_list))
914
return 0;
915
916
idx = 0;
917
list_for_each_entry(insn, &file->mcount_loc_list, call_node)
918
idx++;
919
920
sec = elf_create_section_pair(file->elf, "__mcount_loc", addr_size,
921
idx, idx);
922
if (!sec)
923
return -1;
924
925
sec->sh.sh_addralign = addr_size;
926
927
idx = 0;
928
list_for_each_entry(insn, &file->mcount_loc_list, call_node) {
929
930
struct reloc *reloc;
931
932
reloc = elf_init_reloc_text_sym(file->elf, sec, idx * addr_size, idx,
933
insn->sec, insn->offset);
934
if (!reloc)
935
return -1;
936
937
set_reloc_type(file->elf, reloc, addr_size == 8 ? R_ABS64 : R_ABS32);
938
939
idx++;
940
}
941
942
return 0;
943
}
944
945
static int create_direct_call_sections(struct objtool_file *file)
946
{
947
struct instruction *insn;
948
struct section *sec;
949
int idx;
950
951
sec = find_section_by_name(file->elf, ".call_sites");
952
if (sec) {
953
INIT_LIST_HEAD(&file->call_list);
954
WARN("file already has .call_sites section, skipping");
955
return 0;
956
}
957
958
if (list_empty(&file->call_list))
959
return 0;
960
961
idx = 0;
962
list_for_each_entry(insn, &file->call_list, call_node)
963
idx++;
964
965
sec = elf_create_section_pair(file->elf, ".call_sites",
966
sizeof(unsigned int), idx, idx);
967
if (!sec)
968
return -1;
969
970
idx = 0;
971
list_for_each_entry(insn, &file->call_list, call_node) {
972
973
if (!elf_init_reloc_text_sym(file->elf, sec,
974
idx * sizeof(unsigned int), idx,
975
insn->sec, insn->offset))
976
return -1;
977
978
idx++;
979
}
980
981
return 0;
982
}
983
984
/*
985
* Warnings shouldn't be reported for ignored functions.
986
*/
987
static int add_ignores(struct objtool_file *file)
988
{
989
struct section *rsec;
990
struct symbol *func;
991
struct reloc *reloc;
992
993
rsec = find_section_by_name(file->elf, ".rela.discard.func_stack_frame_non_standard");
994
if (!rsec)
995
return 0;
996
997
for_each_reloc(rsec, reloc) {
998
switch (reloc->sym->type) {
999
case STT_FUNC:
1000
func = reloc->sym;
1001
break;
1002
1003
case STT_SECTION:
1004
func = find_func_by_offset(reloc->sym->sec, reloc_addend(reloc));
1005
if (!func)
1006
continue;
1007
break;
1008
1009
default:
1010
ERROR("unexpected relocation symbol type in %s: %d",
1011
rsec->name, reloc->sym->type);
1012
return -1;
1013
}
1014
1015
func->ignore = true;
1016
if (func->cfunc)
1017
func->cfunc->ignore = true;
1018
}
1019
1020
return 0;
1021
}
1022
1023
/*
1024
* This is a whitelist of functions that is allowed to be called with AC set.
1025
* The list is meant to be minimal and only contains compiler instrumentation
1026
* ABI and a few functions used to implement *_{to,from}_user() functions.
1027
*
1028
* These functions must not directly change AC, but may PUSHF/POPF.
1029
*/
1030
static const char *uaccess_safe_builtin[] = {
1031
/* KASAN */
1032
"kasan_report",
1033
"kasan_check_range",
1034
/* KASAN out-of-line */
1035
"__asan_loadN_noabort",
1036
"__asan_load1_noabort",
1037
"__asan_load2_noabort",
1038
"__asan_load4_noabort",
1039
"__asan_load8_noabort",
1040
"__asan_load16_noabort",
1041
"__asan_storeN_noabort",
1042
"__asan_store1_noabort",
1043
"__asan_store2_noabort",
1044
"__asan_store4_noabort",
1045
"__asan_store8_noabort",
1046
"__asan_store16_noabort",
1047
"__kasan_check_read",
1048
"__kasan_check_write",
1049
/* KASAN in-line */
1050
"__asan_report_load_n_noabort",
1051
"__asan_report_load1_noabort",
1052
"__asan_report_load2_noabort",
1053
"__asan_report_load4_noabort",
1054
"__asan_report_load8_noabort",
1055
"__asan_report_load16_noabort",
1056
"__asan_report_store_n_noabort",
1057
"__asan_report_store1_noabort",
1058
"__asan_report_store2_noabort",
1059
"__asan_report_store4_noabort",
1060
"__asan_report_store8_noabort",
1061
"__asan_report_store16_noabort",
1062
/* KCSAN */
1063
"__kcsan_check_access",
1064
"__kcsan_mb",
1065
"__kcsan_wmb",
1066
"__kcsan_rmb",
1067
"__kcsan_release",
1068
"kcsan_found_watchpoint",
1069
"kcsan_setup_watchpoint",
1070
"kcsan_check_scoped_accesses",
1071
"kcsan_disable_current",
1072
"kcsan_enable_current_nowarn",
1073
/* KCSAN/TSAN */
1074
"__tsan_func_entry",
1075
"__tsan_func_exit",
1076
"__tsan_read_range",
1077
"__tsan_write_range",
1078
"__tsan_read1",
1079
"__tsan_read2",
1080
"__tsan_read4",
1081
"__tsan_read8",
1082
"__tsan_read16",
1083
"__tsan_write1",
1084
"__tsan_write2",
1085
"__tsan_write4",
1086
"__tsan_write8",
1087
"__tsan_write16",
1088
"__tsan_read_write1",
1089
"__tsan_read_write2",
1090
"__tsan_read_write4",
1091
"__tsan_read_write8",
1092
"__tsan_read_write16",
1093
"__tsan_volatile_read1",
1094
"__tsan_volatile_read2",
1095
"__tsan_volatile_read4",
1096
"__tsan_volatile_read8",
1097
"__tsan_volatile_read16",
1098
"__tsan_volatile_write1",
1099
"__tsan_volatile_write2",
1100
"__tsan_volatile_write4",
1101
"__tsan_volatile_write8",
1102
"__tsan_volatile_write16",
1103
"__tsan_atomic8_load",
1104
"__tsan_atomic16_load",
1105
"__tsan_atomic32_load",
1106
"__tsan_atomic64_load",
1107
"__tsan_atomic8_store",
1108
"__tsan_atomic16_store",
1109
"__tsan_atomic32_store",
1110
"__tsan_atomic64_store",
1111
"__tsan_atomic8_exchange",
1112
"__tsan_atomic16_exchange",
1113
"__tsan_atomic32_exchange",
1114
"__tsan_atomic64_exchange",
1115
"__tsan_atomic8_fetch_add",
1116
"__tsan_atomic16_fetch_add",
1117
"__tsan_atomic32_fetch_add",
1118
"__tsan_atomic64_fetch_add",
1119
"__tsan_atomic8_fetch_sub",
1120
"__tsan_atomic16_fetch_sub",
1121
"__tsan_atomic32_fetch_sub",
1122
"__tsan_atomic64_fetch_sub",
1123
"__tsan_atomic8_fetch_and",
1124
"__tsan_atomic16_fetch_and",
1125
"__tsan_atomic32_fetch_and",
1126
"__tsan_atomic64_fetch_and",
1127
"__tsan_atomic8_fetch_or",
1128
"__tsan_atomic16_fetch_or",
1129
"__tsan_atomic32_fetch_or",
1130
"__tsan_atomic64_fetch_or",
1131
"__tsan_atomic8_fetch_xor",
1132
"__tsan_atomic16_fetch_xor",
1133
"__tsan_atomic32_fetch_xor",
1134
"__tsan_atomic64_fetch_xor",
1135
"__tsan_atomic8_fetch_nand",
1136
"__tsan_atomic16_fetch_nand",
1137
"__tsan_atomic32_fetch_nand",
1138
"__tsan_atomic64_fetch_nand",
1139
"__tsan_atomic8_compare_exchange_strong",
1140
"__tsan_atomic16_compare_exchange_strong",
1141
"__tsan_atomic32_compare_exchange_strong",
1142
"__tsan_atomic64_compare_exchange_strong",
1143
"__tsan_atomic8_compare_exchange_weak",
1144
"__tsan_atomic16_compare_exchange_weak",
1145
"__tsan_atomic32_compare_exchange_weak",
1146
"__tsan_atomic64_compare_exchange_weak",
1147
"__tsan_atomic8_compare_exchange_val",
1148
"__tsan_atomic16_compare_exchange_val",
1149
"__tsan_atomic32_compare_exchange_val",
1150
"__tsan_atomic64_compare_exchange_val",
1151
"__tsan_atomic_thread_fence",
1152
"__tsan_atomic_signal_fence",
1153
"__tsan_unaligned_read16",
1154
"__tsan_unaligned_write16",
1155
/* KCOV */
1156
"write_comp_data",
1157
"check_kcov_mode",
1158
"__sanitizer_cov_trace_pc",
1159
"__sanitizer_cov_trace_const_cmp1",
1160
"__sanitizer_cov_trace_const_cmp2",
1161
"__sanitizer_cov_trace_const_cmp4",
1162
"__sanitizer_cov_trace_const_cmp8",
1163
"__sanitizer_cov_trace_cmp1",
1164
"__sanitizer_cov_trace_cmp2",
1165
"__sanitizer_cov_trace_cmp4",
1166
"__sanitizer_cov_trace_cmp8",
1167
"__sanitizer_cov_trace_switch",
1168
/* KMSAN */
1169
"kmsan_copy_to_user",
1170
"kmsan_disable_current",
1171
"kmsan_enable_current",
1172
"kmsan_report",
1173
"kmsan_unpoison_entry_regs",
1174
"kmsan_unpoison_memory",
1175
"__msan_chain_origin",
1176
"__msan_get_context_state",
1177
"__msan_instrument_asm_store",
1178
"__msan_metadata_ptr_for_load_1",
1179
"__msan_metadata_ptr_for_load_2",
1180
"__msan_metadata_ptr_for_load_4",
1181
"__msan_metadata_ptr_for_load_8",
1182
"__msan_metadata_ptr_for_load_n",
1183
"__msan_metadata_ptr_for_store_1",
1184
"__msan_metadata_ptr_for_store_2",
1185
"__msan_metadata_ptr_for_store_4",
1186
"__msan_metadata_ptr_for_store_8",
1187
"__msan_metadata_ptr_for_store_n",
1188
"__msan_poison_alloca",
1189
"__msan_warning",
1190
/* UBSAN */
1191
"ubsan_type_mismatch_common",
1192
"__ubsan_handle_type_mismatch",
1193
"__ubsan_handle_type_mismatch_v1",
1194
"__ubsan_handle_shift_out_of_bounds",
1195
"__ubsan_handle_load_invalid_value",
1196
/* KSTACK_ERASE */
1197
"__sanitizer_cov_stack_depth",
1198
/* TRACE_BRANCH_PROFILING */
1199
"ftrace_likely_update",
1200
/* STACKPROTECTOR */
1201
"__stack_chk_fail",
1202
/* misc */
1203
"csum_partial_copy_generic",
1204
"copy_mc_fragile",
1205
"copy_mc_fragile_handle_tail",
1206
"copy_mc_enhanced_fast_string",
1207
"rep_stos_alternative",
1208
"rep_movs_alternative",
1209
"__copy_user_nocache",
1210
NULL
1211
};
1212
1213
static void add_uaccess_safe(struct objtool_file *file)
1214
{
1215
struct symbol *func;
1216
const char **name;
1217
1218
if (!opts.uaccess)
1219
return;
1220
1221
for (name = uaccess_safe_builtin; *name; name++) {
1222
func = find_symbol_by_name(file->elf, *name);
1223
if (!func)
1224
continue;
1225
1226
func->uaccess_safe = true;
1227
}
1228
}
1229
1230
/*
1231
* Symbols that replace INSN_CALL_DYNAMIC, every (tail) call to such a symbol
1232
* will be added to the .retpoline_sites section.
1233
*/
1234
__weak bool arch_is_retpoline(struct symbol *sym)
1235
{
1236
return false;
1237
}
1238
1239
/*
1240
* Symbols that replace INSN_RETURN, every (tail) call to such a symbol
1241
* will be added to the .return_sites section.
1242
*/
1243
__weak bool arch_is_rethunk(struct symbol *sym)
1244
{
1245
return false;
1246
}
1247
1248
/*
1249
* Symbols that are embedded inside other instructions, because sometimes crazy
1250
* code exists. These are mostly ignored for validation purposes.
1251
*/
1252
__weak bool arch_is_embedded_insn(struct symbol *sym)
1253
{
1254
return false;
1255
}
1256
1257
static struct reloc *insn_reloc(struct objtool_file *file, struct instruction *insn)
1258
{
1259
struct reloc *reloc;
1260
1261
if (insn->no_reloc)
1262
return NULL;
1263
1264
if (!file)
1265
return NULL;
1266
1267
reloc = find_reloc_by_dest_range(file->elf, insn->sec,
1268
insn->offset, insn->len);
1269
if (!reloc) {
1270
insn->no_reloc = 1;
1271
return NULL;
1272
}
1273
1274
return reloc;
1275
}
1276
1277
static void remove_insn_ops(struct instruction *insn)
1278
{
1279
struct stack_op *op, *next;
1280
1281
for (op = insn->stack_ops; op; op = next) {
1282
next = op->next;
1283
free(op);
1284
}
1285
insn->stack_ops = NULL;
1286
}
1287
1288
static int annotate_call_site(struct objtool_file *file,
1289
struct instruction *insn, bool sibling)
1290
{
1291
struct reloc *reloc = insn_reloc(file, insn);
1292
struct symbol *sym = insn_call_dest(insn);
1293
1294
if (!sym)
1295
sym = reloc->sym;
1296
1297
if (sym->static_call_tramp) {
1298
list_add_tail(&insn->call_node, &file->static_call_list);
1299
return 0;
1300
}
1301
1302
if (sym->retpoline_thunk) {
1303
list_add_tail(&insn->call_node, &file->retpoline_call_list);
1304
return 0;
1305
}
1306
1307
/*
1308
* Many compilers cannot disable KCOV or sanitizer calls with a function
1309
* attribute so they need a little help, NOP out any such calls from
1310
* noinstr text.
1311
*/
1312
if (opts.hack_noinstr && insn->sec->noinstr && sym->profiling_func) {
1313
if (reloc)
1314
set_reloc_type(file->elf, reloc, R_NONE);
1315
1316
if (elf_write_insn(file->elf, insn->sec,
1317
insn->offset, insn->len,
1318
sibling ? arch_ret_insn(insn->len)
1319
: arch_nop_insn(insn->len))) {
1320
return -1;
1321
}
1322
1323
insn->type = sibling ? INSN_RETURN : INSN_NOP;
1324
1325
if (sibling) {
1326
/*
1327
* We've replaced the tail-call JMP insn by two new
1328
* insn: RET; INT3, except we only have a single struct
1329
* insn here. Mark it retpoline_safe to avoid the SLS
1330
* warning, instead of adding another insn.
1331
*/
1332
insn->retpoline_safe = true;
1333
}
1334
1335
return 0;
1336
}
1337
1338
if (opts.mcount && sym->fentry) {
1339
if (sibling)
1340
WARN_INSN(insn, "tail call to __fentry__ !?!?");
1341
if (opts.mnop) {
1342
if (reloc)
1343
set_reloc_type(file->elf, reloc, R_NONE);
1344
1345
if (elf_write_insn(file->elf, insn->sec,
1346
insn->offset, insn->len,
1347
arch_nop_insn(insn->len))) {
1348
return -1;
1349
}
1350
1351
insn->type = INSN_NOP;
1352
}
1353
1354
list_add_tail(&insn->call_node, &file->mcount_loc_list);
1355
return 0;
1356
}
1357
1358
if (insn->type == INSN_CALL && !insn->sec->init &&
1359
!insn->_call_dest->embedded_insn)
1360
list_add_tail(&insn->call_node, &file->call_list);
1361
1362
if (!sibling && dead_end_function(file, sym))
1363
insn->dead_end = true;
1364
1365
return 0;
1366
}
1367
1368
static int add_call_dest(struct objtool_file *file, struct instruction *insn,
1369
struct symbol *dest, bool sibling)
1370
{
1371
insn->_call_dest = dest;
1372
if (!dest)
1373
return 0;
1374
1375
/*
1376
* Whatever stack impact regular CALLs have, should be undone
1377
* by the RETURN of the called function.
1378
*
1379
* Annotated intra-function calls retain the stack_ops but
1380
* are converted to JUMP, see read_intra_function_calls().
1381
*/
1382
remove_insn_ops(insn);
1383
1384
return annotate_call_site(file, insn, sibling);
1385
}
1386
1387
static int add_retpoline_call(struct objtool_file *file, struct instruction *insn)
1388
{
1389
/*
1390
* Retpoline calls/jumps are really dynamic calls/jumps in disguise,
1391
* so convert them accordingly.
1392
*/
1393
switch (insn->type) {
1394
case INSN_CALL:
1395
insn->type = INSN_CALL_DYNAMIC;
1396
break;
1397
case INSN_JUMP_UNCONDITIONAL:
1398
insn->type = INSN_JUMP_DYNAMIC;
1399
break;
1400
case INSN_JUMP_CONDITIONAL:
1401
insn->type = INSN_JUMP_DYNAMIC_CONDITIONAL;
1402
break;
1403
default:
1404
return 0;
1405
}
1406
1407
insn->retpoline_safe = true;
1408
1409
/*
1410
* Whatever stack impact regular CALLs have, should be undone
1411
* by the RETURN of the called function.
1412
*
1413
* Annotated intra-function calls retain the stack_ops but
1414
* are converted to JUMP, see read_intra_function_calls().
1415
*/
1416
remove_insn_ops(insn);
1417
1418
return annotate_call_site(file, insn, false);
1419
}
1420
1421
static void add_return_call(struct objtool_file *file, struct instruction *insn, bool add)
1422
{
1423
/*
1424
* Return thunk tail calls are really just returns in disguise,
1425
* so convert them accordingly.
1426
*/
1427
insn->type = INSN_RETURN;
1428
insn->retpoline_safe = true;
1429
1430
if (add)
1431
list_add_tail(&insn->call_node, &file->return_thunk_list);
1432
}
1433
1434
static bool is_first_func_insn(struct objtool_file *file,
1435
struct instruction *insn, struct symbol *sym)
1436
{
1437
if (insn->offset == sym->offset)
1438
return true;
1439
1440
/* Allow direct CALL/JMP past ENDBR */
1441
if (opts.ibt) {
1442
struct instruction *prev = prev_insn_same_sym(file, insn);
1443
1444
if (prev && prev->type == INSN_ENDBR &&
1445
insn->offset == sym->offset + prev->len)
1446
return true;
1447
}
1448
1449
return false;
1450
}
1451
1452
/*
1453
* A sibling call is a tail-call to another symbol -- to differentiate from a
1454
* recursive tail-call which is to the same symbol.
1455
*/
1456
static bool jump_is_sibling_call(struct objtool_file *file,
1457
struct instruction *from, struct instruction *to)
1458
{
1459
struct symbol *fs = from->sym;
1460
struct symbol *ts = to->sym;
1461
1462
/* Not a sibling call if from/to a symbol hole */
1463
if (!fs || !ts)
1464
return false;
1465
1466
/* Not a sibling call if not targeting the start of a symbol. */
1467
if (!is_first_func_insn(file, to, ts))
1468
return false;
1469
1470
/* Disallow sibling calls into STT_NOTYPE */
1471
if (ts->type == STT_NOTYPE)
1472
return false;
1473
1474
/* Must not be self to be a sibling */
1475
return fs->pfunc != ts->pfunc;
1476
}
1477
1478
/*
1479
* Find the destination instructions for all jumps.
1480
*/
1481
static int add_jump_destinations(struct objtool_file *file)
1482
{
1483
struct instruction *insn, *jump_dest;
1484
struct reloc *reloc;
1485
struct section *dest_sec;
1486
unsigned long dest_off;
1487
int ret;
1488
1489
for_each_insn(file, insn) {
1490
struct symbol *func = insn_func(insn);
1491
1492
if (insn->jump_dest) {
1493
/*
1494
* handle_group_alt() may have previously set
1495
* 'jump_dest' for some alternatives.
1496
*/
1497
continue;
1498
}
1499
if (!is_static_jump(insn))
1500
continue;
1501
1502
reloc = insn_reloc(file, insn);
1503
if (!reloc) {
1504
dest_sec = insn->sec;
1505
dest_off = arch_jump_destination(insn);
1506
} else if (reloc->sym->type == STT_SECTION) {
1507
dest_sec = reloc->sym->sec;
1508
dest_off = arch_dest_reloc_offset(reloc_addend(reloc));
1509
} else if (reloc->sym->retpoline_thunk) {
1510
ret = add_retpoline_call(file, insn);
1511
if (ret)
1512
return ret;
1513
continue;
1514
} else if (reloc->sym->return_thunk) {
1515
add_return_call(file, insn, true);
1516
continue;
1517
} else if (func) {
1518
/*
1519
* External sibling call or internal sibling call with
1520
* STT_FUNC reloc.
1521
*/
1522
ret = add_call_dest(file, insn, reloc->sym, true);
1523
if (ret)
1524
return ret;
1525
continue;
1526
} else if (reloc->sym->sec->idx) {
1527
dest_sec = reloc->sym->sec;
1528
dest_off = reloc->sym->sym.st_value +
1529
arch_dest_reloc_offset(reloc_addend(reloc));
1530
} else {
1531
/* non-func asm code jumping to another file */
1532
continue;
1533
}
1534
1535
jump_dest = find_insn(file, dest_sec, dest_off);
1536
if (!jump_dest) {
1537
struct symbol *sym = find_symbol_by_offset(dest_sec, dest_off);
1538
1539
/*
1540
* This is a special case for retbleed_untrain_ret().
1541
* It jumps to __x86_return_thunk(), but objtool
1542
* can't find the thunk's starting RET
1543
* instruction, because the RET is also in the
1544
* middle of another instruction. Objtool only
1545
* knows about the outer instruction.
1546
*/
1547
if (sym && sym->embedded_insn) {
1548
add_return_call(file, insn, false);
1549
continue;
1550
}
1551
1552
/*
1553
* GCOV/KCOV dead code can jump to the end of the
1554
* function/section.
1555
*/
1556
if (file->ignore_unreachables && func &&
1557
dest_sec == insn->sec &&
1558
dest_off == func->offset + func->len)
1559
continue;
1560
1561
ERROR_INSN(insn, "can't find jump dest instruction at %s+0x%lx",
1562
dest_sec->name, dest_off);
1563
return -1;
1564
}
1565
1566
/*
1567
* An intra-TU jump in retpoline.o might not have a relocation
1568
* for its jump dest, in which case the above
1569
* add_{retpoline,return}_call() didn't happen.
1570
*/
1571
if (jump_dest->sym && jump_dest->offset == jump_dest->sym->offset) {
1572
if (jump_dest->sym->retpoline_thunk) {
1573
ret = add_retpoline_call(file, insn);
1574
if (ret)
1575
return ret;
1576
continue;
1577
}
1578
if (jump_dest->sym->return_thunk) {
1579
add_return_call(file, insn, true);
1580
continue;
1581
}
1582
}
1583
1584
/*
1585
* Cross-function jump.
1586
*/
1587
if (func && insn_func(jump_dest) && func != insn_func(jump_dest)) {
1588
1589
/*
1590
* For GCC 8+, create parent/child links for any cold
1591
* subfunctions. This is _mostly_ redundant with a
1592
* similar initialization in read_symbols().
1593
*
1594
* If a function has aliases, we want the *first* such
1595
* function in the symbol table to be the subfunction's
1596
* parent. In that case we overwrite the
1597
* initialization done in read_symbols().
1598
*
1599
* However this code can't completely replace the
1600
* read_symbols() code because this doesn't detect the
1601
* case where the parent function's only reference to a
1602
* subfunction is through a jump table.
1603
*/
1604
if (!strstr(func->name, ".cold") &&
1605
strstr(insn_func(jump_dest)->name, ".cold")) {
1606
func->cfunc = insn_func(jump_dest);
1607
insn_func(jump_dest)->pfunc = func;
1608
}
1609
}
1610
1611
if (jump_is_sibling_call(file, insn, jump_dest)) {
1612
/*
1613
* Internal sibling call without reloc or with
1614
* STT_SECTION reloc.
1615
*/
1616
ret = add_call_dest(file, insn, insn_func(jump_dest), true);
1617
if (ret)
1618
return ret;
1619
continue;
1620
}
1621
1622
insn->jump_dest = jump_dest;
1623
}
1624
1625
return 0;
1626
}
1627
1628
static struct symbol *find_call_destination(struct section *sec, unsigned long offset)
1629
{
1630
struct symbol *call_dest;
1631
1632
call_dest = find_func_by_offset(sec, offset);
1633
if (!call_dest)
1634
call_dest = find_symbol_by_offset(sec, offset);
1635
1636
return call_dest;
1637
}
1638
1639
/*
1640
* Find the destination instructions for all calls.
1641
*/
1642
static int add_call_destinations(struct objtool_file *file)
1643
{
1644
struct instruction *insn;
1645
unsigned long dest_off;
1646
struct symbol *dest;
1647
struct reloc *reloc;
1648
int ret;
1649
1650
for_each_insn(file, insn) {
1651
struct symbol *func = insn_func(insn);
1652
if (insn->type != INSN_CALL)
1653
continue;
1654
1655
reloc = insn_reloc(file, insn);
1656
if (!reloc) {
1657
dest_off = arch_jump_destination(insn);
1658
dest = find_call_destination(insn->sec, dest_off);
1659
1660
ret = add_call_dest(file, insn, dest, false);
1661
if (ret)
1662
return ret;
1663
1664
if (func && func->ignore)
1665
continue;
1666
1667
if (!insn_call_dest(insn)) {
1668
ERROR_INSN(insn, "unannotated intra-function call");
1669
return -1;
1670
}
1671
1672
if (func && insn_call_dest(insn)->type != STT_FUNC) {
1673
ERROR_INSN(insn, "unsupported call to non-function");
1674
return -1;
1675
}
1676
1677
} else if (reloc->sym->type == STT_SECTION) {
1678
dest_off = arch_dest_reloc_offset(reloc_addend(reloc));
1679
dest = find_call_destination(reloc->sym->sec, dest_off);
1680
if (!dest) {
1681
ERROR_INSN(insn, "can't find call dest symbol at %s+0x%lx",
1682
reloc->sym->sec->name, dest_off);
1683
return -1;
1684
}
1685
1686
ret = add_call_dest(file, insn, dest, false);
1687
if (ret)
1688
return ret;
1689
1690
} else if (reloc->sym->retpoline_thunk) {
1691
ret = add_retpoline_call(file, insn);
1692
if (ret)
1693
return ret;
1694
1695
} else {
1696
ret = add_call_dest(file, insn, reloc->sym, false);
1697
if (ret)
1698
return ret;
1699
}
1700
}
1701
1702
return 0;
1703
}
1704
1705
/*
1706
* The .alternatives section requires some extra special care over and above
1707
* other special sections because alternatives are patched in place.
1708
*/
1709
static int handle_group_alt(struct objtool_file *file,
1710
struct special_alt *special_alt,
1711
struct instruction *orig_insn,
1712
struct instruction **new_insn)
1713
{
1714
struct instruction *last_new_insn = NULL, *insn, *nop = NULL;
1715
struct alt_group *orig_alt_group, *new_alt_group;
1716
unsigned long dest_off;
1717
1718
orig_alt_group = orig_insn->alt_group;
1719
if (!orig_alt_group) {
1720
struct instruction *last_orig_insn = NULL;
1721
1722
orig_alt_group = calloc(1, sizeof(*orig_alt_group));
1723
if (!orig_alt_group) {
1724
ERROR_GLIBC("calloc");
1725
return -1;
1726
}
1727
orig_alt_group->cfi = calloc(special_alt->orig_len,
1728
sizeof(struct cfi_state *));
1729
if (!orig_alt_group->cfi) {
1730
ERROR_GLIBC("calloc");
1731
return -1;
1732
}
1733
1734
insn = orig_insn;
1735
sec_for_each_insn_from(file, insn) {
1736
if (insn->offset >= special_alt->orig_off + special_alt->orig_len)
1737
break;
1738
1739
insn->alt_group = orig_alt_group;
1740
last_orig_insn = insn;
1741
}
1742
orig_alt_group->orig_group = NULL;
1743
orig_alt_group->first_insn = orig_insn;
1744
orig_alt_group->last_insn = last_orig_insn;
1745
orig_alt_group->nop = NULL;
1746
orig_alt_group->ignore = orig_insn->ignore_alts;
1747
} else {
1748
if (orig_alt_group->last_insn->offset + orig_alt_group->last_insn->len -
1749
orig_alt_group->first_insn->offset != special_alt->orig_len) {
1750
ERROR_INSN(orig_insn, "weirdly overlapping alternative! %ld != %d",
1751
orig_alt_group->last_insn->offset +
1752
orig_alt_group->last_insn->len -
1753
orig_alt_group->first_insn->offset,
1754
special_alt->orig_len);
1755
return -1;
1756
}
1757
}
1758
1759
new_alt_group = calloc(1, sizeof(*new_alt_group));
1760
if (!new_alt_group) {
1761
ERROR_GLIBC("calloc");
1762
return -1;
1763
}
1764
1765
if (special_alt->new_len < special_alt->orig_len) {
1766
/*
1767
* Insert a fake nop at the end to make the replacement
1768
* alt_group the same size as the original. This is needed to
1769
* allow propagate_alt_cfi() to do its magic. When the last
1770
* instruction affects the stack, the instruction after it (the
1771
* nop) will propagate the new state to the shared CFI array.
1772
*/
1773
nop = calloc(1, sizeof(*nop));
1774
if (!nop) {
1775
ERROR_GLIBC("calloc");
1776
return -1;
1777
}
1778
memset(nop, 0, sizeof(*nop));
1779
1780
nop->sec = special_alt->new_sec;
1781
nop->offset = special_alt->new_off + special_alt->new_len;
1782
nop->len = special_alt->orig_len - special_alt->new_len;
1783
nop->type = INSN_NOP;
1784
nop->sym = orig_insn->sym;
1785
nop->alt_group = new_alt_group;
1786
}
1787
1788
if (!special_alt->new_len) {
1789
*new_insn = nop;
1790
goto end;
1791
}
1792
1793
insn = *new_insn;
1794
sec_for_each_insn_from(file, insn) {
1795
struct reloc *alt_reloc;
1796
1797
if (insn->offset >= special_alt->new_off + special_alt->new_len)
1798
break;
1799
1800
last_new_insn = insn;
1801
1802
insn->sym = orig_insn->sym;
1803
insn->alt_group = new_alt_group;
1804
1805
/*
1806
* Since alternative replacement code is copy/pasted by the
1807
* kernel after applying relocations, generally such code can't
1808
* have relative-address relocation references to outside the
1809
* .altinstr_replacement section, unless the arch's
1810
* alternatives code can adjust the relative offsets
1811
* accordingly.
1812
*/
1813
alt_reloc = insn_reloc(file, insn);
1814
if (alt_reloc && arch_pc_relative_reloc(alt_reloc) &&
1815
!arch_support_alt_relocation(special_alt, insn, alt_reloc)) {
1816
1817
ERROR_INSN(insn, "unsupported relocation in alternatives section");
1818
return -1;
1819
}
1820
1821
if (!is_static_jump(insn))
1822
continue;
1823
1824
if (!insn->immediate)
1825
continue;
1826
1827
dest_off = arch_jump_destination(insn);
1828
if (dest_off == special_alt->new_off + special_alt->new_len) {
1829
insn->jump_dest = next_insn_same_sec(file, orig_alt_group->last_insn);
1830
if (!insn->jump_dest) {
1831
ERROR_INSN(insn, "can't find alternative jump destination");
1832
return -1;
1833
}
1834
}
1835
}
1836
1837
if (!last_new_insn) {
1838
ERROR_FUNC(special_alt->new_sec, special_alt->new_off,
1839
"can't find last new alternative instruction");
1840
return -1;
1841
}
1842
1843
end:
1844
new_alt_group->orig_group = orig_alt_group;
1845
new_alt_group->first_insn = *new_insn;
1846
new_alt_group->last_insn = last_new_insn;
1847
new_alt_group->nop = nop;
1848
new_alt_group->ignore = (*new_insn)->ignore_alts;
1849
new_alt_group->cfi = orig_alt_group->cfi;
1850
return 0;
1851
}
1852
1853
/*
1854
* A jump table entry can either convert a nop to a jump or a jump to a nop.
1855
* If the original instruction is a jump, make the alt entry an effective nop
1856
* by just skipping the original instruction.
1857
*/
1858
static int handle_jump_alt(struct objtool_file *file,
1859
struct special_alt *special_alt,
1860
struct instruction *orig_insn,
1861
struct instruction **new_insn)
1862
{
1863
if (orig_insn->type != INSN_JUMP_UNCONDITIONAL &&
1864
orig_insn->type != INSN_NOP) {
1865
1866
ERROR_INSN(orig_insn, "unsupported instruction at jump label");
1867
return -1;
1868
}
1869
1870
if (opts.hack_jump_label && special_alt->key_addend & 2) {
1871
struct reloc *reloc = insn_reloc(file, orig_insn);
1872
1873
if (reloc)
1874
set_reloc_type(file->elf, reloc, R_NONE);
1875
1876
if (elf_write_insn(file->elf, orig_insn->sec,
1877
orig_insn->offset, orig_insn->len,
1878
arch_nop_insn(orig_insn->len))) {
1879
return -1;
1880
}
1881
1882
orig_insn->type = INSN_NOP;
1883
}
1884
1885
if (orig_insn->type == INSN_NOP) {
1886
if (orig_insn->len == 2)
1887
file->jl_nop_short++;
1888
else
1889
file->jl_nop_long++;
1890
1891
return 0;
1892
}
1893
1894
if (orig_insn->len == 2)
1895
file->jl_short++;
1896
else
1897
file->jl_long++;
1898
1899
*new_insn = next_insn_same_sec(file, orig_insn);
1900
return 0;
1901
}
1902
1903
/*
1904
* Read all the special sections which have alternate instructions which can be
1905
* patched in or redirected to at runtime. Each instruction having alternate
1906
* instruction(s) has them added to its insn->alts list, which will be
1907
* traversed in validate_branch().
1908
*/
1909
static int add_special_section_alts(struct objtool_file *file)
1910
{
1911
struct list_head special_alts;
1912
struct instruction *orig_insn, *new_insn;
1913
struct special_alt *special_alt, *tmp;
1914
struct alternative *alt;
1915
int ret;
1916
1917
if (special_get_alts(file->elf, &special_alts))
1918
return -1;
1919
1920
list_for_each_entry_safe(special_alt, tmp, &special_alts, list) {
1921
1922
orig_insn = find_insn(file, special_alt->orig_sec,
1923
special_alt->orig_off);
1924
if (!orig_insn) {
1925
ERROR_FUNC(special_alt->orig_sec, special_alt->orig_off,
1926
"special: can't find orig instruction");
1927
return -1;
1928
}
1929
1930
new_insn = NULL;
1931
if (!special_alt->group || special_alt->new_len) {
1932
new_insn = find_insn(file, special_alt->new_sec,
1933
special_alt->new_off);
1934
if (!new_insn) {
1935
ERROR_FUNC(special_alt->new_sec, special_alt->new_off,
1936
"special: can't find new instruction");
1937
return -1;
1938
}
1939
}
1940
1941
if (special_alt->group) {
1942
if (!special_alt->orig_len) {
1943
ERROR_INSN(orig_insn, "empty alternative entry");
1944
continue;
1945
}
1946
1947
ret = handle_group_alt(file, special_alt, orig_insn,
1948
&new_insn);
1949
if (ret)
1950
return ret;
1951
1952
} else if (special_alt->jump_or_nop) {
1953
ret = handle_jump_alt(file, special_alt, orig_insn,
1954
&new_insn);
1955
if (ret)
1956
return ret;
1957
}
1958
1959
alt = calloc(1, sizeof(*alt));
1960
if (!alt) {
1961
ERROR_GLIBC("calloc");
1962
return -1;
1963
}
1964
1965
alt->insn = new_insn;
1966
alt->next = orig_insn->alts;
1967
orig_insn->alts = alt;
1968
1969
list_del(&special_alt->list);
1970
free(special_alt);
1971
}
1972
1973
if (opts.stats) {
1974
printf("jl\\\tNOP\tJMP\n");
1975
printf("short:\t%ld\t%ld\n", file->jl_nop_short, file->jl_short);
1976
printf("long:\t%ld\t%ld\n", file->jl_nop_long, file->jl_long);
1977
}
1978
1979
return 0;
1980
}
1981
1982
__weak unsigned long arch_jump_table_sym_offset(struct reloc *reloc, struct reloc *table)
1983
{
1984
return reloc->sym->offset + reloc_addend(reloc);
1985
}
1986
1987
static int add_jump_table(struct objtool_file *file, struct instruction *insn)
1988
{
1989
unsigned long table_size = insn_jump_table_size(insn);
1990
struct symbol *pfunc = insn_func(insn)->pfunc;
1991
struct reloc *table = insn_jump_table(insn);
1992
struct instruction *dest_insn;
1993
unsigned int prev_offset = 0;
1994
struct reloc *reloc = table;
1995
struct alternative *alt;
1996
unsigned long sym_offset;
1997
1998
/*
1999
* Each @reloc is a switch table relocation which points to the target
2000
* instruction.
2001
*/
2002
for_each_reloc_from(table->sec, reloc) {
2003
2004
/* Check for the end of the table: */
2005
if (table_size && reloc_offset(reloc) - reloc_offset(table) >= table_size)
2006
break;
2007
if (reloc != table && is_jump_table(reloc))
2008
break;
2009
2010
/* Make sure the table entries are consecutive: */
2011
if (prev_offset && reloc_offset(reloc) != prev_offset + arch_reloc_size(reloc))
2012
break;
2013
2014
sym_offset = arch_jump_table_sym_offset(reloc, table);
2015
2016
/* Detect function pointers from contiguous objects: */
2017
if (reloc->sym->sec == pfunc->sec && sym_offset == pfunc->offset)
2018
break;
2019
2020
/*
2021
* Clang sometimes leaves dangling unused jump table entries
2022
* which point to the end of the function. Ignore them.
2023
*/
2024
if (reloc->sym->sec == pfunc->sec &&
2025
sym_offset == pfunc->offset + pfunc->len)
2026
goto next;
2027
2028
dest_insn = find_insn(file, reloc->sym->sec, sym_offset);
2029
if (!dest_insn)
2030
break;
2031
2032
/* Make sure the destination is in the same function: */
2033
if (!insn_func(dest_insn) || insn_func(dest_insn)->pfunc != pfunc)
2034
break;
2035
2036
alt = calloc(1, sizeof(*alt));
2037
if (!alt) {
2038
ERROR_GLIBC("calloc");
2039
return -1;
2040
}
2041
2042
alt->insn = dest_insn;
2043
alt->next = insn->alts;
2044
insn->alts = alt;
2045
next:
2046
prev_offset = reloc_offset(reloc);
2047
}
2048
2049
if (!prev_offset) {
2050
ERROR_INSN(insn, "can't find switch jump table");
2051
return -1;
2052
}
2053
2054
return 0;
2055
}
2056
2057
/*
2058
* find_jump_table() - Given a dynamic jump, find the switch jump table
2059
* associated with it.
2060
*/
2061
static void find_jump_table(struct objtool_file *file, struct symbol *func,
2062
struct instruction *insn)
2063
{
2064
struct reloc *table_reloc;
2065
struct instruction *dest_insn, *orig_insn = insn;
2066
unsigned long table_size;
2067
unsigned long sym_offset;
2068
2069
/*
2070
* Backward search using the @first_jump_src links, these help avoid
2071
* much of the 'in between' code. Which avoids us getting confused by
2072
* it.
2073
*/
2074
for (;
2075
insn && insn_func(insn) && insn_func(insn)->pfunc == func;
2076
insn = insn->first_jump_src ?: prev_insn_same_sym(file, insn)) {
2077
2078
if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC)
2079
break;
2080
2081
/* allow small jumps within the range */
2082
if (insn->type == INSN_JUMP_UNCONDITIONAL &&
2083
insn->jump_dest &&
2084
(insn->jump_dest->offset <= insn->offset ||
2085
insn->jump_dest->offset > orig_insn->offset))
2086
break;
2087
2088
table_reloc = arch_find_switch_table(file, insn, &table_size);
2089
if (!table_reloc)
2090
continue;
2091
2092
sym_offset = table_reloc->sym->offset + reloc_addend(table_reloc);
2093
2094
dest_insn = find_insn(file, table_reloc->sym->sec, sym_offset);
2095
if (!dest_insn || !insn_func(dest_insn) || insn_func(dest_insn)->pfunc != func)
2096
continue;
2097
2098
set_jump_table(table_reloc);
2099
orig_insn->_jump_table = table_reloc;
2100
orig_insn->_jump_table_size = table_size;
2101
2102
break;
2103
}
2104
}
2105
2106
/*
2107
* First pass: Mark the head of each jump table so that in the next pass,
2108
* we know when a given jump table ends and the next one starts.
2109
*/
2110
static void mark_func_jump_tables(struct objtool_file *file,
2111
struct symbol *func)
2112
{
2113
struct instruction *insn, *last = NULL;
2114
2115
func_for_each_insn(file, func, insn) {
2116
if (!last)
2117
last = insn;
2118
2119
/*
2120
* Store back-pointers for unconditional forward jumps such
2121
* that find_jump_table() can back-track using those and
2122
* avoid some potentially confusing code.
2123
*/
2124
if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest &&
2125
insn->offset > last->offset &&
2126
insn->jump_dest->offset > insn->offset &&
2127
!insn->jump_dest->first_jump_src) {
2128
2129
insn->jump_dest->first_jump_src = insn;
2130
last = insn->jump_dest;
2131
}
2132
2133
if (insn->type != INSN_JUMP_DYNAMIC)
2134
continue;
2135
2136
find_jump_table(file, func, insn);
2137
}
2138
}
2139
2140
static int add_func_jump_tables(struct objtool_file *file,
2141
struct symbol *func)
2142
{
2143
struct instruction *insn;
2144
int ret;
2145
2146
func_for_each_insn(file, func, insn) {
2147
if (!insn_jump_table(insn))
2148
continue;
2149
2150
ret = add_jump_table(file, insn);
2151
if (ret)
2152
return ret;
2153
}
2154
2155
return 0;
2156
}
2157
2158
/*
2159
* For some switch statements, gcc generates a jump table in the .rodata
2160
* section which contains a list of addresses within the function to jump to.
2161
* This finds these jump tables and adds them to the insn->alts lists.
2162
*/
2163
static int add_jump_table_alts(struct objtool_file *file)
2164
{
2165
struct symbol *func;
2166
int ret;
2167
2168
if (!file->rodata)
2169
return 0;
2170
2171
for_each_sym(file, func) {
2172
if (func->type != STT_FUNC)
2173
continue;
2174
2175
mark_func_jump_tables(file, func);
2176
ret = add_func_jump_tables(file, func);
2177
if (ret)
2178
return ret;
2179
}
2180
2181
return 0;
2182
}
2183
2184
static void set_func_state(struct cfi_state *state)
2185
{
2186
state->cfa = initial_func_cfi.cfa;
2187
memcpy(&state->regs, &initial_func_cfi.regs,
2188
CFI_NUM_REGS * sizeof(struct cfi_reg));
2189
state->stack_size = initial_func_cfi.cfa.offset;
2190
state->type = UNWIND_HINT_TYPE_CALL;
2191
}
2192
2193
static int read_unwind_hints(struct objtool_file *file)
2194
{
2195
struct cfi_state cfi = init_cfi;
2196
struct section *sec;
2197
struct unwind_hint *hint;
2198
struct instruction *insn;
2199
struct reloc *reloc;
2200
unsigned long offset;
2201
int i;
2202
2203
sec = find_section_by_name(file->elf, ".discard.unwind_hints");
2204
if (!sec)
2205
return 0;
2206
2207
if (!sec->rsec) {
2208
ERROR("missing .rela.discard.unwind_hints section");
2209
return -1;
2210
}
2211
2212
if (sec->sh.sh_size % sizeof(struct unwind_hint)) {
2213
ERROR("struct unwind_hint size mismatch");
2214
return -1;
2215
}
2216
2217
file->hints = true;
2218
2219
for (i = 0; i < sec->sh.sh_size / sizeof(struct unwind_hint); i++) {
2220
hint = (struct unwind_hint *)sec->data->d_buf + i;
2221
2222
reloc = find_reloc_by_dest(file->elf, sec, i * sizeof(*hint));
2223
if (!reloc) {
2224
ERROR("can't find reloc for unwind_hints[%d]", i);
2225
return -1;
2226
}
2227
2228
if (reloc->sym->type == STT_SECTION) {
2229
offset = reloc_addend(reloc);
2230
} else if (reloc->sym->local_label) {
2231
offset = reloc->sym->offset;
2232
} else {
2233
ERROR("unexpected relocation symbol type in %s", sec->rsec->name);
2234
return -1;
2235
}
2236
2237
insn = find_insn(file, reloc->sym->sec, offset);
2238
if (!insn) {
2239
ERROR("can't find insn for unwind_hints[%d]", i);
2240
return -1;
2241
}
2242
2243
insn->hint = true;
2244
2245
if (hint->type == UNWIND_HINT_TYPE_UNDEFINED) {
2246
insn->cfi = &force_undefined_cfi;
2247
continue;
2248
}
2249
2250
if (hint->type == UNWIND_HINT_TYPE_SAVE) {
2251
insn->hint = false;
2252
insn->save = true;
2253
continue;
2254
}
2255
2256
if (hint->type == UNWIND_HINT_TYPE_RESTORE) {
2257
insn->restore = true;
2258
continue;
2259
}
2260
2261
if (hint->type == UNWIND_HINT_TYPE_REGS_PARTIAL) {
2262
struct symbol *sym = find_symbol_by_offset(insn->sec, insn->offset);
2263
2264
if (sym && sym->bind == STB_GLOBAL) {
2265
if (opts.ibt && insn->type != INSN_ENDBR && !insn->noendbr) {
2266
ERROR_INSN(insn, "UNWIND_HINT_IRET_REGS without ENDBR");
2267
return -1;
2268
}
2269
}
2270
}
2271
2272
if (hint->type == UNWIND_HINT_TYPE_FUNC) {
2273
insn->cfi = &func_cfi;
2274
continue;
2275
}
2276
2277
if (insn->cfi)
2278
cfi = *(insn->cfi);
2279
2280
if (arch_decode_hint_reg(hint->sp_reg, &cfi.cfa.base)) {
2281
ERROR_INSN(insn, "unsupported unwind_hint sp base reg %d", hint->sp_reg);
2282
return -1;
2283
}
2284
2285
cfi.cfa.offset = bswap_if_needed(file->elf, hint->sp_offset);
2286
cfi.type = hint->type;
2287
cfi.signal = hint->signal;
2288
2289
insn->cfi = cfi_hash_find_or_add(&cfi);
2290
}
2291
2292
return 0;
2293
}
2294
2295
static int read_annotate(struct objtool_file *file,
2296
int (*func)(struct objtool_file *file, int type, struct instruction *insn))
2297
{
2298
struct section *sec;
2299
struct instruction *insn;
2300
struct reloc *reloc;
2301
uint64_t offset;
2302
int type, ret;
2303
2304
sec = find_section_by_name(file->elf, ".discard.annotate_insn");
2305
if (!sec)
2306
return 0;
2307
2308
if (!sec->rsec)
2309
return 0;
2310
2311
if (sec->sh.sh_entsize != 8) {
2312
static bool warned = false;
2313
if (!warned && opts.verbose) {
2314
WARN("%s: dodgy linker, sh_entsize != 8", sec->name);
2315
warned = true;
2316
}
2317
sec->sh.sh_entsize = 8;
2318
}
2319
2320
for_each_reloc(sec->rsec, reloc) {
2321
type = *(u32 *)(sec->data->d_buf + (reloc_idx(reloc) * sec->sh.sh_entsize) + 4);
2322
type = bswap_if_needed(file->elf, type);
2323
2324
offset = reloc->sym->offset + reloc_addend(reloc);
2325
insn = find_insn(file, reloc->sym->sec, offset);
2326
2327
if (!insn) {
2328
ERROR("bad .discard.annotate_insn entry: %d of type %d", reloc_idx(reloc), type);
2329
return -1;
2330
}
2331
2332
ret = func(file, type, insn);
2333
if (ret < 0)
2334
return ret;
2335
}
2336
2337
return 0;
2338
}
2339
2340
static int __annotate_early(struct objtool_file *file, int type, struct instruction *insn)
2341
{
2342
switch (type) {
2343
2344
/* Must be before add_special_section_alts() */
2345
case ANNOTYPE_IGNORE_ALTS:
2346
insn->ignore_alts = true;
2347
break;
2348
2349
/*
2350
* Must be before read_unwind_hints() since that needs insn->noendbr.
2351
*/
2352
case ANNOTYPE_NOENDBR:
2353
insn->noendbr = 1;
2354
break;
2355
2356
default:
2357
break;
2358
}
2359
2360
return 0;
2361
}
2362
2363
static int __annotate_ifc(struct objtool_file *file, int type, struct instruction *insn)
2364
{
2365
unsigned long dest_off;
2366
2367
if (type != ANNOTYPE_INTRA_FUNCTION_CALL)
2368
return 0;
2369
2370
if (insn->type != INSN_CALL) {
2371
ERROR_INSN(insn, "intra_function_call not a direct call");
2372
return -1;
2373
}
2374
2375
/*
2376
* Treat intra-function CALLs as JMPs, but with a stack_op.
2377
* See add_call_destinations(), which strips stack_ops from
2378
* normal CALLs.
2379
*/
2380
insn->type = INSN_JUMP_UNCONDITIONAL;
2381
2382
dest_off = arch_jump_destination(insn);
2383
insn->jump_dest = find_insn(file, insn->sec, dest_off);
2384
if (!insn->jump_dest) {
2385
ERROR_INSN(insn, "can't find call dest at %s+0x%lx",
2386
insn->sec->name, dest_off);
2387
return -1;
2388
}
2389
2390
return 0;
2391
}
2392
2393
static int __annotate_late(struct objtool_file *file, int type, struct instruction *insn)
2394
{
2395
switch (type) {
2396
case ANNOTYPE_NOENDBR:
2397
/* early */
2398
break;
2399
2400
case ANNOTYPE_RETPOLINE_SAFE:
2401
if (insn->type != INSN_JUMP_DYNAMIC &&
2402
insn->type != INSN_CALL_DYNAMIC &&
2403
insn->type != INSN_RETURN &&
2404
insn->type != INSN_NOP) {
2405
ERROR_INSN(insn, "retpoline_safe hint not an indirect jump/call/ret/nop");
2406
return -1;
2407
}
2408
2409
insn->retpoline_safe = true;
2410
break;
2411
2412
case ANNOTYPE_INSTR_BEGIN:
2413
insn->instr++;
2414
break;
2415
2416
case ANNOTYPE_INSTR_END:
2417
insn->instr--;
2418
break;
2419
2420
case ANNOTYPE_UNRET_BEGIN:
2421
insn->unret = 1;
2422
break;
2423
2424
case ANNOTYPE_IGNORE_ALTS:
2425
/* early */
2426
break;
2427
2428
case ANNOTYPE_INTRA_FUNCTION_CALL:
2429
/* ifc */
2430
break;
2431
2432
case ANNOTYPE_REACHABLE:
2433
insn->dead_end = false;
2434
break;
2435
2436
default:
2437
ERROR_INSN(insn, "Unknown annotation type: %d", type);
2438
return -1;
2439
}
2440
2441
return 0;
2442
}
2443
2444
/*
2445
* Return true if name matches an instrumentation function, where calls to that
2446
* function from noinstr code can safely be removed, but compilers won't do so.
2447
*/
2448
static bool is_profiling_func(const char *name)
2449
{
2450
/*
2451
* Many compilers cannot disable KCOV with a function attribute.
2452
*/
2453
if (!strncmp(name, "__sanitizer_cov_", 16))
2454
return true;
2455
2456
return false;
2457
}
2458
2459
static int classify_symbols(struct objtool_file *file)
2460
{
2461
struct symbol *func;
2462
2463
for_each_sym(file, func) {
2464
if (func->type == STT_NOTYPE && strstarts(func->name, ".L"))
2465
func->local_label = true;
2466
2467
if (func->bind != STB_GLOBAL)
2468
continue;
2469
2470
if (!strncmp(func->name, STATIC_CALL_TRAMP_PREFIX_STR,
2471
strlen(STATIC_CALL_TRAMP_PREFIX_STR)))
2472
func->static_call_tramp = true;
2473
2474
if (arch_is_retpoline(func))
2475
func->retpoline_thunk = true;
2476
2477
if (arch_is_rethunk(func))
2478
func->return_thunk = true;
2479
2480
if (arch_is_embedded_insn(func))
2481
func->embedded_insn = true;
2482
2483
if (arch_ftrace_match(func->name))
2484
func->fentry = true;
2485
2486
if (is_profiling_func(func->name))
2487
func->profiling_func = true;
2488
}
2489
2490
return 0;
2491
}
2492
2493
static void mark_rodata(struct objtool_file *file)
2494
{
2495
struct section *sec;
2496
bool found = false;
2497
2498
/*
2499
* Search for the following rodata sections, each of which can
2500
* potentially contain jump tables:
2501
*
2502
* - .rodata: can contain GCC switch tables
2503
* - .rodata.<func>: same, if -fdata-sections is being used
2504
* - .data.rel.ro.c_jump_table: contains C annotated jump tables
2505
*
2506
* .rodata.str1.* sections are ignored; they don't contain jump tables.
2507
*/
2508
for_each_sec(file, sec) {
2509
if ((!strncmp(sec->name, ".rodata", 7) &&
2510
!strstr(sec->name, ".str1.")) ||
2511
!strncmp(sec->name, ".data.rel.ro", 12)) {
2512
sec->rodata = true;
2513
found = true;
2514
}
2515
}
2516
2517
file->rodata = found;
2518
}
2519
2520
static int decode_sections(struct objtool_file *file)
2521
{
2522
int ret;
2523
2524
mark_rodata(file);
2525
2526
ret = init_pv_ops(file);
2527
if (ret)
2528
return ret;
2529
2530
/*
2531
* Must be before add_{jump_call}_destination.
2532
*/
2533
ret = classify_symbols(file);
2534
if (ret)
2535
return ret;
2536
2537
ret = decode_instructions(file);
2538
if (ret)
2539
return ret;
2540
2541
ret = add_ignores(file);
2542
if (ret)
2543
return ret;
2544
2545
add_uaccess_safe(file);
2546
2547
ret = read_annotate(file, __annotate_early);
2548
if (ret)
2549
return ret;
2550
2551
/*
2552
* Must be before add_jump_destinations(), which depends on 'func'
2553
* being set for alternatives, to enable proper sibling call detection.
2554
*/
2555
if (opts.stackval || opts.orc || opts.uaccess || opts.noinstr) {
2556
ret = add_special_section_alts(file);
2557
if (ret)
2558
return ret;
2559
}
2560
2561
ret = add_jump_destinations(file);
2562
if (ret)
2563
return ret;
2564
2565
/*
2566
* Must be before add_call_destination(); it changes INSN_CALL to
2567
* INSN_JUMP.
2568
*/
2569
ret = read_annotate(file, __annotate_ifc);
2570
if (ret)
2571
return ret;
2572
2573
ret = add_call_destinations(file);
2574
if (ret)
2575
return ret;
2576
2577
ret = add_jump_table_alts(file);
2578
if (ret)
2579
return ret;
2580
2581
ret = read_unwind_hints(file);
2582
if (ret)
2583
return ret;
2584
2585
/*
2586
* Must be after add_call_destinations() such that it can override
2587
* dead_end_function() marks.
2588
*/
2589
ret = read_annotate(file, __annotate_late);
2590
if (ret)
2591
return ret;
2592
2593
return 0;
2594
}
2595
2596
static bool is_special_call(struct instruction *insn)
2597
{
2598
if (insn->type == INSN_CALL) {
2599
struct symbol *dest = insn_call_dest(insn);
2600
2601
if (!dest)
2602
return false;
2603
2604
if (dest->fentry || dest->embedded_insn)
2605
return true;
2606
}
2607
2608
return false;
2609
}
2610
2611
static bool has_modified_stack_frame(struct instruction *insn, struct insn_state *state)
2612
{
2613
struct cfi_state *cfi = &state->cfi;
2614
int i;
2615
2616
if (cfi->cfa.base != initial_func_cfi.cfa.base || cfi->drap)
2617
return true;
2618
2619
if (cfi->cfa.offset != initial_func_cfi.cfa.offset)
2620
return true;
2621
2622
if (cfi->stack_size != initial_func_cfi.cfa.offset)
2623
return true;
2624
2625
for (i = 0; i < CFI_NUM_REGS; i++) {
2626
if (cfi->regs[i].base != initial_func_cfi.regs[i].base ||
2627
cfi->regs[i].offset != initial_func_cfi.regs[i].offset)
2628
return true;
2629
}
2630
2631
return false;
2632
}
2633
2634
static bool check_reg_frame_pos(const struct cfi_reg *reg,
2635
int expected_offset)
2636
{
2637
return reg->base == CFI_CFA &&
2638
reg->offset == expected_offset;
2639
}
2640
2641
static bool has_valid_stack_frame(struct insn_state *state)
2642
{
2643
struct cfi_state *cfi = &state->cfi;
2644
2645
if (cfi->cfa.base == CFI_BP &&
2646
check_reg_frame_pos(&cfi->regs[CFI_BP], -cfi->cfa.offset) &&
2647
check_reg_frame_pos(&cfi->regs[CFI_RA], -cfi->cfa.offset + 8))
2648
return true;
2649
2650
if (cfi->drap && cfi->regs[CFI_BP].base == CFI_BP)
2651
return true;
2652
2653
return false;
2654
}
2655
2656
static int update_cfi_state_regs(struct instruction *insn,
2657
struct cfi_state *cfi,
2658
struct stack_op *op)
2659
{
2660
struct cfi_reg *cfa = &cfi->cfa;
2661
2662
if (cfa->base != CFI_SP && cfa->base != CFI_SP_INDIRECT)
2663
return 0;
2664
2665
/* push */
2666
if (op->dest.type == OP_DEST_PUSH || op->dest.type == OP_DEST_PUSHF)
2667
cfa->offset += 8;
2668
2669
/* pop */
2670
if (op->src.type == OP_SRC_POP || op->src.type == OP_SRC_POPF)
2671
cfa->offset -= 8;
2672
2673
/* add immediate to sp */
2674
if (op->dest.type == OP_DEST_REG && op->src.type == OP_SRC_ADD &&
2675
op->dest.reg == CFI_SP && op->src.reg == CFI_SP)
2676
cfa->offset -= op->src.offset;
2677
2678
return 0;
2679
}
2680
2681
static void save_reg(struct cfi_state *cfi, unsigned char reg, int base, int offset)
2682
{
2683
if (arch_callee_saved_reg(reg) &&
2684
cfi->regs[reg].base == CFI_UNDEFINED) {
2685
cfi->regs[reg].base = base;
2686
cfi->regs[reg].offset = offset;
2687
}
2688
}
2689
2690
static void restore_reg(struct cfi_state *cfi, unsigned char reg)
2691
{
2692
cfi->regs[reg].base = initial_func_cfi.regs[reg].base;
2693
cfi->regs[reg].offset = initial_func_cfi.regs[reg].offset;
2694
}
2695
2696
/*
2697
* A note about DRAP stack alignment:
2698
*
2699
* GCC has the concept of a DRAP register, which is used to help keep track of
2700
* the stack pointer when aligning the stack. r10 or r13 is used as the DRAP
2701
* register. The typical DRAP pattern is:
2702
*
2703
* 4c 8d 54 24 08 lea 0x8(%rsp),%r10
2704
* 48 83 e4 c0 and $0xffffffffffffffc0,%rsp
2705
* 41 ff 72 f8 pushq -0x8(%r10)
2706
* 55 push %rbp
2707
* 48 89 e5 mov %rsp,%rbp
2708
* (more pushes)
2709
* 41 52 push %r10
2710
* ...
2711
* 41 5a pop %r10
2712
* (more pops)
2713
* 5d pop %rbp
2714
* 49 8d 62 f8 lea -0x8(%r10),%rsp
2715
* c3 retq
2716
*
2717
* There are some variations in the epilogues, like:
2718
*
2719
* 5b pop %rbx
2720
* 41 5a pop %r10
2721
* 41 5c pop %r12
2722
* 41 5d pop %r13
2723
* 41 5e pop %r14
2724
* c9 leaveq
2725
* 49 8d 62 f8 lea -0x8(%r10),%rsp
2726
* c3 retq
2727
*
2728
* and:
2729
*
2730
* 4c 8b 55 e8 mov -0x18(%rbp),%r10
2731
* 48 8b 5d e0 mov -0x20(%rbp),%rbx
2732
* 4c 8b 65 f0 mov -0x10(%rbp),%r12
2733
* 4c 8b 6d f8 mov -0x8(%rbp),%r13
2734
* c9 leaveq
2735
* 49 8d 62 f8 lea -0x8(%r10),%rsp
2736
* c3 retq
2737
*
2738
* Sometimes r13 is used as the DRAP register, in which case it's saved and
2739
* restored beforehand:
2740
*
2741
* 41 55 push %r13
2742
* 4c 8d 6c 24 10 lea 0x10(%rsp),%r13
2743
* 48 83 e4 f0 and $0xfffffffffffffff0,%rsp
2744
* ...
2745
* 49 8d 65 f0 lea -0x10(%r13),%rsp
2746
* 41 5d pop %r13
2747
* c3 retq
2748
*/
2749
static int update_cfi_state(struct instruction *insn,
2750
struct instruction *next_insn,
2751
struct cfi_state *cfi, struct stack_op *op)
2752
{
2753
struct cfi_reg *cfa = &cfi->cfa;
2754
struct cfi_reg *regs = cfi->regs;
2755
2756
/* ignore UNWIND_HINT_UNDEFINED regions */
2757
if (cfi->force_undefined)
2758
return 0;
2759
2760
/* stack operations don't make sense with an undefined CFA */
2761
if (cfa->base == CFI_UNDEFINED) {
2762
if (insn_func(insn)) {
2763
WARN_INSN(insn, "undefined stack state");
2764
return 1;
2765
}
2766
return 0;
2767
}
2768
2769
if (cfi->type == UNWIND_HINT_TYPE_REGS ||
2770
cfi->type == UNWIND_HINT_TYPE_REGS_PARTIAL)
2771
return update_cfi_state_regs(insn, cfi, op);
2772
2773
switch (op->dest.type) {
2774
2775
case OP_DEST_REG:
2776
switch (op->src.type) {
2777
2778
case OP_SRC_REG:
2779
if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP &&
2780
cfa->base == CFI_SP &&
2781
check_reg_frame_pos(&regs[CFI_BP], -cfa->offset)) {
2782
2783
/* mov %rsp, %rbp */
2784
cfa->base = op->dest.reg;
2785
cfi->bp_scratch = false;
2786
}
2787
2788
else if (op->src.reg == CFI_SP &&
2789
op->dest.reg == CFI_BP && cfi->drap) {
2790
2791
/* drap: mov %rsp, %rbp */
2792
regs[CFI_BP].base = CFI_BP;
2793
regs[CFI_BP].offset = -cfi->stack_size;
2794
cfi->bp_scratch = false;
2795
}
2796
2797
else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
2798
2799
/*
2800
* mov %rsp, %reg
2801
*
2802
* This is needed for the rare case where GCC
2803
* does:
2804
*
2805
* mov %rsp, %rax
2806
* ...
2807
* mov %rax, %rsp
2808
*/
2809
cfi->vals[op->dest.reg].base = CFI_CFA;
2810
cfi->vals[op->dest.reg].offset = -cfi->stack_size;
2811
}
2812
2813
else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP &&
2814
(cfa->base == CFI_BP || cfa->base == cfi->drap_reg)) {
2815
2816
/*
2817
* mov %rbp, %rsp
2818
*
2819
* Restore the original stack pointer (Clang).
2820
*/
2821
cfi->stack_size = -cfi->regs[CFI_BP].offset;
2822
}
2823
2824
else if (op->dest.reg == cfa->base) {
2825
2826
/* mov %reg, %rsp */
2827
if (cfa->base == CFI_SP &&
2828
cfi->vals[op->src.reg].base == CFI_CFA) {
2829
2830
/*
2831
* This is needed for the rare case
2832
* where GCC does something dumb like:
2833
*
2834
* lea 0x8(%rsp), %rcx
2835
* ...
2836
* mov %rcx, %rsp
2837
*/
2838
cfa->offset = -cfi->vals[op->src.reg].offset;
2839
cfi->stack_size = cfa->offset;
2840
2841
} else if (cfa->base == CFI_SP &&
2842
cfi->vals[op->src.reg].base == CFI_SP_INDIRECT &&
2843
cfi->vals[op->src.reg].offset == cfa->offset) {
2844
2845
/*
2846
* Stack swizzle:
2847
*
2848
* 1: mov %rsp, (%[tos])
2849
* 2: mov %[tos], %rsp
2850
* ...
2851
* 3: pop %rsp
2852
*
2853
* Where:
2854
*
2855
* 1 - places a pointer to the previous
2856
* stack at the Top-of-Stack of the
2857
* new stack.
2858
*
2859
* 2 - switches to the new stack.
2860
*
2861
* 3 - pops the Top-of-Stack to restore
2862
* the original stack.
2863
*
2864
* Note: we set base to SP_INDIRECT
2865
* here and preserve offset. Therefore
2866
* when the unwinder reaches ToS it
2867
* will dereference SP and then add the
2868
* offset to find the next frame, IOW:
2869
* (%rsp) + offset.
2870
*/
2871
cfa->base = CFI_SP_INDIRECT;
2872
2873
} else {
2874
cfa->base = CFI_UNDEFINED;
2875
cfa->offset = 0;
2876
}
2877
}
2878
2879
else if (op->dest.reg == CFI_SP &&
2880
cfi->vals[op->src.reg].base == CFI_SP_INDIRECT &&
2881
cfi->vals[op->src.reg].offset == cfa->offset) {
2882
2883
/*
2884
* The same stack swizzle case 2) as above. But
2885
* because we can't change cfa->base, case 3)
2886
* will become a regular POP. Pretend we're a
2887
* PUSH so things don't go unbalanced.
2888
*/
2889
cfi->stack_size += 8;
2890
}
2891
2892
2893
break;
2894
2895
case OP_SRC_ADD:
2896
if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) {
2897
2898
/* add imm, %rsp */
2899
cfi->stack_size -= op->src.offset;
2900
if (cfa->base == CFI_SP)
2901
cfa->offset -= op->src.offset;
2902
break;
2903
}
2904
2905
if (op->dest.reg == CFI_BP && op->src.reg == CFI_SP &&
2906
insn->sym->frame_pointer) {
2907
/* addi.d fp,sp,imm on LoongArch */
2908
if (cfa->base == CFI_SP && cfa->offset == op->src.offset) {
2909
cfa->base = CFI_BP;
2910
cfa->offset = 0;
2911
}
2912
break;
2913
}
2914
2915
if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) {
2916
/* addi.d sp,fp,imm on LoongArch */
2917
if (cfa->base == CFI_BP && cfa->offset == 0) {
2918
if (insn->sym->frame_pointer) {
2919
cfa->base = CFI_SP;
2920
cfa->offset = -op->src.offset;
2921
}
2922
} else {
2923
/* lea disp(%rbp), %rsp */
2924
cfi->stack_size = -(op->src.offset + regs[CFI_BP].offset);
2925
}
2926
break;
2927
}
2928
2929
if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
2930
2931
/* drap: lea disp(%rsp), %drap */
2932
cfi->drap_reg = op->dest.reg;
2933
2934
/*
2935
* lea disp(%rsp), %reg
2936
*
2937
* This is needed for the rare case where GCC
2938
* does something dumb like:
2939
*
2940
* lea 0x8(%rsp), %rcx
2941
* ...
2942
* mov %rcx, %rsp
2943
*/
2944
cfi->vals[op->dest.reg].base = CFI_CFA;
2945
cfi->vals[op->dest.reg].offset = \
2946
-cfi->stack_size + op->src.offset;
2947
2948
break;
2949
}
2950
2951
if (cfi->drap && op->dest.reg == CFI_SP &&
2952
op->src.reg == cfi->drap_reg) {
2953
2954
/* drap: lea disp(%drap), %rsp */
2955
cfa->base = CFI_SP;
2956
cfa->offset = cfi->stack_size = -op->src.offset;
2957
cfi->drap_reg = CFI_UNDEFINED;
2958
cfi->drap = false;
2959
break;
2960
}
2961
2962
if (op->dest.reg == cfi->cfa.base && !(next_insn && next_insn->hint)) {
2963
WARN_INSN(insn, "unsupported stack register modification");
2964
return -1;
2965
}
2966
2967
break;
2968
2969
case OP_SRC_AND:
2970
if (op->dest.reg != CFI_SP ||
2971
(cfi->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) ||
2972
(cfi->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) {
2973
WARN_INSN(insn, "unsupported stack pointer realignment");
2974
return -1;
2975
}
2976
2977
if (cfi->drap_reg != CFI_UNDEFINED) {
2978
/* drap: and imm, %rsp */
2979
cfa->base = cfi->drap_reg;
2980
cfa->offset = cfi->stack_size = 0;
2981
cfi->drap = true;
2982
}
2983
2984
/*
2985
* Older versions of GCC (4.8ish) realign the stack
2986
* without DRAP, with a frame pointer.
2987
*/
2988
2989
break;
2990
2991
case OP_SRC_POP:
2992
case OP_SRC_POPF:
2993
if (op->dest.reg == CFI_SP && cfa->base == CFI_SP_INDIRECT) {
2994
2995
/* pop %rsp; # restore from a stack swizzle */
2996
cfa->base = CFI_SP;
2997
break;
2998
}
2999
3000
if (!cfi->drap && op->dest.reg == cfa->base) {
3001
3002
/* pop %rbp */
3003
cfa->base = CFI_SP;
3004
}
3005
3006
if (cfi->drap && cfa->base == CFI_BP_INDIRECT &&
3007
op->dest.reg == cfi->drap_reg &&
3008
cfi->drap_offset == -cfi->stack_size) {
3009
3010
/* drap: pop %drap */
3011
cfa->base = cfi->drap_reg;
3012
cfa->offset = 0;
3013
cfi->drap_offset = -1;
3014
3015
} else if (cfi->stack_size == -regs[op->dest.reg].offset) {
3016
3017
/* pop %reg */
3018
restore_reg(cfi, op->dest.reg);
3019
}
3020
3021
cfi->stack_size -= 8;
3022
if (cfa->base == CFI_SP)
3023
cfa->offset -= 8;
3024
3025
break;
3026
3027
case OP_SRC_REG_INDIRECT:
3028
if (!cfi->drap && op->dest.reg == cfa->base &&
3029
op->dest.reg == CFI_BP) {
3030
3031
/* mov disp(%rsp), %rbp */
3032
cfa->base = CFI_SP;
3033
cfa->offset = cfi->stack_size;
3034
}
3035
3036
if (cfi->drap && op->src.reg == CFI_BP &&
3037
op->src.offset == cfi->drap_offset) {
3038
3039
/* drap: mov disp(%rbp), %drap */
3040
cfa->base = cfi->drap_reg;
3041
cfa->offset = 0;
3042
cfi->drap_offset = -1;
3043
}
3044
3045
if (cfi->drap && op->src.reg == CFI_BP &&
3046
op->src.offset == regs[op->dest.reg].offset) {
3047
3048
/* drap: mov disp(%rbp), %reg */
3049
restore_reg(cfi, op->dest.reg);
3050
3051
} else if (op->src.reg == cfa->base &&
3052
op->src.offset == regs[op->dest.reg].offset + cfa->offset) {
3053
3054
/* mov disp(%rbp), %reg */
3055
/* mov disp(%rsp), %reg */
3056
restore_reg(cfi, op->dest.reg);
3057
3058
} else if (op->src.reg == CFI_SP &&
3059
op->src.offset == regs[op->dest.reg].offset + cfi->stack_size) {
3060
3061
/* mov disp(%rsp), %reg */
3062
restore_reg(cfi, op->dest.reg);
3063
}
3064
3065
break;
3066
3067
default:
3068
WARN_INSN(insn, "unknown stack-related instruction");
3069
return -1;
3070
}
3071
3072
break;
3073
3074
case OP_DEST_PUSH:
3075
case OP_DEST_PUSHF:
3076
cfi->stack_size += 8;
3077
if (cfa->base == CFI_SP)
3078
cfa->offset += 8;
3079
3080
if (op->src.type != OP_SRC_REG)
3081
break;
3082
3083
if (cfi->drap) {
3084
if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) {
3085
3086
/* drap: push %drap */
3087
cfa->base = CFI_BP_INDIRECT;
3088
cfa->offset = -cfi->stack_size;
3089
3090
/* save drap so we know when to restore it */
3091
cfi->drap_offset = -cfi->stack_size;
3092
3093
} else if (op->src.reg == CFI_BP && cfa->base == cfi->drap_reg) {
3094
3095
/* drap: push %rbp */
3096
cfi->stack_size = 0;
3097
3098
} else {
3099
3100
/* drap: push %reg */
3101
save_reg(cfi, op->src.reg, CFI_BP, -cfi->stack_size);
3102
}
3103
3104
} else {
3105
3106
/* push %reg */
3107
save_reg(cfi, op->src.reg, CFI_CFA, -cfi->stack_size);
3108
}
3109
3110
/* detect when asm code uses rbp as a scratch register */
3111
if (opts.stackval && insn_func(insn) && op->src.reg == CFI_BP &&
3112
cfa->base != CFI_BP)
3113
cfi->bp_scratch = true;
3114
break;
3115
3116
case OP_DEST_REG_INDIRECT:
3117
3118
if (cfi->drap) {
3119
if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) {
3120
3121
/* drap: mov %drap, disp(%rbp) */
3122
cfa->base = CFI_BP_INDIRECT;
3123
cfa->offset = op->dest.offset;
3124
3125
/* save drap offset so we know when to restore it */
3126
cfi->drap_offset = op->dest.offset;
3127
} else {
3128
3129
/* drap: mov reg, disp(%rbp) */
3130
save_reg(cfi, op->src.reg, CFI_BP, op->dest.offset);
3131
}
3132
3133
} else if (op->dest.reg == cfa->base) {
3134
3135
/* mov reg, disp(%rbp) */
3136
/* mov reg, disp(%rsp) */
3137
save_reg(cfi, op->src.reg, CFI_CFA,
3138
op->dest.offset - cfi->cfa.offset);
3139
3140
} else if (op->dest.reg == CFI_SP) {
3141
3142
/* mov reg, disp(%rsp) */
3143
save_reg(cfi, op->src.reg, CFI_CFA,
3144
op->dest.offset - cfi->stack_size);
3145
3146
} else if (op->src.reg == CFI_SP && op->dest.offset == 0) {
3147
3148
/* mov %rsp, (%reg); # setup a stack swizzle. */
3149
cfi->vals[op->dest.reg].base = CFI_SP_INDIRECT;
3150
cfi->vals[op->dest.reg].offset = cfa->offset;
3151
}
3152
3153
break;
3154
3155
case OP_DEST_MEM:
3156
if (op->src.type != OP_SRC_POP && op->src.type != OP_SRC_POPF) {
3157
WARN_INSN(insn, "unknown stack-related memory operation");
3158
return -1;
3159
}
3160
3161
/* pop mem */
3162
cfi->stack_size -= 8;
3163
if (cfa->base == CFI_SP)
3164
cfa->offset -= 8;
3165
3166
break;
3167
3168
default:
3169
WARN_INSN(insn, "unknown stack-related instruction");
3170
return -1;
3171
}
3172
3173
return 0;
3174
}
3175
3176
/*
3177
* The stack layouts of alternatives instructions can sometimes diverge when
3178
* they have stack modifications. That's fine as long as the potential stack
3179
* layouts don't conflict at any given potential instruction boundary.
3180
*
3181
* Flatten the CFIs of the different alternative code streams (both original
3182
* and replacement) into a single shared CFI array which can be used to detect
3183
* conflicts and nicely feed a linear array of ORC entries to the unwinder.
3184
*/
3185
static int propagate_alt_cfi(struct objtool_file *file, struct instruction *insn)
3186
{
3187
struct cfi_state **alt_cfi;
3188
int group_off;
3189
3190
if (!insn->alt_group)
3191
return 0;
3192
3193
if (!insn->cfi) {
3194
WARN("CFI missing");
3195
return -1;
3196
}
3197
3198
alt_cfi = insn->alt_group->cfi;
3199
group_off = insn->offset - insn->alt_group->first_insn->offset;
3200
3201
if (!alt_cfi[group_off]) {
3202
alt_cfi[group_off] = insn->cfi;
3203
} else {
3204
if (cficmp(alt_cfi[group_off], insn->cfi)) {
3205
struct alt_group *orig_group = insn->alt_group->orig_group ?: insn->alt_group;
3206
struct instruction *orig = orig_group->first_insn;
3207
WARN_INSN(orig, "stack layout conflict in alternatives: %s",
3208
offstr(insn->sec, insn->offset));
3209
return -1;
3210
}
3211
}
3212
3213
return 0;
3214
}
3215
3216
static int handle_insn_ops(struct instruction *insn,
3217
struct instruction *next_insn,
3218
struct insn_state *state)
3219
{
3220
struct stack_op *op;
3221
int ret;
3222
3223
for (op = insn->stack_ops; op; op = op->next) {
3224
3225
ret = update_cfi_state(insn, next_insn, &state->cfi, op);
3226
if (ret)
3227
return ret;
3228
3229
if (!opts.uaccess || !insn->alt_group)
3230
continue;
3231
3232
if (op->dest.type == OP_DEST_PUSHF) {
3233
if (!state->uaccess_stack) {
3234
state->uaccess_stack = 1;
3235
} else if (state->uaccess_stack >> 31) {
3236
WARN_INSN(insn, "PUSHF stack exhausted");
3237
return 1;
3238
}
3239
state->uaccess_stack <<= 1;
3240
state->uaccess_stack |= state->uaccess;
3241
}
3242
3243
if (op->src.type == OP_SRC_POPF) {
3244
if (state->uaccess_stack) {
3245
state->uaccess = state->uaccess_stack & 1;
3246
state->uaccess_stack >>= 1;
3247
if (state->uaccess_stack == 1)
3248
state->uaccess_stack = 0;
3249
}
3250
}
3251
}
3252
3253
return 0;
3254
}
3255
3256
static bool insn_cfi_match(struct instruction *insn, struct cfi_state *cfi2)
3257
{
3258
struct cfi_state *cfi1 = insn->cfi;
3259
int i;
3260
3261
if (!cfi1) {
3262
WARN("CFI missing");
3263
return false;
3264
}
3265
3266
if (memcmp(&cfi1->cfa, &cfi2->cfa, sizeof(cfi1->cfa))) {
3267
3268
WARN_INSN(insn, "stack state mismatch: cfa1=%d%+d cfa2=%d%+d",
3269
cfi1->cfa.base, cfi1->cfa.offset,
3270
cfi2->cfa.base, cfi2->cfa.offset);
3271
return false;
3272
3273
}
3274
3275
if (memcmp(&cfi1->regs, &cfi2->regs, sizeof(cfi1->regs))) {
3276
for (i = 0; i < CFI_NUM_REGS; i++) {
3277
3278
if (!memcmp(&cfi1->regs[i], &cfi2->regs[i], sizeof(struct cfi_reg)))
3279
continue;
3280
3281
WARN_INSN(insn, "stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d",
3282
i, cfi1->regs[i].base, cfi1->regs[i].offset,
3283
i, cfi2->regs[i].base, cfi2->regs[i].offset);
3284
}
3285
return false;
3286
}
3287
3288
if (cfi1->type != cfi2->type) {
3289
3290
WARN_INSN(insn, "stack state mismatch: type1=%d type2=%d",
3291
cfi1->type, cfi2->type);
3292
return false;
3293
}
3294
3295
if (cfi1->drap != cfi2->drap ||
3296
(cfi1->drap && cfi1->drap_reg != cfi2->drap_reg) ||
3297
(cfi1->drap && cfi1->drap_offset != cfi2->drap_offset)) {
3298
3299
WARN_INSN(insn, "stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)",
3300
cfi1->drap, cfi1->drap_reg, cfi1->drap_offset,
3301
cfi2->drap, cfi2->drap_reg, cfi2->drap_offset);
3302
return false;
3303
}
3304
3305
return true;
3306
}
3307
3308
static inline bool func_uaccess_safe(struct symbol *func)
3309
{
3310
if (func)
3311
return func->uaccess_safe;
3312
3313
return false;
3314
}
3315
3316
static inline const char *call_dest_name(struct instruction *insn)
3317
{
3318
static char pvname[19];
3319
struct reloc *reloc;
3320
int idx;
3321
3322
if (insn_call_dest(insn))
3323
return insn_call_dest(insn)->name;
3324
3325
reloc = insn_reloc(NULL, insn);
3326
if (reloc && !strcmp(reloc->sym->name, "pv_ops")) {
3327
idx = (reloc_addend(reloc) / sizeof(void *));
3328
snprintf(pvname, sizeof(pvname), "pv_ops[%d]", idx);
3329
return pvname;
3330
}
3331
3332
return "{dynamic}";
3333
}
3334
3335
static bool pv_call_dest(struct objtool_file *file, struct instruction *insn)
3336
{
3337
struct symbol *target;
3338
struct reloc *reloc;
3339
int idx;
3340
3341
reloc = insn_reloc(file, insn);
3342
if (!reloc || strcmp(reloc->sym->name, "pv_ops"))
3343
return false;
3344
3345
idx = (arch_dest_reloc_offset(reloc_addend(reloc)) / sizeof(void *));
3346
3347
if (file->pv_ops[idx].clean)
3348
return true;
3349
3350
file->pv_ops[idx].clean = true;
3351
3352
list_for_each_entry(target, &file->pv_ops[idx].targets, pv_target) {
3353
if (!target->sec->noinstr) {
3354
WARN("pv_ops[%d]: %s", idx, target->name);
3355
file->pv_ops[idx].clean = false;
3356
}
3357
}
3358
3359
return file->pv_ops[idx].clean;
3360
}
3361
3362
static inline bool noinstr_call_dest(struct objtool_file *file,
3363
struct instruction *insn,
3364
struct symbol *func)
3365
{
3366
/*
3367
* We can't deal with indirect function calls at present;
3368
* assume they're instrumented.
3369
*/
3370
if (!func) {
3371
if (file->pv_ops)
3372
return pv_call_dest(file, insn);
3373
3374
return false;
3375
}
3376
3377
/*
3378
* If the symbol is from a noinstr section; we good.
3379
*/
3380
if (func->sec->noinstr)
3381
return true;
3382
3383
/*
3384
* If the symbol is a static_call trampoline, we can't tell.
3385
*/
3386
if (func->static_call_tramp)
3387
return true;
3388
3389
/*
3390
* The __ubsan_handle_*() calls are like WARN(), they only happen when
3391
* something 'BAD' happened. At the risk of taking the machine down,
3392
* let them proceed to get the message out.
3393
*/
3394
if (!strncmp(func->name, "__ubsan_handle_", 15))
3395
return true;
3396
3397
return false;
3398
}
3399
3400
static int validate_call(struct objtool_file *file,
3401
struct instruction *insn,
3402
struct insn_state *state)
3403
{
3404
if (state->noinstr && state->instr <= 0 &&
3405
!noinstr_call_dest(file, insn, insn_call_dest(insn))) {
3406
WARN_INSN(insn, "call to %s() leaves .noinstr.text section", call_dest_name(insn));
3407
return 1;
3408
}
3409
3410
if (state->uaccess && !func_uaccess_safe(insn_call_dest(insn))) {
3411
WARN_INSN(insn, "call to %s() with UACCESS enabled", call_dest_name(insn));
3412
return 1;
3413
}
3414
3415
if (state->df) {
3416
WARN_INSN(insn, "call to %s() with DF set", call_dest_name(insn));
3417
return 1;
3418
}
3419
3420
return 0;
3421
}
3422
3423
static int validate_sibling_call(struct objtool_file *file,
3424
struct instruction *insn,
3425
struct insn_state *state)
3426
{
3427
if (insn_func(insn) && has_modified_stack_frame(insn, state)) {
3428
WARN_INSN(insn, "sibling call from callable instruction with modified stack frame");
3429
return 1;
3430
}
3431
3432
return validate_call(file, insn, state);
3433
}
3434
3435
static int validate_return(struct symbol *func, struct instruction *insn, struct insn_state *state)
3436
{
3437
if (state->noinstr && state->instr > 0) {
3438
WARN_INSN(insn, "return with instrumentation enabled");
3439
return 1;
3440
}
3441
3442
if (state->uaccess && !func_uaccess_safe(func)) {
3443
WARN_INSN(insn, "return with UACCESS enabled");
3444
return 1;
3445
}
3446
3447
if (!state->uaccess && func_uaccess_safe(func)) {
3448
WARN_INSN(insn, "return with UACCESS disabled from a UACCESS-safe function");
3449
return 1;
3450
}
3451
3452
if (state->df) {
3453
WARN_INSN(insn, "return with DF set");
3454
return 1;
3455
}
3456
3457
if (func && has_modified_stack_frame(insn, state)) {
3458
WARN_INSN(insn, "return with modified stack frame");
3459
return 1;
3460
}
3461
3462
if (state->cfi.bp_scratch) {
3463
WARN_INSN(insn, "BP used as a scratch register");
3464
return 1;
3465
}
3466
3467
return 0;
3468
}
3469
3470
static struct instruction *next_insn_to_validate(struct objtool_file *file,
3471
struct instruction *insn)
3472
{
3473
struct alt_group *alt_group = insn->alt_group;
3474
3475
/*
3476
* Simulate the fact that alternatives are patched in-place. When the
3477
* end of a replacement alt_group is reached, redirect objtool flow to
3478
* the end of the original alt_group.
3479
*
3480
* insn->alts->insn -> alt_group->first_insn
3481
* ...
3482
* alt_group->last_insn
3483
* [alt_group->nop] -> next(orig_group->last_insn)
3484
*/
3485
if (alt_group) {
3486
if (alt_group->nop) {
3487
/* ->nop implies ->orig_group */
3488
if (insn == alt_group->last_insn)
3489
return alt_group->nop;
3490
if (insn == alt_group->nop)
3491
goto next_orig;
3492
}
3493
if (insn == alt_group->last_insn && alt_group->orig_group)
3494
goto next_orig;
3495
}
3496
3497
return next_insn_same_sec(file, insn);
3498
3499
next_orig:
3500
return next_insn_same_sec(file, alt_group->orig_group->last_insn);
3501
}
3502
3503
static bool skip_alt_group(struct instruction *insn)
3504
{
3505
struct instruction *alt_insn = insn->alts ? insn->alts->insn : NULL;
3506
3507
/* ANNOTATE_IGNORE_ALTERNATIVE */
3508
if (insn->alt_group && insn->alt_group->ignore)
3509
return true;
3510
3511
/*
3512
* For NOP patched with CLAC/STAC, only follow the latter to avoid
3513
* impossible code paths combining patched CLAC with unpatched STAC
3514
* or vice versa.
3515
*
3516
* ANNOTATE_IGNORE_ALTERNATIVE could have been used here, but Linus
3517
* requested not to do that to avoid hurting .s file readability
3518
* around CLAC/STAC alternative sites.
3519
*/
3520
3521
if (!alt_insn)
3522
return false;
3523
3524
/* Don't override ASM_{CLAC,STAC}_UNSAFE */
3525
if (alt_insn->alt_group && alt_insn->alt_group->ignore)
3526
return false;
3527
3528
return alt_insn->type == INSN_CLAC || alt_insn->type == INSN_STAC;
3529
}
3530
3531
/*
3532
* Follow the branch starting at the given instruction, and recursively follow
3533
* any other branches (jumps). Meanwhile, track the frame pointer state at
3534
* each instruction and validate all the rules described in
3535
* tools/objtool/Documentation/objtool.txt.
3536
*/
3537
static int validate_branch(struct objtool_file *file, struct symbol *func,
3538
struct instruction *insn, struct insn_state state)
3539
{
3540
struct alternative *alt;
3541
struct instruction *next_insn, *prev_insn = NULL;
3542
struct section *sec;
3543
u8 visited;
3544
int ret;
3545
3546
if (func && func->ignore)
3547
return 0;
3548
3549
sec = insn->sec;
3550
3551
while (1) {
3552
next_insn = next_insn_to_validate(file, insn);
3553
3554
if (func && insn_func(insn) && func != insn_func(insn)->pfunc) {
3555
/* Ignore KCFI type preambles, which always fall through */
3556
if (!strncmp(func->name, "__cfi_", 6) ||
3557
!strncmp(func->name, "__pfx_", 6) ||
3558
!strncmp(func->name, "__pi___cfi_", 11) ||
3559
!strncmp(func->name, "__pi___pfx_", 11))
3560
return 0;
3561
3562
if (file->ignore_unreachables)
3563
return 0;
3564
3565
WARN("%s() falls through to next function %s()",
3566
func->name, insn_func(insn)->name);
3567
func->warned = 1;
3568
3569
return 1;
3570
}
3571
3572
visited = VISITED_BRANCH << state.uaccess;
3573
if (insn->visited & VISITED_BRANCH_MASK) {
3574
if (!insn->hint && !insn_cfi_match(insn, &state.cfi))
3575
return 1;
3576
3577
if (insn->visited & visited)
3578
return 0;
3579
} else {
3580
nr_insns_visited++;
3581
}
3582
3583
if (state.noinstr)
3584
state.instr += insn->instr;
3585
3586
if (insn->hint) {
3587
if (insn->restore) {
3588
struct instruction *save_insn, *i;
3589
3590
i = insn;
3591
save_insn = NULL;
3592
3593
sym_for_each_insn_continue_reverse(file, func, i) {
3594
if (i->save) {
3595
save_insn = i;
3596
break;
3597
}
3598
}
3599
3600
if (!save_insn) {
3601
WARN_INSN(insn, "no corresponding CFI save for CFI restore");
3602
return 1;
3603
}
3604
3605
if (!save_insn->visited) {
3606
/*
3607
* If the restore hint insn is at the
3608
* beginning of a basic block and was
3609
* branched to from elsewhere, and the
3610
* save insn hasn't been visited yet,
3611
* defer following this branch for now.
3612
* It will be seen later via the
3613
* straight-line path.
3614
*/
3615
if (!prev_insn)
3616
return 0;
3617
3618
WARN_INSN(insn, "objtool isn't smart enough to handle this CFI save/restore combo");
3619
return 1;
3620
}
3621
3622
insn->cfi = save_insn->cfi;
3623
nr_cfi_reused++;
3624
}
3625
3626
state.cfi = *insn->cfi;
3627
} else {
3628
/* XXX track if we actually changed state.cfi */
3629
3630
if (prev_insn && !cficmp(prev_insn->cfi, &state.cfi)) {
3631
insn->cfi = prev_insn->cfi;
3632
nr_cfi_reused++;
3633
} else {
3634
insn->cfi = cfi_hash_find_or_add(&state.cfi);
3635
}
3636
}
3637
3638
insn->visited |= visited;
3639
3640
if (propagate_alt_cfi(file, insn))
3641
return 1;
3642
3643
if (insn->alts) {
3644
for (alt = insn->alts; alt; alt = alt->next) {
3645
ret = validate_branch(file, func, alt->insn, state);
3646
if (ret) {
3647
BT_INSN(insn, "(alt)");
3648
return ret;
3649
}
3650
}
3651
}
3652
3653
if (skip_alt_group(insn))
3654
return 0;
3655
3656
if (handle_insn_ops(insn, next_insn, &state))
3657
return 1;
3658
3659
switch (insn->type) {
3660
3661
case INSN_RETURN:
3662
return validate_return(func, insn, &state);
3663
3664
case INSN_CALL:
3665
case INSN_CALL_DYNAMIC:
3666
ret = validate_call(file, insn, &state);
3667
if (ret)
3668
return ret;
3669
3670
if (opts.stackval && func && !is_special_call(insn) &&
3671
!has_valid_stack_frame(&state)) {
3672
WARN_INSN(insn, "call without frame pointer save/setup");
3673
return 1;
3674
}
3675
3676
break;
3677
3678
case INSN_JUMP_CONDITIONAL:
3679
case INSN_JUMP_UNCONDITIONAL:
3680
if (is_sibling_call(insn)) {
3681
ret = validate_sibling_call(file, insn, &state);
3682
if (ret)
3683
return ret;
3684
3685
} else if (insn->jump_dest) {
3686
ret = validate_branch(file, func,
3687
insn->jump_dest, state);
3688
if (ret) {
3689
BT_INSN(insn, "(branch)");
3690
return ret;
3691
}
3692
}
3693
3694
if (insn->type == INSN_JUMP_UNCONDITIONAL)
3695
return 0;
3696
3697
break;
3698
3699
case INSN_JUMP_DYNAMIC:
3700
case INSN_JUMP_DYNAMIC_CONDITIONAL:
3701
if (is_sibling_call(insn)) {
3702
ret = validate_sibling_call(file, insn, &state);
3703
if (ret)
3704
return ret;
3705
}
3706
3707
if (insn->type == INSN_JUMP_DYNAMIC)
3708
return 0;
3709
3710
break;
3711
3712
case INSN_SYSCALL:
3713
if (func && (!next_insn || !next_insn->hint)) {
3714
WARN_INSN(insn, "unsupported instruction in callable function");
3715
return 1;
3716
}
3717
3718
break;
3719
3720
case INSN_SYSRET:
3721
if (func && (!next_insn || !next_insn->hint)) {
3722
WARN_INSN(insn, "unsupported instruction in callable function");
3723
return 1;
3724
}
3725
3726
return 0;
3727
3728
case INSN_STAC:
3729
if (!opts.uaccess)
3730
break;
3731
3732
if (state.uaccess) {
3733
WARN_INSN(insn, "recursive UACCESS enable");
3734
return 1;
3735
}
3736
3737
state.uaccess = true;
3738
break;
3739
3740
case INSN_CLAC:
3741
if (!opts.uaccess)
3742
break;
3743
3744
if (!state.uaccess && func) {
3745
WARN_INSN(insn, "redundant UACCESS disable");
3746
return 1;
3747
}
3748
3749
if (func_uaccess_safe(func) && !state.uaccess_stack) {
3750
WARN_INSN(insn, "UACCESS-safe disables UACCESS");
3751
return 1;
3752
}
3753
3754
state.uaccess = false;
3755
break;
3756
3757
case INSN_STD:
3758
if (state.df) {
3759
WARN_INSN(insn, "recursive STD");
3760
return 1;
3761
}
3762
3763
state.df = true;
3764
break;
3765
3766
case INSN_CLD:
3767
if (!state.df && func) {
3768
WARN_INSN(insn, "redundant CLD");
3769
return 1;
3770
}
3771
3772
state.df = false;
3773
break;
3774
3775
default:
3776
break;
3777
}
3778
3779
if (insn->dead_end)
3780
return 0;
3781
3782
if (!next_insn) {
3783
if (state.cfi.cfa.base == CFI_UNDEFINED)
3784
return 0;
3785
if (file->ignore_unreachables)
3786
return 0;
3787
3788
WARN("%s%sunexpected end of section %s",
3789
func ? func->name : "", func ? "(): " : "",
3790
sec->name);
3791
return 1;
3792
}
3793
3794
prev_insn = insn;
3795
insn = next_insn;
3796
}
3797
3798
return 0;
3799
}
3800
3801
static int validate_unwind_hint(struct objtool_file *file,
3802
struct instruction *insn,
3803
struct insn_state *state)
3804
{
3805
if (insn->hint && !insn->visited) {
3806
int ret = validate_branch(file, insn_func(insn), insn, *state);
3807
if (ret)
3808
BT_INSN(insn, "<=== (hint)");
3809
return ret;
3810
}
3811
3812
return 0;
3813
}
3814
3815
static int validate_unwind_hints(struct objtool_file *file, struct section *sec)
3816
{
3817
struct instruction *insn;
3818
struct insn_state state;
3819
int warnings = 0;
3820
3821
if (!file->hints)
3822
return 0;
3823
3824
init_insn_state(file, &state, sec);
3825
3826
if (sec) {
3827
sec_for_each_insn(file, sec, insn)
3828
warnings += validate_unwind_hint(file, insn, &state);
3829
} else {
3830
for_each_insn(file, insn)
3831
warnings += validate_unwind_hint(file, insn, &state);
3832
}
3833
3834
return warnings;
3835
}
3836
3837
/*
3838
* Validate rethunk entry constraint: must untrain RET before the first RET.
3839
*
3840
* Follow every branch (intra-function) and ensure VALIDATE_UNRET_END comes
3841
* before an actual RET instruction.
3842
*/
3843
static int validate_unret(struct objtool_file *file, struct instruction *insn)
3844
{
3845
struct instruction *next, *dest;
3846
int ret;
3847
3848
for (;;) {
3849
next = next_insn_to_validate(file, insn);
3850
3851
if (insn->visited & VISITED_UNRET)
3852
return 0;
3853
3854
insn->visited |= VISITED_UNRET;
3855
3856
if (insn->alts) {
3857
struct alternative *alt;
3858
for (alt = insn->alts; alt; alt = alt->next) {
3859
ret = validate_unret(file, alt->insn);
3860
if (ret) {
3861
BT_INSN(insn, "(alt)");
3862
return ret;
3863
}
3864
}
3865
}
3866
3867
switch (insn->type) {
3868
3869
case INSN_CALL_DYNAMIC:
3870
case INSN_JUMP_DYNAMIC:
3871
case INSN_JUMP_DYNAMIC_CONDITIONAL:
3872
WARN_INSN(insn, "early indirect call");
3873
return 1;
3874
3875
case INSN_JUMP_UNCONDITIONAL:
3876
case INSN_JUMP_CONDITIONAL:
3877
if (!is_sibling_call(insn)) {
3878
if (!insn->jump_dest) {
3879
WARN_INSN(insn, "unresolved jump target after linking?!?");
3880
return 1;
3881
}
3882
ret = validate_unret(file, insn->jump_dest);
3883
if (ret) {
3884
BT_INSN(insn, "(branch%s)",
3885
insn->type == INSN_JUMP_CONDITIONAL ? "-cond" : "");
3886
return ret;
3887
}
3888
3889
if (insn->type == INSN_JUMP_UNCONDITIONAL)
3890
return 0;
3891
3892
break;
3893
}
3894
3895
/* fallthrough */
3896
case INSN_CALL:
3897
dest = find_insn(file, insn_call_dest(insn)->sec,
3898
insn_call_dest(insn)->offset);
3899
if (!dest) {
3900
WARN("Unresolved function after linking!?: %s",
3901
insn_call_dest(insn)->name);
3902
return 1;
3903
}
3904
3905
ret = validate_unret(file, dest);
3906
if (ret) {
3907
BT_INSN(insn, "(call)");
3908
return ret;
3909
}
3910
/*
3911
* If a call returns without error, it must have seen UNTRAIN_RET.
3912
* Therefore any non-error return is a success.
3913
*/
3914
return 0;
3915
3916
case INSN_RETURN:
3917
WARN_INSN(insn, "RET before UNTRAIN");
3918
return 1;
3919
3920
case INSN_SYSCALL:
3921
break;
3922
3923
case INSN_SYSRET:
3924
return 0;
3925
3926
case INSN_NOP:
3927
if (insn->retpoline_safe)
3928
return 0;
3929
break;
3930
3931
default:
3932
break;
3933
}
3934
3935
if (insn->dead_end)
3936
return 0;
3937
3938
if (!next) {
3939
WARN_INSN(insn, "teh end!");
3940
return 1;
3941
}
3942
insn = next;
3943
}
3944
3945
return 0;
3946
}
3947
3948
/*
3949
* Validate that all branches starting at VALIDATE_UNRET_BEGIN encounter
3950
* VALIDATE_UNRET_END before RET.
3951
*/
3952
static int validate_unrets(struct objtool_file *file)
3953
{
3954
struct instruction *insn;
3955
int warnings = 0;
3956
3957
for_each_insn(file, insn) {
3958
if (!insn->unret)
3959
continue;
3960
3961
warnings += validate_unret(file, insn);
3962
}
3963
3964
return warnings;
3965
}
3966
3967
static int validate_retpoline(struct objtool_file *file)
3968
{
3969
struct instruction *insn;
3970
int warnings = 0;
3971
3972
for_each_insn(file, insn) {
3973
if (insn->type != INSN_JUMP_DYNAMIC &&
3974
insn->type != INSN_CALL_DYNAMIC &&
3975
insn->type != INSN_RETURN)
3976
continue;
3977
3978
if (insn->retpoline_safe)
3979
continue;
3980
3981
if (insn->sec->init)
3982
continue;
3983
3984
if (insn->type == INSN_RETURN) {
3985
if (opts.rethunk) {
3986
WARN_INSN(insn, "'naked' return found in MITIGATION_RETHUNK build");
3987
warnings++;
3988
}
3989
continue;
3990
}
3991
3992
WARN_INSN(insn, "indirect %s found in MITIGATION_RETPOLINE build",
3993
insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call");
3994
warnings++;
3995
}
3996
3997
return warnings;
3998
}
3999
4000
static bool is_kasan_insn(struct instruction *insn)
4001
{
4002
return (insn->type == INSN_CALL &&
4003
!strcmp(insn_call_dest(insn)->name, "__asan_handle_no_return"));
4004
}
4005
4006
static bool is_ubsan_insn(struct instruction *insn)
4007
{
4008
return (insn->type == INSN_CALL &&
4009
!strcmp(insn_call_dest(insn)->name,
4010
"__ubsan_handle_builtin_unreachable"));
4011
}
4012
4013
static bool ignore_unreachable_insn(struct objtool_file *file, struct instruction *insn)
4014
{
4015
struct symbol *func = insn_func(insn);
4016
struct instruction *prev_insn;
4017
int i;
4018
4019
if (insn->type == INSN_NOP || insn->type == INSN_TRAP || (func && func->ignore))
4020
return true;
4021
4022
/*
4023
* Ignore alternative replacement instructions. This can happen
4024
* when a whitelisted function uses one of the ALTERNATIVE macros.
4025
*/
4026
if (!strcmp(insn->sec->name, ".altinstr_replacement") ||
4027
!strcmp(insn->sec->name, ".altinstr_aux"))
4028
return true;
4029
4030
/*
4031
* Whole archive runs might encounter dead code from weak symbols.
4032
* This is where the linker will have dropped the weak symbol in
4033
* favour of a regular symbol, but leaves the code in place.
4034
*
4035
* In this case we'll find a piece of code (whole function) that is not
4036
* covered by a !section symbol. Ignore them.
4037
*/
4038
if (opts.link && !func) {
4039
int size = find_symbol_hole_containing(insn->sec, insn->offset);
4040
unsigned long end = insn->offset + size;
4041
4042
if (!size) /* not a hole */
4043
return false;
4044
4045
if (size < 0) /* hole until the end */
4046
return true;
4047
4048
sec_for_each_insn_continue(file, insn) {
4049
/*
4050
* If we reach a visited instruction at or before the
4051
* end of the hole, ignore the unreachable.
4052
*/
4053
if (insn->visited)
4054
return true;
4055
4056
if (insn->offset >= end)
4057
break;
4058
4059
/*
4060
* If this hole jumps to a .cold function, mark it ignore too.
4061
*/
4062
if (insn->jump_dest && insn_func(insn->jump_dest) &&
4063
strstr(insn_func(insn->jump_dest)->name, ".cold")) {
4064
insn_func(insn->jump_dest)->ignore = true;
4065
}
4066
}
4067
4068
return false;
4069
}
4070
4071
if (!func)
4072
return false;
4073
4074
if (func->static_call_tramp)
4075
return true;
4076
4077
/*
4078
* CONFIG_UBSAN_TRAP inserts a UD2 when it sees
4079
* __builtin_unreachable(). The BUG() macro has an unreachable() after
4080
* the UD2, which causes GCC's undefined trap logic to emit another UD2
4081
* (or occasionally a JMP to UD2).
4082
*
4083
* It may also insert a UD2 after calling a __noreturn function.
4084
*/
4085
prev_insn = prev_insn_same_sec(file, insn);
4086
if (prev_insn && prev_insn->dead_end &&
4087
(insn->type == INSN_BUG ||
4088
(insn->type == INSN_JUMP_UNCONDITIONAL &&
4089
insn->jump_dest && insn->jump_dest->type == INSN_BUG)))
4090
return true;
4091
4092
/*
4093
* Check if this (or a subsequent) instruction is related to
4094
* CONFIG_UBSAN or CONFIG_KASAN.
4095
*
4096
* End the search at 5 instructions to avoid going into the weeds.
4097
*/
4098
for (i = 0; i < 5; i++) {
4099
4100
if (is_kasan_insn(insn) || is_ubsan_insn(insn))
4101
return true;
4102
4103
if (insn->type == INSN_JUMP_UNCONDITIONAL) {
4104
if (insn->jump_dest &&
4105
insn_func(insn->jump_dest) == func) {
4106
insn = insn->jump_dest;
4107
continue;
4108
}
4109
4110
break;
4111
}
4112
4113
if (insn->offset + insn->len >= func->offset + func->len)
4114
break;
4115
4116
insn = next_insn_same_sec(file, insn);
4117
}
4118
4119
return false;
4120
}
4121
4122
static int add_prefix_symbol(struct objtool_file *file, struct symbol *func)
4123
{
4124
struct instruction *insn, *prev;
4125
struct cfi_state *cfi;
4126
4127
insn = find_insn(file, func->sec, func->offset);
4128
if (!insn)
4129
return -1;
4130
4131
for (prev = prev_insn_same_sec(file, insn);
4132
prev;
4133
prev = prev_insn_same_sec(file, prev)) {
4134
u64 offset;
4135
4136
if (prev->type != INSN_NOP)
4137
return -1;
4138
4139
offset = func->offset - prev->offset;
4140
4141
if (offset > opts.prefix)
4142
return -1;
4143
4144
if (offset < opts.prefix)
4145
continue;
4146
4147
elf_create_prefix_symbol(file->elf, func, opts.prefix);
4148
break;
4149
}
4150
4151
if (!prev)
4152
return -1;
4153
4154
if (!insn->cfi) {
4155
/*
4156
* This can happen if stack validation isn't enabled or the
4157
* function is annotated with STACK_FRAME_NON_STANDARD.
4158
*/
4159
return 0;
4160
}
4161
4162
/* Propagate insn->cfi to the prefix code */
4163
cfi = cfi_hash_find_or_add(insn->cfi);
4164
for (; prev != insn; prev = next_insn_same_sec(file, prev))
4165
prev->cfi = cfi;
4166
4167
return 0;
4168
}
4169
4170
static int add_prefix_symbols(struct objtool_file *file)
4171
{
4172
struct section *sec;
4173
struct symbol *func;
4174
4175
for_each_sec(file, sec) {
4176
if (!(sec->sh.sh_flags & SHF_EXECINSTR))
4177
continue;
4178
4179
sec_for_each_sym(sec, func) {
4180
if (func->type != STT_FUNC)
4181
continue;
4182
4183
add_prefix_symbol(file, func);
4184
}
4185
}
4186
4187
return 0;
4188
}
4189
4190
static int validate_symbol(struct objtool_file *file, struct section *sec,
4191
struct symbol *sym, struct insn_state *state)
4192
{
4193
struct instruction *insn;
4194
int ret;
4195
4196
if (!sym->len) {
4197
WARN("%s() is missing an ELF size annotation", sym->name);
4198
return 1;
4199
}
4200
4201
if (sym->pfunc != sym || sym->alias != sym)
4202
return 0;
4203
4204
insn = find_insn(file, sec, sym->offset);
4205
if (!insn || insn->visited)
4206
return 0;
4207
4208
if (opts.uaccess)
4209
state->uaccess = sym->uaccess_safe;
4210
4211
ret = validate_branch(file, insn_func(insn), insn, *state);
4212
if (ret)
4213
BT_INSN(insn, "<=== (sym)");
4214
return ret;
4215
}
4216
4217
static int validate_section(struct objtool_file *file, struct section *sec)
4218
{
4219
struct insn_state state;
4220
struct symbol *func;
4221
int warnings = 0;
4222
4223
sec_for_each_sym(sec, func) {
4224
if (func->type != STT_FUNC)
4225
continue;
4226
4227
init_insn_state(file, &state, sec);
4228
set_func_state(&state.cfi);
4229
4230
warnings += validate_symbol(file, sec, func, &state);
4231
}
4232
4233
return warnings;
4234
}
4235
4236
static int validate_noinstr_sections(struct objtool_file *file)
4237
{
4238
struct section *sec;
4239
int warnings = 0;
4240
4241
sec = find_section_by_name(file->elf, ".noinstr.text");
4242
if (sec) {
4243
warnings += validate_section(file, sec);
4244
warnings += validate_unwind_hints(file, sec);
4245
}
4246
4247
sec = find_section_by_name(file->elf, ".entry.text");
4248
if (sec) {
4249
warnings += validate_section(file, sec);
4250
warnings += validate_unwind_hints(file, sec);
4251
}
4252
4253
sec = find_section_by_name(file->elf, ".cpuidle.text");
4254
if (sec) {
4255
warnings += validate_section(file, sec);
4256
warnings += validate_unwind_hints(file, sec);
4257
}
4258
4259
return warnings;
4260
}
4261
4262
static int validate_functions(struct objtool_file *file)
4263
{
4264
struct section *sec;
4265
int warnings = 0;
4266
4267
for_each_sec(file, sec) {
4268
if (!(sec->sh.sh_flags & SHF_EXECINSTR))
4269
continue;
4270
4271
warnings += validate_section(file, sec);
4272
}
4273
4274
return warnings;
4275
}
4276
4277
static void mark_endbr_used(struct instruction *insn)
4278
{
4279
if (!list_empty(&insn->call_node))
4280
list_del_init(&insn->call_node);
4281
}
4282
4283
static bool noendbr_range(struct objtool_file *file, struct instruction *insn)
4284
{
4285
struct symbol *sym = find_symbol_containing(insn->sec, insn->offset-1);
4286
struct instruction *first;
4287
4288
if (!sym)
4289
return false;
4290
4291
first = find_insn(file, sym->sec, sym->offset);
4292
if (!first)
4293
return false;
4294
4295
if (first->type != INSN_ENDBR && !first->noendbr)
4296
return false;
4297
4298
return insn->offset == sym->offset + sym->len;
4299
}
4300
4301
static int __validate_ibt_insn(struct objtool_file *file, struct instruction *insn,
4302
struct instruction *dest)
4303
{
4304
if (dest->type == INSN_ENDBR) {
4305
mark_endbr_used(dest);
4306
return 0;
4307
}
4308
4309
if (insn_func(dest) && insn_func(insn) &&
4310
insn_func(dest)->pfunc == insn_func(insn)->pfunc) {
4311
/*
4312
* Anything from->to self is either _THIS_IP_ or
4313
* IRET-to-self.
4314
*
4315
* There is no sane way to annotate _THIS_IP_ since the
4316
* compiler treats the relocation as a constant and is
4317
* happy to fold in offsets, skewing any annotation we
4318
* do, leading to vast amounts of false-positives.
4319
*
4320
* There's also compiler generated _THIS_IP_ through
4321
* KCOV and such which we have no hope of annotating.
4322
*
4323
* As such, blanket accept self-references without
4324
* issue.
4325
*/
4326
return 0;
4327
}
4328
4329
/*
4330
* Accept anything ANNOTATE_NOENDBR.
4331
*/
4332
if (dest->noendbr)
4333
return 0;
4334
4335
/*
4336
* Accept if this is the instruction after a symbol
4337
* that is (no)endbr -- typical code-range usage.
4338
*/
4339
if (noendbr_range(file, dest))
4340
return 0;
4341
4342
WARN_INSN(insn, "relocation to !ENDBR: %s", offstr(dest->sec, dest->offset));
4343
return 1;
4344
}
4345
4346
static int validate_ibt_insn(struct objtool_file *file, struct instruction *insn)
4347
{
4348
struct instruction *dest;
4349
struct reloc *reloc;
4350
unsigned long off;
4351
int warnings = 0;
4352
4353
/*
4354
* Looking for function pointer load relocations. Ignore
4355
* direct/indirect branches:
4356
*/
4357
switch (insn->type) {
4358
4359
case INSN_CALL:
4360
case INSN_CALL_DYNAMIC:
4361
case INSN_JUMP_CONDITIONAL:
4362
case INSN_JUMP_UNCONDITIONAL:
4363
case INSN_JUMP_DYNAMIC:
4364
case INSN_JUMP_DYNAMIC_CONDITIONAL:
4365
case INSN_RETURN:
4366
case INSN_NOP:
4367
return 0;
4368
4369
case INSN_LEA_RIP:
4370
if (!insn_reloc(file, insn)) {
4371
/* local function pointer reference without reloc */
4372
4373
off = arch_jump_destination(insn);
4374
4375
dest = find_insn(file, insn->sec, off);
4376
if (!dest) {
4377
WARN_INSN(insn, "corrupt function pointer reference");
4378
return 1;
4379
}
4380
4381
return __validate_ibt_insn(file, insn, dest);
4382
}
4383
break;
4384
4385
default:
4386
break;
4387
}
4388
4389
for (reloc = insn_reloc(file, insn);
4390
reloc;
4391
reloc = find_reloc_by_dest_range(file->elf, insn->sec,
4392
reloc_offset(reloc) + 1,
4393
(insn->offset + insn->len) - (reloc_offset(reloc) + 1))) {
4394
4395
off = reloc->sym->offset;
4396
if (reloc_type(reloc) == R_X86_64_PC32 ||
4397
reloc_type(reloc) == R_X86_64_PLT32)
4398
off += arch_dest_reloc_offset(reloc_addend(reloc));
4399
else
4400
off += reloc_addend(reloc);
4401
4402
dest = find_insn(file, reloc->sym->sec, off);
4403
if (!dest)
4404
continue;
4405
4406
warnings += __validate_ibt_insn(file, insn, dest);
4407
}
4408
4409
return warnings;
4410
}
4411
4412
static int validate_ibt_data_reloc(struct objtool_file *file,
4413
struct reloc *reloc)
4414
{
4415
struct instruction *dest;
4416
4417
dest = find_insn(file, reloc->sym->sec,
4418
reloc->sym->offset + reloc_addend(reloc));
4419
if (!dest)
4420
return 0;
4421
4422
if (dest->type == INSN_ENDBR) {
4423
mark_endbr_used(dest);
4424
return 0;
4425
}
4426
4427
if (dest->noendbr)
4428
return 0;
4429
4430
WARN_FUNC(reloc->sec->base, reloc_offset(reloc),
4431
"data relocation to !ENDBR: %s", offstr(dest->sec, dest->offset));
4432
4433
return 1;
4434
}
4435
4436
/*
4437
* Validate IBT rules and remove used ENDBR instructions from the seal list.
4438
* Unused ENDBR instructions will be annotated for sealing (i.e., replaced with
4439
* NOPs) later, in create_ibt_endbr_seal_sections().
4440
*/
4441
static int validate_ibt(struct objtool_file *file)
4442
{
4443
struct section *sec;
4444
struct reloc *reloc;
4445
struct instruction *insn;
4446
int warnings = 0;
4447
4448
for_each_insn(file, insn)
4449
warnings += validate_ibt_insn(file, insn);
4450
4451
for_each_sec(file, sec) {
4452
4453
/* Already done by validate_ibt_insn() */
4454
if (sec->sh.sh_flags & SHF_EXECINSTR)
4455
continue;
4456
4457
if (!sec->rsec)
4458
continue;
4459
4460
/*
4461
* These sections can reference text addresses, but not with
4462
* the intent to indirect branch to them.
4463
*/
4464
if ((!strncmp(sec->name, ".discard", 8) &&
4465
strcmp(sec->name, ".discard.ibt_endbr_noseal")) ||
4466
!strncmp(sec->name, ".debug", 6) ||
4467
!strcmp(sec->name, ".altinstructions") ||
4468
!strcmp(sec->name, ".ibt_endbr_seal") ||
4469
!strcmp(sec->name, ".orc_unwind_ip") ||
4470
!strcmp(sec->name, ".parainstructions") ||
4471
!strcmp(sec->name, ".retpoline_sites") ||
4472
!strcmp(sec->name, ".smp_locks") ||
4473
!strcmp(sec->name, ".static_call_sites") ||
4474
!strcmp(sec->name, "_error_injection_whitelist") ||
4475
!strcmp(sec->name, "_kprobe_blacklist") ||
4476
!strcmp(sec->name, "__bug_table") ||
4477
!strcmp(sec->name, "__ex_table") ||
4478
!strcmp(sec->name, "__jump_table") ||
4479
!strcmp(sec->name, "__mcount_loc") ||
4480
!strcmp(sec->name, ".kcfi_traps") ||
4481
!strcmp(sec->name, ".llvm.call-graph-profile") ||
4482
!strcmp(sec->name, ".llvm_bb_addr_map") ||
4483
!strcmp(sec->name, "__tracepoints") ||
4484
strstr(sec->name, "__patchable_function_entries"))
4485
continue;
4486
4487
for_each_reloc(sec->rsec, reloc)
4488
warnings += validate_ibt_data_reloc(file, reloc);
4489
}
4490
4491
return warnings;
4492
}
4493
4494
static int validate_sls(struct objtool_file *file)
4495
{
4496
struct instruction *insn, *next_insn;
4497
int warnings = 0;
4498
4499
for_each_insn(file, insn) {
4500
next_insn = next_insn_same_sec(file, insn);
4501
4502
if (insn->retpoline_safe)
4503
continue;
4504
4505
switch (insn->type) {
4506
case INSN_RETURN:
4507
if (!next_insn || next_insn->type != INSN_TRAP) {
4508
WARN_INSN(insn, "missing int3 after ret");
4509
warnings++;
4510
}
4511
4512
break;
4513
case INSN_JUMP_DYNAMIC:
4514
if (!next_insn || next_insn->type != INSN_TRAP) {
4515
WARN_INSN(insn, "missing int3 after indirect jump");
4516
warnings++;
4517
}
4518
break;
4519
default:
4520
break;
4521
}
4522
}
4523
4524
return warnings;
4525
}
4526
4527
static int validate_reachable_instructions(struct objtool_file *file)
4528
{
4529
struct instruction *insn, *prev_insn;
4530
struct symbol *call_dest;
4531
int warnings = 0;
4532
4533
if (file->ignore_unreachables)
4534
return 0;
4535
4536
for_each_insn(file, insn) {
4537
if (insn->visited || ignore_unreachable_insn(file, insn))
4538
continue;
4539
4540
prev_insn = prev_insn_same_sec(file, insn);
4541
if (prev_insn && prev_insn->dead_end) {
4542
call_dest = insn_call_dest(prev_insn);
4543
if (call_dest) {
4544
WARN_INSN(insn, "%s() missing __noreturn in .c/.h or NORETURN() in noreturns.h",
4545
call_dest->name);
4546
warnings++;
4547
continue;
4548
}
4549
}
4550
4551
WARN_INSN(insn, "unreachable instruction");
4552
warnings++;
4553
}
4554
4555
return warnings;
4556
}
4557
4558
/* 'funcs' is a space-separated list of function names */
4559
static void disas_funcs(const char *funcs)
4560
{
4561
const char *objdump_str, *cross_compile;
4562
int size, ret;
4563
char *cmd;
4564
4565
cross_compile = getenv("CROSS_COMPILE");
4566
if (!cross_compile)
4567
cross_compile = "";
4568
4569
objdump_str = "%sobjdump -wdr %s | gawk -M -v _funcs='%s' '"
4570
"BEGIN { split(_funcs, funcs); }"
4571
"/^$/ { func_match = 0; }"
4572
"/<.*>:/ { "
4573
"f = gensub(/.*<(.*)>:/, \"\\\\1\", 1);"
4574
"for (i in funcs) {"
4575
"if (funcs[i] == f) {"
4576
"func_match = 1;"
4577
"base = strtonum(\"0x\" $1);"
4578
"break;"
4579
"}"
4580
"}"
4581
"}"
4582
"{"
4583
"if (func_match) {"
4584
"addr = strtonum(\"0x\" $1);"
4585
"printf(\"%%04x \", addr - base);"
4586
"print;"
4587
"}"
4588
"}' 1>&2";
4589
4590
/* fake snprintf() to calculate the size */
4591
size = snprintf(NULL, 0, objdump_str, cross_compile, objname, funcs) + 1;
4592
if (size <= 0) {
4593
WARN("objdump string size calculation failed");
4594
return;
4595
}
4596
4597
cmd = malloc(size);
4598
4599
/* real snprintf() */
4600
snprintf(cmd, size, objdump_str, cross_compile, objname, funcs);
4601
ret = system(cmd);
4602
if (ret) {
4603
WARN("disassembly failed: %d", ret);
4604
return;
4605
}
4606
}
4607
4608
static void disas_warned_funcs(struct objtool_file *file)
4609
{
4610
struct symbol *sym;
4611
char *funcs = NULL, *tmp;
4612
4613
for_each_sym(file, sym) {
4614
if (sym->warned) {
4615
if (!funcs) {
4616
funcs = malloc(strlen(sym->name) + 1);
4617
if (!funcs) {
4618
ERROR_GLIBC("malloc");
4619
return;
4620
}
4621
strcpy(funcs, sym->name);
4622
} else {
4623
tmp = malloc(strlen(funcs) + strlen(sym->name) + 2);
4624
if (!tmp) {
4625
ERROR_GLIBC("malloc");
4626
return;
4627
}
4628
sprintf(tmp, "%s %s", funcs, sym->name);
4629
free(funcs);
4630
funcs = tmp;
4631
}
4632
}
4633
}
4634
4635
if (funcs)
4636
disas_funcs(funcs);
4637
}
4638
4639
__weak bool arch_absolute_reloc(struct elf *elf, struct reloc *reloc)
4640
{
4641
unsigned int type = reloc_type(reloc);
4642
size_t sz = elf_addr_size(elf);
4643
4644
return (sz == 8) ? (type == R_ABS64) : (type == R_ABS32);
4645
}
4646
4647
static int check_abs_references(struct objtool_file *file)
4648
{
4649
struct section *sec;
4650
struct reloc *reloc;
4651
int ret = 0;
4652
4653
for_each_sec(file, sec) {
4654
/* absolute references in non-loadable sections are fine */
4655
if (!(sec->sh.sh_flags & SHF_ALLOC))
4656
continue;
4657
4658
/* section must have an associated .rela section */
4659
if (!sec->rsec)
4660
continue;
4661
4662
/*
4663
* Special case for compiler generated metadata that is not
4664
* consumed until after boot.
4665
*/
4666
if (!strcmp(sec->name, "__patchable_function_entries"))
4667
continue;
4668
4669
for_each_reloc(sec->rsec, reloc) {
4670
if (arch_absolute_reloc(file->elf, reloc)) {
4671
WARN("section %s has absolute relocation at offset 0x%lx",
4672
sec->name, reloc_offset(reloc));
4673
ret++;
4674
}
4675
}
4676
}
4677
return ret;
4678
}
4679
4680
struct insn_chunk {
4681
void *addr;
4682
struct insn_chunk *next;
4683
};
4684
4685
/*
4686
* Reduce peak RSS usage by freeing insns memory before writing the ELF file,
4687
* which can trigger more allocations for .debug_* sections whose data hasn't
4688
* been read yet.
4689
*/
4690
static void free_insns(struct objtool_file *file)
4691
{
4692
struct instruction *insn;
4693
struct insn_chunk *chunks = NULL, *chunk;
4694
4695
for_each_insn(file, insn) {
4696
if (!insn->idx) {
4697
chunk = malloc(sizeof(*chunk));
4698
chunk->addr = insn;
4699
chunk->next = chunks;
4700
chunks = chunk;
4701
}
4702
}
4703
4704
for (chunk = chunks; chunk; chunk = chunk->next)
4705
free(chunk->addr);
4706
}
4707
4708
int check(struct objtool_file *file)
4709
{
4710
int ret = 0, warnings = 0;
4711
4712
arch_initial_func_cfi_state(&initial_func_cfi);
4713
init_cfi_state(&init_cfi);
4714
init_cfi_state(&func_cfi);
4715
set_func_state(&func_cfi);
4716
init_cfi_state(&force_undefined_cfi);
4717
force_undefined_cfi.force_undefined = true;
4718
4719
if (!cfi_hash_alloc(1UL << (file->elf->symbol_bits - 3))) {
4720
ret = -1;
4721
goto out;
4722
}
4723
4724
cfi_hash_add(&init_cfi);
4725
cfi_hash_add(&func_cfi);
4726
4727
ret = decode_sections(file);
4728
if (ret)
4729
goto out;
4730
4731
if (!nr_insns)
4732
goto out;
4733
4734
if (opts.retpoline)
4735
warnings += validate_retpoline(file);
4736
4737
if (opts.stackval || opts.orc || opts.uaccess) {
4738
int w = 0;
4739
4740
w += validate_functions(file);
4741
w += validate_unwind_hints(file, NULL);
4742
if (!w)
4743
w += validate_reachable_instructions(file);
4744
4745
warnings += w;
4746
4747
} else if (opts.noinstr) {
4748
warnings += validate_noinstr_sections(file);
4749
}
4750
4751
if (opts.unret) {
4752
/*
4753
* Must be after validate_branch() and friends, it plays
4754
* further games with insn->visited.
4755
*/
4756
warnings += validate_unrets(file);
4757
}
4758
4759
if (opts.ibt)
4760
warnings += validate_ibt(file);
4761
4762
if (opts.sls)
4763
warnings += validate_sls(file);
4764
4765
if (opts.static_call) {
4766
ret = create_static_call_sections(file);
4767
if (ret)
4768
goto out;
4769
}
4770
4771
if (opts.retpoline) {
4772
ret = create_retpoline_sites_sections(file);
4773
if (ret)
4774
goto out;
4775
}
4776
4777
if (opts.cfi) {
4778
ret = create_cfi_sections(file);
4779
if (ret)
4780
goto out;
4781
}
4782
4783
if (opts.rethunk) {
4784
ret = create_return_sites_sections(file);
4785
if (ret)
4786
goto out;
4787
4788
if (opts.hack_skylake) {
4789
ret = create_direct_call_sections(file);
4790
if (ret)
4791
goto out;
4792
}
4793
}
4794
4795
if (opts.mcount) {
4796
ret = create_mcount_loc_sections(file);
4797
if (ret)
4798
goto out;
4799
}
4800
4801
if (opts.prefix) {
4802
ret = add_prefix_symbols(file);
4803
if (ret)
4804
goto out;
4805
}
4806
4807
if (opts.ibt) {
4808
ret = create_ibt_endbr_seal_sections(file);
4809
if (ret)
4810
goto out;
4811
}
4812
4813
if (opts.noabs)
4814
warnings += check_abs_references(file);
4815
4816
if (opts.orc && nr_insns) {
4817
ret = orc_create(file);
4818
if (ret)
4819
goto out;
4820
}
4821
4822
free_insns(file);
4823
4824
if (opts.stats) {
4825
printf("nr_insns_visited: %ld\n", nr_insns_visited);
4826
printf("nr_cfi: %ld\n", nr_cfi);
4827
printf("nr_cfi_reused: %ld\n", nr_cfi_reused);
4828
printf("nr_cfi_cache: %ld\n", nr_cfi_cache);
4829
}
4830
4831
out:
4832
if (!ret && !warnings)
4833
return 0;
4834
4835
if (opts.werror && warnings)
4836
ret = 1;
4837
4838
if (opts.verbose) {
4839
if (opts.werror && warnings)
4840
WARN("%d warning(s) upgraded to errors", warnings);
4841
print_args();
4842
disas_warned_funcs(file);
4843
}
4844
4845
return ret;
4846
}
4847
4848