Revert "objtool: Support addition to set CFA base"
[platform/kernel/linux-starfive.git] / tools / objtool / check.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Copyright (C) 2015-2017 Josh Poimboeuf <jpoimboe@redhat.com>
4  */
5
6 #include <string.h>
7 #include <stdlib.h>
8 #include <inttypes.h>
9 #include <sys/mman.h>
10
11 #include <arch/elf.h>
12 #include <objtool/builtin.h>
13 #include <objtool/cfi.h>
14 #include <objtool/arch.h>
15 #include <objtool/check.h>
16 #include <objtool/special.h>
17 #include <objtool/warn.h>
18 #include <objtool/endianness.h>
19
20 #include <linux/objtool.h>
21 #include <linux/hashtable.h>
22 #include <linux/kernel.h>
23 #include <linux/static_call_types.h>
24
25 struct alternative {
26         struct list_head list;
27         struct instruction *insn;
28         bool skip_orig;
29 };
30
31 static unsigned long nr_cfi, nr_cfi_reused, nr_cfi_cache;
32
33 static struct cfi_init_state initial_func_cfi;
34 static struct cfi_state init_cfi;
35 static struct cfi_state func_cfi;
36
37 struct instruction *find_insn(struct objtool_file *file,
38                               struct section *sec, unsigned long offset)
39 {
40         struct instruction *insn;
41
42         hash_for_each_possible(file->insn_hash, insn, hash, sec_offset_hash(sec, offset)) {
43                 if (insn->sec == sec && insn->offset == offset)
44                         return insn;
45         }
46
47         return NULL;
48 }
49
50 static struct instruction *next_insn_same_sec(struct objtool_file *file,
51                                               struct instruction *insn)
52 {
53         struct instruction *next = list_next_entry(insn, list);
54
55         if (!next || &next->list == &file->insn_list || next->sec != insn->sec)
56                 return NULL;
57
58         return next;
59 }
60
61 static struct instruction *next_insn_same_func(struct objtool_file *file,
62                                                struct instruction *insn)
63 {
64         struct instruction *next = list_next_entry(insn, list);
65         struct symbol *func = insn->func;
66
67         if (!func)
68                 return NULL;
69
70         if (&next->list != &file->insn_list && next->func == func)
71                 return next;
72
73         /* Check if we're already in the subfunction: */
74         if (func == func->cfunc)
75                 return NULL;
76
77         /* Move to the subfunction: */
78         return find_insn(file, func->cfunc->sec, func->cfunc->offset);
79 }
80
81 static struct instruction *prev_insn_same_sym(struct objtool_file *file,
82                                                struct instruction *insn)
83 {
84         struct instruction *prev = list_prev_entry(insn, list);
85
86         if (&prev->list != &file->insn_list && prev->func == insn->func)
87                 return prev;
88
89         return NULL;
90 }
91
92 #define func_for_each_insn(file, func, insn)                            \
93         for (insn = find_insn(file, func->sec, func->offset);           \
94              insn;                                                      \
95              insn = next_insn_same_func(file, insn))
96
97 #define sym_for_each_insn(file, sym, insn)                              \
98         for (insn = find_insn(file, sym->sec, sym->offset);             \
99              insn && &insn->list != &file->insn_list &&                 \
100                 insn->sec == sym->sec &&                                \
101                 insn->offset < sym->offset + sym->len;                  \
102              insn = list_next_entry(insn, list))
103
104 #define sym_for_each_insn_continue_reverse(file, sym, insn)             \
105         for (insn = list_prev_entry(insn, list);                        \
106              &insn->list != &file->insn_list &&                         \
107                 insn->sec == sym->sec && insn->offset >= sym->offset;   \
108              insn = list_prev_entry(insn, list))
109
110 #define sec_for_each_insn_from(file, insn)                              \
111         for (; insn; insn = next_insn_same_sec(file, insn))
112
113 #define sec_for_each_insn_continue(file, insn)                          \
114         for (insn = next_insn_same_sec(file, insn); insn;               \
115              insn = next_insn_same_sec(file, insn))
116
117 static bool is_jump_table_jump(struct instruction *insn)
118 {
119         struct alt_group *alt_group = insn->alt_group;
120
121         if (insn->jump_table)
122                 return true;
123
124         /* Retpoline alternative for a jump table? */
125         return alt_group && alt_group->orig_group &&
126                alt_group->orig_group->first_insn->jump_table;
127 }
128
129 static bool is_sibling_call(struct instruction *insn)
130 {
131         /*
132          * Assume only ELF functions can make sibling calls.  This ensures
133          * sibling call detection consistency between vmlinux.o and individual
134          * objects.
135          */
136         if (!insn->func)
137                 return false;
138
139         /* An indirect jump is either a sibling call or a jump to a table. */
140         if (insn->type == INSN_JUMP_DYNAMIC)
141                 return !is_jump_table_jump(insn);
142
143         /* add_jump_destinations() sets insn->call_dest for sibling calls. */
144         return (is_static_jump(insn) && insn->call_dest);
145 }
146
147 /*
148  * This checks to see if the given function is a "noreturn" function.
149  *
150  * For global functions which are outside the scope of this object file, we
151  * have to keep a manual list of them.
152  *
153  * For local functions, we have to detect them manually by simply looking for
154  * the lack of a return instruction.
155  */
156 static bool __dead_end_function(struct objtool_file *file, struct symbol *func,
157                                 int recursion)
158 {
159         int i;
160         struct instruction *insn;
161         bool empty = true;
162
163         /*
164          * Unfortunately these have to be hard coded because the noreturn
165          * attribute isn't provided in ELF data. Keep 'em sorted.
166          */
167         static const char * const global_noreturns[] = {
168                 "__invalid_creds",
169                 "__module_put_and_kthread_exit",
170                 "__reiserfs_panic",
171                 "__stack_chk_fail",
172                 "__ubsan_handle_builtin_unreachable",
173                 "cpu_bringup_and_idle",
174                 "cpu_startup_entry",
175                 "do_exit",
176                 "do_group_exit",
177                 "do_task_dead",
178                 "ex_handler_msr_mce",
179                 "fortify_panic",
180                 "kthread_complete_and_exit",
181                 "kthread_exit",
182                 "kunit_try_catch_throw",
183                 "lbug_with_loc",
184                 "machine_real_restart",
185                 "make_task_dead",
186                 "panic",
187                 "rewind_stack_and_make_dead",
188                 "sev_es_terminate",
189                 "snp_abort",
190                 "stop_this_cpu",
191                 "usercopy_abort",
192                 "xen_start_kernel",
193         };
194
195         if (!func)
196                 return false;
197
198         if (func->bind == STB_WEAK)
199                 return false;
200
201         if (func->bind == STB_GLOBAL)
202                 for (i = 0; i < ARRAY_SIZE(global_noreturns); i++)
203                         if (!strcmp(func->name, global_noreturns[i]))
204                                 return true;
205
206         if (!func->len)
207                 return false;
208
209         insn = find_insn(file, func->sec, func->offset);
210         if (!insn || !insn->func)
211                 return false;
212
213         func_for_each_insn(file, func, insn) {
214                 empty = false;
215
216                 if (insn->type == INSN_RETURN)
217                         return false;
218         }
219
220         if (empty)
221                 return false;
222
223         /*
224          * A function can have a sibling call instead of a return.  In that
225          * case, the function's dead-end status depends on whether the target
226          * of the sibling call returns.
227          */
228         func_for_each_insn(file, func, insn) {
229                 if (is_sibling_call(insn)) {
230                         struct instruction *dest = insn->jump_dest;
231
232                         if (!dest)
233                                 /* sibling call to another file */
234                                 return false;
235
236                         /* local sibling call */
237                         if (recursion == 5) {
238                                 /*
239                                  * Infinite recursion: two functions have
240                                  * sibling calls to each other.  This is a very
241                                  * rare case.  It means they aren't dead ends.
242                                  */
243                                 return false;
244                         }
245
246                         return __dead_end_function(file, dest->func, recursion+1);
247                 }
248         }
249
250         return true;
251 }
252
253 static bool dead_end_function(struct objtool_file *file, struct symbol *func)
254 {
255         return __dead_end_function(file, func, 0);
256 }
257
258 static void init_cfi_state(struct cfi_state *cfi)
259 {
260         int i;
261
262         for (i = 0; i < CFI_NUM_REGS; i++) {
263                 cfi->regs[i].base = CFI_UNDEFINED;
264                 cfi->vals[i].base = CFI_UNDEFINED;
265         }
266         cfi->cfa.base = CFI_UNDEFINED;
267         cfi->drap_reg = CFI_UNDEFINED;
268         cfi->drap_offset = -1;
269 }
270
271 static void init_insn_state(struct objtool_file *file, struct insn_state *state,
272                             struct section *sec)
273 {
274         memset(state, 0, sizeof(*state));
275         init_cfi_state(&state->cfi);
276
277         /*
278          * We need the full vmlinux for noinstr validation, otherwise we can
279          * not correctly determine insn->call_dest->sec (external symbols do
280          * not have a section).
281          */
282         if (opts.link && opts.noinstr && sec)
283                 state->noinstr = sec->noinstr;
284 }
285
286 static struct cfi_state *cfi_alloc(void)
287 {
288         struct cfi_state *cfi = calloc(sizeof(struct cfi_state), 1);
289         if (!cfi) {
290                 WARN("calloc failed");
291                 exit(1);
292         }
293         nr_cfi++;
294         return cfi;
295 }
296
297 static int cfi_bits;
298 static struct hlist_head *cfi_hash;
299
300 static inline bool cficmp(struct cfi_state *cfi1, struct cfi_state *cfi2)
301 {
302         return memcmp((void *)cfi1 + sizeof(cfi1->hash),
303                       (void *)cfi2 + sizeof(cfi2->hash),
304                       sizeof(struct cfi_state) - sizeof(struct hlist_node));
305 }
306
307 static inline u32 cfi_key(struct cfi_state *cfi)
308 {
309         return jhash((void *)cfi + sizeof(cfi->hash),
310                      sizeof(*cfi) - sizeof(cfi->hash), 0);
311 }
312
313 static struct cfi_state *cfi_hash_find_or_add(struct cfi_state *cfi)
314 {
315         struct hlist_head *head = &cfi_hash[hash_min(cfi_key(cfi), cfi_bits)];
316         struct cfi_state *obj;
317
318         hlist_for_each_entry(obj, head, hash) {
319                 if (!cficmp(cfi, obj)) {
320                         nr_cfi_cache++;
321                         return obj;
322                 }
323         }
324
325         obj = cfi_alloc();
326         *obj = *cfi;
327         hlist_add_head(&obj->hash, head);
328
329         return obj;
330 }
331
332 static void cfi_hash_add(struct cfi_state *cfi)
333 {
334         struct hlist_head *head = &cfi_hash[hash_min(cfi_key(cfi), cfi_bits)];
335
336         hlist_add_head(&cfi->hash, head);
337 }
338
339 static void *cfi_hash_alloc(unsigned long size)
340 {
341         cfi_bits = max(10, ilog2(size));
342         cfi_hash = mmap(NULL, sizeof(struct hlist_head) << cfi_bits,
343                         PROT_READ|PROT_WRITE,
344                         MAP_PRIVATE|MAP_ANON, -1, 0);
345         if (cfi_hash == (void *)-1L) {
346                 WARN("mmap fail cfi_hash");
347                 cfi_hash = NULL;
348         }  else if (opts.stats) {
349                 printf("cfi_bits: %d\n", cfi_bits);
350         }
351
352         return cfi_hash;
353 }
354
355 static unsigned long nr_insns;
356 static unsigned long nr_insns_visited;
357
358 /*
359  * Call the arch-specific instruction decoder for all the instructions and add
360  * them to the global instruction list.
361  */
362 static int decode_instructions(struct objtool_file *file)
363 {
364         struct section *sec;
365         struct symbol *func;
366         unsigned long offset;
367         struct instruction *insn;
368         int ret;
369
370         for_each_sec(file, sec) {
371
372                 if (!(sec->sh.sh_flags & SHF_EXECINSTR))
373                         continue;
374
375                 if (strcmp(sec->name, ".altinstr_replacement") &&
376                     strcmp(sec->name, ".altinstr_aux") &&
377                     strncmp(sec->name, ".discard.", 9))
378                         sec->text = true;
379
380                 if (!strcmp(sec->name, ".noinstr.text") ||
381                     !strcmp(sec->name, ".entry.text") ||
382                     !strncmp(sec->name, ".text.__x86.", 12))
383                         sec->noinstr = true;
384
385                 for (offset = 0; offset < sec->sh.sh_size; offset += insn->len) {
386                         insn = malloc(sizeof(*insn));
387                         if (!insn) {
388                                 WARN("malloc failed");
389                                 return -1;
390                         }
391                         memset(insn, 0, sizeof(*insn));
392                         INIT_LIST_HEAD(&insn->alts);
393                         INIT_LIST_HEAD(&insn->stack_ops);
394                         INIT_LIST_HEAD(&insn->call_node);
395
396                         insn->sec = sec;
397                         insn->offset = offset;
398
399                         ret = arch_decode_instruction(file, sec, offset,
400                                                       sec->sh.sh_size - offset,
401                                                       &insn->len, &insn->type,
402                                                       &insn->immediate,
403                                                       &insn->stack_ops);
404                         if (ret)
405                                 goto err;
406
407                         /*
408                          * By default, "ud2" is a dead end unless otherwise
409                          * annotated, because GCC 7 inserts it for certain
410                          * divide-by-zero cases.
411                          */
412                         if (insn->type == INSN_BUG)
413                                 insn->dead_end = true;
414
415                         hash_add(file->insn_hash, &insn->hash, sec_offset_hash(sec, insn->offset));
416                         list_add_tail(&insn->list, &file->insn_list);
417                         nr_insns++;
418                 }
419
420                 list_for_each_entry(func, &sec->symbol_list, list) {
421                         if (func->type != STT_FUNC || func->alias != func)
422                                 continue;
423
424                         if (!find_insn(file, sec, func->offset)) {
425                                 WARN("%s(): can't find starting instruction",
426                                      func->name);
427                                 return -1;
428                         }
429
430                         sym_for_each_insn(file, func, insn) {
431                                 insn->func = func;
432                                 if (insn->type == INSN_ENDBR && list_empty(&insn->call_node)) {
433                                         if (insn->offset == insn->func->offset) {
434                                                 list_add_tail(&insn->call_node, &file->endbr_list);
435                                                 file->nr_endbr++;
436                                         } else {
437                                                 file->nr_endbr_int++;
438                                         }
439                                 }
440                         }
441                 }
442         }
443
444         if (opts.stats)
445                 printf("nr_insns: %lu\n", nr_insns);
446
447         return 0;
448
449 err:
450         free(insn);
451         return ret;
452 }
453
454 /*
455  * Read the pv_ops[] .data table to find the static initialized values.
456  */
457 static int add_pv_ops(struct objtool_file *file, const char *symname)
458 {
459         struct symbol *sym, *func;
460         unsigned long off, end;
461         struct reloc *rel;
462         int idx;
463
464         sym = find_symbol_by_name(file->elf, symname);
465         if (!sym)
466                 return 0;
467
468         off = sym->offset;
469         end = off + sym->len;
470         for (;;) {
471                 rel = find_reloc_by_dest_range(file->elf, sym->sec, off, end - off);
472                 if (!rel)
473                         break;
474
475                 func = rel->sym;
476                 if (func->type == STT_SECTION)
477                         func = find_symbol_by_offset(rel->sym->sec, rel->addend);
478
479                 idx = (rel->offset - sym->offset) / sizeof(unsigned long);
480
481                 objtool_pv_add(file, idx, func);
482
483                 off = rel->offset + 1;
484                 if (off > end)
485                         break;
486         }
487
488         return 0;
489 }
490
491 /*
492  * Allocate and initialize file->pv_ops[].
493  */
494 static int init_pv_ops(struct objtool_file *file)
495 {
496         static const char *pv_ops_tables[] = {
497                 "pv_ops",
498                 "xen_cpu_ops",
499                 "xen_irq_ops",
500                 "xen_mmu_ops",
501                 NULL,
502         };
503         const char *pv_ops;
504         struct symbol *sym;
505         int idx, nr;
506
507         if (!opts.noinstr)
508                 return 0;
509
510         file->pv_ops = NULL;
511
512         sym = find_symbol_by_name(file->elf, "pv_ops");
513         if (!sym)
514                 return 0;
515
516         nr = sym->len / sizeof(unsigned long);
517         file->pv_ops = calloc(sizeof(struct pv_state), nr);
518         if (!file->pv_ops)
519                 return -1;
520
521         for (idx = 0; idx < nr; idx++)
522                 INIT_LIST_HEAD(&file->pv_ops[idx].targets);
523
524         for (idx = 0; (pv_ops = pv_ops_tables[idx]); idx++)
525                 add_pv_ops(file, pv_ops);
526
527         return 0;
528 }
529
530 static struct instruction *find_last_insn(struct objtool_file *file,
531                                           struct section *sec)
532 {
533         struct instruction *insn = NULL;
534         unsigned int offset;
535         unsigned int end = (sec->sh.sh_size > 10) ? sec->sh.sh_size - 10 : 0;
536
537         for (offset = sec->sh.sh_size - 1; offset >= end && !insn; offset--)
538                 insn = find_insn(file, sec, offset);
539
540         return insn;
541 }
542
543 /*
544  * Mark "ud2" instructions and manually annotated dead ends.
545  */
546 static int add_dead_ends(struct objtool_file *file)
547 {
548         struct section *sec;
549         struct reloc *reloc;
550         struct instruction *insn;
551
552         /*
553          * Check for manually annotated dead ends.
554          */
555         sec = find_section_by_name(file->elf, ".rela.discard.unreachable");
556         if (!sec)
557                 goto reachable;
558
559         list_for_each_entry(reloc, &sec->reloc_list, list) {
560                 if (reloc->sym->type != STT_SECTION) {
561                         WARN("unexpected relocation symbol type in %s", sec->name);
562                         return -1;
563                 }
564                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
565                 if (insn)
566                         insn = list_prev_entry(insn, list);
567                 else if (reloc->addend == reloc->sym->sec->sh.sh_size) {
568                         insn = find_last_insn(file, reloc->sym->sec);
569                         if (!insn) {
570                                 WARN("can't find unreachable insn at %s+0x%" PRIx64,
571                                      reloc->sym->sec->name, reloc->addend);
572                                 return -1;
573                         }
574                 } else {
575                         WARN("can't find unreachable insn at %s+0x%" PRIx64,
576                              reloc->sym->sec->name, reloc->addend);
577                         return -1;
578                 }
579
580                 insn->dead_end = true;
581         }
582
583 reachable:
584         /*
585          * These manually annotated reachable checks are needed for GCC 4.4,
586          * where the Linux unreachable() macro isn't supported.  In that case
587          * GCC doesn't know the "ud2" is fatal, so it generates code as if it's
588          * not a dead end.
589          */
590         sec = find_section_by_name(file->elf, ".rela.discard.reachable");
591         if (!sec)
592                 return 0;
593
594         list_for_each_entry(reloc, &sec->reloc_list, list) {
595                 if (reloc->sym->type != STT_SECTION) {
596                         WARN("unexpected relocation symbol type in %s", sec->name);
597                         return -1;
598                 }
599                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
600                 if (insn)
601                         insn = list_prev_entry(insn, list);
602                 else if (reloc->addend == reloc->sym->sec->sh.sh_size) {
603                         insn = find_last_insn(file, reloc->sym->sec);
604                         if (!insn) {
605                                 WARN("can't find reachable insn at %s+0x%" PRIx64,
606                                      reloc->sym->sec->name, reloc->addend);
607                                 return -1;
608                         }
609                 } else {
610                         WARN("can't find reachable insn at %s+0x%" PRIx64,
611                              reloc->sym->sec->name, reloc->addend);
612                         return -1;
613                 }
614
615                 insn->dead_end = false;
616         }
617
618         return 0;
619 }
620
621 static int create_static_call_sections(struct objtool_file *file)
622 {
623         struct section *sec;
624         struct static_call_site *site;
625         struct instruction *insn;
626         struct symbol *key_sym;
627         char *key_name, *tmp;
628         int idx;
629
630         sec = find_section_by_name(file->elf, ".static_call_sites");
631         if (sec) {
632                 INIT_LIST_HEAD(&file->static_call_list);
633                 WARN("file already has .static_call_sites section, skipping");
634                 return 0;
635         }
636
637         if (list_empty(&file->static_call_list))
638                 return 0;
639
640         idx = 0;
641         list_for_each_entry(insn, &file->static_call_list, call_node)
642                 idx++;
643
644         sec = elf_create_section(file->elf, ".static_call_sites", SHF_WRITE,
645                                  sizeof(struct static_call_site), idx);
646         if (!sec)
647                 return -1;
648
649         idx = 0;
650         list_for_each_entry(insn, &file->static_call_list, call_node) {
651
652                 site = (struct static_call_site *)sec->data->d_buf + idx;
653                 memset(site, 0, sizeof(struct static_call_site));
654
655                 /* populate reloc for 'addr' */
656                 if (elf_add_reloc_to_insn(file->elf, sec,
657                                           idx * sizeof(struct static_call_site),
658                                           R_X86_64_PC32,
659                                           insn->sec, insn->offset))
660                         return -1;
661
662                 /* find key symbol */
663                 key_name = strdup(insn->call_dest->name);
664                 if (!key_name) {
665                         perror("strdup");
666                         return -1;
667                 }
668                 if (strncmp(key_name, STATIC_CALL_TRAMP_PREFIX_STR,
669                             STATIC_CALL_TRAMP_PREFIX_LEN)) {
670                         WARN("static_call: trampoline name malformed: %s", key_name);
671                         free(key_name);
672                         return -1;
673                 }
674                 tmp = key_name + STATIC_CALL_TRAMP_PREFIX_LEN - STATIC_CALL_KEY_PREFIX_LEN;
675                 memcpy(tmp, STATIC_CALL_KEY_PREFIX_STR, STATIC_CALL_KEY_PREFIX_LEN);
676
677                 key_sym = find_symbol_by_name(file->elf, tmp);
678                 if (!key_sym) {
679                         if (!opts.module) {
680                                 WARN("static_call: can't find static_call_key symbol: %s", tmp);
681                                 free(key_name);
682                                 return -1;
683                         }
684
685                         /*
686                          * For modules(), the key might not be exported, which
687                          * means the module can make static calls but isn't
688                          * allowed to change them.
689                          *
690                          * In that case we temporarily set the key to be the
691                          * trampoline address.  This is fixed up in
692                          * static_call_add_module().
693                          */
694                         key_sym = insn->call_dest;
695                 }
696                 free(key_name);
697
698                 /* populate reloc for 'key' */
699                 if (elf_add_reloc(file->elf, sec,
700                                   idx * sizeof(struct static_call_site) + 4,
701                                   R_X86_64_PC32, key_sym,
702                                   is_sibling_call(insn) * STATIC_CALL_SITE_TAIL))
703                         return -1;
704
705                 idx++;
706         }
707
708         return 0;
709 }
710
711 static int create_retpoline_sites_sections(struct objtool_file *file)
712 {
713         struct instruction *insn;
714         struct section *sec;
715         int idx;
716
717         sec = find_section_by_name(file->elf, ".retpoline_sites");
718         if (sec) {
719                 WARN("file already has .retpoline_sites, skipping");
720                 return 0;
721         }
722
723         idx = 0;
724         list_for_each_entry(insn, &file->retpoline_call_list, call_node)
725                 idx++;
726
727         if (!idx)
728                 return 0;
729
730         sec = elf_create_section(file->elf, ".retpoline_sites", 0,
731                                  sizeof(int), idx);
732         if (!sec) {
733                 WARN("elf_create_section: .retpoline_sites");
734                 return -1;
735         }
736
737         idx = 0;
738         list_for_each_entry(insn, &file->retpoline_call_list, call_node) {
739
740                 int *site = (int *)sec->data->d_buf + idx;
741                 *site = 0;
742
743                 if (elf_add_reloc_to_insn(file->elf, sec,
744                                           idx * sizeof(int),
745                                           R_X86_64_PC32,
746                                           insn->sec, insn->offset)) {
747                         WARN("elf_add_reloc_to_insn: .retpoline_sites");
748                         return -1;
749                 }
750
751                 idx++;
752         }
753
754         return 0;
755 }
756
757 static int create_return_sites_sections(struct objtool_file *file)
758 {
759         struct instruction *insn;
760         struct section *sec;
761         int idx;
762
763         sec = find_section_by_name(file->elf, ".return_sites");
764         if (sec) {
765                 WARN("file already has .return_sites, skipping");
766                 return 0;
767         }
768
769         idx = 0;
770         list_for_each_entry(insn, &file->return_thunk_list, call_node)
771                 idx++;
772
773         if (!idx)
774                 return 0;
775
776         sec = elf_create_section(file->elf, ".return_sites", 0,
777                                  sizeof(int), idx);
778         if (!sec) {
779                 WARN("elf_create_section: .return_sites");
780                 return -1;
781         }
782
783         idx = 0;
784         list_for_each_entry(insn, &file->return_thunk_list, call_node) {
785
786                 int *site = (int *)sec->data->d_buf + idx;
787                 *site = 0;
788
789                 if (elf_add_reloc_to_insn(file->elf, sec,
790                                           idx * sizeof(int),
791                                           R_X86_64_PC32,
792                                           insn->sec, insn->offset)) {
793                         WARN("elf_add_reloc_to_insn: .return_sites");
794                         return -1;
795                 }
796
797                 idx++;
798         }
799
800         return 0;
801 }
802
803 static int create_ibt_endbr_seal_sections(struct objtool_file *file)
804 {
805         struct instruction *insn;
806         struct section *sec;
807         int idx;
808
809         sec = find_section_by_name(file->elf, ".ibt_endbr_seal");
810         if (sec) {
811                 WARN("file already has .ibt_endbr_seal, skipping");
812                 return 0;
813         }
814
815         idx = 0;
816         list_for_each_entry(insn, &file->endbr_list, call_node)
817                 idx++;
818
819         if (opts.stats) {
820                 printf("ibt: ENDBR at function start: %d\n", file->nr_endbr);
821                 printf("ibt: ENDBR inside functions:  %d\n", file->nr_endbr_int);
822                 printf("ibt: superfluous ENDBR:       %d\n", idx);
823         }
824
825         if (!idx)
826                 return 0;
827
828         sec = elf_create_section(file->elf, ".ibt_endbr_seal", 0,
829                                  sizeof(int), idx);
830         if (!sec) {
831                 WARN("elf_create_section: .ibt_endbr_seal");
832                 return -1;
833         }
834
835         idx = 0;
836         list_for_each_entry(insn, &file->endbr_list, call_node) {
837
838                 int *site = (int *)sec->data->d_buf + idx;
839                 *site = 0;
840
841                 if (elf_add_reloc_to_insn(file->elf, sec,
842                                           idx * sizeof(int),
843                                           R_X86_64_PC32,
844                                           insn->sec, insn->offset)) {
845                         WARN("elf_add_reloc_to_insn: .ibt_endbr_seal");
846                         return -1;
847                 }
848
849                 idx++;
850         }
851
852         return 0;
853 }
854
855 static int create_mcount_loc_sections(struct objtool_file *file)
856 {
857         struct section *sec;
858         unsigned long *loc;
859         struct instruction *insn;
860         int idx;
861
862         sec = find_section_by_name(file->elf, "__mcount_loc");
863         if (sec) {
864                 INIT_LIST_HEAD(&file->mcount_loc_list);
865                 WARN("file already has __mcount_loc section, skipping");
866                 return 0;
867         }
868
869         if (list_empty(&file->mcount_loc_list))
870                 return 0;
871
872         idx = 0;
873         list_for_each_entry(insn, &file->mcount_loc_list, call_node)
874                 idx++;
875
876         sec = elf_create_section(file->elf, "__mcount_loc", 0, sizeof(unsigned long), idx);
877         if (!sec)
878                 return -1;
879
880         idx = 0;
881         list_for_each_entry(insn, &file->mcount_loc_list, call_node) {
882
883                 loc = (unsigned long *)sec->data->d_buf + idx;
884                 memset(loc, 0, sizeof(unsigned long));
885
886                 if (elf_add_reloc_to_insn(file->elf, sec,
887                                           idx * sizeof(unsigned long),
888                                           R_X86_64_64,
889                                           insn->sec, insn->offset))
890                         return -1;
891
892                 idx++;
893         }
894
895         return 0;
896 }
897
898 /*
899  * Warnings shouldn't be reported for ignored functions.
900  */
901 static void add_ignores(struct objtool_file *file)
902 {
903         struct instruction *insn;
904         struct section *sec;
905         struct symbol *func;
906         struct reloc *reloc;
907
908         sec = find_section_by_name(file->elf, ".rela.discard.func_stack_frame_non_standard");
909         if (!sec)
910                 return;
911
912         list_for_each_entry(reloc, &sec->reloc_list, list) {
913                 switch (reloc->sym->type) {
914                 case STT_FUNC:
915                         func = reloc->sym;
916                         break;
917
918                 case STT_SECTION:
919                         func = find_func_by_offset(reloc->sym->sec, reloc->addend);
920                         if (!func)
921                                 continue;
922                         break;
923
924                 default:
925                         WARN("unexpected relocation symbol type in %s: %d", sec->name, reloc->sym->type);
926                         continue;
927                 }
928
929                 func_for_each_insn(file, func, insn)
930                         insn->ignore = true;
931         }
932 }
933
934 /*
935  * This is a whitelist of functions that is allowed to be called with AC set.
936  * The list is meant to be minimal and only contains compiler instrumentation
937  * ABI and a few functions used to implement *_{to,from}_user() functions.
938  *
939  * These functions must not directly change AC, but may PUSHF/POPF.
940  */
941 static const char *uaccess_safe_builtin[] = {
942         /* KASAN */
943         "kasan_report",
944         "kasan_check_range",
945         /* KASAN out-of-line */
946         "__asan_loadN_noabort",
947         "__asan_load1_noabort",
948         "__asan_load2_noabort",
949         "__asan_load4_noabort",
950         "__asan_load8_noabort",
951         "__asan_load16_noabort",
952         "__asan_storeN_noabort",
953         "__asan_store1_noabort",
954         "__asan_store2_noabort",
955         "__asan_store4_noabort",
956         "__asan_store8_noabort",
957         "__asan_store16_noabort",
958         "__kasan_check_read",
959         "__kasan_check_write",
960         /* KASAN in-line */
961         "__asan_report_load_n_noabort",
962         "__asan_report_load1_noabort",
963         "__asan_report_load2_noabort",
964         "__asan_report_load4_noabort",
965         "__asan_report_load8_noabort",
966         "__asan_report_load16_noabort",
967         "__asan_report_store_n_noabort",
968         "__asan_report_store1_noabort",
969         "__asan_report_store2_noabort",
970         "__asan_report_store4_noabort",
971         "__asan_report_store8_noabort",
972         "__asan_report_store16_noabort",
973         /* KCSAN */
974         "__kcsan_check_access",
975         "__kcsan_mb",
976         "__kcsan_wmb",
977         "__kcsan_rmb",
978         "__kcsan_release",
979         "kcsan_found_watchpoint",
980         "kcsan_setup_watchpoint",
981         "kcsan_check_scoped_accesses",
982         "kcsan_disable_current",
983         "kcsan_enable_current_nowarn",
984         /* KCSAN/TSAN */
985         "__tsan_func_entry",
986         "__tsan_func_exit",
987         "__tsan_read_range",
988         "__tsan_write_range",
989         "__tsan_read1",
990         "__tsan_read2",
991         "__tsan_read4",
992         "__tsan_read8",
993         "__tsan_read16",
994         "__tsan_write1",
995         "__tsan_write2",
996         "__tsan_write4",
997         "__tsan_write8",
998         "__tsan_write16",
999         "__tsan_read_write1",
1000         "__tsan_read_write2",
1001         "__tsan_read_write4",
1002         "__tsan_read_write8",
1003         "__tsan_read_write16",
1004         "__tsan_volatile_read1",
1005         "__tsan_volatile_read2",
1006         "__tsan_volatile_read4",
1007         "__tsan_volatile_read8",
1008         "__tsan_volatile_read16",
1009         "__tsan_volatile_write1",
1010         "__tsan_volatile_write2",
1011         "__tsan_volatile_write4",
1012         "__tsan_volatile_write8",
1013         "__tsan_volatile_write16",
1014         "__tsan_atomic8_load",
1015         "__tsan_atomic16_load",
1016         "__tsan_atomic32_load",
1017         "__tsan_atomic64_load",
1018         "__tsan_atomic8_store",
1019         "__tsan_atomic16_store",
1020         "__tsan_atomic32_store",
1021         "__tsan_atomic64_store",
1022         "__tsan_atomic8_exchange",
1023         "__tsan_atomic16_exchange",
1024         "__tsan_atomic32_exchange",
1025         "__tsan_atomic64_exchange",
1026         "__tsan_atomic8_fetch_add",
1027         "__tsan_atomic16_fetch_add",
1028         "__tsan_atomic32_fetch_add",
1029         "__tsan_atomic64_fetch_add",
1030         "__tsan_atomic8_fetch_sub",
1031         "__tsan_atomic16_fetch_sub",
1032         "__tsan_atomic32_fetch_sub",
1033         "__tsan_atomic64_fetch_sub",
1034         "__tsan_atomic8_fetch_and",
1035         "__tsan_atomic16_fetch_and",
1036         "__tsan_atomic32_fetch_and",
1037         "__tsan_atomic64_fetch_and",
1038         "__tsan_atomic8_fetch_or",
1039         "__tsan_atomic16_fetch_or",
1040         "__tsan_atomic32_fetch_or",
1041         "__tsan_atomic64_fetch_or",
1042         "__tsan_atomic8_fetch_xor",
1043         "__tsan_atomic16_fetch_xor",
1044         "__tsan_atomic32_fetch_xor",
1045         "__tsan_atomic64_fetch_xor",
1046         "__tsan_atomic8_fetch_nand",
1047         "__tsan_atomic16_fetch_nand",
1048         "__tsan_atomic32_fetch_nand",
1049         "__tsan_atomic64_fetch_nand",
1050         "__tsan_atomic8_compare_exchange_strong",
1051         "__tsan_atomic16_compare_exchange_strong",
1052         "__tsan_atomic32_compare_exchange_strong",
1053         "__tsan_atomic64_compare_exchange_strong",
1054         "__tsan_atomic8_compare_exchange_weak",
1055         "__tsan_atomic16_compare_exchange_weak",
1056         "__tsan_atomic32_compare_exchange_weak",
1057         "__tsan_atomic64_compare_exchange_weak",
1058         "__tsan_atomic8_compare_exchange_val",
1059         "__tsan_atomic16_compare_exchange_val",
1060         "__tsan_atomic32_compare_exchange_val",
1061         "__tsan_atomic64_compare_exchange_val",
1062         "__tsan_atomic_thread_fence",
1063         "__tsan_atomic_signal_fence",
1064         "__tsan_unaligned_read16",
1065         "__tsan_unaligned_write16",
1066         /* KCOV */
1067         "write_comp_data",
1068         "check_kcov_mode",
1069         "__sanitizer_cov_trace_pc",
1070         "__sanitizer_cov_trace_const_cmp1",
1071         "__sanitizer_cov_trace_const_cmp2",
1072         "__sanitizer_cov_trace_const_cmp4",
1073         "__sanitizer_cov_trace_const_cmp8",
1074         "__sanitizer_cov_trace_cmp1",
1075         "__sanitizer_cov_trace_cmp2",
1076         "__sanitizer_cov_trace_cmp4",
1077         "__sanitizer_cov_trace_cmp8",
1078         "__sanitizer_cov_trace_switch",
1079         /* KMSAN */
1080         "kmsan_copy_to_user",
1081         "kmsan_report",
1082         "kmsan_unpoison_entry_regs",
1083         "kmsan_unpoison_memory",
1084         "__msan_chain_origin",
1085         "__msan_get_context_state",
1086         "__msan_instrument_asm_store",
1087         "__msan_metadata_ptr_for_load_1",
1088         "__msan_metadata_ptr_for_load_2",
1089         "__msan_metadata_ptr_for_load_4",
1090         "__msan_metadata_ptr_for_load_8",
1091         "__msan_metadata_ptr_for_load_n",
1092         "__msan_metadata_ptr_for_store_1",
1093         "__msan_metadata_ptr_for_store_2",
1094         "__msan_metadata_ptr_for_store_4",
1095         "__msan_metadata_ptr_for_store_8",
1096         "__msan_metadata_ptr_for_store_n",
1097         "__msan_poison_alloca",
1098         "__msan_warning",
1099         /* UBSAN */
1100         "ubsan_type_mismatch_common",
1101         "__ubsan_handle_type_mismatch",
1102         "__ubsan_handle_type_mismatch_v1",
1103         "__ubsan_handle_shift_out_of_bounds",
1104         /* misc */
1105         "csum_partial_copy_generic",
1106         "copy_mc_fragile",
1107         "copy_mc_fragile_handle_tail",
1108         "copy_mc_enhanced_fast_string",
1109         "ftrace_likely_update", /* CONFIG_TRACE_BRANCH_PROFILING */
1110         "clear_user_erms",
1111         "clear_user_rep_good",
1112         "clear_user_original",
1113         NULL
1114 };
1115
1116 static void add_uaccess_safe(struct objtool_file *file)
1117 {
1118         struct symbol *func;
1119         const char **name;
1120
1121         if (!opts.uaccess)
1122                 return;
1123
1124         for (name = uaccess_safe_builtin; *name; name++) {
1125                 func = find_symbol_by_name(file->elf, *name);
1126                 if (!func)
1127                         continue;
1128
1129                 func->uaccess_safe = true;
1130         }
1131 }
1132
1133 /*
1134  * FIXME: For now, just ignore any alternatives which add retpolines.  This is
1135  * a temporary hack, as it doesn't allow ORC to unwind from inside a retpoline.
1136  * But it at least allows objtool to understand the control flow *around* the
1137  * retpoline.
1138  */
1139 static int add_ignore_alternatives(struct objtool_file *file)
1140 {
1141         struct section *sec;
1142         struct reloc *reloc;
1143         struct instruction *insn;
1144
1145         sec = find_section_by_name(file->elf, ".rela.discard.ignore_alts");
1146         if (!sec)
1147                 return 0;
1148
1149         list_for_each_entry(reloc, &sec->reloc_list, list) {
1150                 if (reloc->sym->type != STT_SECTION) {
1151                         WARN("unexpected relocation symbol type in %s", sec->name);
1152                         return -1;
1153                 }
1154
1155                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
1156                 if (!insn) {
1157                         WARN("bad .discard.ignore_alts entry");
1158                         return -1;
1159                 }
1160
1161                 insn->ignore_alts = true;
1162         }
1163
1164         return 0;
1165 }
1166
1167 __weak bool arch_is_retpoline(struct symbol *sym)
1168 {
1169         return false;
1170 }
1171
1172 __weak bool arch_is_rethunk(struct symbol *sym)
1173 {
1174         return false;
1175 }
1176
1177 #define NEGATIVE_RELOC  ((void *)-1L)
1178
1179 static struct reloc *insn_reloc(struct objtool_file *file, struct instruction *insn)
1180 {
1181         if (insn->reloc == NEGATIVE_RELOC)
1182                 return NULL;
1183
1184         if (!insn->reloc) {
1185                 if (!file)
1186                         return NULL;
1187
1188                 insn->reloc = find_reloc_by_dest_range(file->elf, insn->sec,
1189                                                        insn->offset, insn->len);
1190                 if (!insn->reloc) {
1191                         insn->reloc = NEGATIVE_RELOC;
1192                         return NULL;
1193                 }
1194         }
1195
1196         return insn->reloc;
1197 }
1198
1199 static void remove_insn_ops(struct instruction *insn)
1200 {
1201         struct stack_op *op, *tmp;
1202
1203         list_for_each_entry_safe(op, tmp, &insn->stack_ops, list) {
1204                 list_del(&op->list);
1205                 free(op);
1206         }
1207 }
1208
1209 static void annotate_call_site(struct objtool_file *file,
1210                                struct instruction *insn, bool sibling)
1211 {
1212         struct reloc *reloc = insn_reloc(file, insn);
1213         struct symbol *sym = insn->call_dest;
1214
1215         if (!sym)
1216                 sym = reloc->sym;
1217
1218         /*
1219          * Alternative replacement code is just template code which is
1220          * sometimes copied to the original instruction. For now, don't
1221          * annotate it. (In the future we might consider annotating the
1222          * original instruction if/when it ever makes sense to do so.)
1223          */
1224         if (!strcmp(insn->sec->name, ".altinstr_replacement"))
1225                 return;
1226
1227         if (sym->static_call_tramp) {
1228                 list_add_tail(&insn->call_node, &file->static_call_list);
1229                 return;
1230         }
1231
1232         if (sym->retpoline_thunk) {
1233                 list_add_tail(&insn->call_node, &file->retpoline_call_list);
1234                 return;
1235         }
1236
1237         /*
1238          * Many compilers cannot disable KCOV or sanitizer calls with a function
1239          * attribute so they need a little help, NOP out any such calls from
1240          * noinstr text.
1241          */
1242         if (opts.hack_noinstr && insn->sec->noinstr && sym->profiling_func) {
1243                 if (reloc) {
1244                         reloc->type = R_NONE;
1245                         elf_write_reloc(file->elf, reloc);
1246                 }
1247
1248                 elf_write_insn(file->elf, insn->sec,
1249                                insn->offset, insn->len,
1250                                sibling ? arch_ret_insn(insn->len)
1251                                        : arch_nop_insn(insn->len));
1252
1253                 insn->type = sibling ? INSN_RETURN : INSN_NOP;
1254
1255                 if (sibling) {
1256                         /*
1257                          * We've replaced the tail-call JMP insn by two new
1258                          * insn: RET; INT3, except we only have a single struct
1259                          * insn here. Mark it retpoline_safe to avoid the SLS
1260                          * warning, instead of adding another insn.
1261                          */
1262                         insn->retpoline_safe = true;
1263                 }
1264
1265                 return;
1266         }
1267
1268         if (opts.mcount && sym->fentry) {
1269                 if (sibling)
1270                         WARN_FUNC("Tail call to __fentry__ !?!?", insn->sec, insn->offset);
1271
1272                 if (reloc) {
1273                         reloc->type = R_NONE;
1274                         elf_write_reloc(file->elf, reloc);
1275                 }
1276
1277                 elf_write_insn(file->elf, insn->sec,
1278                                insn->offset, insn->len,
1279                                arch_nop_insn(insn->len));
1280
1281                 insn->type = INSN_NOP;
1282
1283                 list_add_tail(&insn->call_node, &file->mcount_loc_list);
1284                 return;
1285         }
1286
1287         if (!sibling && dead_end_function(file, sym))
1288                 insn->dead_end = true;
1289 }
1290
1291 static void add_call_dest(struct objtool_file *file, struct instruction *insn,
1292                           struct symbol *dest, bool sibling)
1293 {
1294         insn->call_dest = dest;
1295         if (!dest)
1296                 return;
1297
1298         /*
1299          * Whatever stack impact regular CALLs have, should be undone
1300          * by the RETURN of the called function.
1301          *
1302          * Annotated intra-function calls retain the stack_ops but
1303          * are converted to JUMP, see read_intra_function_calls().
1304          */
1305         remove_insn_ops(insn);
1306
1307         annotate_call_site(file, insn, sibling);
1308 }
1309
1310 static void add_retpoline_call(struct objtool_file *file, struct instruction *insn)
1311 {
1312         /*
1313          * Retpoline calls/jumps are really dynamic calls/jumps in disguise,
1314          * so convert them accordingly.
1315          */
1316         switch (insn->type) {
1317         case INSN_CALL:
1318                 insn->type = INSN_CALL_DYNAMIC;
1319                 break;
1320         case INSN_JUMP_UNCONDITIONAL:
1321                 insn->type = INSN_JUMP_DYNAMIC;
1322                 break;
1323         case INSN_JUMP_CONDITIONAL:
1324                 insn->type = INSN_JUMP_DYNAMIC_CONDITIONAL;
1325                 break;
1326         default:
1327                 return;
1328         }
1329
1330         insn->retpoline_safe = true;
1331
1332         /*
1333          * Whatever stack impact regular CALLs have, should be undone
1334          * by the RETURN of the called function.
1335          *
1336          * Annotated intra-function calls retain the stack_ops but
1337          * are converted to JUMP, see read_intra_function_calls().
1338          */
1339         remove_insn_ops(insn);
1340
1341         annotate_call_site(file, insn, false);
1342 }
1343
1344 static void add_return_call(struct objtool_file *file, struct instruction *insn, bool add)
1345 {
1346         /*
1347          * Return thunk tail calls are really just returns in disguise,
1348          * so convert them accordingly.
1349          */
1350         insn->type = INSN_RETURN;
1351         insn->retpoline_safe = true;
1352
1353         if (add)
1354                 list_add_tail(&insn->call_node, &file->return_thunk_list);
1355 }
1356
1357 static bool same_function(struct instruction *insn1, struct instruction *insn2)
1358 {
1359         return insn1->func->pfunc == insn2->func->pfunc;
1360 }
1361
1362 static bool is_first_func_insn(struct objtool_file *file, struct instruction *insn)
1363 {
1364         if (insn->offset == insn->func->offset)
1365                 return true;
1366
1367         if (opts.ibt) {
1368                 struct instruction *prev = prev_insn_same_sym(file, insn);
1369
1370                 if (prev && prev->type == INSN_ENDBR &&
1371                     insn->offset == insn->func->offset + prev->len)
1372                         return true;
1373         }
1374
1375         return false;
1376 }
1377
1378 /*
1379  * Find the destination instructions for all jumps.
1380  */
1381 static int add_jump_destinations(struct objtool_file *file)
1382 {
1383         struct instruction *insn, *jump_dest;
1384         struct reloc *reloc;
1385         struct section *dest_sec;
1386         unsigned long dest_off;
1387
1388         for_each_insn(file, insn) {
1389                 if (insn->jump_dest) {
1390                         /*
1391                          * handle_group_alt() may have previously set
1392                          * 'jump_dest' for some alternatives.
1393                          */
1394                         continue;
1395                 }
1396                 if (!is_static_jump(insn))
1397                         continue;
1398
1399                 reloc = insn_reloc(file, insn);
1400                 if (!reloc) {
1401                         dest_sec = insn->sec;
1402                         dest_off = arch_jump_destination(insn);
1403                 } else if (reloc->sym->type == STT_SECTION) {
1404                         dest_sec = reloc->sym->sec;
1405                         dest_off = arch_dest_reloc_offset(reloc->addend);
1406                 } else if (reloc->sym->retpoline_thunk) {
1407                         add_retpoline_call(file, insn);
1408                         continue;
1409                 } else if (reloc->sym->return_thunk) {
1410                         add_return_call(file, insn, true);
1411                         continue;
1412                 } else if (insn->func) {
1413                         /*
1414                          * External sibling call or internal sibling call with
1415                          * STT_FUNC reloc.
1416                          */
1417                         add_call_dest(file, insn, reloc->sym, true);
1418                         continue;
1419                 } else if (reloc->sym->sec->idx) {
1420                         dest_sec = reloc->sym->sec;
1421                         dest_off = reloc->sym->sym.st_value +
1422                                    arch_dest_reloc_offset(reloc->addend);
1423                 } else {
1424                         /* non-func asm code jumping to another file */
1425                         continue;
1426                 }
1427
1428                 jump_dest = find_insn(file, dest_sec, dest_off);
1429                 if (!jump_dest) {
1430                         struct symbol *sym = find_symbol_by_offset(dest_sec, dest_off);
1431
1432                         /*
1433                          * This is a special case for zen_untrain_ret().
1434                          * It jumps to __x86_return_thunk(), but objtool
1435                          * can't find the thunk's starting RET
1436                          * instruction, because the RET is also in the
1437                          * middle of another instruction.  Objtool only
1438                          * knows about the outer instruction.
1439                          */
1440                         if (sym && sym->return_thunk) {
1441                                 add_return_call(file, insn, false);
1442                                 continue;
1443                         }
1444
1445                         WARN_FUNC("can't find jump dest instruction at %s+0x%lx",
1446                                   insn->sec, insn->offset, dest_sec->name,
1447                                   dest_off);
1448                         return -1;
1449                 }
1450
1451                 /*
1452                  * Cross-function jump.
1453                  */
1454                 if (insn->func && jump_dest->func &&
1455                     insn->func != jump_dest->func) {
1456
1457                         /*
1458                          * For GCC 8+, create parent/child links for any cold
1459                          * subfunctions.  This is _mostly_ redundant with a
1460                          * similar initialization in read_symbols().
1461                          *
1462                          * If a function has aliases, we want the *first* such
1463                          * function in the symbol table to be the subfunction's
1464                          * parent.  In that case we overwrite the
1465                          * initialization done in read_symbols().
1466                          *
1467                          * However this code can't completely replace the
1468                          * read_symbols() code because this doesn't detect the
1469                          * case where the parent function's only reference to a
1470                          * subfunction is through a jump table.
1471                          */
1472                         if (!strstr(insn->func->name, ".cold") &&
1473                             strstr(jump_dest->func->name, ".cold")) {
1474                                 insn->func->cfunc = jump_dest->func;
1475                                 jump_dest->func->pfunc = insn->func;
1476
1477                         } else if (!same_function(insn, jump_dest) &&
1478                                    is_first_func_insn(file, jump_dest)) {
1479                                 /*
1480                                  * Internal sibling call without reloc or with
1481                                  * STT_SECTION reloc.
1482                                  */
1483                                 add_call_dest(file, insn, jump_dest->func, true);
1484                                 continue;
1485                         }
1486                 }
1487
1488                 insn->jump_dest = jump_dest;
1489         }
1490
1491         return 0;
1492 }
1493
1494 static struct symbol *find_call_destination(struct section *sec, unsigned long offset)
1495 {
1496         struct symbol *call_dest;
1497
1498         call_dest = find_func_by_offset(sec, offset);
1499         if (!call_dest)
1500                 call_dest = find_symbol_by_offset(sec, offset);
1501
1502         return call_dest;
1503 }
1504
1505 /*
1506  * Find the destination instructions for all calls.
1507  */
1508 static int add_call_destinations(struct objtool_file *file)
1509 {
1510         struct instruction *insn;
1511         unsigned long dest_off;
1512         struct symbol *dest;
1513         struct reloc *reloc;
1514
1515         for_each_insn(file, insn) {
1516                 if (insn->type != INSN_CALL)
1517                         continue;
1518
1519                 reloc = insn_reloc(file, insn);
1520                 if (!reloc) {
1521                         dest_off = arch_jump_destination(insn);
1522                         dest = find_call_destination(insn->sec, dest_off);
1523
1524                         add_call_dest(file, insn, dest, false);
1525
1526                         if (insn->ignore)
1527                                 continue;
1528
1529                         if (!insn->call_dest) {
1530                                 WARN_FUNC("unannotated intra-function call", insn->sec, insn->offset);
1531                                 return -1;
1532                         }
1533
1534                         if (insn->func && insn->call_dest->type != STT_FUNC) {
1535                                 WARN_FUNC("unsupported call to non-function",
1536                                           insn->sec, insn->offset);
1537                                 return -1;
1538                         }
1539
1540                 } else if (reloc->sym->type == STT_SECTION) {
1541                         dest_off = arch_dest_reloc_offset(reloc->addend);
1542                         dest = find_call_destination(reloc->sym->sec, dest_off);
1543                         if (!dest) {
1544                                 WARN_FUNC("can't find call dest symbol at %s+0x%lx",
1545                                           insn->sec, insn->offset,
1546                                           reloc->sym->sec->name,
1547                                           dest_off);
1548                                 return -1;
1549                         }
1550
1551                         add_call_dest(file, insn, dest, false);
1552
1553                 } else if (reloc->sym->retpoline_thunk) {
1554                         add_retpoline_call(file, insn);
1555
1556                 } else
1557                         add_call_dest(file, insn, reloc->sym, false);
1558         }
1559
1560         return 0;
1561 }
1562
1563 /*
1564  * The .alternatives section requires some extra special care over and above
1565  * other special sections because alternatives are patched in place.
1566  */
1567 static int handle_group_alt(struct objtool_file *file,
1568                             struct special_alt *special_alt,
1569                             struct instruction *orig_insn,
1570                             struct instruction **new_insn)
1571 {
1572         struct instruction *last_orig_insn, *last_new_insn = NULL, *insn, *nop = NULL;
1573         struct alt_group *orig_alt_group, *new_alt_group;
1574         unsigned long dest_off;
1575
1576
1577         orig_alt_group = malloc(sizeof(*orig_alt_group));
1578         if (!orig_alt_group) {
1579                 WARN("malloc failed");
1580                 return -1;
1581         }
1582         orig_alt_group->cfi = calloc(special_alt->orig_len,
1583                                      sizeof(struct cfi_state *));
1584         if (!orig_alt_group->cfi) {
1585                 WARN("calloc failed");
1586                 return -1;
1587         }
1588
1589         last_orig_insn = NULL;
1590         insn = orig_insn;
1591         sec_for_each_insn_from(file, insn) {
1592                 if (insn->offset >= special_alt->orig_off + special_alt->orig_len)
1593                         break;
1594
1595                 insn->alt_group = orig_alt_group;
1596                 last_orig_insn = insn;
1597         }
1598         orig_alt_group->orig_group = NULL;
1599         orig_alt_group->first_insn = orig_insn;
1600         orig_alt_group->last_insn = last_orig_insn;
1601
1602
1603         new_alt_group = malloc(sizeof(*new_alt_group));
1604         if (!new_alt_group) {
1605                 WARN("malloc failed");
1606                 return -1;
1607         }
1608
1609         if (special_alt->new_len < special_alt->orig_len) {
1610                 /*
1611                  * Insert a fake nop at the end to make the replacement
1612                  * alt_group the same size as the original.  This is needed to
1613                  * allow propagate_alt_cfi() to do its magic.  When the last
1614                  * instruction affects the stack, the instruction after it (the
1615                  * nop) will propagate the new state to the shared CFI array.
1616                  */
1617                 nop = malloc(sizeof(*nop));
1618                 if (!nop) {
1619                         WARN("malloc failed");
1620                         return -1;
1621                 }
1622                 memset(nop, 0, sizeof(*nop));
1623                 INIT_LIST_HEAD(&nop->alts);
1624                 INIT_LIST_HEAD(&nop->stack_ops);
1625
1626                 nop->sec = special_alt->new_sec;
1627                 nop->offset = special_alt->new_off + special_alt->new_len;
1628                 nop->len = special_alt->orig_len - special_alt->new_len;
1629                 nop->type = INSN_NOP;
1630                 nop->func = orig_insn->func;
1631                 nop->alt_group = new_alt_group;
1632                 nop->ignore = orig_insn->ignore_alts;
1633         }
1634
1635         if (!special_alt->new_len) {
1636                 *new_insn = nop;
1637                 goto end;
1638         }
1639
1640         insn = *new_insn;
1641         sec_for_each_insn_from(file, insn) {
1642                 struct reloc *alt_reloc;
1643
1644                 if (insn->offset >= special_alt->new_off + special_alt->new_len)
1645                         break;
1646
1647                 last_new_insn = insn;
1648
1649                 insn->ignore = orig_insn->ignore_alts;
1650                 insn->func = orig_insn->func;
1651                 insn->alt_group = new_alt_group;
1652
1653                 /*
1654                  * Since alternative replacement code is copy/pasted by the
1655                  * kernel after applying relocations, generally such code can't
1656                  * have relative-address relocation references to outside the
1657                  * .altinstr_replacement section, unless the arch's
1658                  * alternatives code can adjust the relative offsets
1659                  * accordingly.
1660                  */
1661                 alt_reloc = insn_reloc(file, insn);
1662                 if (alt_reloc &&
1663                     !arch_support_alt_relocation(special_alt, insn, alt_reloc)) {
1664
1665                         WARN_FUNC("unsupported relocation in alternatives section",
1666                                   insn->sec, insn->offset);
1667                         return -1;
1668                 }
1669
1670                 if (!is_static_jump(insn))
1671                         continue;
1672
1673                 if (!insn->immediate)
1674                         continue;
1675
1676                 dest_off = arch_jump_destination(insn);
1677                 if (dest_off == special_alt->new_off + special_alt->new_len) {
1678                         insn->jump_dest = next_insn_same_sec(file, last_orig_insn);
1679                         if (!insn->jump_dest) {
1680                                 WARN_FUNC("can't find alternative jump destination",
1681                                           insn->sec, insn->offset);
1682                                 return -1;
1683                         }
1684                 }
1685         }
1686
1687         if (!last_new_insn) {
1688                 WARN_FUNC("can't find last new alternative instruction",
1689                           special_alt->new_sec, special_alt->new_off);
1690                 return -1;
1691         }
1692
1693         if (nop)
1694                 list_add(&nop->list, &last_new_insn->list);
1695 end:
1696         new_alt_group->orig_group = orig_alt_group;
1697         new_alt_group->first_insn = *new_insn;
1698         new_alt_group->last_insn = nop ? : last_new_insn;
1699         new_alt_group->cfi = orig_alt_group->cfi;
1700         return 0;
1701 }
1702
1703 /*
1704  * A jump table entry can either convert a nop to a jump or a jump to a nop.
1705  * If the original instruction is a jump, make the alt entry an effective nop
1706  * by just skipping the original instruction.
1707  */
1708 static int handle_jump_alt(struct objtool_file *file,
1709                            struct special_alt *special_alt,
1710                            struct instruction *orig_insn,
1711                            struct instruction **new_insn)
1712 {
1713         if (orig_insn->type != INSN_JUMP_UNCONDITIONAL &&
1714             orig_insn->type != INSN_NOP) {
1715
1716                 WARN_FUNC("unsupported instruction at jump label",
1717                           orig_insn->sec, orig_insn->offset);
1718                 return -1;
1719         }
1720
1721         if (opts.hack_jump_label && special_alt->key_addend & 2) {
1722                 struct reloc *reloc = insn_reloc(file, orig_insn);
1723
1724                 if (reloc) {
1725                         reloc->type = R_NONE;
1726                         elf_write_reloc(file->elf, reloc);
1727                 }
1728                 elf_write_insn(file->elf, orig_insn->sec,
1729                                orig_insn->offset, orig_insn->len,
1730                                arch_nop_insn(orig_insn->len));
1731                 orig_insn->type = INSN_NOP;
1732         }
1733
1734         if (orig_insn->type == INSN_NOP) {
1735                 if (orig_insn->len == 2)
1736                         file->jl_nop_short++;
1737                 else
1738                         file->jl_nop_long++;
1739
1740                 return 0;
1741         }
1742
1743         if (orig_insn->len == 2)
1744                 file->jl_short++;
1745         else
1746                 file->jl_long++;
1747
1748         *new_insn = list_next_entry(orig_insn, list);
1749         return 0;
1750 }
1751
1752 /*
1753  * Read all the special sections which have alternate instructions which can be
1754  * patched in or redirected to at runtime.  Each instruction having alternate
1755  * instruction(s) has them added to its insn->alts list, which will be
1756  * traversed in validate_branch().
1757  */
1758 static int add_special_section_alts(struct objtool_file *file)
1759 {
1760         struct list_head special_alts;
1761         struct instruction *orig_insn, *new_insn;
1762         struct special_alt *special_alt, *tmp;
1763         struct alternative *alt;
1764         int ret;
1765
1766         ret = special_get_alts(file->elf, &special_alts);
1767         if (ret)
1768                 return ret;
1769
1770         list_for_each_entry_safe(special_alt, tmp, &special_alts, list) {
1771
1772                 orig_insn = find_insn(file, special_alt->orig_sec,
1773                                       special_alt->orig_off);
1774                 if (!orig_insn) {
1775                         WARN_FUNC("special: can't find orig instruction",
1776                                   special_alt->orig_sec, special_alt->orig_off);
1777                         ret = -1;
1778                         goto out;
1779                 }
1780
1781                 new_insn = NULL;
1782                 if (!special_alt->group || special_alt->new_len) {
1783                         new_insn = find_insn(file, special_alt->new_sec,
1784                                              special_alt->new_off);
1785                         if (!new_insn) {
1786                                 WARN_FUNC("special: can't find new instruction",
1787                                           special_alt->new_sec,
1788                                           special_alt->new_off);
1789                                 ret = -1;
1790                                 goto out;
1791                         }
1792                 }
1793
1794                 if (special_alt->group) {
1795                         if (!special_alt->orig_len) {
1796                                 WARN_FUNC("empty alternative entry",
1797                                           orig_insn->sec, orig_insn->offset);
1798                                 continue;
1799                         }
1800
1801                         ret = handle_group_alt(file, special_alt, orig_insn,
1802                                                &new_insn);
1803                         if (ret)
1804                                 goto out;
1805                 } else if (special_alt->jump_or_nop) {
1806                         ret = handle_jump_alt(file, special_alt, orig_insn,
1807                                               &new_insn);
1808                         if (ret)
1809                                 goto out;
1810                 }
1811
1812                 alt = malloc(sizeof(*alt));
1813                 if (!alt) {
1814                         WARN("malloc failed");
1815                         ret = -1;
1816                         goto out;
1817                 }
1818
1819                 alt->insn = new_insn;
1820                 alt->skip_orig = special_alt->skip_orig;
1821                 orig_insn->ignore_alts |= special_alt->skip_alt;
1822                 list_add_tail(&alt->list, &orig_insn->alts);
1823
1824                 list_del(&special_alt->list);
1825                 free(special_alt);
1826         }
1827
1828         if (opts.stats) {
1829                 printf("jl\\\tNOP\tJMP\n");
1830                 printf("short:\t%ld\t%ld\n", file->jl_nop_short, file->jl_short);
1831                 printf("long:\t%ld\t%ld\n", file->jl_nop_long, file->jl_long);
1832         }
1833
1834 out:
1835         return ret;
1836 }
1837
1838 static int add_jump_table(struct objtool_file *file, struct instruction *insn,
1839                             struct reloc *table)
1840 {
1841         struct reloc *reloc = table;
1842         struct instruction *dest_insn;
1843         struct alternative *alt;
1844         struct symbol *pfunc = insn->func->pfunc;
1845         unsigned int prev_offset = 0;
1846
1847         /*
1848          * Each @reloc is a switch table relocation which points to the target
1849          * instruction.
1850          */
1851         list_for_each_entry_from(reloc, &table->sec->reloc_list, list) {
1852
1853                 /* Check for the end of the table: */
1854                 if (reloc != table && reloc->jump_table_start)
1855                         break;
1856
1857                 /* Make sure the table entries are consecutive: */
1858                 if (prev_offset && reloc->offset != prev_offset + 8)
1859                         break;
1860
1861                 /* Detect function pointers from contiguous objects: */
1862                 if (reloc->sym->sec == pfunc->sec &&
1863                     reloc->addend == pfunc->offset)
1864                         break;
1865
1866                 dest_insn = find_insn(file, reloc->sym->sec, reloc->addend);
1867                 if (!dest_insn)
1868                         break;
1869
1870                 /* Make sure the destination is in the same function: */
1871                 if (!dest_insn->func || dest_insn->func->pfunc != pfunc)
1872                         break;
1873
1874                 alt = malloc(sizeof(*alt));
1875                 if (!alt) {
1876                         WARN("malloc failed");
1877                         return -1;
1878                 }
1879
1880                 alt->insn = dest_insn;
1881                 list_add_tail(&alt->list, &insn->alts);
1882                 prev_offset = reloc->offset;
1883         }
1884
1885         if (!prev_offset) {
1886                 WARN_FUNC("can't find switch jump table",
1887                           insn->sec, insn->offset);
1888                 return -1;
1889         }
1890
1891         return 0;
1892 }
1893
1894 /*
1895  * find_jump_table() - Given a dynamic jump, find the switch jump table
1896  * associated with it.
1897  */
1898 static struct reloc *find_jump_table(struct objtool_file *file,
1899                                       struct symbol *func,
1900                                       struct instruction *insn)
1901 {
1902         struct reloc *table_reloc;
1903         struct instruction *dest_insn, *orig_insn = insn;
1904
1905         /*
1906          * Backward search using the @first_jump_src links, these help avoid
1907          * much of the 'in between' code. Which avoids us getting confused by
1908          * it.
1909          */
1910         for (;
1911              insn && insn->func && insn->func->pfunc == func;
1912              insn = insn->first_jump_src ?: prev_insn_same_sym(file, insn)) {
1913
1914                 if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC)
1915                         break;
1916
1917                 /* allow small jumps within the range */
1918                 if (insn->type == INSN_JUMP_UNCONDITIONAL &&
1919                     insn->jump_dest &&
1920                     (insn->jump_dest->offset <= insn->offset ||
1921                      insn->jump_dest->offset > orig_insn->offset))
1922                     break;
1923
1924                 table_reloc = arch_find_switch_table(file, insn);
1925                 if (!table_reloc)
1926                         continue;
1927                 dest_insn = find_insn(file, table_reloc->sym->sec, table_reloc->addend);
1928                 if (!dest_insn || !dest_insn->func || dest_insn->func->pfunc != func)
1929                         continue;
1930
1931                 return table_reloc;
1932         }
1933
1934         return NULL;
1935 }
1936
1937 /*
1938  * First pass: Mark the head of each jump table so that in the next pass,
1939  * we know when a given jump table ends and the next one starts.
1940  */
1941 static void mark_func_jump_tables(struct objtool_file *file,
1942                                     struct symbol *func)
1943 {
1944         struct instruction *insn, *last = NULL;
1945         struct reloc *reloc;
1946
1947         func_for_each_insn(file, func, insn) {
1948                 if (!last)
1949                         last = insn;
1950
1951                 /*
1952                  * Store back-pointers for unconditional forward jumps such
1953                  * that find_jump_table() can back-track using those and
1954                  * avoid some potentially confusing code.
1955                  */
1956                 if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest &&
1957                     insn->offset > last->offset &&
1958                     insn->jump_dest->offset > insn->offset &&
1959                     !insn->jump_dest->first_jump_src) {
1960
1961                         insn->jump_dest->first_jump_src = insn;
1962                         last = insn->jump_dest;
1963                 }
1964
1965                 if (insn->type != INSN_JUMP_DYNAMIC)
1966                         continue;
1967
1968                 reloc = find_jump_table(file, func, insn);
1969                 if (reloc) {
1970                         reloc->jump_table_start = true;
1971                         insn->jump_table = reloc;
1972                 }
1973         }
1974 }
1975
1976 static int add_func_jump_tables(struct objtool_file *file,
1977                                   struct symbol *func)
1978 {
1979         struct instruction *insn;
1980         int ret;
1981
1982         func_for_each_insn(file, func, insn) {
1983                 if (!insn->jump_table)
1984                         continue;
1985
1986                 ret = add_jump_table(file, insn, insn->jump_table);
1987                 if (ret)
1988                         return ret;
1989         }
1990
1991         return 0;
1992 }
1993
1994 /*
1995  * For some switch statements, gcc generates a jump table in the .rodata
1996  * section which contains a list of addresses within the function to jump to.
1997  * This finds these jump tables and adds them to the insn->alts lists.
1998  */
1999 static int add_jump_table_alts(struct objtool_file *file)
2000 {
2001         struct section *sec;
2002         struct symbol *func;
2003         int ret;
2004
2005         if (!file->rodata)
2006                 return 0;
2007
2008         for_each_sec(file, sec) {
2009                 list_for_each_entry(func, &sec->symbol_list, list) {
2010                         if (func->type != STT_FUNC)
2011                                 continue;
2012
2013                         mark_func_jump_tables(file, func);
2014                         ret = add_func_jump_tables(file, func);
2015                         if (ret)
2016                                 return ret;
2017                 }
2018         }
2019
2020         return 0;
2021 }
2022
2023 static void set_func_state(struct cfi_state *state)
2024 {
2025         state->cfa = initial_func_cfi.cfa;
2026         memcpy(&state->regs, &initial_func_cfi.regs,
2027                CFI_NUM_REGS * sizeof(struct cfi_reg));
2028         state->stack_size = initial_func_cfi.cfa.offset;
2029 }
2030
2031 static int read_unwind_hints(struct objtool_file *file)
2032 {
2033         struct cfi_state cfi = init_cfi;
2034         struct section *sec, *relocsec;
2035         struct unwind_hint *hint;
2036         struct instruction *insn;
2037         struct reloc *reloc;
2038         int i;
2039
2040         sec = find_section_by_name(file->elf, ".discard.unwind_hints");
2041         if (!sec)
2042                 return 0;
2043
2044         relocsec = sec->reloc;
2045         if (!relocsec) {
2046                 WARN("missing .rela.discard.unwind_hints section");
2047                 return -1;
2048         }
2049
2050         if (sec->sh.sh_size % sizeof(struct unwind_hint)) {
2051                 WARN("struct unwind_hint size mismatch");
2052                 return -1;
2053         }
2054
2055         file->hints = true;
2056
2057         for (i = 0; i < sec->sh.sh_size / sizeof(struct unwind_hint); i++) {
2058                 hint = (struct unwind_hint *)sec->data->d_buf + i;
2059
2060                 reloc = find_reloc_by_dest(file->elf, sec, i * sizeof(*hint));
2061                 if (!reloc) {
2062                         WARN("can't find reloc for unwind_hints[%d]", i);
2063                         return -1;
2064                 }
2065
2066                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
2067                 if (!insn) {
2068                         WARN("can't find insn for unwind_hints[%d]", i);
2069                         return -1;
2070                 }
2071
2072                 insn->hint = true;
2073
2074                 if (hint->type == UNWIND_HINT_TYPE_SAVE) {
2075                         insn->hint = false;
2076                         insn->save = true;
2077                         continue;
2078                 }
2079
2080                 if (hint->type == UNWIND_HINT_TYPE_RESTORE) {
2081                         insn->restore = true;
2082                         continue;
2083                 }
2084
2085                 if (hint->type == UNWIND_HINT_TYPE_REGS_PARTIAL) {
2086                         struct symbol *sym = find_symbol_by_offset(insn->sec, insn->offset);
2087
2088                         if (sym && sym->bind == STB_GLOBAL) {
2089                                 if (opts.ibt && insn->type != INSN_ENDBR && !insn->noendbr) {
2090                                         WARN_FUNC("UNWIND_HINT_IRET_REGS without ENDBR",
2091                                                   insn->sec, insn->offset);
2092                                 }
2093
2094                                 insn->entry = 1;
2095                         }
2096                 }
2097
2098                 if (hint->type == UNWIND_HINT_TYPE_ENTRY) {
2099                         hint->type = UNWIND_HINT_TYPE_CALL;
2100                         insn->entry = 1;
2101                 }
2102
2103                 if (hint->type == UNWIND_HINT_TYPE_FUNC) {
2104                         insn->cfi = &func_cfi;
2105                         continue;
2106                 }
2107
2108                 if (insn->cfi)
2109                         cfi = *(insn->cfi);
2110
2111                 if (arch_decode_hint_reg(hint->sp_reg, &cfi.cfa.base)) {
2112                         WARN_FUNC("unsupported unwind_hint sp base reg %d",
2113                                   insn->sec, insn->offset, hint->sp_reg);
2114                         return -1;
2115                 }
2116
2117                 cfi.cfa.offset = bswap_if_needed(hint->sp_offset);
2118                 cfi.type = hint->type;
2119                 cfi.end = hint->end;
2120
2121                 insn->cfi = cfi_hash_find_or_add(&cfi);
2122         }
2123
2124         return 0;
2125 }
2126
2127 static int read_noendbr_hints(struct objtool_file *file)
2128 {
2129         struct section *sec;
2130         struct instruction *insn;
2131         struct reloc *reloc;
2132
2133         sec = find_section_by_name(file->elf, ".rela.discard.noendbr");
2134         if (!sec)
2135                 return 0;
2136
2137         list_for_each_entry(reloc, &sec->reloc_list, list) {
2138                 insn = find_insn(file, reloc->sym->sec, reloc->sym->offset + reloc->addend);
2139                 if (!insn) {
2140                         WARN("bad .discard.noendbr entry");
2141                         return -1;
2142                 }
2143
2144                 insn->noendbr = 1;
2145         }
2146
2147         return 0;
2148 }
2149
2150 static int read_retpoline_hints(struct objtool_file *file)
2151 {
2152         struct section *sec;
2153         struct instruction *insn;
2154         struct reloc *reloc;
2155
2156         sec = find_section_by_name(file->elf, ".rela.discard.retpoline_safe");
2157         if (!sec)
2158                 return 0;
2159
2160         list_for_each_entry(reloc, &sec->reloc_list, list) {
2161                 if (reloc->sym->type != STT_SECTION) {
2162                         WARN("unexpected relocation symbol type in %s", sec->name);
2163                         return -1;
2164                 }
2165
2166                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
2167                 if (!insn) {
2168                         WARN("bad .discard.retpoline_safe entry");
2169                         return -1;
2170                 }
2171
2172                 if (insn->type != INSN_JUMP_DYNAMIC &&
2173                     insn->type != INSN_CALL_DYNAMIC &&
2174                     insn->type != INSN_RETURN &&
2175                     insn->type != INSN_NOP) {
2176                         WARN_FUNC("retpoline_safe hint not an indirect jump/call/ret/nop",
2177                                   insn->sec, insn->offset);
2178                         return -1;
2179                 }
2180
2181                 insn->retpoline_safe = true;
2182         }
2183
2184         return 0;
2185 }
2186
2187 static int read_instr_hints(struct objtool_file *file)
2188 {
2189         struct section *sec;
2190         struct instruction *insn;
2191         struct reloc *reloc;
2192
2193         sec = find_section_by_name(file->elf, ".rela.discard.instr_end");
2194         if (!sec)
2195                 return 0;
2196
2197         list_for_each_entry(reloc, &sec->reloc_list, list) {
2198                 if (reloc->sym->type != STT_SECTION) {
2199                         WARN("unexpected relocation symbol type in %s", sec->name);
2200                         return -1;
2201                 }
2202
2203                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
2204                 if (!insn) {
2205                         WARN("bad .discard.instr_end entry");
2206                         return -1;
2207                 }
2208
2209                 insn->instr--;
2210         }
2211
2212         sec = find_section_by_name(file->elf, ".rela.discard.instr_begin");
2213         if (!sec)
2214                 return 0;
2215
2216         list_for_each_entry(reloc, &sec->reloc_list, list) {
2217                 if (reloc->sym->type != STT_SECTION) {
2218                         WARN("unexpected relocation symbol type in %s", sec->name);
2219                         return -1;
2220                 }
2221
2222                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
2223                 if (!insn) {
2224                         WARN("bad .discard.instr_begin entry");
2225                         return -1;
2226                 }
2227
2228                 insn->instr++;
2229         }
2230
2231         return 0;
2232 }
2233
2234 static int read_intra_function_calls(struct objtool_file *file)
2235 {
2236         struct instruction *insn;
2237         struct section *sec;
2238         struct reloc *reloc;
2239
2240         sec = find_section_by_name(file->elf, ".rela.discard.intra_function_calls");
2241         if (!sec)
2242                 return 0;
2243
2244         list_for_each_entry(reloc, &sec->reloc_list, list) {
2245                 unsigned long dest_off;
2246
2247                 if (reloc->sym->type != STT_SECTION) {
2248                         WARN("unexpected relocation symbol type in %s",
2249                              sec->name);
2250                         return -1;
2251                 }
2252
2253                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
2254                 if (!insn) {
2255                         WARN("bad .discard.intra_function_call entry");
2256                         return -1;
2257                 }
2258
2259                 if (insn->type != INSN_CALL) {
2260                         WARN_FUNC("intra_function_call not a direct call",
2261                                   insn->sec, insn->offset);
2262                         return -1;
2263                 }
2264
2265                 /*
2266                  * Treat intra-function CALLs as JMPs, but with a stack_op.
2267                  * See add_call_destinations(), which strips stack_ops from
2268                  * normal CALLs.
2269                  */
2270                 insn->type = INSN_JUMP_UNCONDITIONAL;
2271
2272                 dest_off = arch_jump_destination(insn);
2273                 insn->jump_dest = find_insn(file, insn->sec, dest_off);
2274                 if (!insn->jump_dest) {
2275                         WARN_FUNC("can't find call dest at %s+0x%lx",
2276                                   insn->sec, insn->offset,
2277                                   insn->sec->name, dest_off);
2278                         return -1;
2279                 }
2280         }
2281
2282         return 0;
2283 }
2284
2285 /*
2286  * Return true if name matches an instrumentation function, where calls to that
2287  * function from noinstr code can safely be removed, but compilers won't do so.
2288  */
2289 static bool is_profiling_func(const char *name)
2290 {
2291         /*
2292          * Many compilers cannot disable KCOV with a function attribute.
2293          */
2294         if (!strncmp(name, "__sanitizer_cov_", 16))
2295                 return true;
2296
2297         /*
2298          * Some compilers currently do not remove __tsan_func_entry/exit nor
2299          * __tsan_atomic_signal_fence (used for barrier instrumentation) with
2300          * the __no_sanitize_thread attribute, remove them. Once the kernel's
2301          * minimum Clang version is 14.0, this can be removed.
2302          */
2303         if (!strncmp(name, "__tsan_func_", 12) ||
2304             !strcmp(name, "__tsan_atomic_signal_fence"))
2305                 return true;
2306
2307         return false;
2308 }
2309
2310 static int classify_symbols(struct objtool_file *file)
2311 {
2312         struct section *sec;
2313         struct symbol *func;
2314
2315         for_each_sec(file, sec) {
2316                 list_for_each_entry(func, &sec->symbol_list, list) {
2317                         if (func->bind != STB_GLOBAL)
2318                                 continue;
2319
2320                         if (!strncmp(func->name, STATIC_CALL_TRAMP_PREFIX_STR,
2321                                      strlen(STATIC_CALL_TRAMP_PREFIX_STR)))
2322                                 func->static_call_tramp = true;
2323
2324                         if (arch_is_retpoline(func))
2325                                 func->retpoline_thunk = true;
2326
2327                         if (arch_is_rethunk(func))
2328                                 func->return_thunk = true;
2329
2330                         if (!strcmp(func->name, "__fentry__"))
2331                                 func->fentry = true;
2332
2333                         if (is_profiling_func(func->name))
2334                                 func->profiling_func = true;
2335                 }
2336         }
2337
2338         return 0;
2339 }
2340
2341 static void mark_rodata(struct objtool_file *file)
2342 {
2343         struct section *sec;
2344         bool found = false;
2345
2346         /*
2347          * Search for the following rodata sections, each of which can
2348          * potentially contain jump tables:
2349          *
2350          * - .rodata: can contain GCC switch tables
2351          * - .rodata.<func>: same, if -fdata-sections is being used
2352          * - .rodata..c_jump_table: contains C annotated jump tables
2353          *
2354          * .rodata.str1.* sections are ignored; they don't contain jump tables.
2355          */
2356         for_each_sec(file, sec) {
2357                 if (!strncmp(sec->name, ".rodata", 7) &&
2358                     !strstr(sec->name, ".str1.")) {
2359                         sec->rodata = true;
2360                         found = true;
2361                 }
2362         }
2363
2364         file->rodata = found;
2365 }
2366
2367 static int decode_sections(struct objtool_file *file)
2368 {
2369         int ret;
2370
2371         mark_rodata(file);
2372
2373         ret = init_pv_ops(file);
2374         if (ret)
2375                 return ret;
2376
2377         ret = decode_instructions(file);
2378         if (ret)
2379                 return ret;
2380
2381         add_ignores(file);
2382         add_uaccess_safe(file);
2383
2384         ret = add_ignore_alternatives(file);
2385         if (ret)
2386                 return ret;
2387
2388         /*
2389          * Must be before read_unwind_hints() since that needs insn->noendbr.
2390          */
2391         ret = read_noendbr_hints(file);
2392         if (ret)
2393                 return ret;
2394
2395         /*
2396          * Must be before add_{jump_call}_destination.
2397          */
2398         ret = classify_symbols(file);
2399         if (ret)
2400                 return ret;
2401
2402         /*
2403          * Must be before add_jump_destinations(), which depends on 'func'
2404          * being set for alternatives, to enable proper sibling call detection.
2405          */
2406         ret = add_special_section_alts(file);
2407         if (ret)
2408                 return ret;
2409
2410         ret = add_jump_destinations(file);
2411         if (ret)
2412                 return ret;
2413
2414         /*
2415          * Must be before add_call_destination(); it changes INSN_CALL to
2416          * INSN_JUMP.
2417          */
2418         ret = read_intra_function_calls(file);
2419         if (ret)
2420                 return ret;
2421
2422         ret = add_call_destinations(file);
2423         if (ret)
2424                 return ret;
2425
2426         /*
2427          * Must be after add_call_destinations() such that it can override
2428          * dead_end_function() marks.
2429          */
2430         ret = add_dead_ends(file);
2431         if (ret)
2432                 return ret;
2433
2434         ret = add_jump_table_alts(file);
2435         if (ret)
2436                 return ret;
2437
2438         ret = read_unwind_hints(file);
2439         if (ret)
2440                 return ret;
2441
2442         ret = read_retpoline_hints(file);
2443         if (ret)
2444                 return ret;
2445
2446         ret = read_instr_hints(file);
2447         if (ret)
2448                 return ret;
2449
2450         return 0;
2451 }
2452
2453 static bool is_fentry_call(struct instruction *insn)
2454 {
2455         if (insn->type == INSN_CALL &&
2456             insn->call_dest &&
2457             insn->call_dest->fentry)
2458                 return true;
2459
2460         return false;
2461 }
2462
2463 static bool has_modified_stack_frame(struct instruction *insn, struct insn_state *state)
2464 {
2465         struct cfi_state *cfi = &state->cfi;
2466         int i;
2467
2468         if (cfi->cfa.base != initial_func_cfi.cfa.base || cfi->drap)
2469                 return true;
2470
2471         if (cfi->cfa.offset != initial_func_cfi.cfa.offset)
2472                 return true;
2473
2474         if (cfi->stack_size != initial_func_cfi.cfa.offset)
2475                 return true;
2476
2477         for (i = 0; i < CFI_NUM_REGS; i++) {
2478                 if (cfi->regs[i].base != initial_func_cfi.regs[i].base ||
2479                     cfi->regs[i].offset != initial_func_cfi.regs[i].offset)
2480                         return true;
2481         }
2482
2483         return false;
2484 }
2485
2486 static bool check_reg_frame_pos(const struct cfi_reg *reg,
2487                                 int expected_offset)
2488 {
2489         return reg->base == CFI_CFA &&
2490                reg->offset == expected_offset;
2491 }
2492
2493 static bool has_valid_stack_frame(struct insn_state *state)
2494 {
2495         struct cfi_state *cfi = &state->cfi;
2496
2497         if (cfi->cfa.base == CFI_BP &&
2498             check_reg_frame_pos(&cfi->regs[CFI_BP], -cfi->cfa.offset) &&
2499             check_reg_frame_pos(&cfi->regs[CFI_RA], -cfi->cfa.offset + 8))
2500                 return true;
2501
2502         if (cfi->drap && cfi->regs[CFI_BP].base == CFI_BP)
2503                 return true;
2504
2505         return false;
2506 }
2507
2508 static int update_cfi_state_regs(struct instruction *insn,
2509                                   struct cfi_state *cfi,
2510                                   struct stack_op *op)
2511 {
2512         struct cfi_reg *cfa = &cfi->cfa;
2513
2514         if (cfa->base != CFI_SP && cfa->base != CFI_SP_INDIRECT)
2515                 return 0;
2516
2517         /* push */
2518         if (op->dest.type == OP_DEST_PUSH || op->dest.type == OP_DEST_PUSHF)
2519                 cfa->offset += 8;
2520
2521         /* pop */
2522         if (op->src.type == OP_SRC_POP || op->src.type == OP_SRC_POPF)
2523                 cfa->offset -= 8;
2524
2525         /* add immediate to sp */
2526         if (op->dest.type == OP_DEST_REG && op->src.type == OP_SRC_ADD &&
2527             op->dest.reg == CFI_SP && op->src.reg == CFI_SP)
2528                 cfa->offset -= op->src.offset;
2529
2530         return 0;
2531 }
2532
2533 static void save_reg(struct cfi_state *cfi, unsigned char reg, int base, int offset)
2534 {
2535         if (arch_callee_saved_reg(reg) &&
2536             cfi->regs[reg].base == CFI_UNDEFINED) {
2537                 cfi->regs[reg].base = base;
2538                 cfi->regs[reg].offset = offset;
2539         }
2540 }
2541
2542 static void restore_reg(struct cfi_state *cfi, unsigned char reg)
2543 {
2544         cfi->regs[reg].base = initial_func_cfi.regs[reg].base;
2545         cfi->regs[reg].offset = initial_func_cfi.regs[reg].offset;
2546 }
2547
2548 /*
2549  * A note about DRAP stack alignment:
2550  *
2551  * GCC has the concept of a DRAP register, which is used to help keep track of
2552  * the stack pointer when aligning the stack.  r10 or r13 is used as the DRAP
2553  * register.  The typical DRAP pattern is:
2554  *
2555  *   4c 8d 54 24 08             lea    0x8(%rsp),%r10
2556  *   48 83 e4 c0                and    $0xffffffffffffffc0,%rsp
2557  *   41 ff 72 f8                pushq  -0x8(%r10)
2558  *   55                         push   %rbp
2559  *   48 89 e5                   mov    %rsp,%rbp
2560  *                              (more pushes)
2561  *   41 52                      push   %r10
2562  *                              ...
2563  *   41 5a                      pop    %r10
2564  *                              (more pops)
2565  *   5d                         pop    %rbp
2566  *   49 8d 62 f8                lea    -0x8(%r10),%rsp
2567  *   c3                         retq
2568  *
2569  * There are some variations in the epilogues, like:
2570  *
2571  *   5b                         pop    %rbx
2572  *   41 5a                      pop    %r10
2573  *   41 5c                      pop    %r12
2574  *   41 5d                      pop    %r13
2575  *   41 5e                      pop    %r14
2576  *   c9                         leaveq
2577  *   49 8d 62 f8                lea    -0x8(%r10),%rsp
2578  *   c3                         retq
2579  *
2580  * and:
2581  *
2582  *   4c 8b 55 e8                mov    -0x18(%rbp),%r10
2583  *   48 8b 5d e0                mov    -0x20(%rbp),%rbx
2584  *   4c 8b 65 f0                mov    -0x10(%rbp),%r12
2585  *   4c 8b 6d f8                mov    -0x8(%rbp),%r13
2586  *   c9                         leaveq
2587  *   49 8d 62 f8                lea    -0x8(%r10),%rsp
2588  *   c3                         retq
2589  *
2590  * Sometimes r13 is used as the DRAP register, in which case it's saved and
2591  * restored beforehand:
2592  *
2593  *   41 55                      push   %r13
2594  *   4c 8d 6c 24 10             lea    0x10(%rsp),%r13
2595  *   48 83 e4 f0                and    $0xfffffffffffffff0,%rsp
2596  *                              ...
2597  *   49 8d 65 f0                lea    -0x10(%r13),%rsp
2598  *   41 5d                      pop    %r13
2599  *   c3                         retq
2600  */
2601 static int update_cfi_state(struct instruction *insn,
2602                             struct instruction *next_insn,
2603                             struct cfi_state *cfi, struct stack_op *op)
2604 {
2605         struct cfi_reg *cfa = &cfi->cfa;
2606         struct cfi_reg *regs = cfi->regs;
2607
2608         /* stack operations don't make sense with an undefined CFA */
2609         if (cfa->base == CFI_UNDEFINED) {
2610                 if (insn->func) {
2611                         WARN_FUNC("undefined stack state", insn->sec, insn->offset);
2612                         return -1;
2613                 }
2614                 return 0;
2615         }
2616
2617         if (cfi->type == UNWIND_HINT_TYPE_REGS ||
2618             cfi->type == UNWIND_HINT_TYPE_REGS_PARTIAL)
2619                 return update_cfi_state_regs(insn, cfi, op);
2620
2621         switch (op->dest.type) {
2622
2623         case OP_DEST_REG:
2624                 switch (op->src.type) {
2625
2626                 case OP_SRC_REG:
2627                         if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP &&
2628                             cfa->base == CFI_SP &&
2629                             check_reg_frame_pos(&regs[CFI_BP], -cfa->offset)) {
2630
2631                                 /* mov %rsp, %rbp */
2632                                 cfa->base = op->dest.reg;
2633                                 cfi->bp_scratch = false;
2634                         }
2635
2636                         else if (op->src.reg == CFI_SP &&
2637                                  op->dest.reg == CFI_BP && cfi->drap) {
2638
2639                                 /* drap: mov %rsp, %rbp */
2640                                 regs[CFI_BP].base = CFI_BP;
2641                                 regs[CFI_BP].offset = -cfi->stack_size;
2642                                 cfi->bp_scratch = false;
2643                         }
2644
2645                         else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
2646
2647                                 /*
2648                                  * mov %rsp, %reg
2649                                  *
2650                                  * This is needed for the rare case where GCC
2651                                  * does:
2652                                  *
2653                                  *   mov    %rsp, %rax
2654                                  *   ...
2655                                  *   mov    %rax, %rsp
2656                                  */
2657                                 cfi->vals[op->dest.reg].base = CFI_CFA;
2658                                 cfi->vals[op->dest.reg].offset = -cfi->stack_size;
2659                         }
2660
2661                         else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP &&
2662                                  (cfa->base == CFI_BP || cfa->base == cfi->drap_reg)) {
2663
2664                                 /*
2665                                  * mov %rbp, %rsp
2666                                  *
2667                                  * Restore the original stack pointer (Clang).
2668                                  */
2669                                 cfi->stack_size = -cfi->regs[CFI_BP].offset;
2670                         }
2671
2672                         else if (op->dest.reg == cfa->base) {
2673
2674                                 /* mov %reg, %rsp */
2675                                 if (cfa->base == CFI_SP &&
2676                                     cfi->vals[op->src.reg].base == CFI_CFA) {
2677
2678                                         /*
2679                                          * This is needed for the rare case
2680                                          * where GCC does something dumb like:
2681                                          *
2682                                          *   lea    0x8(%rsp), %rcx
2683                                          *   ...
2684                                          *   mov    %rcx, %rsp
2685                                          */
2686                                         cfa->offset = -cfi->vals[op->src.reg].offset;
2687                                         cfi->stack_size = cfa->offset;
2688
2689                                 } else if (cfa->base == CFI_SP &&
2690                                            cfi->vals[op->src.reg].base == CFI_SP_INDIRECT &&
2691                                            cfi->vals[op->src.reg].offset == cfa->offset) {
2692
2693                                         /*
2694                                          * Stack swizzle:
2695                                          *
2696                                          * 1: mov %rsp, (%[tos])
2697                                          * 2: mov %[tos], %rsp
2698                                          *    ...
2699                                          * 3: pop %rsp
2700                                          *
2701                                          * Where:
2702                                          *
2703                                          * 1 - places a pointer to the previous
2704                                          *     stack at the Top-of-Stack of the
2705                                          *     new stack.
2706                                          *
2707                                          * 2 - switches to the new stack.
2708                                          *
2709                                          * 3 - pops the Top-of-Stack to restore
2710                                          *     the original stack.
2711                                          *
2712                                          * Note: we set base to SP_INDIRECT
2713                                          * here and preserve offset. Therefore
2714                                          * when the unwinder reaches ToS it
2715                                          * will dereference SP and then add the
2716                                          * offset to find the next frame, IOW:
2717                                          * (%rsp) + offset.
2718                                          */
2719                                         cfa->base = CFI_SP_INDIRECT;
2720
2721                                 } else {
2722                                         cfa->base = CFI_UNDEFINED;
2723                                         cfa->offset = 0;
2724                                 }
2725                         }
2726
2727                         else if (op->dest.reg == CFI_SP &&
2728                                  cfi->vals[op->src.reg].base == CFI_SP_INDIRECT &&
2729                                  cfi->vals[op->src.reg].offset == cfa->offset) {
2730
2731                                 /*
2732                                  * The same stack swizzle case 2) as above. But
2733                                  * because we can't change cfa->base, case 3)
2734                                  * will become a regular POP. Pretend we're a
2735                                  * PUSH so things don't go unbalanced.
2736                                  */
2737                                 cfi->stack_size += 8;
2738                         }
2739
2740
2741                         break;
2742
2743                 case OP_SRC_ADD:
2744                         if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) {
2745
2746                                 /* add imm, %rsp */
2747                                 cfi->stack_size -= op->src.offset;
2748                                 if (cfa->base == CFI_SP)
2749                                         cfa->offset -= op->src.offset;
2750                                 break;
2751                         }
2752
2753                         if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) {
2754
2755                                 /* lea disp(%rbp), %rsp */
2756                                 cfi->stack_size = -(op->src.offset + regs[CFI_BP].offset);
2757                                 break;
2758                         }
2759
2760                         if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
2761
2762                                 /* drap: lea disp(%rsp), %drap */
2763                                 cfi->drap_reg = op->dest.reg;
2764
2765                                 /*
2766                                  * lea disp(%rsp), %reg
2767                                  *
2768                                  * This is needed for the rare case where GCC
2769                                  * does something dumb like:
2770                                  *
2771                                  *   lea    0x8(%rsp), %rcx
2772                                  *   ...
2773                                  *   mov    %rcx, %rsp
2774                                  */
2775                                 cfi->vals[op->dest.reg].base = CFI_CFA;
2776                                 cfi->vals[op->dest.reg].offset = \
2777                                         -cfi->stack_size + op->src.offset;
2778
2779                                 break;
2780                         }
2781
2782                         if (cfi->drap && op->dest.reg == CFI_SP &&
2783                             op->src.reg == cfi->drap_reg) {
2784
2785                                  /* drap: lea disp(%drap), %rsp */
2786                                 cfa->base = CFI_SP;
2787                                 cfa->offset = cfi->stack_size = -op->src.offset;
2788                                 cfi->drap_reg = CFI_UNDEFINED;
2789                                 cfi->drap = false;
2790                                 break;
2791                         }
2792
2793                         if (op->dest.reg == cfi->cfa.base && !(next_insn && next_insn->hint)) {
2794                                 WARN_FUNC("unsupported stack register modification",
2795                                           insn->sec, insn->offset);
2796                                 return -1;
2797                         }
2798
2799                         break;
2800
2801                 case OP_SRC_AND:
2802                         if (op->dest.reg != CFI_SP ||
2803                             (cfi->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) ||
2804                             (cfi->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) {
2805                                 WARN_FUNC("unsupported stack pointer realignment",
2806                                           insn->sec, insn->offset);
2807                                 return -1;
2808                         }
2809
2810                         if (cfi->drap_reg != CFI_UNDEFINED) {
2811                                 /* drap: and imm, %rsp */
2812                                 cfa->base = cfi->drap_reg;
2813                                 cfa->offset = cfi->stack_size = 0;
2814                                 cfi->drap = true;
2815                         }
2816
2817                         /*
2818                          * Older versions of GCC (4.8ish) realign the stack
2819                          * without DRAP, with a frame pointer.
2820                          */
2821
2822                         break;
2823
2824                 case OP_SRC_POP:
2825                 case OP_SRC_POPF:
2826                         if (op->dest.reg == CFI_SP && cfa->base == CFI_SP_INDIRECT) {
2827
2828                                 /* pop %rsp; # restore from a stack swizzle */
2829                                 cfa->base = CFI_SP;
2830                                 break;
2831                         }
2832
2833                         if (!cfi->drap && op->dest.reg == cfa->base) {
2834
2835                                 /* pop %rbp */
2836                                 cfa->base = CFI_SP;
2837                         }
2838
2839                         if (cfi->drap && cfa->base == CFI_BP_INDIRECT &&
2840                             op->dest.reg == cfi->drap_reg &&
2841                             cfi->drap_offset == -cfi->stack_size) {
2842
2843                                 /* drap: pop %drap */
2844                                 cfa->base = cfi->drap_reg;
2845                                 cfa->offset = 0;
2846                                 cfi->drap_offset = -1;
2847
2848                         } else if (cfi->stack_size == -regs[op->dest.reg].offset) {
2849
2850                                 /* pop %reg */
2851                                 restore_reg(cfi, op->dest.reg);
2852                         }
2853
2854                         cfi->stack_size -= 8;
2855                         if (cfa->base == CFI_SP)
2856                                 cfa->offset -= 8;
2857
2858                         break;
2859
2860                 case OP_SRC_REG_INDIRECT:
2861                         if (!cfi->drap && op->dest.reg == cfa->base &&
2862                             op->dest.reg == CFI_BP) {
2863
2864                                 /* mov disp(%rsp), %rbp */
2865                                 cfa->base = CFI_SP;
2866                                 cfa->offset = cfi->stack_size;
2867                         }
2868
2869                         if (cfi->drap && op->src.reg == CFI_BP &&
2870                             op->src.offset == cfi->drap_offset) {
2871
2872                                 /* drap: mov disp(%rbp), %drap */
2873                                 cfa->base = cfi->drap_reg;
2874                                 cfa->offset = 0;
2875                                 cfi->drap_offset = -1;
2876                         }
2877
2878                         if (cfi->drap && op->src.reg == CFI_BP &&
2879                             op->src.offset == regs[op->dest.reg].offset) {
2880
2881                                 /* drap: mov disp(%rbp), %reg */
2882                                 restore_reg(cfi, op->dest.reg);
2883
2884                         } else if (op->src.reg == cfa->base &&
2885                             op->src.offset == regs[op->dest.reg].offset + cfa->offset) {
2886
2887                                 /* mov disp(%rbp), %reg */
2888                                 /* mov disp(%rsp), %reg */
2889                                 restore_reg(cfi, op->dest.reg);
2890
2891                         } else if (op->src.reg == CFI_SP &&
2892                                    op->src.offset == regs[op->dest.reg].offset + cfi->stack_size) {
2893
2894                                 /* mov disp(%rsp), %reg */
2895                                 restore_reg(cfi, op->dest.reg);
2896                         }
2897
2898                         break;
2899
2900                 default:
2901                         WARN_FUNC("unknown stack-related instruction",
2902                                   insn->sec, insn->offset);
2903                         return -1;
2904                 }
2905
2906                 break;
2907
2908         case OP_DEST_PUSH:
2909         case OP_DEST_PUSHF:
2910                 cfi->stack_size += 8;
2911                 if (cfa->base == CFI_SP)
2912                         cfa->offset += 8;
2913
2914                 if (op->src.type != OP_SRC_REG)
2915                         break;
2916
2917                 if (cfi->drap) {
2918                         if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) {
2919
2920                                 /* drap: push %drap */
2921                                 cfa->base = CFI_BP_INDIRECT;
2922                                 cfa->offset = -cfi->stack_size;
2923
2924                                 /* save drap so we know when to restore it */
2925                                 cfi->drap_offset = -cfi->stack_size;
2926
2927                         } else if (op->src.reg == CFI_BP && cfa->base == cfi->drap_reg) {
2928
2929                                 /* drap: push %rbp */
2930                                 cfi->stack_size = 0;
2931
2932                         } else {
2933
2934                                 /* drap: push %reg */
2935                                 save_reg(cfi, op->src.reg, CFI_BP, -cfi->stack_size);
2936                         }
2937
2938                 } else {
2939
2940                         /* push %reg */
2941                         save_reg(cfi, op->src.reg, CFI_CFA, -cfi->stack_size);
2942                 }
2943
2944                 /* detect when asm code uses rbp as a scratch register */
2945                 if (opts.stackval && insn->func && op->src.reg == CFI_BP &&
2946                     cfa->base != CFI_BP)
2947                         cfi->bp_scratch = true;
2948                 break;
2949
2950         case OP_DEST_REG_INDIRECT:
2951
2952                 if (cfi->drap) {
2953                         if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) {
2954
2955                                 /* drap: mov %drap, disp(%rbp) */
2956                                 cfa->base = CFI_BP_INDIRECT;
2957                                 cfa->offset = op->dest.offset;
2958
2959                                 /* save drap offset so we know when to restore it */
2960                                 cfi->drap_offset = op->dest.offset;
2961                         } else {
2962
2963                                 /* drap: mov reg, disp(%rbp) */
2964                                 save_reg(cfi, op->src.reg, CFI_BP, op->dest.offset);
2965                         }
2966
2967                 } else if (op->dest.reg == cfa->base) {
2968
2969                         /* mov reg, disp(%rbp) */
2970                         /* mov reg, disp(%rsp) */
2971                         save_reg(cfi, op->src.reg, CFI_CFA,
2972                                  op->dest.offset - cfi->cfa.offset);
2973
2974                 } else if (op->dest.reg == CFI_SP) {
2975
2976                         /* mov reg, disp(%rsp) */
2977                         save_reg(cfi, op->src.reg, CFI_CFA,
2978                                  op->dest.offset - cfi->stack_size);
2979
2980                 } else if (op->src.reg == CFI_SP && op->dest.offset == 0) {
2981
2982                         /* mov %rsp, (%reg); # setup a stack swizzle. */
2983                         cfi->vals[op->dest.reg].base = CFI_SP_INDIRECT;
2984                         cfi->vals[op->dest.reg].offset = cfa->offset;
2985                 }
2986
2987                 break;
2988
2989         case OP_DEST_MEM:
2990                 if (op->src.type != OP_SRC_POP && op->src.type != OP_SRC_POPF) {
2991                         WARN_FUNC("unknown stack-related memory operation",
2992                                   insn->sec, insn->offset);
2993                         return -1;
2994                 }
2995
2996                 /* pop mem */
2997                 cfi->stack_size -= 8;
2998                 if (cfa->base == CFI_SP)
2999                         cfa->offset -= 8;
3000
3001                 break;
3002
3003         default:
3004                 WARN_FUNC("unknown stack-related instruction",
3005                           insn->sec, insn->offset);
3006                 return -1;
3007         }
3008
3009         return 0;
3010 }
3011
3012 /*
3013  * The stack layouts of alternatives instructions can sometimes diverge when
3014  * they have stack modifications.  That's fine as long as the potential stack
3015  * layouts don't conflict at any given potential instruction boundary.
3016  *
3017  * Flatten the CFIs of the different alternative code streams (both original
3018  * and replacement) into a single shared CFI array which can be used to detect
3019  * conflicts and nicely feed a linear array of ORC entries to the unwinder.
3020  */
3021 static int propagate_alt_cfi(struct objtool_file *file, struct instruction *insn)
3022 {
3023         struct cfi_state **alt_cfi;
3024         int group_off;
3025
3026         if (!insn->alt_group)
3027                 return 0;
3028
3029         if (!insn->cfi) {
3030                 WARN("CFI missing");
3031                 return -1;
3032         }
3033
3034         alt_cfi = insn->alt_group->cfi;
3035         group_off = insn->offset - insn->alt_group->first_insn->offset;
3036
3037         if (!alt_cfi[group_off]) {
3038                 alt_cfi[group_off] = insn->cfi;
3039         } else {
3040                 if (cficmp(alt_cfi[group_off], insn->cfi)) {
3041                         WARN_FUNC("stack layout conflict in alternatives",
3042                                   insn->sec, insn->offset);
3043                         return -1;
3044                 }
3045         }
3046
3047         return 0;
3048 }
3049
3050 static int handle_insn_ops(struct instruction *insn,
3051                            struct instruction *next_insn,
3052                            struct insn_state *state)
3053 {
3054         struct stack_op *op;
3055
3056         list_for_each_entry(op, &insn->stack_ops, list) {
3057
3058                 if (update_cfi_state(insn, next_insn, &state->cfi, op))
3059                         return 1;
3060
3061                 if (!insn->alt_group)
3062                         continue;
3063
3064                 if (op->dest.type == OP_DEST_PUSHF) {
3065                         if (!state->uaccess_stack) {
3066                                 state->uaccess_stack = 1;
3067                         } else if (state->uaccess_stack >> 31) {
3068                                 WARN_FUNC("PUSHF stack exhausted",
3069                                           insn->sec, insn->offset);
3070                                 return 1;
3071                         }
3072                         state->uaccess_stack <<= 1;
3073                         state->uaccess_stack  |= state->uaccess;
3074                 }
3075
3076                 if (op->src.type == OP_SRC_POPF) {
3077                         if (state->uaccess_stack) {
3078                                 state->uaccess = state->uaccess_stack & 1;
3079                                 state->uaccess_stack >>= 1;
3080                                 if (state->uaccess_stack == 1)
3081                                         state->uaccess_stack = 0;
3082                         }
3083                 }
3084         }
3085
3086         return 0;
3087 }
3088
3089 static bool insn_cfi_match(struct instruction *insn, struct cfi_state *cfi2)
3090 {
3091         struct cfi_state *cfi1 = insn->cfi;
3092         int i;
3093
3094         if (!cfi1) {
3095                 WARN("CFI missing");
3096                 return false;
3097         }
3098
3099         if (memcmp(&cfi1->cfa, &cfi2->cfa, sizeof(cfi1->cfa))) {
3100
3101                 WARN_FUNC("stack state mismatch: cfa1=%d%+d cfa2=%d%+d",
3102                           insn->sec, insn->offset,
3103                           cfi1->cfa.base, cfi1->cfa.offset,
3104                           cfi2->cfa.base, cfi2->cfa.offset);
3105
3106         } else if (memcmp(&cfi1->regs, &cfi2->regs, sizeof(cfi1->regs))) {
3107                 for (i = 0; i < CFI_NUM_REGS; i++) {
3108                         if (!memcmp(&cfi1->regs[i], &cfi2->regs[i],
3109                                     sizeof(struct cfi_reg)))
3110                                 continue;
3111
3112                         WARN_FUNC("stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d",
3113                                   insn->sec, insn->offset,
3114                                   i, cfi1->regs[i].base, cfi1->regs[i].offset,
3115                                   i, cfi2->regs[i].base, cfi2->regs[i].offset);
3116                         break;
3117                 }
3118
3119         } else if (cfi1->type != cfi2->type) {
3120
3121                 WARN_FUNC("stack state mismatch: type1=%d type2=%d",
3122                           insn->sec, insn->offset, cfi1->type, cfi2->type);
3123
3124         } else if (cfi1->drap != cfi2->drap ||
3125                    (cfi1->drap && cfi1->drap_reg != cfi2->drap_reg) ||
3126                    (cfi1->drap && cfi1->drap_offset != cfi2->drap_offset)) {
3127
3128                 WARN_FUNC("stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)",
3129                           insn->sec, insn->offset,
3130                           cfi1->drap, cfi1->drap_reg, cfi1->drap_offset,
3131                           cfi2->drap, cfi2->drap_reg, cfi2->drap_offset);
3132
3133         } else
3134                 return true;
3135
3136         return false;
3137 }
3138
3139 static inline bool func_uaccess_safe(struct symbol *func)
3140 {
3141         if (func)
3142                 return func->uaccess_safe;
3143
3144         return false;
3145 }
3146
3147 static inline const char *call_dest_name(struct instruction *insn)
3148 {
3149         static char pvname[19];
3150         struct reloc *rel;
3151         int idx;
3152
3153         if (insn->call_dest)
3154                 return insn->call_dest->name;
3155
3156         rel = insn_reloc(NULL, insn);
3157         if (rel && !strcmp(rel->sym->name, "pv_ops")) {
3158                 idx = (rel->addend / sizeof(void *));
3159                 snprintf(pvname, sizeof(pvname), "pv_ops[%d]", idx);
3160                 return pvname;
3161         }
3162
3163         return "{dynamic}";
3164 }
3165
3166 static bool pv_call_dest(struct objtool_file *file, struct instruction *insn)
3167 {
3168         struct symbol *target;
3169         struct reloc *rel;
3170         int idx;
3171
3172         rel = insn_reloc(file, insn);
3173         if (!rel || strcmp(rel->sym->name, "pv_ops"))
3174                 return false;
3175
3176         idx = (arch_dest_reloc_offset(rel->addend) / sizeof(void *));
3177
3178         if (file->pv_ops[idx].clean)
3179                 return true;
3180
3181         file->pv_ops[idx].clean = true;
3182
3183         list_for_each_entry(target, &file->pv_ops[idx].targets, pv_target) {
3184                 if (!target->sec->noinstr) {
3185                         WARN("pv_ops[%d]: %s", idx, target->name);
3186                         file->pv_ops[idx].clean = false;
3187                 }
3188         }
3189
3190         return file->pv_ops[idx].clean;
3191 }
3192
3193 static inline bool noinstr_call_dest(struct objtool_file *file,
3194                                      struct instruction *insn,
3195                                      struct symbol *func)
3196 {
3197         /*
3198          * We can't deal with indirect function calls at present;
3199          * assume they're instrumented.
3200          */
3201         if (!func) {
3202                 if (file->pv_ops)
3203                         return pv_call_dest(file, insn);
3204
3205                 return false;
3206         }
3207
3208         /*
3209          * If the symbol is from a noinstr section; we good.
3210          */
3211         if (func->sec->noinstr)
3212                 return true;
3213
3214         /*
3215          * The __ubsan_handle_*() calls are like WARN(), they only happen when
3216          * something 'BAD' happened. At the risk of taking the machine down,
3217          * let them proceed to get the message out.
3218          */
3219         if (!strncmp(func->name, "__ubsan_handle_", 15))
3220                 return true;
3221
3222         return false;
3223 }
3224
3225 static int validate_call(struct objtool_file *file,
3226                          struct instruction *insn,
3227                          struct insn_state *state)
3228 {
3229         if (state->noinstr && state->instr <= 0 &&
3230             !noinstr_call_dest(file, insn, insn->call_dest)) {
3231                 WARN_FUNC("call to %s() leaves .noinstr.text section",
3232                                 insn->sec, insn->offset, call_dest_name(insn));
3233                 return 1;
3234         }
3235
3236         if (state->uaccess && !func_uaccess_safe(insn->call_dest)) {
3237                 WARN_FUNC("call to %s() with UACCESS enabled",
3238                                 insn->sec, insn->offset, call_dest_name(insn));
3239                 return 1;
3240         }
3241
3242         if (state->df) {
3243                 WARN_FUNC("call to %s() with DF set",
3244                                 insn->sec, insn->offset, call_dest_name(insn));
3245                 return 1;
3246         }
3247
3248         return 0;
3249 }
3250
3251 static int validate_sibling_call(struct objtool_file *file,
3252                                  struct instruction *insn,
3253                                  struct insn_state *state)
3254 {
3255         if (has_modified_stack_frame(insn, state)) {
3256                 WARN_FUNC("sibling call from callable instruction with modified stack frame",
3257                                 insn->sec, insn->offset);
3258                 return 1;
3259         }
3260
3261         return validate_call(file, insn, state);
3262 }
3263
3264 static int validate_return(struct symbol *func, struct instruction *insn, struct insn_state *state)
3265 {
3266         if (state->noinstr && state->instr > 0) {
3267                 WARN_FUNC("return with instrumentation enabled",
3268                           insn->sec, insn->offset);
3269                 return 1;
3270         }
3271
3272         if (state->uaccess && !func_uaccess_safe(func)) {
3273                 WARN_FUNC("return with UACCESS enabled",
3274                           insn->sec, insn->offset);
3275                 return 1;
3276         }
3277
3278         if (!state->uaccess && func_uaccess_safe(func)) {
3279                 WARN_FUNC("return with UACCESS disabled from a UACCESS-safe function",
3280                           insn->sec, insn->offset);
3281                 return 1;
3282         }
3283
3284         if (state->df) {
3285                 WARN_FUNC("return with DF set",
3286                           insn->sec, insn->offset);
3287                 return 1;
3288         }
3289
3290         if (func && has_modified_stack_frame(insn, state)) {
3291                 WARN_FUNC("return with modified stack frame",
3292                           insn->sec, insn->offset);
3293                 return 1;
3294         }
3295
3296         if (state->cfi.bp_scratch) {
3297                 WARN_FUNC("BP used as a scratch register",
3298                           insn->sec, insn->offset);
3299                 return 1;
3300         }
3301
3302         return 0;
3303 }
3304
3305 static struct instruction *next_insn_to_validate(struct objtool_file *file,
3306                                                  struct instruction *insn)
3307 {
3308         struct alt_group *alt_group = insn->alt_group;
3309
3310         /*
3311          * Simulate the fact that alternatives are patched in-place.  When the
3312          * end of a replacement alt_group is reached, redirect objtool flow to
3313          * the end of the original alt_group.
3314          */
3315         if (alt_group && insn == alt_group->last_insn && alt_group->orig_group)
3316                 return next_insn_same_sec(file, alt_group->orig_group->last_insn);
3317
3318         return next_insn_same_sec(file, insn);
3319 }
3320
3321 /*
3322  * Follow the branch starting at the given instruction, and recursively follow
3323  * any other branches (jumps).  Meanwhile, track the frame pointer state at
3324  * each instruction and validate all the rules described in
3325  * tools/objtool/Documentation/objtool.txt.
3326  */
3327 static int validate_branch(struct objtool_file *file, struct symbol *func,
3328                            struct instruction *insn, struct insn_state state)
3329 {
3330         struct alternative *alt;
3331         struct instruction *next_insn, *prev_insn = NULL;
3332         struct section *sec;
3333         u8 visited;
3334         int ret;
3335
3336         sec = insn->sec;
3337
3338         while (1) {
3339                 next_insn = next_insn_to_validate(file, insn);
3340
3341                 if (func && insn->func && func != insn->func->pfunc) {
3342                         /* Ignore KCFI type preambles, which always fall through */
3343                         if (!strncmp(func->name, "__cfi_", 6))
3344                                 return 0;
3345
3346                         WARN("%s() falls through to next function %s()",
3347                              func->name, insn->func->name);
3348                         return 1;
3349                 }
3350
3351                 if (func && insn->ignore) {
3352                         WARN_FUNC("BUG: why am I validating an ignored function?",
3353                                   sec, insn->offset);
3354                         return 1;
3355                 }
3356
3357                 visited = VISITED_BRANCH << state.uaccess;
3358                 if (insn->visited & VISITED_BRANCH_MASK) {
3359                         if (!insn->hint && !insn_cfi_match(insn, &state.cfi))
3360                                 return 1;
3361
3362                         if (insn->visited & visited)
3363                                 return 0;
3364                 } else {
3365                         nr_insns_visited++;
3366                 }
3367
3368                 if (state.noinstr)
3369                         state.instr += insn->instr;
3370
3371                 if (insn->hint) {
3372                         if (insn->restore) {
3373                                 struct instruction *save_insn, *i;
3374
3375                                 i = insn;
3376                                 save_insn = NULL;
3377
3378                                 sym_for_each_insn_continue_reverse(file, func, i) {
3379                                         if (i->save) {
3380                                                 save_insn = i;
3381                                                 break;
3382                                         }
3383                                 }
3384
3385                                 if (!save_insn) {
3386                                         WARN_FUNC("no corresponding CFI save for CFI restore",
3387                                                   sec, insn->offset);
3388                                         return 1;
3389                                 }
3390
3391                                 if (!save_insn->visited) {
3392                                         WARN_FUNC("objtool isn't smart enough to handle this CFI save/restore combo",
3393                                                   sec, insn->offset);
3394                                         return 1;
3395                                 }
3396
3397                                 insn->cfi = save_insn->cfi;
3398                                 nr_cfi_reused++;
3399                         }
3400
3401                         state.cfi = *insn->cfi;
3402                 } else {
3403                         /* XXX track if we actually changed state.cfi */
3404
3405                         if (prev_insn && !cficmp(prev_insn->cfi, &state.cfi)) {
3406                                 insn->cfi = prev_insn->cfi;
3407                                 nr_cfi_reused++;
3408                         } else {
3409                                 insn->cfi = cfi_hash_find_or_add(&state.cfi);
3410                         }
3411                 }
3412
3413                 insn->visited |= visited;
3414
3415                 if (propagate_alt_cfi(file, insn))
3416                         return 1;
3417
3418                 if (!insn->ignore_alts && !list_empty(&insn->alts)) {
3419                         bool skip_orig = false;
3420
3421                         list_for_each_entry(alt, &insn->alts, list) {
3422                                 if (alt->skip_orig)
3423                                         skip_orig = true;
3424
3425                                 ret = validate_branch(file, func, alt->insn, state);
3426                                 if (ret) {
3427                                         if (opts.backtrace)
3428                                                 BT_FUNC("(alt)", insn);
3429                                         return ret;
3430                                 }
3431                         }
3432
3433                         if (skip_orig)
3434                                 return 0;
3435                 }
3436
3437                 if (handle_insn_ops(insn, next_insn, &state))
3438                         return 1;
3439
3440                 switch (insn->type) {
3441
3442                 case INSN_RETURN:
3443                         return validate_return(func, insn, &state);
3444
3445                 case INSN_CALL:
3446                 case INSN_CALL_DYNAMIC:
3447                         ret = validate_call(file, insn, &state);
3448                         if (ret)
3449                                 return ret;
3450
3451                         if (opts.stackval && func && !is_fentry_call(insn) &&
3452                             !has_valid_stack_frame(&state)) {
3453                                 WARN_FUNC("call without frame pointer save/setup",
3454                                           sec, insn->offset);
3455                                 return 1;
3456                         }
3457
3458                         if (insn->dead_end)
3459                                 return 0;
3460
3461                         break;
3462
3463                 case INSN_JUMP_CONDITIONAL:
3464                 case INSN_JUMP_UNCONDITIONAL:
3465                         if (is_sibling_call(insn)) {
3466                                 ret = validate_sibling_call(file, insn, &state);
3467                                 if (ret)
3468                                         return ret;
3469
3470                         } else if (insn->jump_dest) {
3471                                 ret = validate_branch(file, func,
3472                                                       insn->jump_dest, state);
3473                                 if (ret) {
3474                                         if (opts.backtrace)
3475                                                 BT_FUNC("(branch)", insn);
3476                                         return ret;
3477                                 }
3478                         }
3479
3480                         if (insn->type == INSN_JUMP_UNCONDITIONAL)
3481                                 return 0;
3482
3483                         break;
3484
3485                 case INSN_JUMP_DYNAMIC:
3486                 case INSN_JUMP_DYNAMIC_CONDITIONAL:
3487                         if (is_sibling_call(insn)) {
3488                                 ret = validate_sibling_call(file, insn, &state);
3489                                 if (ret)
3490                                         return ret;
3491                         }
3492
3493                         if (insn->type == INSN_JUMP_DYNAMIC)
3494                                 return 0;
3495
3496                         break;
3497
3498                 case INSN_CONTEXT_SWITCH:
3499                         if (func && (!next_insn || !next_insn->hint)) {
3500                                 WARN_FUNC("unsupported instruction in callable function",
3501                                           sec, insn->offset);
3502                                 return 1;
3503                         }
3504                         return 0;
3505
3506                 case INSN_STAC:
3507                         if (state.uaccess) {
3508                                 WARN_FUNC("recursive UACCESS enable", sec, insn->offset);
3509                                 return 1;
3510                         }
3511
3512                         state.uaccess = true;
3513                         break;
3514
3515                 case INSN_CLAC:
3516                         if (!state.uaccess && func) {
3517                                 WARN_FUNC("redundant UACCESS disable", sec, insn->offset);
3518                                 return 1;
3519                         }
3520
3521                         if (func_uaccess_safe(func) && !state.uaccess_stack) {
3522                                 WARN_FUNC("UACCESS-safe disables UACCESS", sec, insn->offset);
3523                                 return 1;
3524                         }
3525
3526                         state.uaccess = false;
3527                         break;
3528
3529                 case INSN_STD:
3530                         if (state.df) {
3531                                 WARN_FUNC("recursive STD", sec, insn->offset);
3532                                 return 1;
3533                         }
3534
3535                         state.df = true;
3536                         break;
3537
3538                 case INSN_CLD:
3539                         if (!state.df && func) {
3540                                 WARN_FUNC("redundant CLD", sec, insn->offset);
3541                                 return 1;
3542                         }
3543
3544                         state.df = false;
3545                         break;
3546
3547                 default:
3548                         break;
3549                 }
3550
3551                 if (insn->dead_end)
3552                         return 0;
3553
3554                 if (!next_insn) {
3555                         if (state.cfi.cfa.base == CFI_UNDEFINED)
3556                                 return 0;
3557                         WARN("%s: unexpected end of section", sec->name);
3558                         return 1;
3559                 }
3560
3561                 prev_insn = insn;
3562                 insn = next_insn;
3563         }
3564
3565         return 0;
3566 }
3567
3568 static int validate_unwind_hints(struct objtool_file *file, struct section *sec)
3569 {
3570         struct instruction *insn;
3571         struct insn_state state;
3572         int ret, warnings = 0;
3573
3574         if (!file->hints)
3575                 return 0;
3576
3577         init_insn_state(file, &state, sec);
3578
3579         if (sec) {
3580                 insn = find_insn(file, sec, 0);
3581                 if (!insn)
3582                         return 0;
3583         } else {
3584                 insn = list_first_entry(&file->insn_list, typeof(*insn), list);
3585         }
3586
3587         while (&insn->list != &file->insn_list && (!sec || insn->sec == sec)) {
3588                 if (insn->hint && !insn->visited && !insn->ignore) {
3589                         ret = validate_branch(file, insn->func, insn, state);
3590                         if (ret && opts.backtrace)
3591                                 BT_FUNC("<=== (hint)", insn);
3592                         warnings += ret;
3593                 }
3594
3595                 insn = list_next_entry(insn, list);
3596         }
3597
3598         return warnings;
3599 }
3600
3601 /*
3602  * Validate rethunk entry constraint: must untrain RET before the first RET.
3603  *
3604  * Follow every branch (intra-function) and ensure ANNOTATE_UNRET_END comes
3605  * before an actual RET instruction.
3606  */
3607 static int validate_entry(struct objtool_file *file, struct instruction *insn)
3608 {
3609         struct instruction *next, *dest;
3610         int ret, warnings = 0;
3611
3612         for (;;) {
3613                 next = next_insn_to_validate(file, insn);
3614
3615                 if (insn->visited & VISITED_ENTRY)
3616                         return 0;
3617
3618                 insn->visited |= VISITED_ENTRY;
3619
3620                 if (!insn->ignore_alts && !list_empty(&insn->alts)) {
3621                         struct alternative *alt;
3622                         bool skip_orig = false;
3623
3624                         list_for_each_entry(alt, &insn->alts, list) {
3625                                 if (alt->skip_orig)
3626                                         skip_orig = true;
3627
3628                                 ret = validate_entry(file, alt->insn);
3629                                 if (ret) {
3630                                         if (opts.backtrace)
3631                                                 BT_FUNC("(alt)", insn);
3632                                         return ret;
3633                                 }
3634                         }
3635
3636                         if (skip_orig)
3637                                 return 0;
3638                 }
3639
3640                 switch (insn->type) {
3641
3642                 case INSN_CALL_DYNAMIC:
3643                 case INSN_JUMP_DYNAMIC:
3644                 case INSN_JUMP_DYNAMIC_CONDITIONAL:
3645                         WARN_FUNC("early indirect call", insn->sec, insn->offset);
3646                         return 1;
3647
3648                 case INSN_JUMP_UNCONDITIONAL:
3649                 case INSN_JUMP_CONDITIONAL:
3650                         if (!is_sibling_call(insn)) {
3651                                 if (!insn->jump_dest) {
3652                                         WARN_FUNC("unresolved jump target after linking?!?",
3653                                                   insn->sec, insn->offset);
3654                                         return -1;
3655                                 }
3656                                 ret = validate_entry(file, insn->jump_dest);
3657                                 if (ret) {
3658                                         if (opts.backtrace) {
3659                                                 BT_FUNC("(branch%s)", insn,
3660                                                         insn->type == INSN_JUMP_CONDITIONAL ? "-cond" : "");
3661                                         }
3662                                         return ret;
3663                                 }
3664
3665                                 if (insn->type == INSN_JUMP_UNCONDITIONAL)
3666                                         return 0;
3667
3668                                 break;
3669                         }
3670
3671                         /* fallthrough */
3672                 case INSN_CALL:
3673                         dest = find_insn(file, insn->call_dest->sec,
3674                                          insn->call_dest->offset);
3675                         if (!dest) {
3676                                 WARN("Unresolved function after linking!?: %s",
3677                                      insn->call_dest->name);
3678                                 return -1;
3679                         }
3680
3681                         ret = validate_entry(file, dest);
3682                         if (ret) {
3683                                 if (opts.backtrace)
3684                                         BT_FUNC("(call)", insn);
3685                                 return ret;
3686                         }
3687                         /*
3688                          * If a call returns without error, it must have seen UNTRAIN_RET.
3689                          * Therefore any non-error return is a success.
3690                          */
3691                         return 0;
3692
3693                 case INSN_RETURN:
3694                         WARN_FUNC("RET before UNTRAIN", insn->sec, insn->offset);
3695                         return 1;
3696
3697                 case INSN_NOP:
3698                         if (insn->retpoline_safe)
3699                                 return 0;
3700                         break;
3701
3702                 default:
3703                         break;
3704                 }
3705
3706                 if (!next) {
3707                         WARN_FUNC("teh end!", insn->sec, insn->offset);
3708                         return -1;
3709                 }
3710                 insn = next;
3711         }
3712
3713         return warnings;
3714 }
3715
3716 /*
3717  * Validate that all branches starting at 'insn->entry' encounter UNRET_END
3718  * before RET.
3719  */
3720 static int validate_unret(struct objtool_file *file)
3721 {
3722         struct instruction *insn;
3723         int ret, warnings = 0;
3724
3725         for_each_insn(file, insn) {
3726                 if (!insn->entry)
3727                         continue;
3728
3729                 ret = validate_entry(file, insn);
3730                 if (ret < 0) {
3731                         WARN_FUNC("Failed UNRET validation", insn->sec, insn->offset);
3732                         return ret;
3733                 }
3734                 warnings += ret;
3735         }
3736
3737         return warnings;
3738 }
3739
3740 static int validate_retpoline(struct objtool_file *file)
3741 {
3742         struct instruction *insn;
3743         int warnings = 0;
3744
3745         for_each_insn(file, insn) {
3746                 if (insn->type != INSN_JUMP_DYNAMIC &&
3747                     insn->type != INSN_CALL_DYNAMIC &&
3748                     insn->type != INSN_RETURN)
3749                         continue;
3750
3751                 if (insn->retpoline_safe)
3752                         continue;
3753
3754                 /*
3755                  * .init.text code is ran before userspace and thus doesn't
3756                  * strictly need retpolines, except for modules which are
3757                  * loaded late, they very much do need retpoline in their
3758                  * .init.text
3759                  */
3760                 if (!strcmp(insn->sec->name, ".init.text") && !opts.module)
3761                         continue;
3762
3763                 if (insn->type == INSN_RETURN) {
3764                         if (opts.rethunk) {
3765                                 WARN_FUNC("'naked' return found in RETHUNK build",
3766                                           insn->sec, insn->offset);
3767                         } else
3768                                 continue;
3769                 } else {
3770                         WARN_FUNC("indirect %s found in RETPOLINE build",
3771                                   insn->sec, insn->offset,
3772                                   insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call");
3773                 }
3774
3775                 warnings++;
3776         }
3777
3778         return warnings;
3779 }
3780
3781 static bool is_kasan_insn(struct instruction *insn)
3782 {
3783         return (insn->type == INSN_CALL &&
3784                 !strcmp(insn->call_dest->name, "__asan_handle_no_return"));
3785 }
3786
3787 static bool is_ubsan_insn(struct instruction *insn)
3788 {
3789         return (insn->type == INSN_CALL &&
3790                 !strcmp(insn->call_dest->name,
3791                         "__ubsan_handle_builtin_unreachable"));
3792 }
3793
3794 static bool ignore_unreachable_insn(struct objtool_file *file, struct instruction *insn)
3795 {
3796         int i;
3797         struct instruction *prev_insn;
3798
3799         if (insn->ignore || insn->type == INSN_NOP || insn->type == INSN_TRAP)
3800                 return true;
3801
3802         /*
3803          * Ignore alternative replacement instructions.  This can happen
3804          * when a whitelisted function uses one of the ALTERNATIVE macros.
3805          */
3806         if (!strcmp(insn->sec->name, ".altinstr_replacement") ||
3807             !strcmp(insn->sec->name, ".altinstr_aux"))
3808                 return true;
3809
3810         /*
3811          * Whole archive runs might encounter dead code from weak symbols.
3812          * This is where the linker will have dropped the weak symbol in
3813          * favour of a regular symbol, but leaves the code in place.
3814          *
3815          * In this case we'll find a piece of code (whole function) that is not
3816          * covered by a !section symbol. Ignore them.
3817          */
3818         if (opts.link && !insn->func) {
3819                 int size = find_symbol_hole_containing(insn->sec, insn->offset);
3820                 unsigned long end = insn->offset + size;
3821
3822                 if (!size) /* not a hole */
3823                         return false;
3824
3825                 if (size < 0) /* hole until the end */
3826                         return true;
3827
3828                 sec_for_each_insn_continue(file, insn) {
3829                         /*
3830                          * If we reach a visited instruction at or before the
3831                          * end of the hole, ignore the unreachable.
3832                          */
3833                         if (insn->visited)
3834                                 return true;
3835
3836                         if (insn->offset >= end)
3837                                 break;
3838
3839                         /*
3840                          * If this hole jumps to a .cold function, mark it ignore too.
3841                          */
3842                         if (insn->jump_dest && insn->jump_dest->func &&
3843                             strstr(insn->jump_dest->func->name, ".cold")) {
3844                                 struct instruction *dest = insn->jump_dest;
3845                                 func_for_each_insn(file, dest->func, dest)
3846                                         dest->ignore = true;
3847                         }
3848                 }
3849
3850                 return false;
3851         }
3852
3853         if (!insn->func)
3854                 return false;
3855
3856         if (insn->func->static_call_tramp)
3857                 return true;
3858
3859         /*
3860          * CONFIG_UBSAN_TRAP inserts a UD2 when it sees
3861          * __builtin_unreachable().  The BUG() macro has an unreachable() after
3862          * the UD2, which causes GCC's undefined trap logic to emit another UD2
3863          * (or occasionally a JMP to UD2).
3864          *
3865          * It may also insert a UD2 after calling a __noreturn function.
3866          */
3867         prev_insn = list_prev_entry(insn, list);
3868         if ((prev_insn->dead_end || dead_end_function(file, prev_insn->call_dest)) &&
3869             (insn->type == INSN_BUG ||
3870              (insn->type == INSN_JUMP_UNCONDITIONAL &&
3871               insn->jump_dest && insn->jump_dest->type == INSN_BUG)))
3872                 return true;
3873
3874         /*
3875          * Check if this (or a subsequent) instruction is related to
3876          * CONFIG_UBSAN or CONFIG_KASAN.
3877          *
3878          * End the search at 5 instructions to avoid going into the weeds.
3879          */
3880         for (i = 0; i < 5; i++) {
3881
3882                 if (is_kasan_insn(insn) || is_ubsan_insn(insn))
3883                         return true;
3884
3885                 if (insn->type == INSN_JUMP_UNCONDITIONAL) {
3886                         if (insn->jump_dest &&
3887                             insn->jump_dest->func == insn->func) {
3888                                 insn = insn->jump_dest;
3889                                 continue;
3890                         }
3891
3892                         break;
3893                 }
3894
3895                 if (insn->offset + insn->len >= insn->func->offset + insn->func->len)
3896                         break;
3897
3898                 insn = list_next_entry(insn, list);
3899         }
3900
3901         return false;
3902 }
3903
3904 static int validate_symbol(struct objtool_file *file, struct section *sec,
3905                            struct symbol *sym, struct insn_state *state)
3906 {
3907         struct instruction *insn;
3908         int ret;
3909
3910         if (!sym->len) {
3911                 WARN("%s() is missing an ELF size annotation", sym->name);
3912                 return 1;
3913         }
3914
3915         if (sym->pfunc != sym || sym->alias != sym)
3916                 return 0;
3917
3918         insn = find_insn(file, sec, sym->offset);
3919         if (!insn || insn->ignore || insn->visited)
3920                 return 0;
3921
3922         state->uaccess = sym->uaccess_safe;
3923
3924         ret = validate_branch(file, insn->func, insn, *state);
3925         if (ret && opts.backtrace)
3926                 BT_FUNC("<=== (sym)", insn);
3927         return ret;
3928 }
3929
3930 static int validate_section(struct objtool_file *file, struct section *sec)
3931 {
3932         struct insn_state state;
3933         struct symbol *func;
3934         int warnings = 0;
3935
3936         list_for_each_entry(func, &sec->symbol_list, list) {
3937                 if (func->type != STT_FUNC)
3938                         continue;
3939
3940                 init_insn_state(file, &state, sec);
3941                 set_func_state(&state.cfi);
3942
3943                 warnings += validate_symbol(file, sec, func, &state);
3944         }
3945
3946         return warnings;
3947 }
3948
3949 static int validate_noinstr_sections(struct objtool_file *file)
3950 {
3951         struct section *sec;
3952         int warnings = 0;
3953
3954         sec = find_section_by_name(file->elf, ".noinstr.text");
3955         if (sec) {
3956                 warnings += validate_section(file, sec);
3957                 warnings += validate_unwind_hints(file, sec);
3958         }
3959
3960         sec = find_section_by_name(file->elf, ".entry.text");
3961         if (sec) {
3962                 warnings += validate_section(file, sec);
3963                 warnings += validate_unwind_hints(file, sec);
3964         }
3965
3966         return warnings;
3967 }
3968
3969 static int validate_functions(struct objtool_file *file)
3970 {
3971         struct section *sec;
3972         int warnings = 0;
3973
3974         for_each_sec(file, sec) {
3975                 if (!(sec->sh.sh_flags & SHF_EXECINSTR))
3976                         continue;
3977
3978                 warnings += validate_section(file, sec);
3979         }
3980
3981         return warnings;
3982 }
3983
3984 static void mark_endbr_used(struct instruction *insn)
3985 {
3986         if (!list_empty(&insn->call_node))
3987                 list_del_init(&insn->call_node);
3988 }
3989
3990 static int validate_ibt_insn(struct objtool_file *file, struct instruction *insn)
3991 {
3992         struct instruction *dest;
3993         struct reloc *reloc;
3994         unsigned long off;
3995         int warnings = 0;
3996
3997         /*
3998          * Looking for function pointer load relocations.  Ignore
3999          * direct/indirect branches:
4000          */
4001         switch (insn->type) {
4002         case INSN_CALL:
4003         case INSN_CALL_DYNAMIC:
4004         case INSN_JUMP_CONDITIONAL:
4005         case INSN_JUMP_UNCONDITIONAL:
4006         case INSN_JUMP_DYNAMIC:
4007         case INSN_JUMP_DYNAMIC_CONDITIONAL:
4008         case INSN_RETURN:
4009         case INSN_NOP:
4010                 return 0;
4011         default:
4012                 break;
4013         }
4014
4015         for (reloc = insn_reloc(file, insn);
4016              reloc;
4017              reloc = find_reloc_by_dest_range(file->elf, insn->sec,
4018                                               reloc->offset + 1,
4019                                               (insn->offset + insn->len) - (reloc->offset + 1))) {
4020
4021                 /*
4022                  * static_call_update() references the trampoline, which
4023                  * doesn't have (or need) ENDBR.  Skip warning in that case.
4024                  */
4025                 if (reloc->sym->static_call_tramp)
4026                         continue;
4027
4028                 off = reloc->sym->offset;
4029                 if (reloc->type == R_X86_64_PC32 || reloc->type == R_X86_64_PLT32)
4030                         off += arch_dest_reloc_offset(reloc->addend);
4031                 else
4032                         off += reloc->addend;
4033
4034                 dest = find_insn(file, reloc->sym->sec, off);
4035                 if (!dest)
4036                         continue;
4037
4038                 if (dest->type == INSN_ENDBR) {
4039                         mark_endbr_used(dest);
4040                         continue;
4041                 }
4042
4043                 if (dest->func && dest->func == insn->func) {
4044                         /*
4045                          * Anything from->to self is either _THIS_IP_ or
4046                          * IRET-to-self.
4047                          *
4048                          * There is no sane way to annotate _THIS_IP_ since the
4049                          * compiler treats the relocation as a constant and is
4050                          * happy to fold in offsets, skewing any annotation we
4051                          * do, leading to vast amounts of false-positives.
4052                          *
4053                          * There's also compiler generated _THIS_IP_ through
4054                          * KCOV and such which we have no hope of annotating.
4055                          *
4056                          * As such, blanket accept self-references without
4057                          * issue.
4058                          */
4059                         continue;
4060                 }
4061
4062                 if (dest->noendbr)
4063                         continue;
4064
4065                 WARN_FUNC("relocation to !ENDBR: %s",
4066                           insn->sec, insn->offset,
4067                           offstr(dest->sec, dest->offset));
4068
4069                 warnings++;
4070         }
4071
4072         return warnings;
4073 }
4074
4075 static int validate_ibt_data_reloc(struct objtool_file *file,
4076                                    struct reloc *reloc)
4077 {
4078         struct instruction *dest;
4079
4080         dest = find_insn(file, reloc->sym->sec,
4081                          reloc->sym->offset + reloc->addend);
4082         if (!dest)
4083                 return 0;
4084
4085         if (dest->type == INSN_ENDBR) {
4086                 mark_endbr_used(dest);
4087                 return 0;
4088         }
4089
4090         if (dest->noendbr)
4091                 return 0;
4092
4093         WARN_FUNC("data relocation to !ENDBR: %s",
4094                   reloc->sec->base, reloc->offset,
4095                   offstr(dest->sec, dest->offset));
4096
4097         return 1;
4098 }
4099
4100 /*
4101  * Validate IBT rules and remove used ENDBR instructions from the seal list.
4102  * Unused ENDBR instructions will be annotated for sealing (i.e., replaced with
4103  * NOPs) later, in create_ibt_endbr_seal_sections().
4104  */
4105 static int validate_ibt(struct objtool_file *file)
4106 {
4107         struct section *sec;
4108         struct reloc *reloc;
4109         struct instruction *insn;
4110         int warnings = 0;
4111
4112         for_each_insn(file, insn)
4113                 warnings += validate_ibt_insn(file, insn);
4114
4115         for_each_sec(file, sec) {
4116
4117                 /* Already done by validate_ibt_insn() */
4118                 if (sec->sh.sh_flags & SHF_EXECINSTR)
4119                         continue;
4120
4121                 if (!sec->reloc)
4122                         continue;
4123
4124                 /*
4125                  * These sections can reference text addresses, but not with
4126                  * the intent to indirect branch to them.
4127                  */
4128                 if ((!strncmp(sec->name, ".discard", 8) &&
4129                      strcmp(sec->name, ".discard.ibt_endbr_noseal"))    ||
4130                     !strncmp(sec->name, ".debug", 6)                    ||
4131                     !strcmp(sec->name, ".altinstructions")              ||
4132                     !strcmp(sec->name, ".ibt_endbr_seal")               ||
4133                     !strcmp(sec->name, ".orc_unwind_ip")                ||
4134                     !strcmp(sec->name, ".parainstructions")             ||
4135                     !strcmp(sec->name, ".retpoline_sites")              ||
4136                     !strcmp(sec->name, ".smp_locks")                    ||
4137                     !strcmp(sec->name, ".static_call_sites")            ||
4138                     !strcmp(sec->name, "_error_injection_whitelist")    ||
4139                     !strcmp(sec->name, "_kprobe_blacklist")             ||
4140                     !strcmp(sec->name, "__bug_table")                   ||
4141                     !strcmp(sec->name, "__ex_table")                    ||
4142                     !strcmp(sec->name, "__jump_table")                  ||
4143                     !strcmp(sec->name, "__mcount_loc")                  ||
4144                     !strcmp(sec->name, ".kcfi_traps")                   ||
4145                     strstr(sec->name, "__patchable_function_entries"))
4146                         continue;
4147
4148                 list_for_each_entry(reloc, &sec->reloc->reloc_list, list)
4149                         warnings += validate_ibt_data_reloc(file, reloc);
4150         }
4151
4152         return warnings;
4153 }
4154
4155 static int validate_sls(struct objtool_file *file)
4156 {
4157         struct instruction *insn, *next_insn;
4158         int warnings = 0;
4159
4160         for_each_insn(file, insn) {
4161                 next_insn = next_insn_same_sec(file, insn);
4162
4163                 if (insn->retpoline_safe)
4164                         continue;
4165
4166                 switch (insn->type) {
4167                 case INSN_RETURN:
4168                         if (!next_insn || next_insn->type != INSN_TRAP) {
4169                                 WARN_FUNC("missing int3 after ret",
4170                                           insn->sec, insn->offset);
4171                                 warnings++;
4172                         }
4173
4174                         break;
4175                 case INSN_JUMP_DYNAMIC:
4176                         if (!next_insn || next_insn->type != INSN_TRAP) {
4177                                 WARN_FUNC("missing int3 after indirect jump",
4178                                           insn->sec, insn->offset);
4179                                 warnings++;
4180                         }
4181                         break;
4182                 default:
4183                         break;
4184                 }
4185         }
4186
4187         return warnings;
4188 }
4189
4190 static int validate_reachable_instructions(struct objtool_file *file)
4191 {
4192         struct instruction *insn;
4193
4194         if (file->ignore_unreachables)
4195                 return 0;
4196
4197         for_each_insn(file, insn) {
4198                 if (insn->visited || ignore_unreachable_insn(file, insn))
4199                         continue;
4200
4201                 WARN_FUNC("unreachable instruction", insn->sec, insn->offset);
4202                 return 1;
4203         }
4204
4205         return 0;
4206 }
4207
4208 int check(struct objtool_file *file)
4209 {
4210         int ret, warnings = 0;
4211
4212         arch_initial_func_cfi_state(&initial_func_cfi);
4213         init_cfi_state(&init_cfi);
4214         init_cfi_state(&func_cfi);
4215         set_func_state(&func_cfi);
4216
4217         if (!cfi_hash_alloc(1UL << (file->elf->symbol_bits - 3)))
4218                 goto out;
4219
4220         cfi_hash_add(&init_cfi);
4221         cfi_hash_add(&func_cfi);
4222
4223         ret = decode_sections(file);
4224         if (ret < 0)
4225                 goto out;
4226
4227         warnings += ret;
4228
4229         if (list_empty(&file->insn_list))
4230                 goto out;
4231
4232         if (opts.retpoline) {
4233                 ret = validate_retpoline(file);
4234                 if (ret < 0)
4235                         return ret;
4236                 warnings += ret;
4237         }
4238
4239         if (opts.stackval || opts.orc || opts.uaccess) {
4240                 ret = validate_functions(file);
4241                 if (ret < 0)
4242                         goto out;
4243                 warnings += ret;
4244
4245                 ret = validate_unwind_hints(file, NULL);
4246                 if (ret < 0)
4247                         goto out;
4248                 warnings += ret;
4249
4250                 if (!warnings) {
4251                         ret = validate_reachable_instructions(file);
4252                         if (ret < 0)
4253                                 goto out;
4254                         warnings += ret;
4255                 }
4256
4257         } else if (opts.noinstr) {
4258                 ret = validate_noinstr_sections(file);
4259                 if (ret < 0)
4260                         goto out;
4261                 warnings += ret;
4262         }
4263
4264         if (opts.unret) {
4265                 /*
4266                  * Must be after validate_branch() and friends, it plays
4267                  * further games with insn->visited.
4268                  */
4269                 ret = validate_unret(file);
4270                 if (ret < 0)
4271                         return ret;
4272                 warnings += ret;
4273         }
4274
4275         if (opts.ibt) {
4276                 ret = validate_ibt(file);
4277                 if (ret < 0)
4278                         goto out;
4279                 warnings += ret;
4280         }
4281
4282         if (opts.sls) {
4283                 ret = validate_sls(file);
4284                 if (ret < 0)
4285                         goto out;
4286                 warnings += ret;
4287         }
4288
4289         if (opts.static_call) {
4290                 ret = create_static_call_sections(file);
4291                 if (ret < 0)
4292                         goto out;
4293                 warnings += ret;
4294         }
4295
4296         if (opts.retpoline) {
4297                 ret = create_retpoline_sites_sections(file);
4298                 if (ret < 0)
4299                         goto out;
4300                 warnings += ret;
4301         }
4302
4303         if (opts.rethunk) {
4304                 ret = create_return_sites_sections(file);
4305                 if (ret < 0)
4306                         goto out;
4307                 warnings += ret;
4308         }
4309
4310         if (opts.mcount) {
4311                 ret = create_mcount_loc_sections(file);
4312                 if (ret < 0)
4313                         goto out;
4314                 warnings += ret;
4315         }
4316
4317         if (opts.ibt) {
4318                 ret = create_ibt_endbr_seal_sections(file);
4319                 if (ret < 0)
4320                         goto out;
4321                 warnings += ret;
4322         }
4323
4324         if (opts.orc && !list_empty(&file->insn_list)) {
4325                 ret = orc_create(file);
4326                 if (ret < 0)
4327                         goto out;
4328                 warnings += ret;
4329         }
4330
4331
4332         if (opts.stats) {
4333                 printf("nr_insns_visited: %ld\n", nr_insns_visited);
4334                 printf("nr_cfi: %ld\n", nr_cfi);
4335                 printf("nr_cfi_reused: %ld\n", nr_cfi_reused);
4336                 printf("nr_cfi_cache: %ld\n", nr_cfi_cache);
4337         }
4338
4339 out:
4340         /*
4341          *  For now, don't fail the kernel build on fatal warnings.  These
4342          *  errors are still fairly common due to the growing matrix of
4343          *  supported toolchains and their recent pace of change.
4344          */
4345         return 0;
4346 }