Merge patch series "Use composable cache instead of L2 cache"
[platform/kernel/linux-starfive.git] / tools / objtool / check.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Copyright (C) 2015-2017 Josh Poimboeuf <jpoimboe@redhat.com>
4  */
5
6 #include <string.h>
7 #include <stdlib.h>
8 #include <inttypes.h>
9 #include <sys/mman.h>
10
11 #include <arch/elf.h>
12 #include <objtool/builtin.h>
13 #include <objtool/cfi.h>
14 #include <objtool/arch.h>
15 #include <objtool/check.h>
16 #include <objtool/special.h>
17 #include <objtool/warn.h>
18 #include <objtool/endianness.h>
19
20 #include <linux/objtool.h>
21 #include <linux/hashtable.h>
22 #include <linux/kernel.h>
23 #include <linux/static_call_types.h>
24
25 struct alternative {
26         struct list_head list;
27         struct instruction *insn;
28         bool skip_orig;
29 };
30
31 static unsigned long nr_cfi, nr_cfi_reused, nr_cfi_cache;
32
33 static struct cfi_init_state initial_func_cfi;
34 static struct cfi_state init_cfi;
35 static struct cfi_state func_cfi;
36
37 struct instruction *find_insn(struct objtool_file *file,
38                               struct section *sec, unsigned long offset)
39 {
40         struct instruction *insn;
41
42         hash_for_each_possible(file->insn_hash, insn, hash, sec_offset_hash(sec, offset)) {
43                 if (insn->sec == sec && insn->offset == offset)
44                         return insn;
45         }
46
47         return NULL;
48 }
49
50 static struct instruction *next_insn_same_sec(struct objtool_file *file,
51                                               struct instruction *insn)
52 {
53         struct instruction *next = list_next_entry(insn, list);
54
55         if (!next || &next->list == &file->insn_list || next->sec != insn->sec)
56                 return NULL;
57
58         return next;
59 }
60
61 static struct instruction *next_insn_same_func(struct objtool_file *file,
62                                                struct instruction *insn)
63 {
64         struct instruction *next = list_next_entry(insn, list);
65         struct symbol *func = insn->func;
66
67         if (!func)
68                 return NULL;
69
70         if (&next->list != &file->insn_list && next->func == func)
71                 return next;
72
73         /* Check if we're already in the subfunction: */
74         if (func == func->cfunc)
75                 return NULL;
76
77         /* Move to the subfunction: */
78         return find_insn(file, func->cfunc->sec, func->cfunc->offset);
79 }
80
81 static struct instruction *prev_insn_same_sym(struct objtool_file *file,
82                                                struct instruction *insn)
83 {
84         struct instruction *prev = list_prev_entry(insn, list);
85
86         if (&prev->list != &file->insn_list && prev->func == insn->func)
87                 return prev;
88
89         return NULL;
90 }
91
92 #define func_for_each_insn(file, func, insn)                            \
93         for (insn = find_insn(file, func->sec, func->offset);           \
94              insn;                                                      \
95              insn = next_insn_same_func(file, insn))
96
97 #define sym_for_each_insn(file, sym, insn)                              \
98         for (insn = find_insn(file, sym->sec, sym->offset);             \
99              insn && &insn->list != &file->insn_list &&                 \
100                 insn->sec == sym->sec &&                                \
101                 insn->offset < sym->offset + sym->len;                  \
102              insn = list_next_entry(insn, list))
103
104 #define sym_for_each_insn_continue_reverse(file, sym, insn)             \
105         for (insn = list_prev_entry(insn, list);                        \
106              &insn->list != &file->insn_list &&                         \
107                 insn->sec == sym->sec && insn->offset >= sym->offset;   \
108              insn = list_prev_entry(insn, list))
109
110 #define sec_for_each_insn_from(file, insn)                              \
111         for (; insn; insn = next_insn_same_sec(file, insn))
112
113 #define sec_for_each_insn_continue(file, insn)                          \
114         for (insn = next_insn_same_sec(file, insn); insn;               \
115              insn = next_insn_same_sec(file, insn))
116
117 static bool is_jump_table_jump(struct instruction *insn)
118 {
119         struct alt_group *alt_group = insn->alt_group;
120
121         if (insn->jump_table)
122                 return true;
123
124         /* Retpoline alternative for a jump table? */
125         return alt_group && alt_group->orig_group &&
126                alt_group->orig_group->first_insn->jump_table;
127 }
128
129 static bool is_sibling_call(struct instruction *insn)
130 {
131         /*
132          * Assume only ELF functions can make sibling calls.  This ensures
133          * sibling call detection consistency between vmlinux.o and individual
134          * objects.
135          */
136         if (!insn->func)
137                 return false;
138
139         /* An indirect jump is either a sibling call or a jump to a table. */
140         if (insn->type == INSN_JUMP_DYNAMIC)
141                 return !is_jump_table_jump(insn);
142
143         /* add_jump_destinations() sets insn->call_dest for sibling calls. */
144         return (is_static_jump(insn) && insn->call_dest);
145 }
146
147 /*
148  * This checks to see if the given function is a "noreturn" function.
149  *
150  * For global functions which are outside the scope of this object file, we
151  * have to keep a manual list of them.
152  *
153  * For local functions, we have to detect them manually by simply looking for
154  * the lack of a return instruction.
155  */
156 static bool __dead_end_function(struct objtool_file *file, struct symbol *func,
157                                 int recursion)
158 {
159         int i;
160         struct instruction *insn;
161         bool empty = true;
162
163         /*
164          * Unfortunately these have to be hard coded because the noreturn
165          * attribute isn't provided in ELF data. Keep 'em sorted.
166          */
167         static const char * const global_noreturns[] = {
168                 "__invalid_creds",
169                 "__module_put_and_kthread_exit",
170                 "__reiserfs_panic",
171                 "__stack_chk_fail",
172                 "__ubsan_handle_builtin_unreachable",
173                 "cpu_bringup_and_idle",
174                 "cpu_startup_entry",
175                 "do_exit",
176                 "do_group_exit",
177                 "do_task_dead",
178                 "ex_handler_msr_mce",
179                 "fortify_panic",
180                 "kthread_complete_and_exit",
181                 "kthread_exit",
182                 "kunit_try_catch_throw",
183                 "lbug_with_loc",
184                 "machine_real_restart",
185                 "make_task_dead",
186                 "panic",
187                 "rewind_stack_and_make_dead",
188                 "sev_es_terminate",
189                 "snp_abort",
190                 "stop_this_cpu",
191                 "usercopy_abort",
192                 "xen_start_kernel",
193         };
194
195         if (!func)
196                 return false;
197
198         if (func->bind == STB_WEAK)
199                 return false;
200
201         if (func->bind == STB_GLOBAL)
202                 for (i = 0; i < ARRAY_SIZE(global_noreturns); i++)
203                         if (!strcmp(func->name, global_noreturns[i]))
204                                 return true;
205
206         if (!func->len)
207                 return false;
208
209         insn = find_insn(file, func->sec, func->offset);
210         if (!insn->func)
211                 return false;
212
213         func_for_each_insn(file, func, insn) {
214                 empty = false;
215
216                 if (insn->type == INSN_RETURN)
217                         return false;
218         }
219
220         if (empty)
221                 return false;
222
223         /*
224          * A function can have a sibling call instead of a return.  In that
225          * case, the function's dead-end status depends on whether the target
226          * of the sibling call returns.
227          */
228         func_for_each_insn(file, func, insn) {
229                 if (is_sibling_call(insn)) {
230                         struct instruction *dest = insn->jump_dest;
231
232                         if (!dest)
233                                 /* sibling call to another file */
234                                 return false;
235
236                         /* local sibling call */
237                         if (recursion == 5) {
238                                 /*
239                                  * Infinite recursion: two functions have
240                                  * sibling calls to each other.  This is a very
241                                  * rare case.  It means they aren't dead ends.
242                                  */
243                                 return false;
244                         }
245
246                         return __dead_end_function(file, dest->func, recursion+1);
247                 }
248         }
249
250         return true;
251 }
252
253 static bool dead_end_function(struct objtool_file *file, struct symbol *func)
254 {
255         return __dead_end_function(file, func, 0);
256 }
257
258 static void init_cfi_state(struct cfi_state *cfi)
259 {
260         int i;
261
262         for (i = 0; i < CFI_NUM_REGS; i++) {
263                 cfi->regs[i].base = CFI_UNDEFINED;
264                 cfi->vals[i].base = CFI_UNDEFINED;
265         }
266         cfi->cfa.base = CFI_UNDEFINED;
267         cfi->drap_reg = CFI_UNDEFINED;
268         cfi->drap_offset = -1;
269 }
270
271 static void init_insn_state(struct objtool_file *file, struct insn_state *state,
272                             struct section *sec)
273 {
274         memset(state, 0, sizeof(*state));
275         init_cfi_state(&state->cfi);
276
277         /*
278          * We need the full vmlinux for noinstr validation, otherwise we can
279          * not correctly determine insn->call_dest->sec (external symbols do
280          * not have a section).
281          */
282         if (opts.link && opts.noinstr && sec)
283                 state->noinstr = sec->noinstr;
284 }
285
286 static struct cfi_state *cfi_alloc(void)
287 {
288         struct cfi_state *cfi = calloc(sizeof(struct cfi_state), 1);
289         if (!cfi) {
290                 WARN("calloc failed");
291                 exit(1);
292         }
293         nr_cfi++;
294         return cfi;
295 }
296
297 static int cfi_bits;
298 static struct hlist_head *cfi_hash;
299
300 static inline bool cficmp(struct cfi_state *cfi1, struct cfi_state *cfi2)
301 {
302         return memcmp((void *)cfi1 + sizeof(cfi1->hash),
303                       (void *)cfi2 + sizeof(cfi2->hash),
304                       sizeof(struct cfi_state) - sizeof(struct hlist_node));
305 }
306
307 static inline u32 cfi_key(struct cfi_state *cfi)
308 {
309         return jhash((void *)cfi + sizeof(cfi->hash),
310                      sizeof(*cfi) - sizeof(cfi->hash), 0);
311 }
312
313 static struct cfi_state *cfi_hash_find_or_add(struct cfi_state *cfi)
314 {
315         struct hlist_head *head = &cfi_hash[hash_min(cfi_key(cfi), cfi_bits)];
316         struct cfi_state *obj;
317
318         hlist_for_each_entry(obj, head, hash) {
319                 if (!cficmp(cfi, obj)) {
320                         nr_cfi_cache++;
321                         return obj;
322                 }
323         }
324
325         obj = cfi_alloc();
326         *obj = *cfi;
327         hlist_add_head(&obj->hash, head);
328
329         return obj;
330 }
331
332 static void cfi_hash_add(struct cfi_state *cfi)
333 {
334         struct hlist_head *head = &cfi_hash[hash_min(cfi_key(cfi), cfi_bits)];
335
336         hlist_add_head(&cfi->hash, head);
337 }
338
339 static void *cfi_hash_alloc(unsigned long size)
340 {
341         cfi_bits = max(10, ilog2(size));
342         cfi_hash = mmap(NULL, sizeof(struct hlist_head) << cfi_bits,
343                         PROT_READ|PROT_WRITE,
344                         MAP_PRIVATE|MAP_ANON, -1, 0);
345         if (cfi_hash == (void *)-1L) {
346                 WARN("mmap fail cfi_hash");
347                 cfi_hash = NULL;
348         }  else if (opts.stats) {
349                 printf("cfi_bits: %d\n", cfi_bits);
350         }
351
352         return cfi_hash;
353 }
354
355 static unsigned long nr_insns;
356 static unsigned long nr_insns_visited;
357
358 /*
359  * Call the arch-specific instruction decoder for all the instructions and add
360  * them to the global instruction list.
361  */
362 static int decode_instructions(struct objtool_file *file)
363 {
364         struct section *sec;
365         struct symbol *func;
366         unsigned long offset;
367         struct instruction *insn;
368         int ret;
369
370         for_each_sec(file, sec) {
371
372                 if (!(sec->sh.sh_flags & SHF_EXECINSTR))
373                         continue;
374
375                 if (strcmp(sec->name, ".altinstr_replacement") &&
376                     strcmp(sec->name, ".altinstr_aux") &&
377                     strncmp(sec->name, ".discard.", 9))
378                         sec->text = true;
379
380                 if (!strcmp(sec->name, ".noinstr.text") ||
381                     !strcmp(sec->name, ".entry.text") ||
382                     !strncmp(sec->name, ".text.__x86.", 12))
383                         sec->noinstr = true;
384
385                 for (offset = 0; offset < sec->sh.sh_size; offset += insn->len) {
386                         insn = malloc(sizeof(*insn));
387                         if (!insn) {
388                                 WARN("malloc failed");
389                                 return -1;
390                         }
391                         memset(insn, 0, sizeof(*insn));
392                         INIT_LIST_HEAD(&insn->alts);
393                         INIT_LIST_HEAD(&insn->stack_ops);
394                         INIT_LIST_HEAD(&insn->call_node);
395
396                         insn->sec = sec;
397                         insn->offset = offset;
398
399                         ret = arch_decode_instruction(file, sec, offset,
400                                                       sec->sh.sh_size - offset,
401                                                       &insn->len, &insn->type,
402                                                       &insn->immediate,
403                                                       &insn->stack_ops);
404                         if (ret)
405                                 goto err;
406
407                         /*
408                          * By default, "ud2" is a dead end unless otherwise
409                          * annotated, because GCC 7 inserts it for certain
410                          * divide-by-zero cases.
411                          */
412                         if (insn->type == INSN_BUG)
413                                 insn->dead_end = true;
414
415                         hash_add(file->insn_hash, &insn->hash, sec_offset_hash(sec, insn->offset));
416                         list_add_tail(&insn->list, &file->insn_list);
417                         nr_insns++;
418                 }
419
420                 list_for_each_entry(func, &sec->symbol_list, list) {
421                         if (func->type != STT_FUNC || func->alias != func)
422                                 continue;
423
424                         if (!find_insn(file, sec, func->offset)) {
425                                 WARN("%s(): can't find starting instruction",
426                                      func->name);
427                                 return -1;
428                         }
429
430                         sym_for_each_insn(file, func, insn) {
431                                 insn->func = func;
432                                 if (insn->type == INSN_ENDBR && list_empty(&insn->call_node)) {
433                                         if (insn->offset == insn->func->offset) {
434                                                 list_add_tail(&insn->call_node, &file->endbr_list);
435                                                 file->nr_endbr++;
436                                         } else {
437                                                 file->nr_endbr_int++;
438                                         }
439                                 }
440                         }
441                 }
442         }
443
444         if (opts.stats)
445                 printf("nr_insns: %lu\n", nr_insns);
446
447         return 0;
448
449 err:
450         free(insn);
451         return ret;
452 }
453
454 /*
455  * Read the pv_ops[] .data table to find the static initialized values.
456  */
457 static int add_pv_ops(struct objtool_file *file, const char *symname)
458 {
459         struct symbol *sym, *func;
460         unsigned long off, end;
461         struct reloc *rel;
462         int idx;
463
464         sym = find_symbol_by_name(file->elf, symname);
465         if (!sym)
466                 return 0;
467
468         off = sym->offset;
469         end = off + sym->len;
470         for (;;) {
471                 rel = find_reloc_by_dest_range(file->elf, sym->sec, off, end - off);
472                 if (!rel)
473                         break;
474
475                 func = rel->sym;
476                 if (func->type == STT_SECTION)
477                         func = find_symbol_by_offset(rel->sym->sec, rel->addend);
478
479                 idx = (rel->offset - sym->offset) / sizeof(unsigned long);
480
481                 objtool_pv_add(file, idx, func);
482
483                 off = rel->offset + 1;
484                 if (off > end)
485                         break;
486         }
487
488         return 0;
489 }
490
491 /*
492  * Allocate and initialize file->pv_ops[].
493  */
494 static int init_pv_ops(struct objtool_file *file)
495 {
496         static const char *pv_ops_tables[] = {
497                 "pv_ops",
498                 "xen_cpu_ops",
499                 "xen_irq_ops",
500                 "xen_mmu_ops",
501                 NULL,
502         };
503         const char *pv_ops;
504         struct symbol *sym;
505         int idx, nr;
506
507         if (!opts.noinstr)
508                 return 0;
509
510         file->pv_ops = NULL;
511
512         sym = find_symbol_by_name(file->elf, "pv_ops");
513         if (!sym)
514                 return 0;
515
516         nr = sym->len / sizeof(unsigned long);
517         file->pv_ops = calloc(sizeof(struct pv_state), nr);
518         if (!file->pv_ops)
519                 return -1;
520
521         for (idx = 0; idx < nr; idx++)
522                 INIT_LIST_HEAD(&file->pv_ops[idx].targets);
523
524         for (idx = 0; (pv_ops = pv_ops_tables[idx]); idx++)
525                 add_pv_ops(file, pv_ops);
526
527         return 0;
528 }
529
530 static struct instruction *find_last_insn(struct objtool_file *file,
531                                           struct section *sec)
532 {
533         struct instruction *insn = NULL;
534         unsigned int offset;
535         unsigned int end = (sec->sh.sh_size > 10) ? sec->sh.sh_size - 10 : 0;
536
537         for (offset = sec->sh.sh_size - 1; offset >= end && !insn; offset--)
538                 insn = find_insn(file, sec, offset);
539
540         return insn;
541 }
542
543 /*
544  * Mark "ud2" instructions and manually annotated dead ends.
545  */
546 static int add_dead_ends(struct objtool_file *file)
547 {
548         struct section *sec;
549         struct reloc *reloc;
550         struct instruction *insn;
551
552         /*
553          * Check for manually annotated dead ends.
554          */
555         sec = find_section_by_name(file->elf, ".rela.discard.unreachable");
556         if (!sec)
557                 goto reachable;
558
559         list_for_each_entry(reloc, &sec->reloc_list, list) {
560                 if (reloc->sym->type != STT_SECTION) {
561                         WARN("unexpected relocation symbol type in %s", sec->name);
562                         return -1;
563                 }
564                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
565                 if (insn)
566                         insn = list_prev_entry(insn, list);
567                 else if (reloc->addend == reloc->sym->sec->sh.sh_size) {
568                         insn = find_last_insn(file, reloc->sym->sec);
569                         if (!insn) {
570                                 WARN("can't find unreachable insn at %s+0x%" PRIx64,
571                                      reloc->sym->sec->name, reloc->addend);
572                                 return -1;
573                         }
574                 } else {
575                         WARN("can't find unreachable insn at %s+0x%" PRIx64,
576                              reloc->sym->sec->name, reloc->addend);
577                         return -1;
578                 }
579
580                 insn->dead_end = true;
581         }
582
583 reachable:
584         /*
585          * These manually annotated reachable checks are needed for GCC 4.4,
586          * where the Linux unreachable() macro isn't supported.  In that case
587          * GCC doesn't know the "ud2" is fatal, so it generates code as if it's
588          * not a dead end.
589          */
590         sec = find_section_by_name(file->elf, ".rela.discard.reachable");
591         if (!sec)
592                 return 0;
593
594         list_for_each_entry(reloc, &sec->reloc_list, list) {
595                 if (reloc->sym->type != STT_SECTION) {
596                         WARN("unexpected relocation symbol type in %s", sec->name);
597                         return -1;
598                 }
599                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
600                 if (insn)
601                         insn = list_prev_entry(insn, list);
602                 else if (reloc->addend == reloc->sym->sec->sh.sh_size) {
603                         insn = find_last_insn(file, reloc->sym->sec);
604                         if (!insn) {
605                                 WARN("can't find reachable insn at %s+0x%" PRIx64,
606                                      reloc->sym->sec->name, reloc->addend);
607                                 return -1;
608                         }
609                 } else {
610                         WARN("can't find reachable insn at %s+0x%" PRIx64,
611                              reloc->sym->sec->name, reloc->addend);
612                         return -1;
613                 }
614
615                 insn->dead_end = false;
616         }
617
618         return 0;
619 }
620
621 static int create_static_call_sections(struct objtool_file *file)
622 {
623         struct section *sec;
624         struct static_call_site *site;
625         struct instruction *insn;
626         struct symbol *key_sym;
627         char *key_name, *tmp;
628         int idx;
629
630         sec = find_section_by_name(file->elf, ".static_call_sites");
631         if (sec) {
632                 INIT_LIST_HEAD(&file->static_call_list);
633                 WARN("file already has .static_call_sites section, skipping");
634                 return 0;
635         }
636
637         if (list_empty(&file->static_call_list))
638                 return 0;
639
640         idx = 0;
641         list_for_each_entry(insn, &file->static_call_list, call_node)
642                 idx++;
643
644         sec = elf_create_section(file->elf, ".static_call_sites", SHF_WRITE,
645                                  sizeof(struct static_call_site), idx);
646         if (!sec)
647                 return -1;
648
649         idx = 0;
650         list_for_each_entry(insn, &file->static_call_list, call_node) {
651
652                 site = (struct static_call_site *)sec->data->d_buf + idx;
653                 memset(site, 0, sizeof(struct static_call_site));
654
655                 /* populate reloc for 'addr' */
656                 if (elf_add_reloc_to_insn(file->elf, sec,
657                                           idx * sizeof(struct static_call_site),
658                                           R_X86_64_PC32,
659                                           insn->sec, insn->offset))
660                         return -1;
661
662                 /* find key symbol */
663                 key_name = strdup(insn->call_dest->name);
664                 if (!key_name) {
665                         perror("strdup");
666                         return -1;
667                 }
668                 if (strncmp(key_name, STATIC_CALL_TRAMP_PREFIX_STR,
669                             STATIC_CALL_TRAMP_PREFIX_LEN)) {
670                         WARN("static_call: trampoline name malformed: %s", key_name);
671                         return -1;
672                 }
673                 tmp = key_name + STATIC_CALL_TRAMP_PREFIX_LEN - STATIC_CALL_KEY_PREFIX_LEN;
674                 memcpy(tmp, STATIC_CALL_KEY_PREFIX_STR, STATIC_CALL_KEY_PREFIX_LEN);
675
676                 key_sym = find_symbol_by_name(file->elf, tmp);
677                 if (!key_sym) {
678                         if (!opts.module) {
679                                 WARN("static_call: can't find static_call_key symbol: %s", tmp);
680                                 return -1;
681                         }
682
683                         /*
684                          * For modules(), the key might not be exported, which
685                          * means the module can make static calls but isn't
686                          * allowed to change them.
687                          *
688                          * In that case we temporarily set the key to be the
689                          * trampoline address.  This is fixed up in
690                          * static_call_add_module().
691                          */
692                         key_sym = insn->call_dest;
693                 }
694                 free(key_name);
695
696                 /* populate reloc for 'key' */
697                 if (elf_add_reloc(file->elf, sec,
698                                   idx * sizeof(struct static_call_site) + 4,
699                                   R_X86_64_PC32, key_sym,
700                                   is_sibling_call(insn) * STATIC_CALL_SITE_TAIL))
701                         return -1;
702
703                 idx++;
704         }
705
706         return 0;
707 }
708
709 static int create_retpoline_sites_sections(struct objtool_file *file)
710 {
711         struct instruction *insn;
712         struct section *sec;
713         int idx;
714
715         sec = find_section_by_name(file->elf, ".retpoline_sites");
716         if (sec) {
717                 WARN("file already has .retpoline_sites, skipping");
718                 return 0;
719         }
720
721         idx = 0;
722         list_for_each_entry(insn, &file->retpoline_call_list, call_node)
723                 idx++;
724
725         if (!idx)
726                 return 0;
727
728         sec = elf_create_section(file->elf, ".retpoline_sites", 0,
729                                  sizeof(int), idx);
730         if (!sec) {
731                 WARN("elf_create_section: .retpoline_sites");
732                 return -1;
733         }
734
735         idx = 0;
736         list_for_each_entry(insn, &file->retpoline_call_list, call_node) {
737
738                 int *site = (int *)sec->data->d_buf + idx;
739                 *site = 0;
740
741                 if (elf_add_reloc_to_insn(file->elf, sec,
742                                           idx * sizeof(int),
743                                           R_X86_64_PC32,
744                                           insn->sec, insn->offset)) {
745                         WARN("elf_add_reloc_to_insn: .retpoline_sites");
746                         return -1;
747                 }
748
749                 idx++;
750         }
751
752         return 0;
753 }
754
755 static int create_return_sites_sections(struct objtool_file *file)
756 {
757         struct instruction *insn;
758         struct section *sec;
759         int idx;
760
761         sec = find_section_by_name(file->elf, ".return_sites");
762         if (sec) {
763                 WARN("file already has .return_sites, skipping");
764                 return 0;
765         }
766
767         idx = 0;
768         list_for_each_entry(insn, &file->return_thunk_list, call_node)
769                 idx++;
770
771         if (!idx)
772                 return 0;
773
774         sec = elf_create_section(file->elf, ".return_sites", 0,
775                                  sizeof(int), idx);
776         if (!sec) {
777                 WARN("elf_create_section: .return_sites");
778                 return -1;
779         }
780
781         idx = 0;
782         list_for_each_entry(insn, &file->return_thunk_list, call_node) {
783
784                 int *site = (int *)sec->data->d_buf + idx;
785                 *site = 0;
786
787                 if (elf_add_reloc_to_insn(file->elf, sec,
788                                           idx * sizeof(int),
789                                           R_X86_64_PC32,
790                                           insn->sec, insn->offset)) {
791                         WARN("elf_add_reloc_to_insn: .return_sites");
792                         return -1;
793                 }
794
795                 idx++;
796         }
797
798         return 0;
799 }
800
801 static int create_ibt_endbr_seal_sections(struct objtool_file *file)
802 {
803         struct instruction *insn;
804         struct section *sec;
805         int idx;
806
807         sec = find_section_by_name(file->elf, ".ibt_endbr_seal");
808         if (sec) {
809                 WARN("file already has .ibt_endbr_seal, skipping");
810                 return 0;
811         }
812
813         idx = 0;
814         list_for_each_entry(insn, &file->endbr_list, call_node)
815                 idx++;
816
817         if (opts.stats) {
818                 printf("ibt: ENDBR at function start: %d\n", file->nr_endbr);
819                 printf("ibt: ENDBR inside functions:  %d\n", file->nr_endbr_int);
820                 printf("ibt: superfluous ENDBR:       %d\n", idx);
821         }
822
823         if (!idx)
824                 return 0;
825
826         sec = elf_create_section(file->elf, ".ibt_endbr_seal", 0,
827                                  sizeof(int), idx);
828         if (!sec) {
829                 WARN("elf_create_section: .ibt_endbr_seal");
830                 return -1;
831         }
832
833         idx = 0;
834         list_for_each_entry(insn, &file->endbr_list, call_node) {
835
836                 int *site = (int *)sec->data->d_buf + idx;
837                 *site = 0;
838
839                 if (elf_add_reloc_to_insn(file->elf, sec,
840                                           idx * sizeof(int),
841                                           R_X86_64_PC32,
842                                           insn->sec, insn->offset)) {
843                         WARN("elf_add_reloc_to_insn: .ibt_endbr_seal");
844                         return -1;
845                 }
846
847                 idx++;
848         }
849
850         return 0;
851 }
852
853 static int create_mcount_loc_sections(struct objtool_file *file)
854 {
855         struct section *sec;
856         unsigned long *loc;
857         struct instruction *insn;
858         int idx;
859
860         sec = find_section_by_name(file->elf, "__mcount_loc");
861         if (sec) {
862                 INIT_LIST_HEAD(&file->mcount_loc_list);
863                 WARN("file already has __mcount_loc section, skipping");
864                 return 0;
865         }
866
867         if (list_empty(&file->mcount_loc_list))
868                 return 0;
869
870         idx = 0;
871         list_for_each_entry(insn, &file->mcount_loc_list, call_node)
872                 idx++;
873
874         sec = elf_create_section(file->elf, "__mcount_loc", 0, sizeof(unsigned long), idx);
875         if (!sec)
876                 return -1;
877
878         idx = 0;
879         list_for_each_entry(insn, &file->mcount_loc_list, call_node) {
880
881                 loc = (unsigned long *)sec->data->d_buf + idx;
882                 memset(loc, 0, sizeof(unsigned long));
883
884                 if (elf_add_reloc_to_insn(file->elf, sec,
885                                           idx * sizeof(unsigned long),
886                                           R_X86_64_64,
887                                           insn->sec, insn->offset))
888                         return -1;
889
890                 idx++;
891         }
892
893         return 0;
894 }
895
896 /*
897  * Warnings shouldn't be reported for ignored functions.
898  */
899 static void add_ignores(struct objtool_file *file)
900 {
901         struct instruction *insn;
902         struct section *sec;
903         struct symbol *func;
904         struct reloc *reloc;
905
906         sec = find_section_by_name(file->elf, ".rela.discard.func_stack_frame_non_standard");
907         if (!sec)
908                 return;
909
910         list_for_each_entry(reloc, &sec->reloc_list, list) {
911                 switch (reloc->sym->type) {
912                 case STT_FUNC:
913                         func = reloc->sym;
914                         break;
915
916                 case STT_SECTION:
917                         func = find_func_by_offset(reloc->sym->sec, reloc->addend);
918                         if (!func)
919                                 continue;
920                         break;
921
922                 default:
923                         WARN("unexpected relocation symbol type in %s: %d", sec->name, reloc->sym->type);
924                         continue;
925                 }
926
927                 func_for_each_insn(file, func, insn)
928                         insn->ignore = true;
929         }
930 }
931
932 /*
933  * This is a whitelist of functions that is allowed to be called with AC set.
934  * The list is meant to be minimal and only contains compiler instrumentation
935  * ABI and a few functions used to implement *_{to,from}_user() functions.
936  *
937  * These functions must not directly change AC, but may PUSHF/POPF.
938  */
939 static const char *uaccess_safe_builtin[] = {
940         /* KASAN */
941         "kasan_report",
942         "kasan_check_range",
943         /* KASAN out-of-line */
944         "__asan_loadN_noabort",
945         "__asan_load1_noabort",
946         "__asan_load2_noabort",
947         "__asan_load4_noabort",
948         "__asan_load8_noabort",
949         "__asan_load16_noabort",
950         "__asan_storeN_noabort",
951         "__asan_store1_noabort",
952         "__asan_store2_noabort",
953         "__asan_store4_noabort",
954         "__asan_store8_noabort",
955         "__asan_store16_noabort",
956         "__kasan_check_read",
957         "__kasan_check_write",
958         /* KASAN in-line */
959         "__asan_report_load_n_noabort",
960         "__asan_report_load1_noabort",
961         "__asan_report_load2_noabort",
962         "__asan_report_load4_noabort",
963         "__asan_report_load8_noabort",
964         "__asan_report_load16_noabort",
965         "__asan_report_store_n_noabort",
966         "__asan_report_store1_noabort",
967         "__asan_report_store2_noabort",
968         "__asan_report_store4_noabort",
969         "__asan_report_store8_noabort",
970         "__asan_report_store16_noabort",
971         /* KCSAN */
972         "__kcsan_check_access",
973         "__kcsan_mb",
974         "__kcsan_wmb",
975         "__kcsan_rmb",
976         "__kcsan_release",
977         "kcsan_found_watchpoint",
978         "kcsan_setup_watchpoint",
979         "kcsan_check_scoped_accesses",
980         "kcsan_disable_current",
981         "kcsan_enable_current_nowarn",
982         /* KCSAN/TSAN */
983         "__tsan_func_entry",
984         "__tsan_func_exit",
985         "__tsan_read_range",
986         "__tsan_write_range",
987         "__tsan_read1",
988         "__tsan_read2",
989         "__tsan_read4",
990         "__tsan_read8",
991         "__tsan_read16",
992         "__tsan_write1",
993         "__tsan_write2",
994         "__tsan_write4",
995         "__tsan_write8",
996         "__tsan_write16",
997         "__tsan_read_write1",
998         "__tsan_read_write2",
999         "__tsan_read_write4",
1000         "__tsan_read_write8",
1001         "__tsan_read_write16",
1002         "__tsan_atomic8_load",
1003         "__tsan_atomic16_load",
1004         "__tsan_atomic32_load",
1005         "__tsan_atomic64_load",
1006         "__tsan_atomic8_store",
1007         "__tsan_atomic16_store",
1008         "__tsan_atomic32_store",
1009         "__tsan_atomic64_store",
1010         "__tsan_atomic8_exchange",
1011         "__tsan_atomic16_exchange",
1012         "__tsan_atomic32_exchange",
1013         "__tsan_atomic64_exchange",
1014         "__tsan_atomic8_fetch_add",
1015         "__tsan_atomic16_fetch_add",
1016         "__tsan_atomic32_fetch_add",
1017         "__tsan_atomic64_fetch_add",
1018         "__tsan_atomic8_fetch_sub",
1019         "__tsan_atomic16_fetch_sub",
1020         "__tsan_atomic32_fetch_sub",
1021         "__tsan_atomic64_fetch_sub",
1022         "__tsan_atomic8_fetch_and",
1023         "__tsan_atomic16_fetch_and",
1024         "__tsan_atomic32_fetch_and",
1025         "__tsan_atomic64_fetch_and",
1026         "__tsan_atomic8_fetch_or",
1027         "__tsan_atomic16_fetch_or",
1028         "__tsan_atomic32_fetch_or",
1029         "__tsan_atomic64_fetch_or",
1030         "__tsan_atomic8_fetch_xor",
1031         "__tsan_atomic16_fetch_xor",
1032         "__tsan_atomic32_fetch_xor",
1033         "__tsan_atomic64_fetch_xor",
1034         "__tsan_atomic8_fetch_nand",
1035         "__tsan_atomic16_fetch_nand",
1036         "__tsan_atomic32_fetch_nand",
1037         "__tsan_atomic64_fetch_nand",
1038         "__tsan_atomic8_compare_exchange_strong",
1039         "__tsan_atomic16_compare_exchange_strong",
1040         "__tsan_atomic32_compare_exchange_strong",
1041         "__tsan_atomic64_compare_exchange_strong",
1042         "__tsan_atomic8_compare_exchange_weak",
1043         "__tsan_atomic16_compare_exchange_weak",
1044         "__tsan_atomic32_compare_exchange_weak",
1045         "__tsan_atomic64_compare_exchange_weak",
1046         "__tsan_atomic8_compare_exchange_val",
1047         "__tsan_atomic16_compare_exchange_val",
1048         "__tsan_atomic32_compare_exchange_val",
1049         "__tsan_atomic64_compare_exchange_val",
1050         "__tsan_atomic_thread_fence",
1051         "__tsan_atomic_signal_fence",
1052         /* KCOV */
1053         "write_comp_data",
1054         "check_kcov_mode",
1055         "__sanitizer_cov_trace_pc",
1056         "__sanitizer_cov_trace_const_cmp1",
1057         "__sanitizer_cov_trace_const_cmp2",
1058         "__sanitizer_cov_trace_const_cmp4",
1059         "__sanitizer_cov_trace_const_cmp8",
1060         "__sanitizer_cov_trace_cmp1",
1061         "__sanitizer_cov_trace_cmp2",
1062         "__sanitizer_cov_trace_cmp4",
1063         "__sanitizer_cov_trace_cmp8",
1064         "__sanitizer_cov_trace_switch",
1065         /* UBSAN */
1066         "ubsan_type_mismatch_common",
1067         "__ubsan_handle_type_mismatch",
1068         "__ubsan_handle_type_mismatch_v1",
1069         "__ubsan_handle_shift_out_of_bounds",
1070         /* misc */
1071         "csum_partial_copy_generic",
1072         "copy_mc_fragile",
1073         "copy_mc_fragile_handle_tail",
1074         "copy_mc_enhanced_fast_string",
1075         "ftrace_likely_update", /* CONFIG_TRACE_BRANCH_PROFILING */
1076         "clear_user_erms",
1077         "clear_user_rep_good",
1078         "clear_user_original",
1079         NULL
1080 };
1081
1082 static void add_uaccess_safe(struct objtool_file *file)
1083 {
1084         struct symbol *func;
1085         const char **name;
1086
1087         if (!opts.uaccess)
1088                 return;
1089
1090         for (name = uaccess_safe_builtin; *name; name++) {
1091                 func = find_symbol_by_name(file->elf, *name);
1092                 if (!func)
1093                         continue;
1094
1095                 func->uaccess_safe = true;
1096         }
1097 }
1098
1099 /*
1100  * FIXME: For now, just ignore any alternatives which add retpolines.  This is
1101  * a temporary hack, as it doesn't allow ORC to unwind from inside a retpoline.
1102  * But it at least allows objtool to understand the control flow *around* the
1103  * retpoline.
1104  */
1105 static int add_ignore_alternatives(struct objtool_file *file)
1106 {
1107         struct section *sec;
1108         struct reloc *reloc;
1109         struct instruction *insn;
1110
1111         sec = find_section_by_name(file->elf, ".rela.discard.ignore_alts");
1112         if (!sec)
1113                 return 0;
1114
1115         list_for_each_entry(reloc, &sec->reloc_list, list) {
1116                 if (reloc->sym->type != STT_SECTION) {
1117                         WARN("unexpected relocation symbol type in %s", sec->name);
1118                         return -1;
1119                 }
1120
1121                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
1122                 if (!insn) {
1123                         WARN("bad .discard.ignore_alts entry");
1124                         return -1;
1125                 }
1126
1127                 insn->ignore_alts = true;
1128         }
1129
1130         return 0;
1131 }
1132
1133 __weak bool arch_is_retpoline(struct symbol *sym)
1134 {
1135         return false;
1136 }
1137
1138 __weak bool arch_is_rethunk(struct symbol *sym)
1139 {
1140         return false;
1141 }
1142
1143 #define NEGATIVE_RELOC  ((void *)-1L)
1144
1145 static struct reloc *insn_reloc(struct objtool_file *file, struct instruction *insn)
1146 {
1147         if (insn->reloc == NEGATIVE_RELOC)
1148                 return NULL;
1149
1150         if (!insn->reloc) {
1151                 if (!file)
1152                         return NULL;
1153
1154                 insn->reloc = find_reloc_by_dest_range(file->elf, insn->sec,
1155                                                        insn->offset, insn->len);
1156                 if (!insn->reloc) {
1157                         insn->reloc = NEGATIVE_RELOC;
1158                         return NULL;
1159                 }
1160         }
1161
1162         return insn->reloc;
1163 }
1164
1165 static void remove_insn_ops(struct instruction *insn)
1166 {
1167         struct stack_op *op, *tmp;
1168
1169         list_for_each_entry_safe(op, tmp, &insn->stack_ops, list) {
1170                 list_del(&op->list);
1171                 free(op);
1172         }
1173 }
1174
1175 static void annotate_call_site(struct objtool_file *file,
1176                                struct instruction *insn, bool sibling)
1177 {
1178         struct reloc *reloc = insn_reloc(file, insn);
1179         struct symbol *sym = insn->call_dest;
1180
1181         if (!sym)
1182                 sym = reloc->sym;
1183
1184         /*
1185          * Alternative replacement code is just template code which is
1186          * sometimes copied to the original instruction. For now, don't
1187          * annotate it. (In the future we might consider annotating the
1188          * original instruction if/when it ever makes sense to do so.)
1189          */
1190         if (!strcmp(insn->sec->name, ".altinstr_replacement"))
1191                 return;
1192
1193         if (sym->static_call_tramp) {
1194                 list_add_tail(&insn->call_node, &file->static_call_list);
1195                 return;
1196         }
1197
1198         if (sym->retpoline_thunk) {
1199                 list_add_tail(&insn->call_node, &file->retpoline_call_list);
1200                 return;
1201         }
1202
1203         /*
1204          * Many compilers cannot disable KCOV or sanitizer calls with a function
1205          * attribute so they need a little help, NOP out any such calls from
1206          * noinstr text.
1207          */
1208         if (opts.hack_noinstr && insn->sec->noinstr && sym->profiling_func) {
1209                 if (reloc) {
1210                         reloc->type = R_NONE;
1211                         elf_write_reloc(file->elf, reloc);
1212                 }
1213
1214                 elf_write_insn(file->elf, insn->sec,
1215                                insn->offset, insn->len,
1216                                sibling ? arch_ret_insn(insn->len)
1217                                        : arch_nop_insn(insn->len));
1218
1219                 insn->type = sibling ? INSN_RETURN : INSN_NOP;
1220
1221                 if (sibling) {
1222                         /*
1223                          * We've replaced the tail-call JMP insn by two new
1224                          * insn: RET; INT3, except we only have a single struct
1225                          * insn here. Mark it retpoline_safe to avoid the SLS
1226                          * warning, instead of adding another insn.
1227                          */
1228                         insn->retpoline_safe = true;
1229                 }
1230
1231                 return;
1232         }
1233
1234         if (opts.mcount && sym->fentry) {
1235                 if (sibling)
1236                         WARN_FUNC("Tail call to __fentry__ !?!?", insn->sec, insn->offset);
1237
1238                 if (reloc) {
1239                         reloc->type = R_NONE;
1240                         elf_write_reloc(file->elf, reloc);
1241                 }
1242
1243                 elf_write_insn(file->elf, insn->sec,
1244                                insn->offset, insn->len,
1245                                arch_nop_insn(insn->len));
1246
1247                 insn->type = INSN_NOP;
1248
1249                 list_add_tail(&insn->call_node, &file->mcount_loc_list);
1250                 return;
1251         }
1252
1253         if (!sibling && dead_end_function(file, sym))
1254                 insn->dead_end = true;
1255 }
1256
1257 static void add_call_dest(struct objtool_file *file, struct instruction *insn,
1258                           struct symbol *dest, bool sibling)
1259 {
1260         insn->call_dest = dest;
1261         if (!dest)
1262                 return;
1263
1264         /*
1265          * Whatever stack impact regular CALLs have, should be undone
1266          * by the RETURN of the called function.
1267          *
1268          * Annotated intra-function calls retain the stack_ops but
1269          * are converted to JUMP, see read_intra_function_calls().
1270          */
1271         remove_insn_ops(insn);
1272
1273         annotate_call_site(file, insn, sibling);
1274 }
1275
1276 static void add_retpoline_call(struct objtool_file *file, struct instruction *insn)
1277 {
1278         /*
1279          * Retpoline calls/jumps are really dynamic calls/jumps in disguise,
1280          * so convert them accordingly.
1281          */
1282         switch (insn->type) {
1283         case INSN_CALL:
1284                 insn->type = INSN_CALL_DYNAMIC;
1285                 break;
1286         case INSN_JUMP_UNCONDITIONAL:
1287                 insn->type = INSN_JUMP_DYNAMIC;
1288                 break;
1289         case INSN_JUMP_CONDITIONAL:
1290                 insn->type = INSN_JUMP_DYNAMIC_CONDITIONAL;
1291                 break;
1292         default:
1293                 return;
1294         }
1295
1296         insn->retpoline_safe = true;
1297
1298         /*
1299          * Whatever stack impact regular CALLs have, should be undone
1300          * by the RETURN of the called function.
1301          *
1302          * Annotated intra-function calls retain the stack_ops but
1303          * are converted to JUMP, see read_intra_function_calls().
1304          */
1305         remove_insn_ops(insn);
1306
1307         annotate_call_site(file, insn, false);
1308 }
1309
1310 static void add_return_call(struct objtool_file *file, struct instruction *insn, bool add)
1311 {
1312         /*
1313          * Return thunk tail calls are really just returns in disguise,
1314          * so convert them accordingly.
1315          */
1316         insn->type = INSN_RETURN;
1317         insn->retpoline_safe = true;
1318
1319         if (add)
1320                 list_add_tail(&insn->call_node, &file->return_thunk_list);
1321 }
1322
1323 static bool same_function(struct instruction *insn1, struct instruction *insn2)
1324 {
1325         return insn1->func->pfunc == insn2->func->pfunc;
1326 }
1327
1328 static bool is_first_func_insn(struct objtool_file *file, struct instruction *insn)
1329 {
1330         if (insn->offset == insn->func->offset)
1331                 return true;
1332
1333         if (opts.ibt) {
1334                 struct instruction *prev = prev_insn_same_sym(file, insn);
1335
1336                 if (prev && prev->type == INSN_ENDBR &&
1337                     insn->offset == insn->func->offset + prev->len)
1338                         return true;
1339         }
1340
1341         return false;
1342 }
1343
1344 /*
1345  * Find the destination instructions for all jumps.
1346  */
1347 static int add_jump_destinations(struct objtool_file *file)
1348 {
1349         struct instruction *insn, *jump_dest;
1350         struct reloc *reloc;
1351         struct section *dest_sec;
1352         unsigned long dest_off;
1353
1354         for_each_insn(file, insn) {
1355                 if (insn->jump_dest) {
1356                         /*
1357                          * handle_group_alt() may have previously set
1358                          * 'jump_dest' for some alternatives.
1359                          */
1360                         continue;
1361                 }
1362                 if (!is_static_jump(insn))
1363                         continue;
1364
1365                 reloc = insn_reloc(file, insn);
1366                 if (!reloc) {
1367                         dest_sec = insn->sec;
1368                         dest_off = arch_jump_destination(insn);
1369                 } else if (reloc->sym->type == STT_SECTION) {
1370                         dest_sec = reloc->sym->sec;
1371                         dest_off = arch_dest_reloc_offset(reloc->addend);
1372                 } else if (reloc->sym->retpoline_thunk) {
1373                         add_retpoline_call(file, insn);
1374                         continue;
1375                 } else if (reloc->sym->return_thunk) {
1376                         add_return_call(file, insn, true);
1377                         continue;
1378                 } else if (insn->func) {
1379                         /*
1380                          * External sibling call or internal sibling call with
1381                          * STT_FUNC reloc.
1382                          */
1383                         add_call_dest(file, insn, reloc->sym, true);
1384                         continue;
1385                 } else if (reloc->sym->sec->idx) {
1386                         dest_sec = reloc->sym->sec;
1387                         dest_off = reloc->sym->sym.st_value +
1388                                    arch_dest_reloc_offset(reloc->addend);
1389                 } else {
1390                         /* non-func asm code jumping to another file */
1391                         continue;
1392                 }
1393
1394                 jump_dest = find_insn(file, dest_sec, dest_off);
1395                 if (!jump_dest) {
1396                         struct symbol *sym = find_symbol_by_offset(dest_sec, dest_off);
1397
1398                         /*
1399                          * This is a special case for zen_untrain_ret().
1400                          * It jumps to __x86_return_thunk(), but objtool
1401                          * can't find the thunk's starting RET
1402                          * instruction, because the RET is also in the
1403                          * middle of another instruction.  Objtool only
1404                          * knows about the outer instruction.
1405                          */
1406                         if (sym && sym->return_thunk) {
1407                                 add_return_call(file, insn, false);
1408                                 continue;
1409                         }
1410
1411                         WARN_FUNC("can't find jump dest instruction at %s+0x%lx",
1412                                   insn->sec, insn->offset, dest_sec->name,
1413                                   dest_off);
1414                         return -1;
1415                 }
1416
1417                 /*
1418                  * Cross-function jump.
1419                  */
1420                 if (insn->func && jump_dest->func &&
1421                     insn->func != jump_dest->func) {
1422
1423                         /*
1424                          * For GCC 8+, create parent/child links for any cold
1425                          * subfunctions.  This is _mostly_ redundant with a
1426                          * similar initialization in read_symbols().
1427                          *
1428                          * If a function has aliases, we want the *first* such
1429                          * function in the symbol table to be the subfunction's
1430                          * parent.  In that case we overwrite the
1431                          * initialization done in read_symbols().
1432                          *
1433                          * However this code can't completely replace the
1434                          * read_symbols() code because this doesn't detect the
1435                          * case where the parent function's only reference to a
1436                          * subfunction is through a jump table.
1437                          */
1438                         if (!strstr(insn->func->name, ".cold") &&
1439                             strstr(jump_dest->func->name, ".cold")) {
1440                                 insn->func->cfunc = jump_dest->func;
1441                                 jump_dest->func->pfunc = insn->func;
1442
1443                         } else if (!same_function(insn, jump_dest) &&
1444                                    is_first_func_insn(file, jump_dest)) {
1445                                 /*
1446                                  * Internal sibling call without reloc or with
1447                                  * STT_SECTION reloc.
1448                                  */
1449                                 add_call_dest(file, insn, jump_dest->func, true);
1450                                 continue;
1451                         }
1452                 }
1453
1454                 insn->jump_dest = jump_dest;
1455         }
1456
1457         return 0;
1458 }
1459
1460 static struct symbol *find_call_destination(struct section *sec, unsigned long offset)
1461 {
1462         struct symbol *call_dest;
1463
1464         call_dest = find_func_by_offset(sec, offset);
1465         if (!call_dest)
1466                 call_dest = find_symbol_by_offset(sec, offset);
1467
1468         return call_dest;
1469 }
1470
1471 /*
1472  * Find the destination instructions for all calls.
1473  */
1474 static int add_call_destinations(struct objtool_file *file)
1475 {
1476         struct instruction *insn;
1477         unsigned long dest_off;
1478         struct symbol *dest;
1479         struct reloc *reloc;
1480
1481         for_each_insn(file, insn) {
1482                 if (insn->type != INSN_CALL)
1483                         continue;
1484
1485                 reloc = insn_reloc(file, insn);
1486                 if (!reloc) {
1487                         dest_off = arch_jump_destination(insn);
1488                         dest = find_call_destination(insn->sec, dest_off);
1489
1490                         add_call_dest(file, insn, dest, false);
1491
1492                         if (insn->ignore)
1493                                 continue;
1494
1495                         if (!insn->call_dest) {
1496                                 WARN_FUNC("unannotated intra-function call", insn->sec, insn->offset);
1497                                 return -1;
1498                         }
1499
1500                         if (insn->func && insn->call_dest->type != STT_FUNC) {
1501                                 WARN_FUNC("unsupported call to non-function",
1502                                           insn->sec, insn->offset);
1503                                 return -1;
1504                         }
1505
1506                 } else if (reloc->sym->type == STT_SECTION) {
1507                         dest_off = arch_dest_reloc_offset(reloc->addend);
1508                         dest = find_call_destination(reloc->sym->sec, dest_off);
1509                         if (!dest) {
1510                                 WARN_FUNC("can't find call dest symbol at %s+0x%lx",
1511                                           insn->sec, insn->offset,
1512                                           reloc->sym->sec->name,
1513                                           dest_off);
1514                                 return -1;
1515                         }
1516
1517                         add_call_dest(file, insn, dest, false);
1518
1519                 } else if (reloc->sym->retpoline_thunk) {
1520                         add_retpoline_call(file, insn);
1521
1522                 } else
1523                         add_call_dest(file, insn, reloc->sym, false);
1524         }
1525
1526         return 0;
1527 }
1528
1529 /*
1530  * The .alternatives section requires some extra special care over and above
1531  * other special sections because alternatives are patched in place.
1532  */
1533 static int handle_group_alt(struct objtool_file *file,
1534                             struct special_alt *special_alt,
1535                             struct instruction *orig_insn,
1536                             struct instruction **new_insn)
1537 {
1538         struct instruction *last_orig_insn, *last_new_insn = NULL, *insn, *nop = NULL;
1539         struct alt_group *orig_alt_group, *new_alt_group;
1540         unsigned long dest_off;
1541
1542
1543         orig_alt_group = malloc(sizeof(*orig_alt_group));
1544         if (!orig_alt_group) {
1545                 WARN("malloc failed");
1546                 return -1;
1547         }
1548         orig_alt_group->cfi = calloc(special_alt->orig_len,
1549                                      sizeof(struct cfi_state *));
1550         if (!orig_alt_group->cfi) {
1551                 WARN("calloc failed");
1552                 return -1;
1553         }
1554
1555         last_orig_insn = NULL;
1556         insn = orig_insn;
1557         sec_for_each_insn_from(file, insn) {
1558                 if (insn->offset >= special_alt->orig_off + special_alt->orig_len)
1559                         break;
1560
1561                 insn->alt_group = orig_alt_group;
1562                 last_orig_insn = insn;
1563         }
1564         orig_alt_group->orig_group = NULL;
1565         orig_alt_group->first_insn = orig_insn;
1566         orig_alt_group->last_insn = last_orig_insn;
1567
1568
1569         new_alt_group = malloc(sizeof(*new_alt_group));
1570         if (!new_alt_group) {
1571                 WARN("malloc failed");
1572                 return -1;
1573         }
1574
1575         if (special_alt->new_len < special_alt->orig_len) {
1576                 /*
1577                  * Insert a fake nop at the end to make the replacement
1578                  * alt_group the same size as the original.  This is needed to
1579                  * allow propagate_alt_cfi() to do its magic.  When the last
1580                  * instruction affects the stack, the instruction after it (the
1581                  * nop) will propagate the new state to the shared CFI array.
1582                  */
1583                 nop = malloc(sizeof(*nop));
1584                 if (!nop) {
1585                         WARN("malloc failed");
1586                         return -1;
1587                 }
1588                 memset(nop, 0, sizeof(*nop));
1589                 INIT_LIST_HEAD(&nop->alts);
1590                 INIT_LIST_HEAD(&nop->stack_ops);
1591
1592                 nop->sec = special_alt->new_sec;
1593                 nop->offset = special_alt->new_off + special_alt->new_len;
1594                 nop->len = special_alt->orig_len - special_alt->new_len;
1595                 nop->type = INSN_NOP;
1596                 nop->func = orig_insn->func;
1597                 nop->alt_group = new_alt_group;
1598                 nop->ignore = orig_insn->ignore_alts;
1599         }
1600
1601         if (!special_alt->new_len) {
1602                 *new_insn = nop;
1603                 goto end;
1604         }
1605
1606         insn = *new_insn;
1607         sec_for_each_insn_from(file, insn) {
1608                 struct reloc *alt_reloc;
1609
1610                 if (insn->offset >= special_alt->new_off + special_alt->new_len)
1611                         break;
1612
1613                 last_new_insn = insn;
1614
1615                 insn->ignore = orig_insn->ignore_alts;
1616                 insn->func = orig_insn->func;
1617                 insn->alt_group = new_alt_group;
1618
1619                 /*
1620                  * Since alternative replacement code is copy/pasted by the
1621                  * kernel after applying relocations, generally such code can't
1622                  * have relative-address relocation references to outside the
1623                  * .altinstr_replacement section, unless the arch's
1624                  * alternatives code can adjust the relative offsets
1625                  * accordingly.
1626                  */
1627                 alt_reloc = insn_reloc(file, insn);
1628                 if (alt_reloc &&
1629                     !arch_support_alt_relocation(special_alt, insn, alt_reloc)) {
1630
1631                         WARN_FUNC("unsupported relocation in alternatives section",
1632                                   insn->sec, insn->offset);
1633                         return -1;
1634                 }
1635
1636                 if (!is_static_jump(insn))
1637                         continue;
1638
1639                 if (!insn->immediate)
1640                         continue;
1641
1642                 dest_off = arch_jump_destination(insn);
1643                 if (dest_off == special_alt->new_off + special_alt->new_len) {
1644                         insn->jump_dest = next_insn_same_sec(file, last_orig_insn);
1645                         if (!insn->jump_dest) {
1646                                 WARN_FUNC("can't find alternative jump destination",
1647                                           insn->sec, insn->offset);
1648                                 return -1;
1649                         }
1650                 }
1651         }
1652
1653         if (!last_new_insn) {
1654                 WARN_FUNC("can't find last new alternative instruction",
1655                           special_alt->new_sec, special_alt->new_off);
1656                 return -1;
1657         }
1658
1659         if (nop)
1660                 list_add(&nop->list, &last_new_insn->list);
1661 end:
1662         new_alt_group->orig_group = orig_alt_group;
1663         new_alt_group->first_insn = *new_insn;
1664         new_alt_group->last_insn = nop ? : last_new_insn;
1665         new_alt_group->cfi = orig_alt_group->cfi;
1666         return 0;
1667 }
1668
1669 /*
1670  * A jump table entry can either convert a nop to a jump or a jump to a nop.
1671  * If the original instruction is a jump, make the alt entry an effective nop
1672  * by just skipping the original instruction.
1673  */
1674 static int handle_jump_alt(struct objtool_file *file,
1675                            struct special_alt *special_alt,
1676                            struct instruction *orig_insn,
1677                            struct instruction **new_insn)
1678 {
1679         if (orig_insn->type != INSN_JUMP_UNCONDITIONAL &&
1680             orig_insn->type != INSN_NOP) {
1681
1682                 WARN_FUNC("unsupported instruction at jump label",
1683                           orig_insn->sec, orig_insn->offset);
1684                 return -1;
1685         }
1686
1687         if (opts.hack_jump_label && special_alt->key_addend & 2) {
1688                 struct reloc *reloc = insn_reloc(file, orig_insn);
1689
1690                 if (reloc) {
1691                         reloc->type = R_NONE;
1692                         elf_write_reloc(file->elf, reloc);
1693                 }
1694                 elf_write_insn(file->elf, orig_insn->sec,
1695                                orig_insn->offset, orig_insn->len,
1696                                arch_nop_insn(orig_insn->len));
1697                 orig_insn->type = INSN_NOP;
1698         }
1699
1700         if (orig_insn->type == INSN_NOP) {
1701                 if (orig_insn->len == 2)
1702                         file->jl_nop_short++;
1703                 else
1704                         file->jl_nop_long++;
1705
1706                 return 0;
1707         }
1708
1709         if (orig_insn->len == 2)
1710                 file->jl_short++;
1711         else
1712                 file->jl_long++;
1713
1714         *new_insn = list_next_entry(orig_insn, list);
1715         return 0;
1716 }
1717
1718 /*
1719  * Read all the special sections which have alternate instructions which can be
1720  * patched in or redirected to at runtime.  Each instruction having alternate
1721  * instruction(s) has them added to its insn->alts list, which will be
1722  * traversed in validate_branch().
1723  */
1724 static int add_special_section_alts(struct objtool_file *file)
1725 {
1726         struct list_head special_alts;
1727         struct instruction *orig_insn, *new_insn;
1728         struct special_alt *special_alt, *tmp;
1729         struct alternative *alt;
1730         int ret;
1731
1732         ret = special_get_alts(file->elf, &special_alts);
1733         if (ret)
1734                 return ret;
1735
1736         list_for_each_entry_safe(special_alt, tmp, &special_alts, list) {
1737
1738                 orig_insn = find_insn(file, special_alt->orig_sec,
1739                                       special_alt->orig_off);
1740                 if (!orig_insn) {
1741                         WARN_FUNC("special: can't find orig instruction",
1742                                   special_alt->orig_sec, special_alt->orig_off);
1743                         ret = -1;
1744                         goto out;
1745                 }
1746
1747                 new_insn = NULL;
1748                 if (!special_alt->group || special_alt->new_len) {
1749                         new_insn = find_insn(file, special_alt->new_sec,
1750                                              special_alt->new_off);
1751                         if (!new_insn) {
1752                                 WARN_FUNC("special: can't find new instruction",
1753                                           special_alt->new_sec,
1754                                           special_alt->new_off);
1755                                 ret = -1;
1756                                 goto out;
1757                         }
1758                 }
1759
1760                 if (special_alt->group) {
1761                         if (!special_alt->orig_len) {
1762                                 WARN_FUNC("empty alternative entry",
1763                                           orig_insn->sec, orig_insn->offset);
1764                                 continue;
1765                         }
1766
1767                         ret = handle_group_alt(file, special_alt, orig_insn,
1768                                                &new_insn);
1769                         if (ret)
1770                                 goto out;
1771                 } else if (special_alt->jump_or_nop) {
1772                         ret = handle_jump_alt(file, special_alt, orig_insn,
1773                                               &new_insn);
1774                         if (ret)
1775                                 goto out;
1776                 }
1777
1778                 alt = malloc(sizeof(*alt));
1779                 if (!alt) {
1780                         WARN("malloc failed");
1781                         ret = -1;
1782                         goto out;
1783                 }
1784
1785                 alt->insn = new_insn;
1786                 alt->skip_orig = special_alt->skip_orig;
1787                 orig_insn->ignore_alts |= special_alt->skip_alt;
1788                 list_add_tail(&alt->list, &orig_insn->alts);
1789
1790                 list_del(&special_alt->list);
1791                 free(special_alt);
1792         }
1793
1794         if (opts.stats) {
1795                 printf("jl\\\tNOP\tJMP\n");
1796                 printf("short:\t%ld\t%ld\n", file->jl_nop_short, file->jl_short);
1797                 printf("long:\t%ld\t%ld\n", file->jl_nop_long, file->jl_long);
1798         }
1799
1800 out:
1801         return ret;
1802 }
1803
1804 static int add_jump_table(struct objtool_file *file, struct instruction *insn,
1805                             struct reloc *table)
1806 {
1807         struct reloc *reloc = table;
1808         struct instruction *dest_insn;
1809         struct alternative *alt;
1810         struct symbol *pfunc = insn->func->pfunc;
1811         unsigned int prev_offset = 0;
1812
1813         /*
1814          * Each @reloc is a switch table relocation which points to the target
1815          * instruction.
1816          */
1817         list_for_each_entry_from(reloc, &table->sec->reloc_list, list) {
1818
1819                 /* Check for the end of the table: */
1820                 if (reloc != table && reloc->jump_table_start)
1821                         break;
1822
1823                 /* Make sure the table entries are consecutive: */
1824                 if (prev_offset && reloc->offset != prev_offset + 8)
1825                         break;
1826
1827                 /* Detect function pointers from contiguous objects: */
1828                 if (reloc->sym->sec == pfunc->sec &&
1829                     reloc->addend == pfunc->offset)
1830                         break;
1831
1832                 dest_insn = find_insn(file, reloc->sym->sec, reloc->addend);
1833                 if (!dest_insn)
1834                         break;
1835
1836                 /* Make sure the destination is in the same function: */
1837                 if (!dest_insn->func || dest_insn->func->pfunc != pfunc)
1838                         break;
1839
1840                 alt = malloc(sizeof(*alt));
1841                 if (!alt) {
1842                         WARN("malloc failed");
1843                         return -1;
1844                 }
1845
1846                 alt->insn = dest_insn;
1847                 list_add_tail(&alt->list, &insn->alts);
1848                 prev_offset = reloc->offset;
1849         }
1850
1851         if (!prev_offset) {
1852                 WARN_FUNC("can't find switch jump table",
1853                           insn->sec, insn->offset);
1854                 return -1;
1855         }
1856
1857         return 0;
1858 }
1859
1860 /*
1861  * find_jump_table() - Given a dynamic jump, find the switch jump table
1862  * associated with it.
1863  */
1864 static struct reloc *find_jump_table(struct objtool_file *file,
1865                                       struct symbol *func,
1866                                       struct instruction *insn)
1867 {
1868         struct reloc *table_reloc;
1869         struct instruction *dest_insn, *orig_insn = insn;
1870
1871         /*
1872          * Backward search using the @first_jump_src links, these help avoid
1873          * much of the 'in between' code. Which avoids us getting confused by
1874          * it.
1875          */
1876         for (;
1877              insn && insn->func && insn->func->pfunc == func;
1878              insn = insn->first_jump_src ?: prev_insn_same_sym(file, insn)) {
1879
1880                 if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC)
1881                         break;
1882
1883                 /* allow small jumps within the range */
1884                 if (insn->type == INSN_JUMP_UNCONDITIONAL &&
1885                     insn->jump_dest &&
1886                     (insn->jump_dest->offset <= insn->offset ||
1887                      insn->jump_dest->offset > orig_insn->offset))
1888                     break;
1889
1890                 table_reloc = arch_find_switch_table(file, insn);
1891                 if (!table_reloc)
1892                         continue;
1893                 dest_insn = find_insn(file, table_reloc->sym->sec, table_reloc->addend);
1894                 if (!dest_insn || !dest_insn->func || dest_insn->func->pfunc != func)
1895                         continue;
1896
1897                 return table_reloc;
1898         }
1899
1900         return NULL;
1901 }
1902
1903 /*
1904  * First pass: Mark the head of each jump table so that in the next pass,
1905  * we know when a given jump table ends and the next one starts.
1906  */
1907 static void mark_func_jump_tables(struct objtool_file *file,
1908                                     struct symbol *func)
1909 {
1910         struct instruction *insn, *last = NULL;
1911         struct reloc *reloc;
1912
1913         func_for_each_insn(file, func, insn) {
1914                 if (!last)
1915                         last = insn;
1916
1917                 /*
1918                  * Store back-pointers for unconditional forward jumps such
1919                  * that find_jump_table() can back-track using those and
1920                  * avoid some potentially confusing code.
1921                  */
1922                 if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest &&
1923                     insn->offset > last->offset &&
1924                     insn->jump_dest->offset > insn->offset &&
1925                     !insn->jump_dest->first_jump_src) {
1926
1927                         insn->jump_dest->first_jump_src = insn;
1928                         last = insn->jump_dest;
1929                 }
1930
1931                 if (insn->type != INSN_JUMP_DYNAMIC)
1932                         continue;
1933
1934                 reloc = find_jump_table(file, func, insn);
1935                 if (reloc) {
1936                         reloc->jump_table_start = true;
1937                         insn->jump_table = reloc;
1938                 }
1939         }
1940 }
1941
1942 static int add_func_jump_tables(struct objtool_file *file,
1943                                   struct symbol *func)
1944 {
1945         struct instruction *insn;
1946         int ret;
1947
1948         func_for_each_insn(file, func, insn) {
1949                 if (!insn->jump_table)
1950                         continue;
1951
1952                 ret = add_jump_table(file, insn, insn->jump_table);
1953                 if (ret)
1954                         return ret;
1955         }
1956
1957         return 0;
1958 }
1959
1960 /*
1961  * For some switch statements, gcc generates a jump table in the .rodata
1962  * section which contains a list of addresses within the function to jump to.
1963  * This finds these jump tables and adds them to the insn->alts lists.
1964  */
1965 static int add_jump_table_alts(struct objtool_file *file)
1966 {
1967         struct section *sec;
1968         struct symbol *func;
1969         int ret;
1970
1971         if (!file->rodata)
1972                 return 0;
1973
1974         for_each_sec(file, sec) {
1975                 list_for_each_entry(func, &sec->symbol_list, list) {
1976                         if (func->type != STT_FUNC)
1977                                 continue;
1978
1979                         mark_func_jump_tables(file, func);
1980                         ret = add_func_jump_tables(file, func);
1981                         if (ret)
1982                                 return ret;
1983                 }
1984         }
1985
1986         return 0;
1987 }
1988
1989 static void set_func_state(struct cfi_state *state)
1990 {
1991         state->cfa = initial_func_cfi.cfa;
1992         memcpy(&state->regs, &initial_func_cfi.regs,
1993                CFI_NUM_REGS * sizeof(struct cfi_reg));
1994         state->stack_size = initial_func_cfi.cfa.offset;
1995 }
1996
1997 static int read_unwind_hints(struct objtool_file *file)
1998 {
1999         struct cfi_state cfi = init_cfi;
2000         struct section *sec, *relocsec;
2001         struct unwind_hint *hint;
2002         struct instruction *insn;
2003         struct reloc *reloc;
2004         int i;
2005
2006         sec = find_section_by_name(file->elf, ".discard.unwind_hints");
2007         if (!sec)
2008                 return 0;
2009
2010         relocsec = sec->reloc;
2011         if (!relocsec) {
2012                 WARN("missing .rela.discard.unwind_hints section");
2013                 return -1;
2014         }
2015
2016         if (sec->sh.sh_size % sizeof(struct unwind_hint)) {
2017                 WARN("struct unwind_hint size mismatch");
2018                 return -1;
2019         }
2020
2021         file->hints = true;
2022
2023         for (i = 0; i < sec->sh.sh_size / sizeof(struct unwind_hint); i++) {
2024                 hint = (struct unwind_hint *)sec->data->d_buf + i;
2025
2026                 reloc = find_reloc_by_dest(file->elf, sec, i * sizeof(*hint));
2027                 if (!reloc) {
2028                         WARN("can't find reloc for unwind_hints[%d]", i);
2029                         return -1;
2030                 }
2031
2032                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
2033                 if (!insn) {
2034                         WARN("can't find insn for unwind_hints[%d]", i);
2035                         return -1;
2036                 }
2037
2038                 insn->hint = true;
2039
2040                 if (hint->type == UNWIND_HINT_TYPE_SAVE) {
2041                         insn->hint = false;
2042                         insn->save = true;
2043                         continue;
2044                 }
2045
2046                 if (hint->type == UNWIND_HINT_TYPE_RESTORE) {
2047                         insn->restore = true;
2048                         continue;
2049                 }
2050
2051                 if (hint->type == UNWIND_HINT_TYPE_REGS_PARTIAL) {
2052                         struct symbol *sym = find_symbol_by_offset(insn->sec, insn->offset);
2053
2054                         if (sym && sym->bind == STB_GLOBAL) {
2055                                 if (opts.ibt && insn->type != INSN_ENDBR && !insn->noendbr) {
2056                                         WARN_FUNC("UNWIND_HINT_IRET_REGS without ENDBR",
2057                                                   insn->sec, insn->offset);
2058                                 }
2059
2060                                 insn->entry = 1;
2061                         }
2062                 }
2063
2064                 if (hint->type == UNWIND_HINT_TYPE_ENTRY) {
2065                         hint->type = UNWIND_HINT_TYPE_CALL;
2066                         insn->entry = 1;
2067                 }
2068
2069                 if (hint->type == UNWIND_HINT_TYPE_FUNC) {
2070                         insn->cfi = &func_cfi;
2071                         continue;
2072                 }
2073
2074                 if (insn->cfi)
2075                         cfi = *(insn->cfi);
2076
2077                 if (arch_decode_hint_reg(hint->sp_reg, &cfi.cfa.base)) {
2078                         WARN_FUNC("unsupported unwind_hint sp base reg %d",
2079                                   insn->sec, insn->offset, hint->sp_reg);
2080                         return -1;
2081                 }
2082
2083                 cfi.cfa.offset = bswap_if_needed(hint->sp_offset);
2084                 cfi.type = hint->type;
2085                 cfi.end = hint->end;
2086
2087                 insn->cfi = cfi_hash_find_or_add(&cfi);
2088         }
2089
2090         return 0;
2091 }
2092
2093 static int read_noendbr_hints(struct objtool_file *file)
2094 {
2095         struct section *sec;
2096         struct instruction *insn;
2097         struct reloc *reloc;
2098
2099         sec = find_section_by_name(file->elf, ".rela.discard.noendbr");
2100         if (!sec)
2101                 return 0;
2102
2103         list_for_each_entry(reloc, &sec->reloc_list, list) {
2104                 insn = find_insn(file, reloc->sym->sec, reloc->sym->offset + reloc->addend);
2105                 if (!insn) {
2106                         WARN("bad .discard.noendbr entry");
2107                         return -1;
2108                 }
2109
2110                 if (insn->type == INSN_ENDBR)
2111                         WARN_FUNC("ANNOTATE_NOENDBR on ENDBR", insn->sec, insn->offset);
2112
2113                 insn->noendbr = 1;
2114         }
2115
2116         return 0;
2117 }
2118
2119 static int read_retpoline_hints(struct objtool_file *file)
2120 {
2121         struct section *sec;
2122         struct instruction *insn;
2123         struct reloc *reloc;
2124
2125         sec = find_section_by_name(file->elf, ".rela.discard.retpoline_safe");
2126         if (!sec)
2127                 return 0;
2128
2129         list_for_each_entry(reloc, &sec->reloc_list, list) {
2130                 if (reloc->sym->type != STT_SECTION) {
2131                         WARN("unexpected relocation symbol type in %s", sec->name);
2132                         return -1;
2133                 }
2134
2135                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
2136                 if (!insn) {
2137                         WARN("bad .discard.retpoline_safe entry");
2138                         return -1;
2139                 }
2140
2141                 if (insn->type != INSN_JUMP_DYNAMIC &&
2142                     insn->type != INSN_CALL_DYNAMIC &&
2143                     insn->type != INSN_RETURN &&
2144                     insn->type != INSN_NOP) {
2145                         WARN_FUNC("retpoline_safe hint not an indirect jump/call/ret/nop",
2146                                   insn->sec, insn->offset);
2147                         return -1;
2148                 }
2149
2150                 insn->retpoline_safe = true;
2151         }
2152
2153         return 0;
2154 }
2155
2156 static int read_instr_hints(struct objtool_file *file)
2157 {
2158         struct section *sec;
2159         struct instruction *insn;
2160         struct reloc *reloc;
2161
2162         sec = find_section_by_name(file->elf, ".rela.discard.instr_end");
2163         if (!sec)
2164                 return 0;
2165
2166         list_for_each_entry(reloc, &sec->reloc_list, list) {
2167                 if (reloc->sym->type != STT_SECTION) {
2168                         WARN("unexpected relocation symbol type in %s", sec->name);
2169                         return -1;
2170                 }
2171
2172                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
2173                 if (!insn) {
2174                         WARN("bad .discard.instr_end entry");
2175                         return -1;
2176                 }
2177
2178                 insn->instr--;
2179         }
2180
2181         sec = find_section_by_name(file->elf, ".rela.discard.instr_begin");
2182         if (!sec)
2183                 return 0;
2184
2185         list_for_each_entry(reloc, &sec->reloc_list, list) {
2186                 if (reloc->sym->type != STT_SECTION) {
2187                         WARN("unexpected relocation symbol type in %s", sec->name);
2188                         return -1;
2189                 }
2190
2191                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
2192                 if (!insn) {
2193                         WARN("bad .discard.instr_begin entry");
2194                         return -1;
2195                 }
2196
2197                 insn->instr++;
2198         }
2199
2200         return 0;
2201 }
2202
2203 static int read_intra_function_calls(struct objtool_file *file)
2204 {
2205         struct instruction *insn;
2206         struct section *sec;
2207         struct reloc *reloc;
2208
2209         sec = find_section_by_name(file->elf, ".rela.discard.intra_function_calls");
2210         if (!sec)
2211                 return 0;
2212
2213         list_for_each_entry(reloc, &sec->reloc_list, list) {
2214                 unsigned long dest_off;
2215
2216                 if (reloc->sym->type != STT_SECTION) {
2217                         WARN("unexpected relocation symbol type in %s",
2218                              sec->name);
2219                         return -1;
2220                 }
2221
2222                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
2223                 if (!insn) {
2224                         WARN("bad .discard.intra_function_call entry");
2225                         return -1;
2226                 }
2227
2228                 if (insn->type != INSN_CALL) {
2229                         WARN_FUNC("intra_function_call not a direct call",
2230                                   insn->sec, insn->offset);
2231                         return -1;
2232                 }
2233
2234                 /*
2235                  * Treat intra-function CALLs as JMPs, but with a stack_op.
2236                  * See add_call_destinations(), which strips stack_ops from
2237                  * normal CALLs.
2238                  */
2239                 insn->type = INSN_JUMP_UNCONDITIONAL;
2240
2241                 dest_off = insn->offset + insn->len + insn->immediate;
2242                 insn->jump_dest = find_insn(file, insn->sec, dest_off);
2243                 if (!insn->jump_dest) {
2244                         WARN_FUNC("can't find call dest at %s+0x%lx",
2245                                   insn->sec, insn->offset,
2246                                   insn->sec->name, dest_off);
2247                         return -1;
2248                 }
2249         }
2250
2251         return 0;
2252 }
2253
2254 /*
2255  * Return true if name matches an instrumentation function, where calls to that
2256  * function from noinstr code can safely be removed, but compilers won't do so.
2257  */
2258 static bool is_profiling_func(const char *name)
2259 {
2260         /*
2261          * Many compilers cannot disable KCOV with a function attribute.
2262          */
2263         if (!strncmp(name, "__sanitizer_cov_", 16))
2264                 return true;
2265
2266         /*
2267          * Some compilers currently do not remove __tsan_func_entry/exit nor
2268          * __tsan_atomic_signal_fence (used for barrier instrumentation) with
2269          * the __no_sanitize_thread attribute, remove them. Once the kernel's
2270          * minimum Clang version is 14.0, this can be removed.
2271          */
2272         if (!strncmp(name, "__tsan_func_", 12) ||
2273             !strcmp(name, "__tsan_atomic_signal_fence"))
2274                 return true;
2275
2276         return false;
2277 }
2278
2279 static int classify_symbols(struct objtool_file *file)
2280 {
2281         struct section *sec;
2282         struct symbol *func;
2283
2284         for_each_sec(file, sec) {
2285                 list_for_each_entry(func, &sec->symbol_list, list) {
2286                         if (func->bind != STB_GLOBAL)
2287                                 continue;
2288
2289                         if (!strncmp(func->name, STATIC_CALL_TRAMP_PREFIX_STR,
2290                                      strlen(STATIC_CALL_TRAMP_PREFIX_STR)))
2291                                 func->static_call_tramp = true;
2292
2293                         if (arch_is_retpoline(func))
2294                                 func->retpoline_thunk = true;
2295
2296                         if (arch_is_rethunk(func))
2297                                 func->return_thunk = true;
2298
2299                         if (!strcmp(func->name, "__fentry__"))
2300                                 func->fentry = true;
2301
2302                         if (is_profiling_func(func->name))
2303                                 func->profiling_func = true;
2304                 }
2305         }
2306
2307         return 0;
2308 }
2309
2310 static void mark_rodata(struct objtool_file *file)
2311 {
2312         struct section *sec;
2313         bool found = false;
2314
2315         /*
2316          * Search for the following rodata sections, each of which can
2317          * potentially contain jump tables:
2318          *
2319          * - .rodata: can contain GCC switch tables
2320          * - .rodata.<func>: same, if -fdata-sections is being used
2321          * - .rodata..c_jump_table: contains C annotated jump tables
2322          *
2323          * .rodata.str1.* sections are ignored; they don't contain jump tables.
2324          */
2325         for_each_sec(file, sec) {
2326                 if (!strncmp(sec->name, ".rodata", 7) &&
2327                     !strstr(sec->name, ".str1.")) {
2328                         sec->rodata = true;
2329                         found = true;
2330                 }
2331         }
2332
2333         file->rodata = found;
2334 }
2335
2336 static int decode_sections(struct objtool_file *file)
2337 {
2338         int ret;
2339
2340         mark_rodata(file);
2341
2342         ret = init_pv_ops(file);
2343         if (ret)
2344                 return ret;
2345
2346         ret = decode_instructions(file);
2347         if (ret)
2348                 return ret;
2349
2350         add_ignores(file);
2351         add_uaccess_safe(file);
2352
2353         ret = add_ignore_alternatives(file);
2354         if (ret)
2355                 return ret;
2356
2357         /*
2358          * Must be before read_unwind_hints() since that needs insn->noendbr.
2359          */
2360         ret = read_noendbr_hints(file);
2361         if (ret)
2362                 return ret;
2363
2364         /*
2365          * Must be before add_{jump_call}_destination.
2366          */
2367         ret = classify_symbols(file);
2368         if (ret)
2369                 return ret;
2370
2371         /*
2372          * Must be before add_jump_destinations(), which depends on 'func'
2373          * being set for alternatives, to enable proper sibling call detection.
2374          */
2375         ret = add_special_section_alts(file);
2376         if (ret)
2377                 return ret;
2378
2379         ret = add_jump_destinations(file);
2380         if (ret)
2381                 return ret;
2382
2383         /*
2384          * Must be before add_call_destination(); it changes INSN_CALL to
2385          * INSN_JUMP.
2386          */
2387         ret = read_intra_function_calls(file);
2388         if (ret)
2389                 return ret;
2390
2391         ret = add_call_destinations(file);
2392         if (ret)
2393                 return ret;
2394
2395         /*
2396          * Must be after add_call_destinations() such that it can override
2397          * dead_end_function() marks.
2398          */
2399         ret = add_dead_ends(file);
2400         if (ret)
2401                 return ret;
2402
2403         ret = add_jump_table_alts(file);
2404         if (ret)
2405                 return ret;
2406
2407         ret = read_unwind_hints(file);
2408         if (ret)
2409                 return ret;
2410
2411         ret = read_retpoline_hints(file);
2412         if (ret)
2413                 return ret;
2414
2415         ret = read_instr_hints(file);
2416         if (ret)
2417                 return ret;
2418
2419         return 0;
2420 }
2421
2422 static bool is_fentry_call(struct instruction *insn)
2423 {
2424         if (insn->type == INSN_CALL &&
2425             insn->call_dest &&
2426             insn->call_dest->fentry)
2427                 return true;
2428
2429         return false;
2430 }
2431
2432 static bool has_modified_stack_frame(struct instruction *insn, struct insn_state *state)
2433 {
2434         struct cfi_state *cfi = &state->cfi;
2435         int i;
2436
2437         if (cfi->cfa.base != initial_func_cfi.cfa.base || cfi->drap)
2438                 return true;
2439
2440         if (cfi->cfa.offset != initial_func_cfi.cfa.offset)
2441                 return true;
2442
2443         if (cfi->stack_size != initial_func_cfi.cfa.offset)
2444                 return true;
2445
2446         for (i = 0; i < CFI_NUM_REGS; i++) {
2447                 if (cfi->regs[i].base != initial_func_cfi.regs[i].base ||
2448                     cfi->regs[i].offset != initial_func_cfi.regs[i].offset)
2449                         return true;
2450         }
2451
2452         return false;
2453 }
2454
2455 static bool check_reg_frame_pos(const struct cfi_reg *reg,
2456                                 int expected_offset)
2457 {
2458         return reg->base == CFI_CFA &&
2459                reg->offset == expected_offset;
2460 }
2461
2462 static bool has_valid_stack_frame(struct insn_state *state)
2463 {
2464         struct cfi_state *cfi = &state->cfi;
2465
2466         if (cfi->cfa.base == CFI_BP &&
2467             check_reg_frame_pos(&cfi->regs[CFI_BP], -cfi->cfa.offset) &&
2468             check_reg_frame_pos(&cfi->regs[CFI_RA], -cfi->cfa.offset + 8))
2469                 return true;
2470
2471         if (cfi->drap && cfi->regs[CFI_BP].base == CFI_BP)
2472                 return true;
2473
2474         return false;
2475 }
2476
2477 static int update_cfi_state_regs(struct instruction *insn,
2478                                   struct cfi_state *cfi,
2479                                   struct stack_op *op)
2480 {
2481         struct cfi_reg *cfa = &cfi->cfa;
2482
2483         if (cfa->base != CFI_SP && cfa->base != CFI_SP_INDIRECT)
2484                 return 0;
2485
2486         /* push */
2487         if (op->dest.type == OP_DEST_PUSH || op->dest.type == OP_DEST_PUSHF)
2488                 cfa->offset += 8;
2489
2490         /* pop */
2491         if (op->src.type == OP_SRC_POP || op->src.type == OP_SRC_POPF)
2492                 cfa->offset -= 8;
2493
2494         /* add immediate to sp */
2495         if (op->dest.type == OP_DEST_REG && op->src.type == OP_SRC_ADD &&
2496             op->dest.reg == CFI_SP && op->src.reg == CFI_SP)
2497                 cfa->offset -= op->src.offset;
2498
2499         return 0;
2500 }
2501
2502 static void save_reg(struct cfi_state *cfi, unsigned char reg, int base, int offset)
2503 {
2504         if (arch_callee_saved_reg(reg) &&
2505             cfi->regs[reg].base == CFI_UNDEFINED) {
2506                 cfi->regs[reg].base = base;
2507                 cfi->regs[reg].offset = offset;
2508         }
2509 }
2510
2511 static void restore_reg(struct cfi_state *cfi, unsigned char reg)
2512 {
2513         cfi->regs[reg].base = initial_func_cfi.regs[reg].base;
2514         cfi->regs[reg].offset = initial_func_cfi.regs[reg].offset;
2515 }
2516
2517 /*
2518  * A note about DRAP stack alignment:
2519  *
2520  * GCC has the concept of a DRAP register, which is used to help keep track of
2521  * the stack pointer when aligning the stack.  r10 or r13 is used as the DRAP
2522  * register.  The typical DRAP pattern is:
2523  *
2524  *   4c 8d 54 24 08             lea    0x8(%rsp),%r10
2525  *   48 83 e4 c0                and    $0xffffffffffffffc0,%rsp
2526  *   41 ff 72 f8                pushq  -0x8(%r10)
2527  *   55                         push   %rbp
2528  *   48 89 e5                   mov    %rsp,%rbp
2529  *                              (more pushes)
2530  *   41 52                      push   %r10
2531  *                              ...
2532  *   41 5a                      pop    %r10
2533  *                              (more pops)
2534  *   5d                         pop    %rbp
2535  *   49 8d 62 f8                lea    -0x8(%r10),%rsp
2536  *   c3                         retq
2537  *
2538  * There are some variations in the epilogues, like:
2539  *
2540  *   5b                         pop    %rbx
2541  *   41 5a                      pop    %r10
2542  *   41 5c                      pop    %r12
2543  *   41 5d                      pop    %r13
2544  *   41 5e                      pop    %r14
2545  *   c9                         leaveq
2546  *   49 8d 62 f8                lea    -0x8(%r10),%rsp
2547  *   c3                         retq
2548  *
2549  * and:
2550  *
2551  *   4c 8b 55 e8                mov    -0x18(%rbp),%r10
2552  *   48 8b 5d e0                mov    -0x20(%rbp),%rbx
2553  *   4c 8b 65 f0                mov    -0x10(%rbp),%r12
2554  *   4c 8b 6d f8                mov    -0x8(%rbp),%r13
2555  *   c9                         leaveq
2556  *   49 8d 62 f8                lea    -0x8(%r10),%rsp
2557  *   c3                         retq
2558  *
2559  * Sometimes r13 is used as the DRAP register, in which case it's saved and
2560  * restored beforehand:
2561  *
2562  *   41 55                      push   %r13
2563  *   4c 8d 6c 24 10             lea    0x10(%rsp),%r13
2564  *   48 83 e4 f0                and    $0xfffffffffffffff0,%rsp
2565  *                              ...
2566  *   49 8d 65 f0                lea    -0x10(%r13),%rsp
2567  *   41 5d                      pop    %r13
2568  *   c3                         retq
2569  */
2570 static int update_cfi_state(struct instruction *insn,
2571                             struct instruction *next_insn,
2572                             struct cfi_state *cfi, struct stack_op *op)
2573 {
2574         struct cfi_reg *cfa = &cfi->cfa;
2575         struct cfi_reg *regs = cfi->regs;
2576
2577         /* stack operations don't make sense with an undefined CFA */
2578         if (cfa->base == CFI_UNDEFINED) {
2579                 if (insn->func) {
2580                         WARN_FUNC("undefined stack state", insn->sec, insn->offset);
2581                         return -1;
2582                 }
2583                 return 0;
2584         }
2585
2586         if (cfi->type == UNWIND_HINT_TYPE_REGS ||
2587             cfi->type == UNWIND_HINT_TYPE_REGS_PARTIAL)
2588                 return update_cfi_state_regs(insn, cfi, op);
2589
2590         switch (op->dest.type) {
2591
2592         case OP_DEST_REG:
2593                 switch (op->src.type) {
2594
2595                 case OP_SRC_REG:
2596                         if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP &&
2597                             cfa->base == CFI_SP &&
2598                             check_reg_frame_pos(&regs[CFI_BP], -cfa->offset)) {
2599
2600                                 /* mov %rsp, %rbp */
2601                                 cfa->base = op->dest.reg;
2602                                 cfi->bp_scratch = false;
2603                         }
2604
2605                         else if (op->src.reg == CFI_SP &&
2606                                  op->dest.reg == CFI_BP && cfi->drap) {
2607
2608                                 /* drap: mov %rsp, %rbp */
2609                                 regs[CFI_BP].base = CFI_BP;
2610                                 regs[CFI_BP].offset = -cfi->stack_size;
2611                                 cfi->bp_scratch = false;
2612                         }
2613
2614                         else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
2615
2616                                 /*
2617                                  * mov %rsp, %reg
2618                                  *
2619                                  * This is needed for the rare case where GCC
2620                                  * does:
2621                                  *
2622                                  *   mov    %rsp, %rax
2623                                  *   ...
2624                                  *   mov    %rax, %rsp
2625                                  */
2626                                 cfi->vals[op->dest.reg].base = CFI_CFA;
2627                                 cfi->vals[op->dest.reg].offset = -cfi->stack_size;
2628                         }
2629
2630                         else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP &&
2631                                  (cfa->base == CFI_BP || cfa->base == cfi->drap_reg)) {
2632
2633                                 /*
2634                                  * mov %rbp, %rsp
2635                                  *
2636                                  * Restore the original stack pointer (Clang).
2637                                  */
2638                                 cfi->stack_size = -cfi->regs[CFI_BP].offset;
2639                         }
2640
2641                         else if (op->dest.reg == cfa->base) {
2642
2643                                 /* mov %reg, %rsp */
2644                                 if (cfa->base == CFI_SP &&
2645                                     cfi->vals[op->src.reg].base == CFI_CFA) {
2646
2647                                         /*
2648                                          * This is needed for the rare case
2649                                          * where GCC does something dumb like:
2650                                          *
2651                                          *   lea    0x8(%rsp), %rcx
2652                                          *   ...
2653                                          *   mov    %rcx, %rsp
2654                                          */
2655                                         cfa->offset = -cfi->vals[op->src.reg].offset;
2656                                         cfi->stack_size = cfa->offset;
2657
2658                                 } else if (cfa->base == CFI_SP &&
2659                                            cfi->vals[op->src.reg].base == CFI_SP_INDIRECT &&
2660                                            cfi->vals[op->src.reg].offset == cfa->offset) {
2661
2662                                         /*
2663                                          * Stack swizzle:
2664                                          *
2665                                          * 1: mov %rsp, (%[tos])
2666                                          * 2: mov %[tos], %rsp
2667                                          *    ...
2668                                          * 3: pop %rsp
2669                                          *
2670                                          * Where:
2671                                          *
2672                                          * 1 - places a pointer to the previous
2673                                          *     stack at the Top-of-Stack of the
2674                                          *     new stack.
2675                                          *
2676                                          * 2 - switches to the new stack.
2677                                          *
2678                                          * 3 - pops the Top-of-Stack to restore
2679                                          *     the original stack.
2680                                          *
2681                                          * Note: we set base to SP_INDIRECT
2682                                          * here and preserve offset. Therefore
2683                                          * when the unwinder reaches ToS it
2684                                          * will dereference SP and then add the
2685                                          * offset to find the next frame, IOW:
2686                                          * (%rsp) + offset.
2687                                          */
2688                                         cfa->base = CFI_SP_INDIRECT;
2689
2690                                 } else {
2691                                         cfa->base = CFI_UNDEFINED;
2692                                         cfa->offset = 0;
2693                                 }
2694                         }
2695
2696                         else if (op->dest.reg == CFI_SP &&
2697                                  cfi->vals[op->src.reg].base == CFI_SP_INDIRECT &&
2698                                  cfi->vals[op->src.reg].offset == cfa->offset) {
2699
2700                                 /*
2701                                  * The same stack swizzle case 2) as above. But
2702                                  * because we can't change cfa->base, case 3)
2703                                  * will become a regular POP. Pretend we're a
2704                                  * PUSH so things don't go unbalanced.
2705                                  */
2706                                 cfi->stack_size += 8;
2707                         }
2708
2709
2710                         break;
2711
2712                 case OP_SRC_ADD:
2713                         if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) {
2714
2715                                 /* add imm, %rsp */
2716                                 cfi->stack_size -= op->src.offset;
2717                                 if (cfa->base == CFI_SP)
2718                                         cfa->offset -= op->src.offset;
2719                                 break;
2720                         }
2721
2722                         if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) {
2723
2724                                 /* lea disp(%rbp), %rsp */
2725                                 cfi->stack_size = -(op->src.offset + regs[CFI_BP].offset);
2726                                 break;
2727                         }
2728
2729                         if (!cfi->drap && op->src.reg == CFI_SP &&
2730                             op->dest.reg == CFI_BP && cfa->base == CFI_SP &&
2731                             check_reg_frame_pos(&regs[CFI_BP], -cfa->offset + op->src.offset)) {
2732
2733                                 /* lea disp(%rsp), %rbp */
2734                                 cfa->base = CFI_BP;
2735                                 cfa->offset -= op->src.offset;
2736                                 cfi->bp_scratch = false;
2737                                 break;
2738                         }
2739
2740                         if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
2741
2742                                 /* drap: lea disp(%rsp), %drap */
2743                                 cfi->drap_reg = op->dest.reg;
2744
2745                                 /*
2746                                  * lea disp(%rsp), %reg
2747                                  *
2748                                  * This is needed for the rare case where GCC
2749                                  * does something dumb like:
2750                                  *
2751                                  *   lea    0x8(%rsp), %rcx
2752                                  *   ...
2753                                  *   mov    %rcx, %rsp
2754                                  */
2755                                 cfi->vals[op->dest.reg].base = CFI_CFA;
2756                                 cfi->vals[op->dest.reg].offset = \
2757                                         -cfi->stack_size + op->src.offset;
2758
2759                                 break;
2760                         }
2761
2762                         if (cfi->drap && op->dest.reg == CFI_SP &&
2763                             op->src.reg == cfi->drap_reg) {
2764
2765                                  /* drap: lea disp(%drap), %rsp */
2766                                 cfa->base = CFI_SP;
2767                                 cfa->offset = cfi->stack_size = -op->src.offset;
2768                                 cfi->drap_reg = CFI_UNDEFINED;
2769                                 cfi->drap = false;
2770                                 break;
2771                         }
2772
2773                         if (op->dest.reg == cfi->cfa.base && !(next_insn && next_insn->hint)) {
2774                                 WARN_FUNC("unsupported stack register modification",
2775                                           insn->sec, insn->offset);
2776                                 return -1;
2777                         }
2778
2779                         break;
2780
2781                 case OP_SRC_AND:
2782                         if (op->dest.reg != CFI_SP ||
2783                             (cfi->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) ||
2784                             (cfi->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) {
2785                                 WARN_FUNC("unsupported stack pointer realignment",
2786                                           insn->sec, insn->offset);
2787                                 return -1;
2788                         }
2789
2790                         if (cfi->drap_reg != CFI_UNDEFINED) {
2791                                 /* drap: and imm, %rsp */
2792                                 cfa->base = cfi->drap_reg;
2793                                 cfa->offset = cfi->stack_size = 0;
2794                                 cfi->drap = true;
2795                         }
2796
2797                         /*
2798                          * Older versions of GCC (4.8ish) realign the stack
2799                          * without DRAP, with a frame pointer.
2800                          */
2801
2802                         break;
2803
2804                 case OP_SRC_POP:
2805                 case OP_SRC_POPF:
2806                         if (op->dest.reg == CFI_SP && cfa->base == CFI_SP_INDIRECT) {
2807
2808                                 /* pop %rsp; # restore from a stack swizzle */
2809                                 cfa->base = CFI_SP;
2810                                 break;
2811                         }
2812
2813                         if (!cfi->drap && op->dest.reg == cfa->base) {
2814
2815                                 /* pop %rbp */
2816                                 cfa->base = CFI_SP;
2817                         }
2818
2819                         if (cfi->drap && cfa->base == CFI_BP_INDIRECT &&
2820                             op->dest.reg == cfi->drap_reg &&
2821                             cfi->drap_offset == -cfi->stack_size) {
2822
2823                                 /* drap: pop %drap */
2824                                 cfa->base = cfi->drap_reg;
2825                                 cfa->offset = 0;
2826                                 cfi->drap_offset = -1;
2827
2828                         } else if (cfi->stack_size == -regs[op->dest.reg].offset) {
2829
2830                                 /* pop %reg */
2831                                 restore_reg(cfi, op->dest.reg);
2832                         }
2833
2834                         cfi->stack_size -= 8;
2835                         if (cfa->base == CFI_SP)
2836                                 cfa->offset -= 8;
2837
2838                         break;
2839
2840                 case OP_SRC_REG_INDIRECT:
2841                         if (!cfi->drap && op->dest.reg == cfa->base &&
2842                             op->dest.reg == CFI_BP) {
2843
2844                                 /* mov disp(%rsp), %rbp */
2845                                 cfa->base = CFI_SP;
2846                                 cfa->offset = cfi->stack_size;
2847                         }
2848
2849                         if (cfi->drap && op->src.reg == CFI_BP &&
2850                             op->src.offset == cfi->drap_offset) {
2851
2852                                 /* drap: mov disp(%rbp), %drap */
2853                                 cfa->base = cfi->drap_reg;
2854                                 cfa->offset = 0;
2855                                 cfi->drap_offset = -1;
2856                         }
2857
2858                         if (cfi->drap && op->src.reg == CFI_BP &&
2859                             op->src.offset == regs[op->dest.reg].offset) {
2860
2861                                 /* drap: mov disp(%rbp), %reg */
2862                                 restore_reg(cfi, op->dest.reg);
2863
2864                         } else if (op->src.reg == cfa->base &&
2865                             op->src.offset == regs[op->dest.reg].offset + cfa->offset) {
2866
2867                                 /* mov disp(%rbp), %reg */
2868                                 /* mov disp(%rsp), %reg */
2869                                 restore_reg(cfi, op->dest.reg);
2870
2871                         } else if (op->src.reg == CFI_SP &&
2872                                    op->src.offset == regs[op->dest.reg].offset + cfi->stack_size) {
2873
2874                                 /* mov disp(%rsp), %reg */
2875                                 restore_reg(cfi, op->dest.reg);
2876                         }
2877
2878                         break;
2879
2880                 default:
2881                         WARN_FUNC("unknown stack-related instruction",
2882                                   insn->sec, insn->offset);
2883                         return -1;
2884                 }
2885
2886                 break;
2887
2888         case OP_DEST_PUSH:
2889         case OP_DEST_PUSHF:
2890                 cfi->stack_size += 8;
2891                 if (cfa->base == CFI_SP)
2892                         cfa->offset += 8;
2893
2894                 if (op->src.type != OP_SRC_REG)
2895                         break;
2896
2897                 if (cfi->drap) {
2898                         if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) {
2899
2900                                 /* drap: push %drap */
2901                                 cfa->base = CFI_BP_INDIRECT;
2902                                 cfa->offset = -cfi->stack_size;
2903
2904                                 /* save drap so we know when to restore it */
2905                                 cfi->drap_offset = -cfi->stack_size;
2906
2907                         } else if (op->src.reg == CFI_BP && cfa->base == cfi->drap_reg) {
2908
2909                                 /* drap: push %rbp */
2910                                 cfi->stack_size = 0;
2911
2912                         } else {
2913
2914                                 /* drap: push %reg */
2915                                 save_reg(cfi, op->src.reg, CFI_BP, -cfi->stack_size);
2916                         }
2917
2918                 } else {
2919
2920                         /* push %reg */
2921                         save_reg(cfi, op->src.reg, CFI_CFA, -cfi->stack_size);
2922                 }
2923
2924                 /* detect when asm code uses rbp as a scratch register */
2925                 if (opts.stackval && insn->func && op->src.reg == CFI_BP &&
2926                     cfa->base != CFI_BP)
2927                         cfi->bp_scratch = true;
2928                 break;
2929
2930         case OP_DEST_REG_INDIRECT:
2931
2932                 if (cfi->drap) {
2933                         if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) {
2934
2935                                 /* drap: mov %drap, disp(%rbp) */
2936                                 cfa->base = CFI_BP_INDIRECT;
2937                                 cfa->offset = op->dest.offset;
2938
2939                                 /* save drap offset so we know when to restore it */
2940                                 cfi->drap_offset = op->dest.offset;
2941                         } else {
2942
2943                                 /* drap: mov reg, disp(%rbp) */
2944                                 save_reg(cfi, op->src.reg, CFI_BP, op->dest.offset);
2945                         }
2946
2947                 } else if (op->dest.reg == cfa->base) {
2948
2949                         /* mov reg, disp(%rbp) */
2950                         /* mov reg, disp(%rsp) */
2951                         save_reg(cfi, op->src.reg, CFI_CFA,
2952                                  op->dest.offset - cfi->cfa.offset);
2953
2954                 } else if (op->dest.reg == CFI_SP) {
2955
2956                         /* mov reg, disp(%rsp) */
2957                         save_reg(cfi, op->src.reg, CFI_CFA,
2958                                  op->dest.offset - cfi->stack_size);
2959
2960                 } else if (op->src.reg == CFI_SP && op->dest.offset == 0) {
2961
2962                         /* mov %rsp, (%reg); # setup a stack swizzle. */
2963                         cfi->vals[op->dest.reg].base = CFI_SP_INDIRECT;
2964                         cfi->vals[op->dest.reg].offset = cfa->offset;
2965                 }
2966
2967                 break;
2968
2969         case OP_DEST_MEM:
2970                 if (op->src.type != OP_SRC_POP && op->src.type != OP_SRC_POPF) {
2971                         WARN_FUNC("unknown stack-related memory operation",
2972                                   insn->sec, insn->offset);
2973                         return -1;
2974                 }
2975
2976                 /* pop mem */
2977                 cfi->stack_size -= 8;
2978                 if (cfa->base == CFI_SP)
2979                         cfa->offset -= 8;
2980
2981                 break;
2982
2983         default:
2984                 WARN_FUNC("unknown stack-related instruction",
2985                           insn->sec, insn->offset);
2986                 return -1;
2987         }
2988
2989         return 0;
2990 }
2991
2992 /*
2993  * The stack layouts of alternatives instructions can sometimes diverge when
2994  * they have stack modifications.  That's fine as long as the potential stack
2995  * layouts don't conflict at any given potential instruction boundary.
2996  *
2997  * Flatten the CFIs of the different alternative code streams (both original
2998  * and replacement) into a single shared CFI array which can be used to detect
2999  * conflicts and nicely feed a linear array of ORC entries to the unwinder.
3000  */
3001 static int propagate_alt_cfi(struct objtool_file *file, struct instruction *insn)
3002 {
3003         struct cfi_state **alt_cfi;
3004         int group_off;
3005
3006         if (!insn->alt_group)
3007                 return 0;
3008
3009         if (!insn->cfi) {
3010                 WARN("CFI missing");
3011                 return -1;
3012         }
3013
3014         alt_cfi = insn->alt_group->cfi;
3015         group_off = insn->offset - insn->alt_group->first_insn->offset;
3016
3017         if (!alt_cfi[group_off]) {
3018                 alt_cfi[group_off] = insn->cfi;
3019         } else {
3020                 if (cficmp(alt_cfi[group_off], insn->cfi)) {
3021                         WARN_FUNC("stack layout conflict in alternatives",
3022                                   insn->sec, insn->offset);
3023                         return -1;
3024                 }
3025         }
3026
3027         return 0;
3028 }
3029
3030 static int handle_insn_ops(struct instruction *insn,
3031                            struct instruction *next_insn,
3032                            struct insn_state *state)
3033 {
3034         struct stack_op *op;
3035
3036         list_for_each_entry(op, &insn->stack_ops, list) {
3037
3038                 if (update_cfi_state(insn, next_insn, &state->cfi, op))
3039                         return 1;
3040
3041                 if (!insn->alt_group)
3042                         continue;
3043
3044                 if (op->dest.type == OP_DEST_PUSHF) {
3045                         if (!state->uaccess_stack) {
3046                                 state->uaccess_stack = 1;
3047                         } else if (state->uaccess_stack >> 31) {
3048                                 WARN_FUNC("PUSHF stack exhausted",
3049                                           insn->sec, insn->offset);
3050                                 return 1;
3051                         }
3052                         state->uaccess_stack <<= 1;
3053                         state->uaccess_stack  |= state->uaccess;
3054                 }
3055
3056                 if (op->src.type == OP_SRC_POPF) {
3057                         if (state->uaccess_stack) {
3058                                 state->uaccess = state->uaccess_stack & 1;
3059                                 state->uaccess_stack >>= 1;
3060                                 if (state->uaccess_stack == 1)
3061                                         state->uaccess_stack = 0;
3062                         }
3063                 }
3064         }
3065
3066         return 0;
3067 }
3068
3069 static bool insn_cfi_match(struct instruction *insn, struct cfi_state *cfi2)
3070 {
3071         struct cfi_state *cfi1 = insn->cfi;
3072         int i;
3073
3074         if (!cfi1) {
3075                 WARN("CFI missing");
3076                 return false;
3077         }
3078
3079         if (memcmp(&cfi1->cfa, &cfi2->cfa, sizeof(cfi1->cfa))) {
3080
3081                 WARN_FUNC("stack state mismatch: cfa1=%d%+d cfa2=%d%+d",
3082                           insn->sec, insn->offset,
3083                           cfi1->cfa.base, cfi1->cfa.offset,
3084                           cfi2->cfa.base, cfi2->cfa.offset);
3085
3086         } else if (memcmp(&cfi1->regs, &cfi2->regs, sizeof(cfi1->regs))) {
3087                 for (i = 0; i < CFI_NUM_REGS; i++) {
3088                         if (!memcmp(&cfi1->regs[i], &cfi2->regs[i],
3089                                     sizeof(struct cfi_reg)))
3090                                 continue;
3091
3092                         WARN_FUNC("stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d",
3093                                   insn->sec, insn->offset,
3094                                   i, cfi1->regs[i].base, cfi1->regs[i].offset,
3095                                   i, cfi2->regs[i].base, cfi2->regs[i].offset);
3096                         break;
3097                 }
3098
3099         } else if (cfi1->type != cfi2->type) {
3100
3101                 WARN_FUNC("stack state mismatch: type1=%d type2=%d",
3102                           insn->sec, insn->offset, cfi1->type, cfi2->type);
3103
3104         } else if (cfi1->drap != cfi2->drap ||
3105                    (cfi1->drap && cfi1->drap_reg != cfi2->drap_reg) ||
3106                    (cfi1->drap && cfi1->drap_offset != cfi2->drap_offset)) {
3107
3108                 WARN_FUNC("stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)",
3109                           insn->sec, insn->offset,
3110                           cfi1->drap, cfi1->drap_reg, cfi1->drap_offset,
3111                           cfi2->drap, cfi2->drap_reg, cfi2->drap_offset);
3112
3113         } else
3114                 return true;
3115
3116         return false;
3117 }
3118
3119 static inline bool func_uaccess_safe(struct symbol *func)
3120 {
3121         if (func)
3122                 return func->uaccess_safe;
3123
3124         return false;
3125 }
3126
3127 static inline const char *call_dest_name(struct instruction *insn)
3128 {
3129         static char pvname[19];
3130         struct reloc *rel;
3131         int idx;
3132
3133         if (insn->call_dest)
3134                 return insn->call_dest->name;
3135
3136         rel = insn_reloc(NULL, insn);
3137         if (rel && !strcmp(rel->sym->name, "pv_ops")) {
3138                 idx = (rel->addend / sizeof(void *));
3139                 snprintf(pvname, sizeof(pvname), "pv_ops[%d]", idx);
3140                 return pvname;
3141         }
3142
3143         return "{dynamic}";
3144 }
3145
3146 static bool pv_call_dest(struct objtool_file *file, struct instruction *insn)
3147 {
3148         struct symbol *target;
3149         struct reloc *rel;
3150         int idx;
3151
3152         rel = insn_reloc(file, insn);
3153         if (!rel || strcmp(rel->sym->name, "pv_ops"))
3154                 return false;
3155
3156         idx = (arch_dest_reloc_offset(rel->addend) / sizeof(void *));
3157
3158         if (file->pv_ops[idx].clean)
3159                 return true;
3160
3161         file->pv_ops[idx].clean = true;
3162
3163         list_for_each_entry(target, &file->pv_ops[idx].targets, pv_target) {
3164                 if (!target->sec->noinstr) {
3165                         WARN("pv_ops[%d]: %s", idx, target->name);
3166                         file->pv_ops[idx].clean = false;
3167                 }
3168         }
3169
3170         return file->pv_ops[idx].clean;
3171 }
3172
3173 static inline bool noinstr_call_dest(struct objtool_file *file,
3174                                      struct instruction *insn,
3175                                      struct symbol *func)
3176 {
3177         /*
3178          * We can't deal with indirect function calls at present;
3179          * assume they're instrumented.
3180          */
3181         if (!func) {
3182                 if (file->pv_ops)
3183                         return pv_call_dest(file, insn);
3184
3185                 return false;
3186         }
3187
3188         /*
3189          * If the symbol is from a noinstr section; we good.
3190          */
3191         if (func->sec->noinstr)
3192                 return true;
3193
3194         /*
3195          * The __ubsan_handle_*() calls are like WARN(), they only happen when
3196          * something 'BAD' happened. At the risk of taking the machine down,
3197          * let them proceed to get the message out.
3198          */
3199         if (!strncmp(func->name, "__ubsan_handle_", 15))
3200                 return true;
3201
3202         return false;
3203 }
3204
3205 static int validate_call(struct objtool_file *file,
3206                          struct instruction *insn,
3207                          struct insn_state *state)
3208 {
3209         if (state->noinstr && state->instr <= 0 &&
3210             !noinstr_call_dest(file, insn, insn->call_dest)) {
3211                 WARN_FUNC("call to %s() leaves .noinstr.text section",
3212                                 insn->sec, insn->offset, call_dest_name(insn));
3213                 return 1;
3214         }
3215
3216         if (state->uaccess && !func_uaccess_safe(insn->call_dest)) {
3217                 WARN_FUNC("call to %s() with UACCESS enabled",
3218                                 insn->sec, insn->offset, call_dest_name(insn));
3219                 return 1;
3220         }
3221
3222         if (state->df) {
3223                 WARN_FUNC("call to %s() with DF set",
3224                                 insn->sec, insn->offset, call_dest_name(insn));
3225                 return 1;
3226         }
3227
3228         return 0;
3229 }
3230
3231 static int validate_sibling_call(struct objtool_file *file,
3232                                  struct instruction *insn,
3233                                  struct insn_state *state)
3234 {
3235         if (has_modified_stack_frame(insn, state)) {
3236                 WARN_FUNC("sibling call from callable instruction with modified stack frame",
3237                                 insn->sec, insn->offset);
3238                 return 1;
3239         }
3240
3241         return validate_call(file, insn, state);
3242 }
3243
3244 static int validate_return(struct symbol *func, struct instruction *insn, struct insn_state *state)
3245 {
3246         if (state->noinstr && state->instr > 0) {
3247                 WARN_FUNC("return with instrumentation enabled",
3248                           insn->sec, insn->offset);
3249                 return 1;
3250         }
3251
3252         if (state->uaccess && !func_uaccess_safe(func)) {
3253                 WARN_FUNC("return with UACCESS enabled",
3254                           insn->sec, insn->offset);
3255                 return 1;
3256         }
3257
3258         if (!state->uaccess && func_uaccess_safe(func)) {
3259                 WARN_FUNC("return with UACCESS disabled from a UACCESS-safe function",
3260                           insn->sec, insn->offset);
3261                 return 1;
3262         }
3263
3264         if (state->df) {
3265                 WARN_FUNC("return with DF set",
3266                           insn->sec, insn->offset);
3267                 return 1;
3268         }
3269
3270         if (func && has_modified_stack_frame(insn, state)) {
3271                 WARN_FUNC("return with modified stack frame",
3272                           insn->sec, insn->offset);
3273                 return 1;
3274         }
3275
3276         if (state->cfi.bp_scratch) {
3277                 WARN_FUNC("BP used as a scratch register",
3278                           insn->sec, insn->offset);
3279                 return 1;
3280         }
3281
3282         return 0;
3283 }
3284
3285 static struct instruction *next_insn_to_validate(struct objtool_file *file,
3286                                                  struct instruction *insn)
3287 {
3288         struct alt_group *alt_group = insn->alt_group;
3289
3290         /*
3291          * Simulate the fact that alternatives are patched in-place.  When the
3292          * end of a replacement alt_group is reached, redirect objtool flow to
3293          * the end of the original alt_group.
3294          */
3295         if (alt_group && insn == alt_group->last_insn && alt_group->orig_group)
3296                 return next_insn_same_sec(file, alt_group->orig_group->last_insn);
3297
3298         return next_insn_same_sec(file, insn);
3299 }
3300
3301 /*
3302  * Follow the branch starting at the given instruction, and recursively follow
3303  * any other branches (jumps).  Meanwhile, track the frame pointer state at
3304  * each instruction and validate all the rules described in
3305  * tools/objtool/Documentation/objtool.txt.
3306  */
3307 static int validate_branch(struct objtool_file *file, struct symbol *func,
3308                            struct instruction *insn, struct insn_state state)
3309 {
3310         struct alternative *alt;
3311         struct instruction *next_insn, *prev_insn = NULL;
3312         struct section *sec;
3313         u8 visited;
3314         int ret;
3315
3316         sec = insn->sec;
3317
3318         while (1) {
3319                 next_insn = next_insn_to_validate(file, insn);
3320
3321                 if (func && insn->func && func != insn->func->pfunc) {
3322                         /* Ignore KCFI type preambles, which always fall through */
3323                         if (!strncmp(func->name, "__cfi_", 6))
3324                                 return 0;
3325
3326                         WARN("%s() falls through to next function %s()",
3327                              func->name, insn->func->name);
3328                         return 1;
3329                 }
3330
3331                 if (func && insn->ignore) {
3332                         WARN_FUNC("BUG: why am I validating an ignored function?",
3333                                   sec, insn->offset);
3334                         return 1;
3335                 }
3336
3337                 visited = VISITED_BRANCH << state.uaccess;
3338                 if (insn->visited & VISITED_BRANCH_MASK) {
3339                         if (!insn->hint && !insn_cfi_match(insn, &state.cfi))
3340                                 return 1;
3341
3342                         if (insn->visited & visited)
3343                                 return 0;
3344                 } else {
3345                         nr_insns_visited++;
3346                 }
3347
3348                 if (state.noinstr)
3349                         state.instr += insn->instr;
3350
3351                 if (insn->hint) {
3352                         if (insn->restore) {
3353                                 struct instruction *save_insn, *i;
3354
3355                                 i = insn;
3356                                 save_insn = NULL;
3357
3358                                 sym_for_each_insn_continue_reverse(file, func, i) {
3359                                         if (i->save) {
3360                                                 save_insn = i;
3361                                                 break;
3362                                         }
3363                                 }
3364
3365                                 if (!save_insn) {
3366                                         WARN_FUNC("no corresponding CFI save for CFI restore",
3367                                                   sec, insn->offset);
3368                                         return 1;
3369                                 }
3370
3371                                 if (!save_insn->visited) {
3372                                         WARN_FUNC("objtool isn't smart enough to handle this CFI save/restore combo",
3373                                                   sec, insn->offset);
3374                                         return 1;
3375                                 }
3376
3377                                 insn->cfi = save_insn->cfi;
3378                                 nr_cfi_reused++;
3379                         }
3380
3381                         state.cfi = *insn->cfi;
3382                 } else {
3383                         /* XXX track if we actually changed state.cfi */
3384
3385                         if (prev_insn && !cficmp(prev_insn->cfi, &state.cfi)) {
3386                                 insn->cfi = prev_insn->cfi;
3387                                 nr_cfi_reused++;
3388                         } else {
3389                                 insn->cfi = cfi_hash_find_or_add(&state.cfi);
3390                         }
3391                 }
3392
3393                 insn->visited |= visited;
3394
3395                 if (propagate_alt_cfi(file, insn))
3396                         return 1;
3397
3398                 if (!insn->ignore_alts && !list_empty(&insn->alts)) {
3399                         bool skip_orig = false;
3400
3401                         list_for_each_entry(alt, &insn->alts, list) {
3402                                 if (alt->skip_orig)
3403                                         skip_orig = true;
3404
3405                                 ret = validate_branch(file, func, alt->insn, state);
3406                                 if (ret) {
3407                                         if (opts.backtrace)
3408                                                 BT_FUNC("(alt)", insn);
3409                                         return ret;
3410                                 }
3411                         }
3412
3413                         if (skip_orig)
3414                                 return 0;
3415                 }
3416
3417                 if (handle_insn_ops(insn, next_insn, &state))
3418                         return 1;
3419
3420                 switch (insn->type) {
3421
3422                 case INSN_RETURN:
3423                         return validate_return(func, insn, &state);
3424
3425                 case INSN_CALL:
3426                 case INSN_CALL_DYNAMIC:
3427                         ret = validate_call(file, insn, &state);
3428                         if (ret)
3429                                 return ret;
3430
3431                         if (opts.stackval && func && !is_fentry_call(insn) &&
3432                             !has_valid_stack_frame(&state)) {
3433                                 WARN_FUNC("call without frame pointer save/setup",
3434                                           sec, insn->offset);
3435                                 return 1;
3436                         }
3437
3438                         if (insn->dead_end)
3439                                 return 0;
3440
3441                         break;
3442
3443                 case INSN_JUMP_CONDITIONAL:
3444                 case INSN_JUMP_UNCONDITIONAL:
3445                         if (is_sibling_call(insn)) {
3446                                 ret = validate_sibling_call(file, insn, &state);
3447                                 if (ret)
3448                                         return ret;
3449
3450                         } else if (insn->jump_dest) {
3451                                 ret = validate_branch(file, func,
3452                                                       insn->jump_dest, state);
3453                                 if (ret) {
3454                                         if (opts.backtrace)
3455                                                 BT_FUNC("(branch)", insn);
3456                                         return ret;
3457                                 }
3458                         }
3459
3460                         if (insn->type == INSN_JUMP_UNCONDITIONAL)
3461                                 return 0;
3462
3463                         break;
3464
3465                 case INSN_JUMP_DYNAMIC:
3466                 case INSN_JUMP_DYNAMIC_CONDITIONAL:
3467                         if (is_sibling_call(insn)) {
3468                                 ret = validate_sibling_call(file, insn, &state);
3469                                 if (ret)
3470                                         return ret;
3471                         }
3472
3473                         if (insn->type == INSN_JUMP_DYNAMIC)
3474                                 return 0;
3475
3476                         break;
3477
3478                 case INSN_CONTEXT_SWITCH:
3479                         if (func && (!next_insn || !next_insn->hint)) {
3480                                 WARN_FUNC("unsupported instruction in callable function",
3481                                           sec, insn->offset);
3482                                 return 1;
3483                         }
3484                         return 0;
3485
3486                 case INSN_STAC:
3487                         if (state.uaccess) {
3488                                 WARN_FUNC("recursive UACCESS enable", sec, insn->offset);
3489                                 return 1;
3490                         }
3491
3492                         state.uaccess = true;
3493                         break;
3494
3495                 case INSN_CLAC:
3496                         if (!state.uaccess && func) {
3497                                 WARN_FUNC("redundant UACCESS disable", sec, insn->offset);
3498                                 return 1;
3499                         }
3500
3501                         if (func_uaccess_safe(func) && !state.uaccess_stack) {
3502                                 WARN_FUNC("UACCESS-safe disables UACCESS", sec, insn->offset);
3503                                 return 1;
3504                         }
3505
3506                         state.uaccess = false;
3507                         break;
3508
3509                 case INSN_STD:
3510                         if (state.df) {
3511                                 WARN_FUNC("recursive STD", sec, insn->offset);
3512                                 return 1;
3513                         }
3514
3515                         state.df = true;
3516                         break;
3517
3518                 case INSN_CLD:
3519                         if (!state.df && func) {
3520                                 WARN_FUNC("redundant CLD", sec, insn->offset);
3521                                 return 1;
3522                         }
3523
3524                         state.df = false;
3525                         break;
3526
3527                 default:
3528                         break;
3529                 }
3530
3531                 if (insn->dead_end)
3532                         return 0;
3533
3534                 if (!next_insn) {
3535                         if (state.cfi.cfa.base == CFI_UNDEFINED)
3536                                 return 0;
3537                         WARN("%s: unexpected end of section", sec->name);
3538                         return 1;
3539                 }
3540
3541                 prev_insn = insn;
3542                 insn = next_insn;
3543         }
3544
3545         return 0;
3546 }
3547
3548 static int validate_unwind_hints(struct objtool_file *file, struct section *sec)
3549 {
3550         struct instruction *insn;
3551         struct insn_state state;
3552         int ret, warnings = 0;
3553
3554         if (!file->hints)
3555                 return 0;
3556
3557         init_insn_state(file, &state, sec);
3558
3559         if (sec) {
3560                 insn = find_insn(file, sec, 0);
3561                 if (!insn)
3562                         return 0;
3563         } else {
3564                 insn = list_first_entry(&file->insn_list, typeof(*insn), list);
3565         }
3566
3567         while (&insn->list != &file->insn_list && (!sec || insn->sec == sec)) {
3568                 if (insn->hint && !insn->visited && !insn->ignore) {
3569                         ret = validate_branch(file, insn->func, insn, state);
3570                         if (ret && opts.backtrace)
3571                                 BT_FUNC("<=== (hint)", insn);
3572                         warnings += ret;
3573                 }
3574
3575                 insn = list_next_entry(insn, list);
3576         }
3577
3578         return warnings;
3579 }
3580
3581 /*
3582  * Validate rethunk entry constraint: must untrain RET before the first RET.
3583  *
3584  * Follow every branch (intra-function) and ensure ANNOTATE_UNRET_END comes
3585  * before an actual RET instruction.
3586  */
3587 static int validate_entry(struct objtool_file *file, struct instruction *insn)
3588 {
3589         struct instruction *next, *dest;
3590         int ret, warnings = 0;
3591
3592         for (;;) {
3593                 next = next_insn_to_validate(file, insn);
3594
3595                 if (insn->visited & VISITED_ENTRY)
3596                         return 0;
3597
3598                 insn->visited |= VISITED_ENTRY;
3599
3600                 if (!insn->ignore_alts && !list_empty(&insn->alts)) {
3601                         struct alternative *alt;
3602                         bool skip_orig = false;
3603
3604                         list_for_each_entry(alt, &insn->alts, list) {
3605                                 if (alt->skip_orig)
3606                                         skip_orig = true;
3607
3608                                 ret = validate_entry(file, alt->insn);
3609                                 if (ret) {
3610                                         if (opts.backtrace)
3611                                                 BT_FUNC("(alt)", insn);
3612                                         return ret;
3613                                 }
3614                         }
3615
3616                         if (skip_orig)
3617                                 return 0;
3618                 }
3619
3620                 switch (insn->type) {
3621
3622                 case INSN_CALL_DYNAMIC:
3623                 case INSN_JUMP_DYNAMIC:
3624                 case INSN_JUMP_DYNAMIC_CONDITIONAL:
3625                         WARN_FUNC("early indirect call", insn->sec, insn->offset);
3626                         return 1;
3627
3628                 case INSN_JUMP_UNCONDITIONAL:
3629                 case INSN_JUMP_CONDITIONAL:
3630                         if (!is_sibling_call(insn)) {
3631                                 if (!insn->jump_dest) {
3632                                         WARN_FUNC("unresolved jump target after linking?!?",
3633                                                   insn->sec, insn->offset);
3634                                         return -1;
3635                                 }
3636                                 ret = validate_entry(file, insn->jump_dest);
3637                                 if (ret) {
3638                                         if (opts.backtrace) {
3639                                                 BT_FUNC("(branch%s)", insn,
3640                                                         insn->type == INSN_JUMP_CONDITIONAL ? "-cond" : "");
3641                                         }
3642                                         return ret;
3643                                 }
3644
3645                                 if (insn->type == INSN_JUMP_UNCONDITIONAL)
3646                                         return 0;
3647
3648                                 break;
3649                         }
3650
3651                         /* fallthrough */
3652                 case INSN_CALL:
3653                         dest = find_insn(file, insn->call_dest->sec,
3654                                          insn->call_dest->offset);
3655                         if (!dest) {
3656                                 WARN("Unresolved function after linking!?: %s",
3657                                      insn->call_dest->name);
3658                                 return -1;
3659                         }
3660
3661                         ret = validate_entry(file, dest);
3662                         if (ret) {
3663                                 if (opts.backtrace)
3664                                         BT_FUNC("(call)", insn);
3665                                 return ret;
3666                         }
3667                         /*
3668                          * If a call returns without error, it must have seen UNTRAIN_RET.
3669                          * Therefore any non-error return is a success.
3670                          */
3671                         return 0;
3672
3673                 case INSN_RETURN:
3674                         WARN_FUNC("RET before UNTRAIN", insn->sec, insn->offset);
3675                         return 1;
3676
3677                 case INSN_NOP:
3678                         if (insn->retpoline_safe)
3679                                 return 0;
3680                         break;
3681
3682                 default:
3683                         break;
3684                 }
3685
3686                 if (!next) {
3687                         WARN_FUNC("teh end!", insn->sec, insn->offset);
3688                         return -1;
3689                 }
3690                 insn = next;
3691         }
3692
3693         return warnings;
3694 }
3695
3696 /*
3697  * Validate that all branches starting at 'insn->entry' encounter UNRET_END
3698  * before RET.
3699  */
3700 static int validate_unret(struct objtool_file *file)
3701 {
3702         struct instruction *insn;
3703         int ret, warnings = 0;
3704
3705         for_each_insn(file, insn) {
3706                 if (!insn->entry)
3707                         continue;
3708
3709                 ret = validate_entry(file, insn);
3710                 if (ret < 0) {
3711                         WARN_FUNC("Failed UNRET validation", insn->sec, insn->offset);
3712                         return ret;
3713                 }
3714                 warnings += ret;
3715         }
3716
3717         return warnings;
3718 }
3719
3720 static int validate_retpoline(struct objtool_file *file)
3721 {
3722         struct instruction *insn;
3723         int warnings = 0;
3724
3725         for_each_insn(file, insn) {
3726                 if (insn->type != INSN_JUMP_DYNAMIC &&
3727                     insn->type != INSN_CALL_DYNAMIC &&
3728                     insn->type != INSN_RETURN)
3729                         continue;
3730
3731                 if (insn->retpoline_safe)
3732                         continue;
3733
3734                 /*
3735                  * .init.text code is ran before userspace and thus doesn't
3736                  * strictly need retpolines, except for modules which are
3737                  * loaded late, they very much do need retpoline in their
3738                  * .init.text
3739                  */
3740                 if (!strcmp(insn->sec->name, ".init.text") && !opts.module)
3741                         continue;
3742
3743                 if (insn->type == INSN_RETURN) {
3744                         if (opts.rethunk) {
3745                                 WARN_FUNC("'naked' return found in RETHUNK build",
3746                                           insn->sec, insn->offset);
3747                         } else
3748                                 continue;
3749                 } else {
3750                         WARN_FUNC("indirect %s found in RETPOLINE build",
3751                                   insn->sec, insn->offset,
3752                                   insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call");
3753                 }
3754
3755                 warnings++;
3756         }
3757
3758         return warnings;
3759 }
3760
3761 static bool is_kasan_insn(struct instruction *insn)
3762 {
3763         return (insn->type == INSN_CALL &&
3764                 !strcmp(insn->call_dest->name, "__asan_handle_no_return"));
3765 }
3766
3767 static bool is_ubsan_insn(struct instruction *insn)
3768 {
3769         return (insn->type == INSN_CALL &&
3770                 !strcmp(insn->call_dest->name,
3771                         "__ubsan_handle_builtin_unreachable"));
3772 }
3773
3774 static bool ignore_unreachable_insn(struct objtool_file *file, struct instruction *insn)
3775 {
3776         int i;
3777         struct instruction *prev_insn;
3778
3779         if (insn->ignore || insn->type == INSN_NOP || insn->type == INSN_TRAP)
3780                 return true;
3781
3782         /*
3783          * Ignore alternative replacement instructions.  This can happen
3784          * when a whitelisted function uses one of the ALTERNATIVE macros.
3785          */
3786         if (!strcmp(insn->sec->name, ".altinstr_replacement") ||
3787             !strcmp(insn->sec->name, ".altinstr_aux"))
3788                 return true;
3789
3790         /*
3791          * Whole archive runs might encounter dead code from weak symbols.
3792          * This is where the linker will have dropped the weak symbol in
3793          * favour of a regular symbol, but leaves the code in place.
3794          *
3795          * In this case we'll find a piece of code (whole function) that is not
3796          * covered by a !section symbol. Ignore them.
3797          */
3798         if (opts.link && !insn->func) {
3799                 int size = find_symbol_hole_containing(insn->sec, insn->offset);
3800                 unsigned long end = insn->offset + size;
3801
3802                 if (!size) /* not a hole */
3803                         return false;
3804
3805                 if (size < 0) /* hole until the end */
3806                         return true;
3807
3808                 sec_for_each_insn_continue(file, insn) {
3809                         /*
3810                          * If we reach a visited instruction at or before the
3811                          * end of the hole, ignore the unreachable.
3812                          */
3813                         if (insn->visited)
3814                                 return true;
3815
3816                         if (insn->offset >= end)
3817                                 break;
3818
3819                         /*
3820                          * If this hole jumps to a .cold function, mark it ignore too.
3821                          */
3822                         if (insn->jump_dest && insn->jump_dest->func &&
3823                             strstr(insn->jump_dest->func->name, ".cold")) {
3824                                 struct instruction *dest = insn->jump_dest;
3825                                 func_for_each_insn(file, dest->func, dest)
3826                                         dest->ignore = true;
3827                         }
3828                 }
3829
3830                 return false;
3831         }
3832
3833         if (!insn->func)
3834                 return false;
3835
3836         if (insn->func->static_call_tramp)
3837                 return true;
3838
3839         /*
3840          * CONFIG_UBSAN_TRAP inserts a UD2 when it sees
3841          * __builtin_unreachable().  The BUG() macro has an unreachable() after
3842          * the UD2, which causes GCC's undefined trap logic to emit another UD2
3843          * (or occasionally a JMP to UD2).
3844          *
3845          * It may also insert a UD2 after calling a __noreturn function.
3846          */
3847         prev_insn = list_prev_entry(insn, list);
3848         if ((prev_insn->dead_end || dead_end_function(file, prev_insn->call_dest)) &&
3849             (insn->type == INSN_BUG ||
3850              (insn->type == INSN_JUMP_UNCONDITIONAL &&
3851               insn->jump_dest && insn->jump_dest->type == INSN_BUG)))
3852                 return true;
3853
3854         /*
3855          * Check if this (or a subsequent) instruction is related to
3856          * CONFIG_UBSAN or CONFIG_KASAN.
3857          *
3858          * End the search at 5 instructions to avoid going into the weeds.
3859          */
3860         for (i = 0; i < 5; i++) {
3861
3862                 if (is_kasan_insn(insn) || is_ubsan_insn(insn))
3863                         return true;
3864
3865                 if (insn->type == INSN_JUMP_UNCONDITIONAL) {
3866                         if (insn->jump_dest &&
3867                             insn->jump_dest->func == insn->func) {
3868                                 insn = insn->jump_dest;
3869                                 continue;
3870                         }
3871
3872                         break;
3873                 }
3874
3875                 if (insn->offset + insn->len >= insn->func->offset + insn->func->len)
3876                         break;
3877
3878                 insn = list_next_entry(insn, list);
3879         }
3880
3881         return false;
3882 }
3883
3884 static int validate_symbol(struct objtool_file *file, struct section *sec,
3885                            struct symbol *sym, struct insn_state *state)
3886 {
3887         struct instruction *insn;
3888         int ret;
3889
3890         if (!sym->len) {
3891                 WARN("%s() is missing an ELF size annotation", sym->name);
3892                 return 1;
3893         }
3894
3895         if (sym->pfunc != sym || sym->alias != sym)
3896                 return 0;
3897
3898         insn = find_insn(file, sec, sym->offset);
3899         if (!insn || insn->ignore || insn->visited)
3900                 return 0;
3901
3902         state->uaccess = sym->uaccess_safe;
3903
3904         ret = validate_branch(file, insn->func, insn, *state);
3905         if (ret && opts.backtrace)
3906                 BT_FUNC("<=== (sym)", insn);
3907         return ret;
3908 }
3909
3910 static int validate_section(struct objtool_file *file, struct section *sec)
3911 {
3912         struct insn_state state;
3913         struct symbol *func;
3914         int warnings = 0;
3915
3916         list_for_each_entry(func, &sec->symbol_list, list) {
3917                 if (func->type != STT_FUNC)
3918                         continue;
3919
3920                 init_insn_state(file, &state, sec);
3921                 set_func_state(&state.cfi);
3922
3923                 warnings += validate_symbol(file, sec, func, &state);
3924         }
3925
3926         return warnings;
3927 }
3928
3929 static int validate_noinstr_sections(struct objtool_file *file)
3930 {
3931         struct section *sec;
3932         int warnings = 0;
3933
3934         sec = find_section_by_name(file->elf, ".noinstr.text");
3935         if (sec) {
3936                 warnings += validate_section(file, sec);
3937                 warnings += validate_unwind_hints(file, sec);
3938         }
3939
3940         sec = find_section_by_name(file->elf, ".entry.text");
3941         if (sec) {
3942                 warnings += validate_section(file, sec);
3943                 warnings += validate_unwind_hints(file, sec);
3944         }
3945
3946         return warnings;
3947 }
3948
3949 static int validate_functions(struct objtool_file *file)
3950 {
3951         struct section *sec;
3952         int warnings = 0;
3953
3954         for_each_sec(file, sec) {
3955                 if (!(sec->sh.sh_flags & SHF_EXECINSTR))
3956                         continue;
3957
3958                 warnings += validate_section(file, sec);
3959         }
3960
3961         return warnings;
3962 }
3963
3964 static void mark_endbr_used(struct instruction *insn)
3965 {
3966         if (!list_empty(&insn->call_node))
3967                 list_del_init(&insn->call_node);
3968 }
3969
3970 static int validate_ibt_insn(struct objtool_file *file, struct instruction *insn)
3971 {
3972         struct instruction *dest;
3973         struct reloc *reloc;
3974         unsigned long off;
3975         int warnings = 0;
3976
3977         /*
3978          * Looking for function pointer load relocations.  Ignore
3979          * direct/indirect branches:
3980          */
3981         switch (insn->type) {
3982         case INSN_CALL:
3983         case INSN_CALL_DYNAMIC:
3984         case INSN_JUMP_CONDITIONAL:
3985         case INSN_JUMP_UNCONDITIONAL:
3986         case INSN_JUMP_DYNAMIC:
3987         case INSN_JUMP_DYNAMIC_CONDITIONAL:
3988         case INSN_RETURN:
3989         case INSN_NOP:
3990                 return 0;
3991         default:
3992                 break;
3993         }
3994
3995         for (reloc = insn_reloc(file, insn);
3996              reloc;
3997              reloc = find_reloc_by_dest_range(file->elf, insn->sec,
3998                                               reloc->offset + 1,
3999                                               (insn->offset + insn->len) - (reloc->offset + 1))) {
4000
4001                 /*
4002                  * static_call_update() references the trampoline, which
4003                  * doesn't have (or need) ENDBR.  Skip warning in that case.
4004                  */
4005                 if (reloc->sym->static_call_tramp)
4006                         continue;
4007
4008                 off = reloc->sym->offset;
4009                 if (reloc->type == R_X86_64_PC32 || reloc->type == R_X86_64_PLT32)
4010                         off += arch_dest_reloc_offset(reloc->addend);
4011                 else
4012                         off += reloc->addend;
4013
4014                 dest = find_insn(file, reloc->sym->sec, off);
4015                 if (!dest)
4016                         continue;
4017
4018                 if (dest->type == INSN_ENDBR) {
4019                         mark_endbr_used(dest);
4020                         continue;
4021                 }
4022
4023                 if (dest->func && dest->func == insn->func) {
4024                         /*
4025                          * Anything from->to self is either _THIS_IP_ or
4026                          * IRET-to-self.
4027                          *
4028                          * There is no sane way to annotate _THIS_IP_ since the
4029                          * compiler treats the relocation as a constant and is
4030                          * happy to fold in offsets, skewing any annotation we
4031                          * do, leading to vast amounts of false-positives.
4032                          *
4033                          * There's also compiler generated _THIS_IP_ through
4034                          * KCOV and such which we have no hope of annotating.
4035                          *
4036                          * As such, blanket accept self-references without
4037                          * issue.
4038                          */
4039                         continue;
4040                 }
4041
4042                 if (dest->noendbr)
4043                         continue;
4044
4045                 WARN_FUNC("relocation to !ENDBR: %s",
4046                           insn->sec, insn->offset,
4047                           offstr(dest->sec, dest->offset));
4048
4049                 warnings++;
4050         }
4051
4052         return warnings;
4053 }
4054
4055 static int validate_ibt_data_reloc(struct objtool_file *file,
4056                                    struct reloc *reloc)
4057 {
4058         struct instruction *dest;
4059
4060         dest = find_insn(file, reloc->sym->sec,
4061                          reloc->sym->offset + reloc->addend);
4062         if (!dest)
4063                 return 0;
4064
4065         if (dest->type == INSN_ENDBR) {
4066                 mark_endbr_used(dest);
4067                 return 0;
4068         }
4069
4070         if (dest->noendbr)
4071                 return 0;
4072
4073         WARN_FUNC("data relocation to !ENDBR: %s",
4074                   reloc->sec->base, reloc->offset,
4075                   offstr(dest->sec, dest->offset));
4076
4077         return 1;
4078 }
4079
4080 /*
4081  * Validate IBT rules and remove used ENDBR instructions from the seal list.
4082  * Unused ENDBR instructions will be annotated for sealing (i.e., replaced with
4083  * NOPs) later, in create_ibt_endbr_seal_sections().
4084  */
4085 static int validate_ibt(struct objtool_file *file)
4086 {
4087         struct section *sec;
4088         struct reloc *reloc;
4089         struct instruction *insn;
4090         int warnings = 0;
4091
4092         for_each_insn(file, insn)
4093                 warnings += validate_ibt_insn(file, insn);
4094
4095         for_each_sec(file, sec) {
4096
4097                 /* Already done by validate_ibt_insn() */
4098                 if (sec->sh.sh_flags & SHF_EXECINSTR)
4099                         continue;
4100
4101                 if (!sec->reloc)
4102                         continue;
4103
4104                 /*
4105                  * These sections can reference text addresses, but not with
4106                  * the intent to indirect branch to them.
4107                  */
4108                 if ((!strncmp(sec->name, ".discard", 8) &&
4109                      strcmp(sec->name, ".discard.ibt_endbr_noseal"))    ||
4110                     !strncmp(sec->name, ".debug", 6)                    ||
4111                     !strcmp(sec->name, ".altinstructions")              ||
4112                     !strcmp(sec->name, ".ibt_endbr_seal")               ||
4113                     !strcmp(sec->name, ".orc_unwind_ip")                ||
4114                     !strcmp(sec->name, ".parainstructions")             ||
4115                     !strcmp(sec->name, ".retpoline_sites")              ||
4116                     !strcmp(sec->name, ".smp_locks")                    ||
4117                     !strcmp(sec->name, ".static_call_sites")            ||
4118                     !strcmp(sec->name, "_error_injection_whitelist")    ||
4119                     !strcmp(sec->name, "_kprobe_blacklist")             ||
4120                     !strcmp(sec->name, "__bug_table")                   ||
4121                     !strcmp(sec->name, "__ex_table")                    ||
4122                     !strcmp(sec->name, "__jump_table")                  ||
4123                     !strcmp(sec->name, "__mcount_loc")                  ||
4124                     !strcmp(sec->name, ".kcfi_traps")                   ||
4125                     strstr(sec->name, "__patchable_function_entries"))
4126                         continue;
4127
4128                 list_for_each_entry(reloc, &sec->reloc->reloc_list, list)
4129                         warnings += validate_ibt_data_reloc(file, reloc);
4130         }
4131
4132         return warnings;
4133 }
4134
4135 static int validate_sls(struct objtool_file *file)
4136 {
4137         struct instruction *insn, *next_insn;
4138         int warnings = 0;
4139
4140         for_each_insn(file, insn) {
4141                 next_insn = next_insn_same_sec(file, insn);
4142
4143                 if (insn->retpoline_safe)
4144                         continue;
4145
4146                 switch (insn->type) {
4147                 case INSN_RETURN:
4148                         if (!next_insn || next_insn->type != INSN_TRAP) {
4149                                 WARN_FUNC("missing int3 after ret",
4150                                           insn->sec, insn->offset);
4151                                 warnings++;
4152                         }
4153
4154                         break;
4155                 case INSN_JUMP_DYNAMIC:
4156                         if (!next_insn || next_insn->type != INSN_TRAP) {
4157                                 WARN_FUNC("missing int3 after indirect jump",
4158                                           insn->sec, insn->offset);
4159                                 warnings++;
4160                         }
4161                         break;
4162                 default:
4163                         break;
4164                 }
4165         }
4166
4167         return warnings;
4168 }
4169
4170 static int validate_reachable_instructions(struct objtool_file *file)
4171 {
4172         struct instruction *insn;
4173
4174         if (file->ignore_unreachables)
4175                 return 0;
4176
4177         for_each_insn(file, insn) {
4178                 if (insn->visited || ignore_unreachable_insn(file, insn))
4179                         continue;
4180
4181                 WARN_FUNC("unreachable instruction", insn->sec, insn->offset);
4182                 return 1;
4183         }
4184
4185         return 0;
4186 }
4187
4188 int check(struct objtool_file *file)
4189 {
4190         int ret, warnings = 0;
4191
4192         arch_initial_func_cfi_state(&initial_func_cfi);
4193         init_cfi_state(&init_cfi);
4194         init_cfi_state(&func_cfi);
4195         set_func_state(&func_cfi);
4196
4197         if (!cfi_hash_alloc(1UL << (file->elf->symbol_bits - 3)))
4198                 goto out;
4199
4200         cfi_hash_add(&init_cfi);
4201         cfi_hash_add(&func_cfi);
4202
4203         ret = decode_sections(file);
4204         if (ret < 0)
4205                 goto out;
4206
4207         warnings += ret;
4208
4209         if (list_empty(&file->insn_list))
4210                 goto out;
4211
4212         if (opts.retpoline) {
4213                 ret = validate_retpoline(file);
4214                 if (ret < 0)
4215                         return ret;
4216                 warnings += ret;
4217         }
4218
4219         if (opts.stackval || opts.orc || opts.uaccess) {
4220                 ret = validate_functions(file);
4221                 if (ret < 0)
4222                         goto out;
4223                 warnings += ret;
4224
4225                 ret = validate_unwind_hints(file, NULL);
4226                 if (ret < 0)
4227                         goto out;
4228                 warnings += ret;
4229
4230                 if (!warnings) {
4231                         ret = validate_reachable_instructions(file);
4232                         if (ret < 0)
4233                                 goto out;
4234                         warnings += ret;
4235                 }
4236
4237         } else if (opts.noinstr) {
4238                 ret = validate_noinstr_sections(file);
4239                 if (ret < 0)
4240                         goto out;
4241                 warnings += ret;
4242         }
4243
4244         if (opts.unret) {
4245                 /*
4246                  * Must be after validate_branch() and friends, it plays
4247                  * further games with insn->visited.
4248                  */
4249                 ret = validate_unret(file);
4250                 if (ret < 0)
4251                         return ret;
4252                 warnings += ret;
4253         }
4254
4255         if (opts.ibt) {
4256                 ret = validate_ibt(file);
4257                 if (ret < 0)
4258                         goto out;
4259                 warnings += ret;
4260         }
4261
4262         if (opts.sls) {
4263                 ret = validate_sls(file);
4264                 if (ret < 0)
4265                         goto out;
4266                 warnings += ret;
4267         }
4268
4269         if (opts.static_call) {
4270                 ret = create_static_call_sections(file);
4271                 if (ret < 0)
4272                         goto out;
4273                 warnings += ret;
4274         }
4275
4276         if (opts.retpoline) {
4277                 ret = create_retpoline_sites_sections(file);
4278                 if (ret < 0)
4279                         goto out;
4280                 warnings += ret;
4281         }
4282
4283         if (opts.rethunk) {
4284                 ret = create_return_sites_sections(file);
4285                 if (ret < 0)
4286                         goto out;
4287                 warnings += ret;
4288         }
4289
4290         if (opts.mcount) {
4291                 ret = create_mcount_loc_sections(file);
4292                 if (ret < 0)
4293                         goto out;
4294                 warnings += ret;
4295         }
4296
4297         if (opts.ibt) {
4298                 ret = create_ibt_endbr_seal_sections(file);
4299                 if (ret < 0)
4300                         goto out;
4301                 warnings += ret;
4302         }
4303
4304         if (opts.orc && !list_empty(&file->insn_list)) {
4305                 ret = orc_create(file);
4306                 if (ret < 0)
4307                         goto out;
4308                 warnings += ret;
4309         }
4310
4311
4312         if (opts.stats) {
4313                 printf("nr_insns_visited: %ld\n", nr_insns_visited);
4314                 printf("nr_cfi: %ld\n", nr_cfi);
4315                 printf("nr_cfi_reused: %ld\n", nr_cfi_reused);
4316                 printf("nr_cfi_cache: %ld\n", nr_cfi_cache);
4317         }
4318
4319 out:
4320         /*
4321          *  For now, don't fail the kernel build on fatal warnings.  These
4322          *  errors are still fairly common due to the growing matrix of
4323          *  supported toolchains and their recent pace of change.
4324          */
4325         return 0;
4326 }