1 /* SPDX-License-Identifier: GPL-2.0 */
3 #include <linux/stringify.h>
4 #include <linux/linkage.h>
5 #include <asm/dwarf2.h>
6 #include <asm/cpufeatures.h>
7 #include <asm/alternative.h>
8 #include <asm/asm-offsets.h>
9 #include <asm/export.h>
10 #include <asm/nospec-branch.h>
11 #include <asm/unwind_hints.h>
12 #include <asm/percpu.h>
13 #include <asm/frame.h>
15 .section .text.__x86.indirect_thunk
19 ANNOTATE_INTRA_FUNCTION_CALL
34 .align RETPOLINE_THUNK_SIZE
35 SYM_INNER_LABEL(__x86_indirect_thunk_\reg, SYM_L_GLOBAL)
39 ALTERNATIVE_2 __stringify(RETPOLINE \reg), \
40 __stringify(lfence; ANNOTATE_RETPOLINE_SAFE; jmp *%\reg; int3), X86_FEATURE_RETPOLINE_LFENCE, \
41 __stringify(ANNOTATE_RETPOLINE_SAFE; jmp *%\reg), ALT_NOT(X86_FEATURE_RETPOLINE)
46 * Despite being an assembler file we can't just use .irp here
47 * because __KSYM_DEPS__ only uses the C preprocessor and would
48 * only see one instance of "__x86_indirect_thunk_\reg" rather
49 * than one per register with the correct names. So we do it
50 * the simple and nasty way...
52 * Worse, you can only have a single EXPORT_SYMBOL per line,
53 * and CPP can't insert newlines, so we have to repeat everything
57 #define __EXPORT_THUNK(sym) _ASM_NOKPROBE(sym); EXPORT_SYMBOL(sym)
59 .align RETPOLINE_THUNK_SIZE
60 SYM_CODE_START(__x86_indirect_thunk_array)
62 #define GEN(reg) THUNK reg
63 #include <asm/GEN-for-each-reg.h>
66 .align RETPOLINE_THUNK_SIZE
67 SYM_CODE_END(__x86_indirect_thunk_array)
69 #define GEN(reg) __EXPORT_THUNK(__x86_indirect_thunk_ ## reg)
70 #include <asm/GEN-for-each-reg.h>
73 #ifdef CONFIG_CALL_DEPTH_TRACKING
75 .align RETPOLINE_THUNK_SIZE
77 SYM_INNER_LABEL(__x86_indirect_call_thunk_\reg, SYM_L_GLOBAL)
88 .align RETPOLINE_THUNK_SIZE
89 SYM_CODE_START(__x86_indirect_call_thunk_array)
91 #define GEN(reg) CALL_THUNK reg
92 #include <asm/GEN-for-each-reg.h>
95 .align RETPOLINE_THUNK_SIZE
96 SYM_CODE_END(__x86_indirect_call_thunk_array)
98 #define GEN(reg) __EXPORT_THUNK(__x86_indirect_call_thunk_ ## reg)
99 #include <asm/GEN-for-each-reg.h>
102 .macro JUMP_THUNK reg
103 .align RETPOLINE_THUNK_SIZE
105 SYM_INNER_LABEL(__x86_indirect_jump_thunk_\reg, SYM_L_GLOBAL)
106 UNWIND_HINT_UNDEFINED
114 .align RETPOLINE_THUNK_SIZE
115 SYM_CODE_START(__x86_indirect_jump_thunk_array)
117 #define GEN(reg) JUMP_THUNK reg
118 #include <asm/GEN-for-each-reg.h>
121 .align RETPOLINE_THUNK_SIZE
122 SYM_CODE_END(__x86_indirect_jump_thunk_array)
124 #define GEN(reg) __EXPORT_THUNK(__x86_indirect_jump_thunk_ ## reg)
125 #include <asm/GEN-for-each-reg.h>
129 * This function name is magical and is used by -mfunction-return=thunk-extern
130 * for the compiler to generate JMPs to it.
132 #ifdef CONFIG_RETHUNK
134 .section .text.__x86.return_thunk
137 * Safety details here pertain to the AMD Zen{1,2} microarchitecture:
138 * 1) The RET at __x86_return_thunk must be on a 64 byte boundary, for
139 * alignment within the BTB.
140 * 2) The instruction at zen_untrain_ret must contain, and not
141 * end with, the 0xc3 byte of the RET.
142 * 3) STIBP must be enabled, or SMT disabled, to prevent the sibling thread
143 * from re-poisioning the BTB prediction.
146 .skip 64 - (__x86_return_thunk - zen_untrain_ret), 0xcc
147 SYM_START(zen_untrain_ret, SYM_L_GLOBAL, SYM_A_NONE)
150 * As executed from zen_untrain_ret, this is:
154 * JMP __x86_return_thunk
156 * Executing the TEST instruction has a side effect of evicting any BTB
157 * prediction (potentially attacker controlled) attached to the RET, as
158 * __x86_return_thunk + 1 isn't an instruction boundary at the moment.
163 * As executed from __x86_return_thunk, this is a plain RET.
165 * As part of the TEST above, RET is the ModRM byte, and INT3 the imm8.
167 * We subsequently jump backwards and architecturally execute the RET.
168 * This creates a correct BTB prediction (type=ret), but in the
169 * meantime we suffer Straight Line Speculation (because the type was
170 * no branch) which is halted by the INT3.
172 * With SMT enabled and STIBP active, a sibling thread cannot poison
173 * RET's prediction to a type of its choice, but can evict the
174 * prediction due to competitive sharing. If the prediction is
175 * evicted, __x86_return_thunk will suffer Straight Line Speculation
176 * which will be contained safely by the INT3.
178 SYM_INNER_LABEL(__x86_return_thunk, SYM_L_GLOBAL)
181 SYM_CODE_END(__x86_return_thunk)
184 * Ensure the TEST decoding / BTB invalidation is complete.
189 * Jump back and execute the RET in the middle of the TEST instruction.
190 * INT3 is for SLS protection.
192 jmp __x86_return_thunk
194 SYM_FUNC_END(zen_untrain_ret)
195 __EXPORT_THUNK(zen_untrain_ret)
197 EXPORT_SYMBOL(__x86_return_thunk)
199 #endif /* CONFIG_RETHUNK */
201 #ifdef CONFIG_CALL_DEPTH_TRACKING
204 SYM_FUNC_START(__x86_return_skl)
207 * Keep the hotpath in a 16byte I-fetch for the non-debug
210 CALL_THUNKS_DEBUG_INC_RETS
211 shlq $5, PER_CPU_VAR(pcpu_hot + X86_call_depth)
217 CALL_THUNKS_DEBUG_INC_STUFFS
219 ANNOTATE_INTRA_FUNCTION_CALL
231 SYM_FUNC_END(__x86_return_skl)
233 #endif /* CONFIG_CALL_DEPTH_TRACKING */