| 1 | /* SPDX-License-Identifier: GPL-2.0 */ | 
|---|
| 2 |  | 
|---|
| 3 | #include <linux/export.h> | 
|---|
| 4 | #include <linux/stringify.h> | 
|---|
| 5 | #include <linux/linkage.h> | 
|---|
| 6 | #include <asm/dwarf2.h> | 
|---|
| 7 | #include <asm/cpufeatures.h> | 
|---|
| 8 | #include <asm/alternative.h> | 
|---|
| 9 | #include <asm/asm-offsets.h> | 
|---|
| 10 | #include <asm/nospec-branch.h> | 
|---|
| 11 | #include <asm/unwind_hints.h> | 
|---|
| 12 | #include <asm/percpu.h> | 
|---|
| 13 | #include <asm/frame.h> | 
|---|
| 14 | #include <asm/nops.h> | 
|---|
| 15 |  | 
|---|
| 16 | .section .text..__x86.indirect_thunk | 
|---|
| 17 |  | 
|---|
| 18 | .macro POLINE reg | 
|---|
| 19 | ANNOTATE_INTRA_FUNCTION_CALL | 
|---|
| 20 | call    .Ldo_rop_\@ | 
|---|
| 21 | int3 | 
|---|
| 22 | .Ldo_rop_\@: | 
|---|
| 23 | mov     %\reg, (%_ASM_SP) | 
|---|
| 24 | UNWIND_HINT_FUNC | 
|---|
| 25 | .endm | 
|---|
| 26 |  | 
|---|
| 27 | .macro RETPOLINE reg | 
|---|
| 28 | POLINE \reg | 
|---|
| 29 | RET | 
|---|
| 30 | .endm | 
|---|
| 31 |  | 
|---|
| 32 | .macro THUNK reg | 
|---|
| 33 |  | 
|---|
| 34 | .align RETPOLINE_THUNK_SIZE | 
|---|
| 35 | SYM_INNER_LABEL(__x86_indirect_thunk_\reg, SYM_L_GLOBAL) | 
|---|
| 36 | UNWIND_HINT_UNDEFINED | 
|---|
| 37 | ANNOTATE_NOENDBR | 
|---|
| 38 |  | 
|---|
| 39 | ALTERNATIVE_2 __stringify(RETPOLINE \reg), \ | 
|---|
| 40 | __stringify(lfence; ANNOTATE_RETPOLINE_SAFE; jmp *%\reg; int3), X86_FEATURE_RETPOLINE_LFENCE, \ | 
|---|
| 41 | __stringify(ANNOTATE_RETPOLINE_SAFE; jmp *%\reg), ALT_NOT(X86_FEATURE_RETPOLINE) | 
|---|
| 42 | SYM_PIC_ALIAS(__x86_indirect_thunk_\reg) | 
|---|
| 43 |  | 
|---|
| 44 | .endm | 
|---|
| 45 |  | 
|---|
| 46 | /* | 
|---|
| 47 | * Despite being an assembler file we can't just use .irp here | 
|---|
| 48 | * because __KSYM_DEPS__ only uses the C preprocessor and would | 
|---|
| 49 | * only see one instance of "__x86_indirect_thunk_\reg" rather | 
|---|
| 50 | * than one per register with the correct names. So we do it | 
|---|
| 51 | * the simple and nasty way... | 
|---|
| 52 | * | 
|---|
| 53 | * Worse, you can only have a single EXPORT_SYMBOL per line, | 
|---|
| 54 | * and CPP can't insert newlines, so we have to repeat everything | 
|---|
| 55 | * at least twice. | 
|---|
| 56 | */ | 
|---|
| 57 |  | 
|---|
| 58 | #define __EXPORT_THUNK(sym)	_ASM_NOKPROBE(sym); EXPORT_SYMBOL(sym) | 
|---|
| 59 |  | 
|---|
| 60 | .align RETPOLINE_THUNK_SIZE | 
|---|
| 61 | SYM_CODE_START(__x86_indirect_thunk_array) | 
|---|
| 62 |  | 
|---|
| 63 | #define GEN(reg) THUNK reg | 
|---|
| 64 | #include <asm/GEN-for-each-reg.h> | 
|---|
| 65 | #undef GEN | 
|---|
| 66 |  | 
|---|
| 67 | .align RETPOLINE_THUNK_SIZE | 
|---|
| 68 | SYM_CODE_END(__x86_indirect_thunk_array) | 
|---|
| 69 |  | 
|---|
| 70 | #define GEN(reg) __EXPORT_THUNK(__x86_indirect_thunk_ ## reg) | 
|---|
| 71 | #include <asm/GEN-for-each-reg.h> | 
|---|
| 72 | #undef GEN | 
|---|
| 73 |  | 
|---|
| 74 | #ifdef CONFIG_MITIGATION_CALL_DEPTH_TRACKING | 
|---|
| 75 |  | 
|---|
| 76 | .macro CALL_THUNK reg | 
|---|
| 77 | .align RETPOLINE_THUNK_SIZE | 
|---|
| 78 |  | 
|---|
| 79 | SYM_INNER_LABEL(__x86_indirect_call_thunk_\reg, SYM_L_GLOBAL) | 
|---|
| 80 | UNWIND_HINT_UNDEFINED | 
|---|
| 81 | ANNOTATE_NOENDBR | 
|---|
| 82 |  | 
|---|
| 83 | CALL_DEPTH_ACCOUNT | 
|---|
| 84 | POLINE \reg | 
|---|
| 85 | ANNOTATE_UNRET_SAFE | 
|---|
| 86 | ret | 
|---|
| 87 | int3 | 
|---|
| 88 | .endm | 
|---|
| 89 |  | 
|---|
| 90 | .align RETPOLINE_THUNK_SIZE | 
|---|
| 91 | SYM_CODE_START(__x86_indirect_call_thunk_array) | 
|---|
| 92 |  | 
|---|
| 93 | #define GEN(reg) CALL_THUNK reg | 
|---|
| 94 | #include <asm/GEN-for-each-reg.h> | 
|---|
| 95 | #undef GEN | 
|---|
| 96 |  | 
|---|
| 97 | .align RETPOLINE_THUNK_SIZE | 
|---|
| 98 | SYM_CODE_END(__x86_indirect_call_thunk_array) | 
|---|
| 99 |  | 
|---|
| 100 | #define GEN(reg) __EXPORT_THUNK(__x86_indirect_call_thunk_ ## reg) | 
|---|
| 101 | #include <asm/GEN-for-each-reg.h> | 
|---|
| 102 | #undef GEN | 
|---|
| 103 |  | 
|---|
| 104 | .macro JUMP_THUNK reg | 
|---|
| 105 | .align RETPOLINE_THUNK_SIZE | 
|---|
| 106 |  | 
|---|
| 107 | SYM_INNER_LABEL(__x86_indirect_jump_thunk_\reg, SYM_L_GLOBAL) | 
|---|
| 108 | UNWIND_HINT_UNDEFINED | 
|---|
| 109 | ANNOTATE_NOENDBR | 
|---|
| 110 | POLINE \reg | 
|---|
| 111 | ANNOTATE_UNRET_SAFE | 
|---|
| 112 | ret | 
|---|
| 113 | int3 | 
|---|
| 114 | .endm | 
|---|
| 115 |  | 
|---|
| 116 | .align RETPOLINE_THUNK_SIZE | 
|---|
| 117 | SYM_CODE_START(__x86_indirect_jump_thunk_array) | 
|---|
| 118 |  | 
|---|
| 119 | #define GEN(reg) JUMP_THUNK reg | 
|---|
| 120 | #include <asm/GEN-for-each-reg.h> | 
|---|
| 121 | #undef GEN | 
|---|
| 122 |  | 
|---|
| 123 | .align RETPOLINE_THUNK_SIZE | 
|---|
| 124 | SYM_CODE_END(__x86_indirect_jump_thunk_array) | 
|---|
| 125 |  | 
|---|
| 126 | #define GEN(reg) __EXPORT_THUNK(__x86_indirect_jump_thunk_ ## reg) | 
|---|
| 127 | #include <asm/GEN-for-each-reg.h> | 
|---|
| 128 | #undef GEN | 
|---|
| 129 |  | 
|---|
| 130 | #endif /* CONFIG_MITIGATION_CALL_DEPTH_TRACKING */ | 
|---|
| 131 |  | 
|---|
| 132 | #ifdef CONFIG_MITIGATION_ITS | 
|---|
| 133 |  | 
|---|
| 134 | .macro ITS_THUNK reg | 
|---|
| 135 |  | 
|---|
| 136 | /* | 
|---|
| 137 | * If CFI paranoid is used then the ITS thunk starts with opcodes (1: udb; jne 1b) | 
|---|
| 138 | * that complete the fineibt_paranoid caller sequence. | 
|---|
| 139 | */ | 
|---|
| 140 | 1:	ASM_UDB | 
|---|
| 141 | SYM_INNER_LABEL(__x86_indirect_paranoid_thunk_\reg, SYM_L_GLOBAL) | 
|---|
| 142 | UNWIND_HINT_UNDEFINED | 
|---|
| 143 | ANNOTATE_NOENDBR | 
|---|
| 144 | jne 1b | 
|---|
| 145 | SYM_INNER_LABEL(__x86_indirect_its_thunk_\reg, SYM_L_GLOBAL) | 
|---|
| 146 | UNWIND_HINT_UNDEFINED | 
|---|
| 147 | ANNOTATE_NOENDBR | 
|---|
| 148 | ANNOTATE_RETPOLINE_SAFE | 
|---|
| 149 | jmp *%\reg | 
|---|
| 150 | int3 | 
|---|
| 151 | .align 32, 0xcc		/* fill to the end of the line */ | 
|---|
| 152 | .skip  32 - (__x86_indirect_its_thunk_\reg - 1b), 0xcc /* skip to the next upper half */ | 
|---|
| 153 | .endm | 
|---|
| 154 |  | 
|---|
| 155 | /* ITS mitigation requires thunks be aligned to upper half of cacheline */ | 
|---|
| 156 | .align 64, 0xcc | 
|---|
| 157 | .skip 29, 0xcc | 
|---|
| 158 |  | 
|---|
| 159 | #define GEN(reg) ITS_THUNK reg | 
|---|
| 160 | #include <asm/GEN-for-each-reg.h> | 
|---|
| 161 | #undef GEN | 
|---|
| 162 |  | 
|---|
| 163 | .align 64, 0xcc | 
|---|
| 164 | SYM_FUNC_ALIAS(__x86_indirect_its_thunk_array, __x86_indirect_its_thunk_rax) | 
|---|
| 165 | SYM_CODE_END(__x86_indirect_its_thunk_array) | 
|---|
| 166 |  | 
|---|
| 167 | #endif /* CONFIG_MITIGATION_ITS */ | 
|---|
| 168 |  | 
|---|
| 169 | #ifdef CONFIG_MITIGATION_RETHUNK | 
|---|
| 170 |  | 
|---|
| 171 | /* | 
|---|
| 172 | * Be careful here: that label cannot really be removed because in | 
|---|
| 173 | * some configurations and toolchains, the JMP __x86_return_thunk the | 
|---|
| 174 | * compiler issues is either a short one or the compiler doesn't use | 
|---|
| 175 | * relocations for same-section JMPs and that breaks the returns | 
|---|
| 176 | * detection logic in apply_returns() and in objtool. | 
|---|
| 177 | */ | 
|---|
| 178 | .section .text..__x86.return_thunk | 
|---|
| 179 |  | 
|---|
| 180 | #ifdef CONFIG_MITIGATION_SRSO | 
|---|
| 181 |  | 
|---|
| 182 | /* | 
|---|
| 183 | * srso_alias_untrain_ret() and srso_alias_safe_ret() are placed at | 
|---|
| 184 | * special addresses: | 
|---|
| 185 | * | 
|---|
| 186 | * - srso_alias_untrain_ret() is 2M aligned | 
|---|
| 187 | * - srso_alias_safe_ret() is also in the same 2M page but bits 2, 8, 14 | 
|---|
| 188 | * and 20 in its virtual address are set (while those bits in the | 
|---|
| 189 | * srso_alias_untrain_ret() function are cleared). | 
|---|
| 190 | * | 
|---|
| 191 | * This guarantees that those two addresses will alias in the branch | 
|---|
| 192 | * target buffer of Zen3/4 generations, leading to any potential | 
|---|
| 193 | * poisoned entries at that BTB slot to get evicted. | 
|---|
| 194 | * | 
|---|
| 195 | * As a result, srso_alias_safe_ret() becomes a safe return. | 
|---|
| 196 | */ | 
|---|
| 197 | .pushsection .text..__x86.rethunk_untrain | 
|---|
| 198 | SYM_CODE_START_NOALIGN(srso_alias_untrain_ret) | 
|---|
| 199 | UNWIND_HINT_FUNC | 
|---|
| 200 | ANNOTATE_NOENDBR | 
|---|
| 201 | ASM_NOP2 | 
|---|
| 202 | lfence | 
|---|
| 203 | jmp srso_alias_return_thunk | 
|---|
| 204 | SYM_FUNC_END(srso_alias_untrain_ret) | 
|---|
| 205 | __EXPORT_THUNK(srso_alias_untrain_ret) | 
|---|
| 206 | .popsection | 
|---|
| 207 |  | 
|---|
| 208 | .pushsection .text..__x86.rethunk_safe | 
|---|
| 209 | SYM_CODE_START_NOALIGN(srso_alias_safe_ret) | 
|---|
| 210 | lea 8(%_ASM_SP), %_ASM_SP | 
|---|
| 211 | UNWIND_HINT_FUNC | 
|---|
| 212 | ANNOTATE_UNRET_SAFE | 
|---|
| 213 | ret | 
|---|
| 214 | int3 | 
|---|
| 215 | SYM_FUNC_END(srso_alias_safe_ret) | 
|---|
| 216 |  | 
|---|
| 217 | SYM_CODE_START_NOALIGN(srso_alias_return_thunk) | 
|---|
| 218 | UNWIND_HINT_FUNC | 
|---|
| 219 | ANNOTATE_NOENDBR | 
|---|
| 220 | call srso_alias_safe_ret | 
|---|
| 221 | ud2 | 
|---|
| 222 | SYM_CODE_END(srso_alias_return_thunk) | 
|---|
| 223 | .popsection | 
|---|
| 224 |  | 
|---|
| 225 | /* | 
|---|
| 226 | * SRSO untraining sequence for Zen1/2, similar to retbleed_untrain_ret() | 
|---|
| 227 | * above. On kernel entry, srso_untrain_ret() is executed which is a | 
|---|
| 228 | * | 
|---|
| 229 | * movabs $0xccccc30824648d48,%rax | 
|---|
| 230 | * | 
|---|
| 231 | * and when the return thunk executes the inner label srso_safe_ret() | 
|---|
| 232 | * later, it is a stack manipulation and a RET which is mispredicted and | 
|---|
| 233 | * thus a "safe" one to use. | 
|---|
| 234 | */ | 
|---|
| 235 | .align 64 | 
|---|
| 236 | .skip 64 - (srso_safe_ret - srso_untrain_ret), 0xcc | 
|---|
| 237 | SYM_CODE_START_LOCAL_NOALIGN(srso_untrain_ret) | 
|---|
| 238 | ANNOTATE_NOENDBR | 
|---|
| 239 | .byte 0x48, 0xb8 | 
|---|
| 240 |  | 
|---|
| 241 | /* | 
|---|
| 242 | * This forces the function return instruction to speculate into a trap | 
|---|
| 243 | * (UD2 in srso_return_thunk() below).  This RET will then mispredict | 
|---|
| 244 | * and execution will continue at the return site read from the top of | 
|---|
| 245 | * the stack. | 
|---|
| 246 | */ | 
|---|
| 247 | SYM_INNER_LABEL(srso_safe_ret, SYM_L_GLOBAL) | 
|---|
| 248 | lea 8(%_ASM_SP), %_ASM_SP | 
|---|
| 249 | ret | 
|---|
| 250 | int3 | 
|---|
| 251 | int3 | 
|---|
| 252 | /* end of movabs */ | 
|---|
| 253 | lfence | 
|---|
| 254 | call srso_safe_ret | 
|---|
| 255 | ud2 | 
|---|
| 256 | SYM_CODE_END(srso_safe_ret) | 
|---|
| 257 | SYM_FUNC_END(srso_untrain_ret) | 
|---|
| 258 |  | 
|---|
| 259 | SYM_CODE_START(srso_return_thunk) | 
|---|
| 260 | UNWIND_HINT_FUNC | 
|---|
| 261 | ANNOTATE_NOENDBR | 
|---|
| 262 | call srso_safe_ret | 
|---|
| 263 | ud2 | 
|---|
| 264 | SYM_CODE_END(srso_return_thunk) | 
|---|
| 265 |  | 
|---|
| 266 | #define JMP_SRSO_UNTRAIN_RET "jmp srso_untrain_ret" | 
|---|
| 267 | #else /* !CONFIG_MITIGATION_SRSO */ | 
|---|
| 268 | /* Dummy for the alternative in CALL_UNTRAIN_RET. */ | 
|---|
| 269 | SYM_CODE_START(srso_alias_untrain_ret) | 
|---|
| 270 | ANNOTATE_UNRET_SAFE | 
|---|
| 271 | ANNOTATE_NOENDBR | 
|---|
| 272 | ret | 
|---|
| 273 | int3 | 
|---|
| 274 | SYM_FUNC_END(srso_alias_untrain_ret) | 
|---|
| 275 | __EXPORT_THUNK(srso_alias_untrain_ret) | 
|---|
| 276 | #define JMP_SRSO_UNTRAIN_RET "ud2" | 
|---|
| 277 | #endif /* CONFIG_MITIGATION_SRSO */ | 
|---|
| 278 |  | 
|---|
| 279 | #ifdef CONFIG_MITIGATION_UNRET_ENTRY | 
|---|
| 280 |  | 
|---|
| 281 | /* | 
|---|
| 282 | * Some generic notes on the untraining sequences: | 
|---|
| 283 | * | 
|---|
| 284 | * They are interchangeable when it comes to flushing potentially wrong | 
|---|
| 285 | * RET predictions from the BTB. | 
|---|
| 286 | * | 
|---|
| 287 | * The SRSO Zen1/2 (MOVABS) untraining sequence is longer than the | 
|---|
| 288 | * Retbleed sequence because the return sequence done there | 
|---|
| 289 | * (srso_safe_ret()) is longer and the return sequence must fully nest | 
|---|
| 290 | * (end before) the untraining sequence. Therefore, the untraining | 
|---|
| 291 | * sequence must fully overlap the return sequence. | 
|---|
| 292 | * | 
|---|
| 293 | * Regarding alignment - the instructions which need to be untrained, | 
|---|
| 294 | * must all start at a cacheline boundary for Zen1/2 generations. That | 
|---|
| 295 | * is, instruction sequences starting at srso_safe_ret() and | 
|---|
| 296 | * the respective instruction sequences at retbleed_return_thunk() | 
|---|
| 297 | * must start at a cacheline boundary. | 
|---|
| 298 | */ | 
|---|
| 299 |  | 
|---|
| 300 | /* | 
|---|
| 301 | * Safety details here pertain to the AMD Zen{1,2} microarchitecture: | 
|---|
| 302 | * 1) The RET at retbleed_return_thunk must be on a 64 byte boundary, for | 
|---|
| 303 | *    alignment within the BTB. | 
|---|
| 304 | * 2) The instruction at retbleed_untrain_ret must contain, and not | 
|---|
| 305 | *    end with, the 0xc3 byte of the RET. | 
|---|
| 306 | * 3) STIBP must be enabled, or SMT disabled, to prevent the sibling thread | 
|---|
| 307 | *    from re-poisioning the BTB prediction. | 
|---|
| 308 | */ | 
|---|
| 309 | .align 64 | 
|---|
| 310 | .skip 64 - (retbleed_return_thunk - retbleed_untrain_ret), 0xcc | 
|---|
| 311 | SYM_CODE_START_LOCAL_NOALIGN(retbleed_untrain_ret) | 
|---|
| 312 | ANNOTATE_NOENDBR | 
|---|
| 313 | /* | 
|---|
| 314 | * As executed from retbleed_untrain_ret, this is: | 
|---|
| 315 | * | 
|---|
| 316 | *   TEST $0xcc, %bl | 
|---|
| 317 | *   LFENCE | 
|---|
| 318 | *   JMP retbleed_return_thunk | 
|---|
| 319 | * | 
|---|
| 320 | * Executing the TEST instruction has a side effect of evicting any BTB | 
|---|
| 321 | * prediction (potentially attacker controlled) attached to the RET, as | 
|---|
| 322 | * retbleed_return_thunk + 1 isn't an instruction boundary at the moment. | 
|---|
| 323 | */ | 
|---|
| 324 | .byte	0xf6 | 
|---|
| 325 |  | 
|---|
| 326 | /* | 
|---|
| 327 | * As executed from retbleed_return_thunk, this is a plain RET. | 
|---|
| 328 | * | 
|---|
| 329 | * As part of the TEST above, RET is the ModRM byte, and INT3 the imm8. | 
|---|
| 330 | * | 
|---|
| 331 | * We subsequently jump backwards and architecturally execute the RET. | 
|---|
| 332 | * This creates a correct BTB prediction (type=ret), but in the | 
|---|
| 333 | * meantime we suffer Straight Line Speculation (because the type was | 
|---|
| 334 | * no branch) which is halted by the INT3. | 
|---|
| 335 | * | 
|---|
| 336 | * With SMT enabled and STIBP active, a sibling thread cannot poison | 
|---|
| 337 | * RET's prediction to a type of its choice, but can evict the | 
|---|
| 338 | * prediction due to competitive sharing. If the prediction is | 
|---|
| 339 | * evicted, retbleed_return_thunk will suffer Straight Line Speculation | 
|---|
| 340 | * which will be contained safely by the INT3. | 
|---|
| 341 | */ | 
|---|
| 342 | SYM_INNER_LABEL(retbleed_return_thunk, SYM_L_GLOBAL) | 
|---|
| 343 | ret | 
|---|
| 344 | int3 | 
|---|
| 345 | SYM_CODE_END(retbleed_return_thunk) | 
|---|
| 346 |  | 
|---|
| 347 | /* | 
|---|
| 348 | * Ensure the TEST decoding / BTB invalidation is complete. | 
|---|
| 349 | */ | 
|---|
| 350 | lfence | 
|---|
| 351 |  | 
|---|
| 352 | /* | 
|---|
| 353 | * Jump back and execute the RET in the middle of the TEST instruction. | 
|---|
| 354 | * INT3 is for SLS protection. | 
|---|
| 355 | */ | 
|---|
| 356 | jmp retbleed_return_thunk | 
|---|
| 357 | int3 | 
|---|
| 358 | SYM_FUNC_END(retbleed_untrain_ret) | 
|---|
| 359 |  | 
|---|
| 360 | #define JMP_RETBLEED_UNTRAIN_RET "jmp retbleed_untrain_ret" | 
|---|
| 361 | #else /* !CONFIG_MITIGATION_UNRET_ENTRY */ | 
|---|
| 362 | #define JMP_RETBLEED_UNTRAIN_RET "ud2" | 
|---|
| 363 | #endif /* CONFIG_MITIGATION_UNRET_ENTRY */ | 
|---|
| 364 |  | 
|---|
| 365 | #if defined(CONFIG_MITIGATION_UNRET_ENTRY) || defined(CONFIG_MITIGATION_SRSO) | 
|---|
| 366 |  | 
|---|
| 367 | SYM_FUNC_START(entry_untrain_ret) | 
|---|
| 368 | ANNOTATE_NOENDBR | 
|---|
| 369 | ALTERNATIVE JMP_RETBLEED_UNTRAIN_RET, JMP_SRSO_UNTRAIN_RET, X86_FEATURE_SRSO | 
|---|
| 370 | SYM_FUNC_END(entry_untrain_ret) | 
|---|
| 371 | __EXPORT_THUNK(entry_untrain_ret) | 
|---|
| 372 |  | 
|---|
| 373 | #endif /* CONFIG_MITIGATION_UNRET_ENTRY || CONFIG_MITIGATION_SRSO */ | 
|---|
| 374 |  | 
|---|
| 375 | #ifdef CONFIG_MITIGATION_CALL_DEPTH_TRACKING | 
|---|
| 376 |  | 
|---|
| 377 | .align 64 | 
|---|
| 378 | SYM_FUNC_START(call_depth_return_thunk) | 
|---|
| 379 | ANNOTATE_NOENDBR | 
|---|
| 380 | /* | 
|---|
| 381 | * Keep the hotpath in a 16byte I-fetch for the non-debug | 
|---|
| 382 | * case. | 
|---|
| 383 | */ | 
|---|
| 384 | CALL_THUNKS_DEBUG_INC_RETS | 
|---|
| 385 | shlq	$5, PER_CPU_VAR(__x86_call_depth) | 
|---|
| 386 | jz	1f | 
|---|
| 387 | ANNOTATE_UNRET_SAFE | 
|---|
| 388 | ret | 
|---|
| 389 | int3 | 
|---|
| 390 | 1: | 
|---|
| 391 | CALL_THUNKS_DEBUG_INC_STUFFS | 
|---|
| 392 | .rept	16 | 
|---|
| 393 | ANNOTATE_INTRA_FUNCTION_CALL | 
|---|
| 394 | call	2f | 
|---|
| 395 | int3 | 
|---|
| 396 | 2: | 
|---|
| 397 | .endr | 
|---|
| 398 | add	$(8*16), %rsp | 
|---|
| 399 |  | 
|---|
| 400 | CREDIT_CALL_DEPTH | 
|---|
| 401 |  | 
|---|
| 402 | ANNOTATE_UNRET_SAFE | 
|---|
| 403 | ret | 
|---|
| 404 | int3 | 
|---|
| 405 | SYM_FUNC_END(call_depth_return_thunk) | 
|---|
| 406 |  | 
|---|
| 407 | #endif /* CONFIG_MITIGATION_CALL_DEPTH_TRACKING */ | 
|---|
| 408 |  | 
|---|
| 409 | #ifdef CONFIG_MITIGATION_ITS | 
|---|
| 410 |  | 
|---|
| 411 | .align 64, 0xcc | 
|---|
| 412 | .skip 32, 0xcc | 
|---|
| 413 | SYM_CODE_START(its_return_thunk) | 
|---|
| 414 | UNWIND_HINT_FUNC | 
|---|
| 415 | ANNOTATE_NOENDBR | 
|---|
| 416 | ANNOTATE_UNRET_SAFE | 
|---|
| 417 | ret | 
|---|
| 418 | int3 | 
|---|
| 419 | SYM_CODE_END(its_return_thunk) | 
|---|
| 420 | EXPORT_SYMBOL(its_return_thunk) | 
|---|
| 421 |  | 
|---|
| 422 | #endif /* CONFIG_MITIGATION_ITS */ | 
|---|
| 423 |  | 
|---|
| 424 | /* | 
|---|
| 425 | * This function name is magical and is used by -mfunction-return=thunk-extern | 
|---|
| 426 | * for the compiler to generate JMPs to it. | 
|---|
| 427 | * | 
|---|
| 428 | * This code is only used during kernel boot or module init.  All | 
|---|
| 429 | * 'JMP __x86_return_thunk' sites are changed to something else by | 
|---|
| 430 | * apply_returns(). | 
|---|
| 431 | * | 
|---|
| 432 | * The ALTERNATIVE below adds a really loud warning to catch the case | 
|---|
| 433 | * where the insufficient default return thunk ends up getting used for | 
|---|
| 434 | * whatever reason like miscompilation or failure of | 
|---|
| 435 | * objtool/alternatives/etc to patch all the return sites. | 
|---|
| 436 | */ | 
|---|
| 437 | SYM_CODE_START(__x86_return_thunk) | 
|---|
| 438 | UNWIND_HINT_FUNC | 
|---|
| 439 | ANNOTATE_NOENDBR | 
|---|
| 440 | #if defined(CONFIG_MITIGATION_UNRET_ENTRY) || \ | 
|---|
| 441 | defined(CONFIG_MITIGATION_SRSO) || \ | 
|---|
| 442 | defined(CONFIG_MITIGATION_CALL_DEPTH_TRACKING) | 
|---|
| 443 | ALTERNATIVE __stringify(ANNOTATE_UNRET_SAFE; ret), \ | 
|---|
| 444 | "jmp warn_thunk_thunk", X86_FEATURE_ALWAYS | 
|---|
| 445 | #else | 
|---|
| 446 | ANNOTATE_UNRET_SAFE | 
|---|
| 447 | ret | 
|---|
| 448 | #endif | 
|---|
| 449 | int3 | 
|---|
| 450 | SYM_CODE_END(__x86_return_thunk) | 
|---|
| 451 | SYM_PIC_ALIAS(__x86_return_thunk) | 
|---|
| 452 | EXPORT_SYMBOL(__x86_return_thunk) | 
|---|
| 453 |  | 
|---|
| 454 | #endif /* CONFIG_MITIGATION_RETHUNK */ | 
|---|
| 455 |  | 
|---|