diff options
Diffstat (limited to 'debian/patches/bugfix/x86/retbleed/0008-x86-static_call-Use-alternative-RET-encoding.patch')
-rw-r--r-- | debian/patches/bugfix/x86/retbleed/0008-x86-static_call-Use-alternative-RET-encoding.patch | 199 |
1 files changed, 199 insertions, 0 deletions
diff --git a/debian/patches/bugfix/x86/retbleed/0008-x86-static_call-Use-alternative-RET-encoding.patch b/debian/patches/bugfix/x86/retbleed/0008-x86-static_call-Use-alternative-RET-encoding.patch new file mode 100644 index 000000000..c782206fd --- /dev/null +++ b/debian/patches/bugfix/x86/retbleed/0008-x86-static_call-Use-alternative-RET-encoding.patch @@ -0,0 +1,199 @@ +From: Peter Zijlstra <peterz@infradead.org> +Date: Tue, 14 Jun 2022 23:15:39 +0200 +Subject: x86,static_call: Use alternative RET encoding +Origin: https://git.kernel.org/linus/ee88d363d15617ff50ac24fab0ffec11113b2aeb + +In addition to teaching static_call about the new way to spell 'RET', +there is an added complication in that static_call() is allowed to +rewrite text before it is known which particular spelling is required. + +In order to deal with this; have a static_call specific fixup in the +apply_return() 'alternative' patching routine that will rewrite the +static_call trampoline to match the definite sequence. + +This in turn creates the problem of uniquely identifying static call +trampolines. Currently trampolines are 8 bytes, the first 5 being the +jmp.d32/ret sequence and the final 3 a byte sequence that spells out +'SCT'. + +This sequence is used in __static_call_validate() to ensure it is +patching a trampoline and not a random other jmp.d32. That is, +false-positives shouldn't be plenty, but aren't a big concern. + +OTOH the new __static_call_fixup() must not have false-positives, and +'SCT' decodes to the somewhat weird but semi plausible sequence: + + push %rbx + rex.XB push %r12 + +Additionally, there are SLS concerns with immediate jumps. Combined it +seems like a good moment to change the signature to a single 3 byte +trap instruction that is unique to this usage and will not ever get +generated by accident. + +As such, change the signature to: '0x0f, 0xb9, 0xcc', which decodes +to: + + ud1 %esp, %ecx + +Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> +Signed-off-by: Borislav Petkov <bp@suse.de> +Reviewed-by: Josh Poimboeuf <jpoimboe@kernel.org> +Signed-off-by: Borislav Petkov <bp@suse.de> +--- + arch/x86/include/asm/static_call.h | 19 +++++++++++++- + arch/x86/kernel/alternative.c | 12 ++++++--- + arch/x86/kernel/static_call.c | 40 ++++++++++++++++++++++++++++-- + 3 files changed, 64 insertions(+), 7 deletions(-) + +diff --git a/arch/x86/include/asm/static_call.h b/arch/x86/include/asm/static_call.h +index 2d8dacd02643..70cc9ccb8029 100644 +--- a/arch/x86/include/asm/static_call.h ++++ b/arch/x86/include/asm/static_call.h +@@ -21,6 +21,16 @@ + * relative displacement across sections. + */ + ++/* ++ * The trampoline is 8 bytes and of the general form: ++ * ++ * jmp.d32 \func ++ * ud1 %esp, %ecx ++ * ++ * That trailing #UD provides both a speculation stop and serves as a unique ++ * 3 byte signature identifying static call trampolines. Also see tramp_ud[] ++ * and __static_call_fixup(). ++ */ + #define __ARCH_DEFINE_STATIC_CALL_TRAMP(name, insns) \ + asm(".pushsection .static_call.text, \"ax\" \n" \ + ".align 4 \n" \ +@@ -28,7 +38,7 @@ + STATIC_CALL_TRAMP_STR(name) ": \n" \ + ANNOTATE_NOENDBR \ + insns " \n" \ +- ".byte 0x53, 0x43, 0x54 \n" \ ++ ".byte 0x0f, 0xb9, 0xcc \n" \ + ".type " STATIC_CALL_TRAMP_STR(name) ", @function \n" \ + ".size " STATIC_CALL_TRAMP_STR(name) ", . - " STATIC_CALL_TRAMP_STR(name) " \n" \ + ".popsection \n") +@@ -36,8 +46,13 @@ + #define ARCH_DEFINE_STATIC_CALL_TRAMP(name, func) \ + __ARCH_DEFINE_STATIC_CALL_TRAMP(name, ".byte 0xe9; .long " #func " - (. + 4)") + ++#ifdef CONFIG_RETPOLINE ++#define ARCH_DEFINE_STATIC_CALL_NULL_TRAMP(name) \ ++ __ARCH_DEFINE_STATIC_CALL_TRAMP(name, "jmp __x86_return_thunk") ++#else + #define ARCH_DEFINE_STATIC_CALL_NULL_TRAMP(name) \ + __ARCH_DEFINE_STATIC_CALL_TRAMP(name, "ret; int3; nop; nop; nop") ++#endif + + #define ARCH_DEFINE_STATIC_CALL_RET0_TRAMP(name) \ + ARCH_DEFINE_STATIC_CALL_TRAMP(name, __static_call_return0) +@@ -48,4 +63,6 @@ + ".long " STATIC_CALL_KEY_STR(name) " - . \n" \ + ".popsection \n") + ++extern bool __static_call_fixup(void *tramp, u8 op, void *dest); ++ + #endif /* _ASM_STATIC_CALL_H */ +diff --git a/arch/x86/kernel/alternative.c b/arch/x86/kernel/alternative.c +index 76b745921509..cf447ee18b3c 100644 +--- a/arch/x86/kernel/alternative.c ++++ b/arch/x86/kernel/alternative.c +@@ -539,18 +539,22 @@ void __init_or_module noinline apply_returns(s32 *start, s32 *end) + s32 *s; + + for (s = start; s < end; s++) { +- void *addr = (void *)s + *s; ++ void *dest = NULL, *addr = (void *)s + *s; + struct insn insn; + int len, ret; + u8 bytes[16]; +- u8 op1; ++ u8 op; + + ret = insn_decode_kernel(&insn, addr); + if (WARN_ON_ONCE(ret < 0)) + continue; + +- op1 = insn.opcode.bytes[0]; +- if (WARN_ON_ONCE(op1 != JMP32_INSN_OPCODE)) ++ op = insn.opcode.bytes[0]; ++ if (op == JMP32_INSN_OPCODE) ++ dest = addr + insn.length + insn.immediate.value; ++ ++ if (__static_call_fixup(addr, op, dest) || ++ WARN_ON_ONCE(dest != &__x86_return_thunk)) + continue; + + DPRINTK("return thunk at: %pS (%px) len: %d to: %pS", +diff --git a/arch/x86/kernel/static_call.c b/arch/x86/kernel/static_call.c +index aa72cefdd5be..fe21fe778185 100644 +--- a/arch/x86/kernel/static_call.c ++++ b/arch/x86/kernel/static_call.c +@@ -11,6 +11,13 @@ enum insn_type { + RET = 3, /* tramp / site cond-tail-call */ + }; + ++/* ++ * ud1 %esp, %ecx - a 3 byte #UD that is unique to trampolines, chosen such ++ * that there is no false-positive trampoline identification while also being a ++ * speculation stop. ++ */ ++static const u8 tramp_ud[] = { 0x0f, 0xb9, 0xcc }; ++ + /* + * cs cs cs xorl %eax, %eax - a single 5 byte instruction that clears %[er]ax + */ +@@ -43,7 +50,10 @@ static void __ref __static_call_transform(void *insn, enum insn_type type, void + break; + + case RET: +- code = &retinsn; ++ if (cpu_feature_enabled(X86_FEATURE_RETHUNK)) ++ code = text_gen_insn(JMP32_INSN_OPCODE, insn, &__x86_return_thunk); ++ else ++ code = &retinsn; + break; + } + +@@ -60,7 +70,7 @@ static void __static_call_validate(void *insn, bool tail, bool tramp) + { + u8 opcode = *(u8 *)insn; + +- if (tramp && memcmp(insn+5, "SCT", 3)) { ++ if (tramp && memcmp(insn+5, tramp_ud, 3)) { + pr_err("trampoline signature fail"); + BUG(); + } +@@ -115,3 +125,29 @@ void arch_static_call_transform(void *site, void *tramp, void *func, bool tail) + mutex_unlock(&text_mutex); + } + EXPORT_SYMBOL_GPL(arch_static_call_transform); ++ ++#ifdef CONFIG_RETPOLINE ++/* ++ * This is called by apply_returns() to fix up static call trampolines, ++ * specifically ARCH_DEFINE_STATIC_CALL_NULL_TRAMP which is recorded as ++ * having a return trampoline. ++ * ++ * The problem is that static_call() is available before determining ++ * X86_FEATURE_RETHUNK and, by implication, running alternatives. ++ * ++ * This means that __static_call_transform() above can have overwritten the ++ * return trampoline and we now need to fix things up to be consistent. ++ */ ++bool __static_call_fixup(void *tramp, u8 op, void *dest) ++{ ++ if (memcmp(tramp+5, tramp_ud, 3)) { ++ /* Not a trampoline site, not our problem. */ ++ return false; ++ } ++ ++ if (op == RET_INSN_OPCODE || dest == &__x86_return_thunk) ++ __static_call_transform(tramp, RET, NULL); ++ ++ return true; ++} ++#endif |