Back to home page

OSCL-LXR

 
 

    


0001 /* SPDX-License-Identifier: GPL-2.0 */
0002 
0003 #include <linux/stringify.h>
0004 #include <linux/linkage.h>
0005 #include <asm/dwarf2.h>
0006 #include <asm/cpufeatures.h>
0007 #include <asm/alternative.h>
0008 #include <asm/export.h>
0009 #include <asm/nospec-branch.h>
0010 #include <asm/unwind_hints.h>
0011 #include <asm/frame.h>
0012 
0013     .section .text.__x86.indirect_thunk
0014 
0015 .macro RETPOLINE reg
0016     ANNOTATE_INTRA_FUNCTION_CALL
0017     call    .Ldo_rop_\@
0018 .Lspec_trap_\@:
0019     UNWIND_HINT_EMPTY
0020     pause
0021     lfence
0022     jmp .Lspec_trap_\@
0023 .Ldo_rop_\@:
0024     mov     %\reg, (%_ASM_SP)
0025     UNWIND_HINT_FUNC
0026     RET
0027 .endm
0028 
0029 .macro THUNK reg
0030 
0031     .align RETPOLINE_THUNK_SIZE
0032 SYM_INNER_LABEL(__x86_indirect_thunk_\reg, SYM_L_GLOBAL)
0033     UNWIND_HINT_EMPTY
0034     ANNOTATE_NOENDBR
0035 
0036     ALTERNATIVE_2 __stringify(RETPOLINE \reg), \
0037               __stringify(lfence; ANNOTATE_RETPOLINE_SAFE; jmp *%\reg; int3), X86_FEATURE_RETPOLINE_LFENCE, \
0038               __stringify(ANNOTATE_RETPOLINE_SAFE; jmp *%\reg), ALT_NOT(X86_FEATURE_RETPOLINE)
0039 
0040 .endm
0041 
0042 /*
0043  * Despite being an assembler file we can't just use .irp here
0044  * because __KSYM_DEPS__ only uses the C preprocessor and would
0045  * only see one instance of "__x86_indirect_thunk_\reg" rather
0046  * than one per register with the correct names. So we do it
0047  * the simple and nasty way...
0048  *
0049  * Worse, you can only have a single EXPORT_SYMBOL per line,
0050  * and CPP can't insert newlines, so we have to repeat everything
0051  * at least twice.
0052  */
0053 
0054 #define __EXPORT_THUNK(sym) _ASM_NOKPROBE(sym); EXPORT_SYMBOL(sym)
0055 #define EXPORT_THUNK(reg)   __EXPORT_THUNK(__x86_indirect_thunk_ ## reg)
0056 
0057     .align RETPOLINE_THUNK_SIZE
0058 SYM_CODE_START(__x86_indirect_thunk_array)
0059 
0060 #define GEN(reg) THUNK reg
0061 #include <asm/GEN-for-each-reg.h>
0062 #undef GEN
0063 
0064     .align RETPOLINE_THUNK_SIZE
0065 SYM_CODE_END(__x86_indirect_thunk_array)
0066 
0067 #define GEN(reg) EXPORT_THUNK(reg)
0068 #include <asm/GEN-for-each-reg.h>
0069 #undef GEN
0070 
0071 /*
0072  * This function name is magical and is used by -mfunction-return=thunk-extern
0073  * for the compiler to generate JMPs to it.
0074  */
0075 #ifdef CONFIG_RETHUNK
0076 
0077     .section .text.__x86.return_thunk
0078 
0079 /*
0080  * Safety details here pertain to the AMD Zen{1,2} microarchitecture:
0081  * 1) The RET at __x86_return_thunk must be on a 64 byte boundary, for
0082  *    alignment within the BTB.
0083  * 2) The instruction at zen_untrain_ret must contain, and not
0084  *    end with, the 0xc3 byte of the RET.
0085  * 3) STIBP must be enabled, or SMT disabled, to prevent the sibling thread
0086  *    from re-poisioning the BTB prediction.
0087  */
0088     .align 64
0089     .skip 63, 0xcc
0090 SYM_FUNC_START_NOALIGN(zen_untrain_ret);
0091 
0092     /*
0093      * As executed from zen_untrain_ret, this is:
0094      *
0095      *   TEST $0xcc, %bl
0096      *   LFENCE
0097      *   JMP __x86_return_thunk
0098      *
0099      * Executing the TEST instruction has a side effect of evicting any BTB
0100      * prediction (potentially attacker controlled) attached to the RET, as
0101      * __x86_return_thunk + 1 isn't an instruction boundary at the moment.
0102      */
0103     .byte   0xf6
0104 
0105     /*
0106      * As executed from __x86_return_thunk, this is a plain RET.
0107      *
0108      * As part of the TEST above, RET is the ModRM byte, and INT3 the imm8.
0109      *
0110      * We subsequently jump backwards and architecturally execute the RET.
0111      * This creates a correct BTB prediction (type=ret), but in the
0112      * meantime we suffer Straight Line Speculation (because the type was
0113      * no branch) which is halted by the INT3.
0114      *
0115      * With SMT enabled and STIBP active, a sibling thread cannot poison
0116      * RET's prediction to a type of its choice, but can evict the
0117      * prediction due to competitive sharing. If the prediction is
0118      * evicted, __x86_return_thunk will suffer Straight Line Speculation
0119      * which will be contained safely by the INT3.
0120      */
0121 SYM_INNER_LABEL(__x86_return_thunk, SYM_L_GLOBAL)
0122     ret
0123     int3
0124 SYM_CODE_END(__x86_return_thunk)
0125 
0126     /*
0127      * Ensure the TEST decoding / BTB invalidation is complete.
0128      */
0129     lfence
0130 
0131     /*
0132      * Jump back and execute the RET in the middle of the TEST instruction.
0133      * INT3 is for SLS protection.
0134      */
0135     jmp __x86_return_thunk
0136     int3
0137 SYM_FUNC_END(zen_untrain_ret)
0138 __EXPORT_THUNK(zen_untrain_ret)
0139 
0140 EXPORT_SYMBOL(__x86_return_thunk)
0141 
0142 #endif /* CONFIG_RETHUNK */