0001
0002
0003 #ifndef _ASM_X86_NOSPEC_BRANCH_H_
0004 #define _ASM_X86_NOSPEC_BRANCH_H_
0005
0006 #include <linux/static_key.h>
0007 #include <linux/objtool.h>
0008 #include <linux/linkage.h>
0009
0010 #include <asm/alternative.h>
0011 #include <asm/cpufeatures.h>
0012 #include <asm/msr-index.h>
0013 #include <asm/unwind_hints.h>
0014 #include <asm/percpu.h>
0015
0016 #define RETPOLINE_THUNK_SIZE 32
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035 #define RSB_CLEAR_LOOPS 32
0036
0037
0038
0039
0040 #define __FILL_RETURN_SLOT \
0041 ANNOTATE_INTRA_FUNCTION_CALL; \
0042 call 772f; \
0043 int3; \
0044 772:
0045
0046
0047
0048
0049
0050
0051
0052
0053 #ifdef CONFIG_X86_64
0054 #define __FILL_RETURN_BUFFER(reg, nr) \
0055 mov $(nr/2), reg; \
0056 771: \
0057 __FILL_RETURN_SLOT \
0058 __FILL_RETURN_SLOT \
0059 add $(BITS_PER_LONG/8) * 2, %_ASM_SP; \
0060 dec reg; \
0061 jnz 771b; \
0062 \
0063 lfence;
0064 #else
0065
0066
0067
0068
0069 #define __FILL_RETURN_BUFFER(reg, nr) \
0070 .rept nr; \
0071 __FILL_RETURN_SLOT; \
0072 .endr; \
0073 add $(BITS_PER_LONG/8) * nr, %_ASM_SP;
0074 #endif
0075
0076
0077
0078
0079
0080
0081
0082
0083
0084
0085 #define __FILL_ONE_RETURN \
0086 __FILL_RETURN_SLOT \
0087 add $(BITS_PER_LONG/8), %_ASM_SP; \
0088 lfence;
0089
0090 #ifdef __ASSEMBLY__
0091
0092
0093
0094
0095
0096
0097 .macro ANNOTATE_RETPOLINE_SAFE
0098 .Lannotate_\@:
0099 .pushsection .discard.retpoline_safe
0100 _ASM_PTR .Lannotate_\@
0101 .popsection
0102 .endm
0103
0104
0105
0106
0107
0108 #define ANNOTATE_UNRET_SAFE ANNOTATE_RETPOLINE_SAFE
0109
0110
0111
0112
0113
0114 .macro ANNOTATE_UNRET_END
0115 #ifdef CONFIG_DEBUG_ENTRY
0116 ANNOTATE_RETPOLINE_SAFE
0117 nop
0118 #endif
0119 .endm
0120
0121
0122
0123
0124
0125
0126 .macro __CS_PREFIX reg:req
0127 .irp rs,r8,r9,r10,r11,r12,r13,r14,r15
0128 .ifc \reg,\rs
0129 .byte 0x2e
0130 .endif
0131 .endr
0132 .endm
0133
0134
0135
0136
0137
0138
0139 .macro JMP_NOSPEC reg:req
0140 #ifdef CONFIG_RETPOLINE
0141 __CS_PREFIX \reg
0142 jmp __x86_indirect_thunk_\reg
0143 #else
0144 jmp *%\reg
0145 int3
0146 #endif
0147 .endm
0148
0149 .macro CALL_NOSPEC reg:req
0150 #ifdef CONFIG_RETPOLINE
0151 __CS_PREFIX \reg
0152 call __x86_indirect_thunk_\reg
0153 #else
0154 call *%\reg
0155 #endif
0156 .endm
0157
0158
0159
0160
0161
0162 .macro FILL_RETURN_BUFFER reg:req nr:req ftr:req ftr2=ALT_NOT(X86_FEATURE_ALWAYS)
0163 ALTERNATIVE_2 "jmp .Lskip_rsb_\@", \
0164 __stringify(__FILL_RETURN_BUFFER(\reg,\nr)), \ftr, \
0165 __stringify(__FILL_ONE_RETURN), \ftr2
0166
0167 .Lskip_rsb_\@:
0168 .endm
0169
0170 #ifdef CONFIG_CPU_UNRET_ENTRY
0171 #define CALL_ZEN_UNTRAIN_RET "call zen_untrain_ret"
0172 #else
0173 #define CALL_ZEN_UNTRAIN_RET ""
0174 #endif
0175
0176
0177
0178
0179
0180
0181
0182
0183
0184
0185
0186
0187 .macro UNTRAIN_RET
0188 #if defined(CONFIG_CPU_UNRET_ENTRY) || defined(CONFIG_CPU_IBPB_ENTRY)
0189 ANNOTATE_UNRET_END
0190 ALTERNATIVE_2 "", \
0191 CALL_ZEN_UNTRAIN_RET, X86_FEATURE_UNRET, \
0192 "call entry_ibpb", X86_FEATURE_ENTRY_IBPB
0193 #endif
0194 .endm
0195
0196 #else
0197
0198 #define ANNOTATE_RETPOLINE_SAFE \
0199 "999:\n\t" \
0200 ".pushsection .discard.retpoline_safe\n\t" \
0201 _ASM_PTR " 999b\n\t" \
0202 ".popsection\n\t"
0203
0204 typedef u8 retpoline_thunk_t[RETPOLINE_THUNK_SIZE];
0205 extern retpoline_thunk_t __x86_indirect_thunk_array[];
0206
0207 extern void __x86_return_thunk(void);
0208 extern void zen_untrain_ret(void);
0209 extern void entry_ibpb(void);
0210
0211 #ifdef CONFIG_RETPOLINE
0212
0213 #define GEN(reg) \
0214 extern retpoline_thunk_t __x86_indirect_thunk_ ## reg;
0215 #include <asm/GEN-for-each-reg.h>
0216 #undef GEN
0217
0218 #ifdef CONFIG_X86_64
0219
0220
0221
0222
0223
0224 # define CALL_NOSPEC \
0225 ALTERNATIVE_2( \
0226 ANNOTATE_RETPOLINE_SAFE \
0227 "call *%[thunk_target]\n", \
0228 "call __x86_indirect_thunk_%V[thunk_target]\n", \
0229 X86_FEATURE_RETPOLINE, \
0230 "lfence;\n" \
0231 ANNOTATE_RETPOLINE_SAFE \
0232 "call *%[thunk_target]\n", \
0233 X86_FEATURE_RETPOLINE_LFENCE)
0234
0235 # define THUNK_TARGET(addr) [thunk_target] "r" (addr)
0236
0237 #else
0238
0239
0240
0241
0242
0243 # define CALL_NOSPEC \
0244 ALTERNATIVE_2( \
0245 ANNOTATE_RETPOLINE_SAFE \
0246 "call *%[thunk_target]\n", \
0247 " jmp 904f;\n" \
0248 " .align 16\n" \
0249 "901: call 903f;\n" \
0250 "902: pause;\n" \
0251 " lfence;\n" \
0252 " jmp 902b;\n" \
0253 " .align 16\n" \
0254 "903: lea 4(%%esp), %%esp;\n" \
0255 " pushl %[thunk_target];\n" \
0256 " ret;\n" \
0257 " .align 16\n" \
0258 "904: call 901b;\n", \
0259 X86_FEATURE_RETPOLINE, \
0260 "lfence;\n" \
0261 ANNOTATE_RETPOLINE_SAFE \
0262 "call *%[thunk_target]\n", \
0263 X86_FEATURE_RETPOLINE_LFENCE)
0264
0265 # define THUNK_TARGET(addr) [thunk_target] "rm" (addr)
0266 #endif
0267 #else
0268 # define CALL_NOSPEC "call *%[thunk_target]\n"
0269 # define THUNK_TARGET(addr) [thunk_target] "rm" (addr)
0270 #endif
0271
0272
0273 enum spectre_v2_mitigation {
0274 SPECTRE_V2_NONE,
0275 SPECTRE_V2_RETPOLINE,
0276 SPECTRE_V2_LFENCE,
0277 SPECTRE_V2_EIBRS,
0278 SPECTRE_V2_EIBRS_RETPOLINE,
0279 SPECTRE_V2_EIBRS_LFENCE,
0280 SPECTRE_V2_IBRS,
0281 };
0282
0283
0284 enum spectre_v2_user_mitigation {
0285 SPECTRE_V2_USER_NONE,
0286 SPECTRE_V2_USER_STRICT,
0287 SPECTRE_V2_USER_STRICT_PREFERRED,
0288 SPECTRE_V2_USER_PRCTL,
0289 SPECTRE_V2_USER_SECCOMP,
0290 };
0291
0292
0293 enum ssb_mitigation {
0294 SPEC_STORE_BYPASS_NONE,
0295 SPEC_STORE_BYPASS_DISABLE,
0296 SPEC_STORE_BYPASS_PRCTL,
0297 SPEC_STORE_BYPASS_SECCOMP,
0298 };
0299
0300 extern char __indirect_thunk_start[];
0301 extern char __indirect_thunk_end[];
0302
0303 static __always_inline
0304 void alternative_msr_write(unsigned int msr, u64 val, unsigned int feature)
0305 {
0306 asm volatile(ALTERNATIVE("", "wrmsr", %c[feature])
0307 : : "c" (msr),
0308 "a" ((u32)val),
0309 "d" ((u32)(val >> 32)),
0310 [feature] "i" (feature)
0311 : "memory");
0312 }
0313
0314 static inline void indirect_branch_prediction_barrier(void)
0315 {
0316 u64 val = PRED_CMD_IBPB;
0317
0318 alternative_msr_write(MSR_IA32_PRED_CMD, val, X86_FEATURE_USE_IBPB);
0319 }
0320
0321
0322 extern u64 x86_spec_ctrl_base;
0323 DECLARE_PER_CPU(u64, x86_spec_ctrl_current);
0324 extern void write_spec_ctrl_current(u64 val, bool force);
0325 extern u64 spec_ctrl_current(void);
0326
0327
0328
0329
0330
0331
0332
0333 #define firmware_restrict_branch_speculation_start() \
0334 do { \
0335 preempt_disable(); \
0336 alternative_msr_write(MSR_IA32_SPEC_CTRL, \
0337 spec_ctrl_current() | SPEC_CTRL_IBRS, \
0338 X86_FEATURE_USE_IBRS_FW); \
0339 alternative_msr_write(MSR_IA32_PRED_CMD, PRED_CMD_IBPB, \
0340 X86_FEATURE_USE_IBPB_FW); \
0341 } while (0)
0342
0343 #define firmware_restrict_branch_speculation_end() \
0344 do { \
0345 alternative_msr_write(MSR_IA32_SPEC_CTRL, \
0346 spec_ctrl_current(), \
0347 X86_FEATURE_USE_IBRS_FW); \
0348 preempt_enable(); \
0349 } while (0)
0350
0351 DECLARE_STATIC_KEY_FALSE(switch_to_cond_stibp);
0352 DECLARE_STATIC_KEY_FALSE(switch_mm_cond_ibpb);
0353 DECLARE_STATIC_KEY_FALSE(switch_mm_always_ibpb);
0354
0355 DECLARE_STATIC_KEY_FALSE(mds_user_clear);
0356 DECLARE_STATIC_KEY_FALSE(mds_idle_clear);
0357
0358 DECLARE_STATIC_KEY_FALSE(switch_mm_cond_l1d_flush);
0359
0360 DECLARE_STATIC_KEY_FALSE(mmio_stale_data_clear);
0361
0362 #include <asm/segment.h>
0363
0364
0365
0366
0367
0368
0369
0370
0371 static __always_inline void mds_clear_cpu_buffers(void)
0372 {
0373 static const u16 ds = __KERNEL_DS;
0374
0375
0376
0377
0378
0379
0380
0381
0382
0383
0384 asm volatile("verw %[ds]" : : [ds] "m" (ds) : "cc");
0385 }
0386
0387
0388
0389
0390
0391
0392 static __always_inline void mds_user_clear_cpu_buffers(void)
0393 {
0394 if (static_branch_likely(&mds_user_clear))
0395 mds_clear_cpu_buffers();
0396 }
0397
0398
0399
0400
0401
0402
0403 static inline void mds_idle_clear_cpu_buffers(void)
0404 {
0405 if (static_branch_likely(&mds_idle_clear))
0406 mds_clear_cpu_buffers();
0407 }
0408
0409 #endif
0410
0411 #endif