0001
0002 #include <linux/jump_label.h>
0003 #include <asm/unwind_hints.h>
0004 #include <asm/cpufeatures.h>
0005 #include <asm/page_types.h>
0006 #include <asm/percpu.h>
0007 #include <asm/asm-offsets.h>
0008 #include <asm/processor-flags.h>
0009 #include <asm/ptrace-abi.h>
0010 #include <asm/msr.h>
0011 #include <asm/nospec-branch.h>
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041
0042
0043
0044
0045
0046
0047
0048
0049
0050
0051
0052
0053
0054
0055
0056
0057
0058
0059
0060
0061 #ifdef CONFIG_X86_64
0062
0063
0064
0065
0066
0067
0068 .macro PUSH_REGS rdx=%rdx rcx=%rcx rax=%rax save_ret=0
0069 .if \save_ret
0070 pushq %rsi
0071 movq 8(%rsp), %rsi
0072 movq %rdi, 8(%rsp)
0073 .else
0074 pushq %rdi
0075 pushq %rsi
0076 .endif
0077 pushq \rdx
0078 pushq \rcx
0079 pushq \rax
0080 pushq %r8
0081 pushq %r9
0082 pushq %r10
0083 pushq %r11
0084 pushq %rbx
0085 pushq %rbp
0086 pushq %r12
0087 pushq %r13
0088 pushq %r14
0089 pushq %r15
0090 UNWIND_HINT_REGS
0091
0092 .if \save_ret
0093 pushq %rsi
0094 .endif
0095 .endm
0096
0097 .macro CLEAR_REGS
0098
0099
0100
0101
0102
0103
0104 xorl %esi, %esi
0105 xorl %edx, %edx
0106 xorl %ecx, %ecx
0107 xorl %r8d, %r8d
0108 xorl %r9d, %r9d
0109 xorl %r10d, %r10d
0110 xorl %r11d, %r11d
0111 xorl %ebx, %ebx
0112 xorl %ebp, %ebp
0113 xorl %r12d, %r12d
0114 xorl %r13d, %r13d
0115 xorl %r14d, %r14d
0116 xorl %r15d, %r15d
0117
0118 .endm
0119
0120 .macro PUSH_AND_CLEAR_REGS rdx=%rdx rcx=%rcx rax=%rax save_ret=0
0121 PUSH_REGS rdx=\rdx, rcx=\rcx, rax=\rax, save_ret=\save_ret
0122 CLEAR_REGS
0123 .endm
0124
0125 .macro POP_REGS pop_rdi=1
0126 popq %r15
0127 popq %r14
0128 popq %r13
0129 popq %r12
0130 popq %rbp
0131 popq %rbx
0132 popq %r11
0133 popq %r10
0134 popq %r9
0135 popq %r8
0136 popq %rax
0137 popq %rcx
0138 popq %rdx
0139 popq %rsi
0140 .if \pop_rdi
0141 popq %rdi
0142 .endif
0143 .endm
0144
0145 #ifdef CONFIG_PAGE_TABLE_ISOLATION
0146
0147
0148
0149
0150
0151 #define PTI_USER_PGTABLE_BIT PAGE_SHIFT
0152 #define PTI_USER_PGTABLE_MASK (1 << PTI_USER_PGTABLE_BIT)
0153 #define PTI_USER_PCID_BIT X86_CR3_PTI_PCID_USER_BIT
0154 #define PTI_USER_PCID_MASK (1 << PTI_USER_PCID_BIT)
0155 #define PTI_USER_PGTABLE_AND_PCID_MASK (PTI_USER_PCID_MASK | PTI_USER_PGTABLE_MASK)
0156
0157 .macro SET_NOFLUSH_BIT reg:req
0158 bts $X86_CR3_PCID_NOFLUSH_BIT, \reg
0159 .endm
0160
0161 .macro ADJUST_KERNEL_CR3 reg:req
0162 ALTERNATIVE "", "SET_NOFLUSH_BIT \reg", X86_FEATURE_PCID
0163
0164 andq $(~PTI_USER_PGTABLE_AND_PCID_MASK), \reg
0165 .endm
0166
0167 .macro SWITCH_TO_KERNEL_CR3 scratch_reg:req
0168 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI
0169 mov %cr3, \scratch_reg
0170 ADJUST_KERNEL_CR3 \scratch_reg
0171 mov \scratch_reg, %cr3
0172 .Lend_\@:
0173 .endm
0174
0175 #define THIS_CPU_user_pcid_flush_mask \
0176 PER_CPU_VAR(cpu_tlbstate) + TLB_STATE_user_pcid_flush_mask
0177
0178 .macro SWITCH_TO_USER_CR3_NOSTACK scratch_reg:req scratch_reg2:req
0179 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI
0180 mov %cr3, \scratch_reg
0181
0182 ALTERNATIVE "jmp .Lwrcr3_\@", "", X86_FEATURE_PCID
0183
0184
0185
0186
0187 movq \scratch_reg, \scratch_reg2
0188 andq $(0x7FF), \scratch_reg
0189 bt \scratch_reg, THIS_CPU_user_pcid_flush_mask
0190 jnc .Lnoflush_\@
0191
0192
0193 btr \scratch_reg, THIS_CPU_user_pcid_flush_mask
0194 movq \scratch_reg2, \scratch_reg
0195 jmp .Lwrcr3_pcid_\@
0196
0197 .Lnoflush_\@:
0198 movq \scratch_reg2, \scratch_reg
0199 SET_NOFLUSH_BIT \scratch_reg
0200
0201 .Lwrcr3_pcid_\@:
0202
0203 orq $(PTI_USER_PCID_MASK), \scratch_reg
0204
0205 .Lwrcr3_\@:
0206
0207 orq $(PTI_USER_PGTABLE_MASK), \scratch_reg
0208 mov \scratch_reg, %cr3
0209 .Lend_\@:
0210 .endm
0211
0212 .macro SWITCH_TO_USER_CR3_STACK scratch_reg:req
0213 pushq %rax
0214 SWITCH_TO_USER_CR3_NOSTACK scratch_reg=\scratch_reg scratch_reg2=%rax
0215 popq %rax
0216 .endm
0217
0218 .macro SAVE_AND_SWITCH_TO_KERNEL_CR3 scratch_reg:req save_reg:req
0219 ALTERNATIVE "jmp .Ldone_\@", "", X86_FEATURE_PTI
0220 movq %cr3, \scratch_reg
0221 movq \scratch_reg, \save_reg
0222
0223
0224
0225
0226
0227 bt $PTI_USER_PGTABLE_BIT, \scratch_reg
0228 jnc .Ldone_\@
0229
0230 ADJUST_KERNEL_CR3 \scratch_reg
0231 movq \scratch_reg, %cr3
0232
0233 .Ldone_\@:
0234 .endm
0235
0236 .macro RESTORE_CR3 scratch_reg:req save_reg:req
0237 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_PTI
0238
0239 ALTERNATIVE "jmp .Lwrcr3_\@", "", X86_FEATURE_PCID
0240
0241
0242
0243
0244
0245 bt $PTI_USER_PGTABLE_BIT, \save_reg
0246 jnc .Lnoflush_\@
0247
0248
0249
0250
0251
0252 movq \save_reg, \scratch_reg
0253 andq $(0x7FF), \scratch_reg
0254 bt \scratch_reg, THIS_CPU_user_pcid_flush_mask
0255 jnc .Lnoflush_\@
0256
0257 btr \scratch_reg, THIS_CPU_user_pcid_flush_mask
0258 jmp .Lwrcr3_\@
0259
0260 .Lnoflush_\@:
0261 SET_NOFLUSH_BIT \save_reg
0262
0263 .Lwrcr3_\@:
0264
0265
0266
0267
0268 movq \save_reg, %cr3
0269 .Lend_\@:
0270 .endm
0271
0272 #else
0273
0274 .macro SWITCH_TO_KERNEL_CR3 scratch_reg:req
0275 .endm
0276 .macro SWITCH_TO_USER_CR3_NOSTACK scratch_reg:req scratch_reg2:req
0277 .endm
0278 .macro SWITCH_TO_USER_CR3_STACK scratch_reg:req
0279 .endm
0280 .macro SAVE_AND_SWITCH_TO_KERNEL_CR3 scratch_reg:req save_reg:req
0281 .endm
0282 .macro RESTORE_CR3 scratch_reg:req save_reg:req
0283 .endm
0284
0285 #endif
0286
0287
0288
0289
0290
0291
0292
0293
0294
0295
0296
0297
0298
0299 .macro IBRS_ENTER save_reg
0300 #ifdef CONFIG_CPU_IBRS_ENTRY
0301 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_KERNEL_IBRS
0302 movl $MSR_IA32_SPEC_CTRL, %ecx
0303
0304 .ifnb \save_reg
0305 rdmsr
0306 shl $32, %rdx
0307 or %rdx, %rax
0308 mov %rax, \save_reg
0309 test $SPEC_CTRL_IBRS, %eax
0310 jz .Ldo_wrmsr_\@
0311 lfence
0312 jmp .Lend_\@
0313 .Ldo_wrmsr_\@:
0314 .endif
0315
0316 movq PER_CPU_VAR(x86_spec_ctrl_current), %rdx
0317 movl %edx, %eax
0318 shr $32, %rdx
0319 wrmsr
0320 .Lend_\@:
0321 #endif
0322 .endm
0323
0324
0325
0326
0327
0328 .macro IBRS_EXIT save_reg
0329 #ifdef CONFIG_CPU_IBRS_ENTRY
0330 ALTERNATIVE "jmp .Lend_\@", "", X86_FEATURE_KERNEL_IBRS
0331 movl $MSR_IA32_SPEC_CTRL, %ecx
0332
0333 .ifnb \save_reg
0334 mov \save_reg, %rdx
0335 .else
0336 movq PER_CPU_VAR(x86_spec_ctrl_current), %rdx
0337 andl $(~SPEC_CTRL_IBRS), %edx
0338 .endif
0339
0340 movl %edx, %eax
0341 shr $32, %rdx
0342 wrmsr
0343 .Lend_\@:
0344 #endif
0345 .endm
0346
0347
0348
0349
0350
0351
0352
0353
0354
0355
0356
0357 .macro FENCE_SWAPGS_USER_ENTRY
0358 ALTERNATIVE "", "lfence", X86_FEATURE_FENCE_SWAPGS_USER
0359 .endm
0360 .macro FENCE_SWAPGS_KERNEL_ENTRY
0361 ALTERNATIVE "", "lfence", X86_FEATURE_FENCE_SWAPGS_KERNEL
0362 .endm
0363
0364 .macro STACKLEAK_ERASE_NOCLOBBER
0365 #ifdef CONFIG_GCC_PLUGIN_STACKLEAK
0366 PUSH_AND_CLEAR_REGS
0367 call stackleak_erase
0368 POP_REGS
0369 #endif
0370 .endm
0371
0372 .macro SAVE_AND_SET_GSBASE scratch_reg:req save_reg:req
0373 rdgsbase \save_reg
0374 GET_PERCPU_BASE \scratch_reg
0375 wrgsbase \scratch_reg
0376 .endm
0377
0378 #else
0379 # undef UNWIND_HINT_IRET_REGS
0380 # define UNWIND_HINT_IRET_REGS
0381 #endif
0382
0383 .macro STACKLEAK_ERASE
0384 #ifdef CONFIG_GCC_PLUGIN_STACKLEAK
0385 call stackleak_erase
0386 #endif
0387 .endm
0388
0389 #ifdef CONFIG_SMP
0390
0391
0392
0393
0394
0395 .macro LOAD_CPU_AND_NODE_SEG_LIMIT reg:req
0396 movq $__CPUNODE_SEG, \reg
0397 lsl \reg, \reg
0398 .endm
0399
0400
0401
0402
0403
0404
0405
0406
0407
0408
0409
0410 .macro GET_PERCPU_BASE reg:req
0411 LOAD_CPU_AND_NODE_SEG_LIMIT \reg
0412 andq $VDSO_CPUNODE_MASK, \reg
0413 movq __per_cpu_offset(, \reg, 8), \reg
0414 .endm
0415
0416 #else
0417
0418 .macro GET_PERCPU_BASE reg:req
0419 movq pcpu_unit_offsets(%rip), \reg
0420 .endm
0421
0422 #endif