0001
0002 #ifndef _ASM_X86_SPECIAL_INSNS_H
0003 #define _ASM_X86_SPECIAL_INSNS_H
0004
0005
0006 #ifdef __KERNEL__
0007
0008 #include <asm/nops.h>
0009 #include <asm/processor-flags.h>
0010 #include <linux/irqflags.h>
0011 #include <linux/jump_label.h>
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022 #define __FORCE_ORDER "m"(*(unsigned int *)0x1000UL)
0023
0024 void native_write_cr0(unsigned long val);
0025
0026 static inline unsigned long native_read_cr0(void)
0027 {
0028 unsigned long val;
0029 asm volatile("mov %%cr0,%0\n\t" : "=r" (val) : __FORCE_ORDER);
0030 return val;
0031 }
0032
0033 static __always_inline unsigned long native_read_cr2(void)
0034 {
0035 unsigned long val;
0036 asm volatile("mov %%cr2,%0\n\t" : "=r" (val) : __FORCE_ORDER);
0037 return val;
0038 }
0039
0040 static __always_inline void native_write_cr2(unsigned long val)
0041 {
0042 asm volatile("mov %0,%%cr2": : "r" (val) : "memory");
0043 }
0044
0045 static inline unsigned long __native_read_cr3(void)
0046 {
0047 unsigned long val;
0048 asm volatile("mov %%cr3,%0\n\t" : "=r" (val) : __FORCE_ORDER);
0049 return val;
0050 }
0051
0052 static inline void native_write_cr3(unsigned long val)
0053 {
0054 asm volatile("mov %0,%%cr3": : "r" (val) : "memory");
0055 }
0056
0057 static inline unsigned long native_read_cr4(void)
0058 {
0059 unsigned long val;
0060 #ifdef CONFIG_X86_32
0061
0062
0063
0064
0065
0066 asm volatile("1: mov %%cr4, %0\n"
0067 "2:\n"
0068 _ASM_EXTABLE(1b, 2b)
0069 : "=r" (val) : "0" (0), __FORCE_ORDER);
0070 #else
0071
0072 asm volatile("mov %%cr4,%0\n\t" : "=r" (val) : __FORCE_ORDER);
0073 #endif
0074 return val;
0075 }
0076
0077 void native_write_cr4(unsigned long val);
0078
0079 #ifdef CONFIG_X86_INTEL_MEMORY_PROTECTION_KEYS
0080 static inline u32 rdpkru(void)
0081 {
0082 u32 ecx = 0;
0083 u32 edx, pkru;
0084
0085
0086
0087
0088
0089 asm volatile(".byte 0x0f,0x01,0xee\n\t"
0090 : "=a" (pkru), "=d" (edx)
0091 : "c" (ecx));
0092 return pkru;
0093 }
0094
0095 static inline void wrpkru(u32 pkru)
0096 {
0097 u32 ecx = 0, edx = 0;
0098
0099
0100
0101
0102
0103 asm volatile(".byte 0x0f,0x01,0xef\n\t"
0104 : : "a" (pkru), "c"(ecx), "d"(edx));
0105 }
0106
0107 #else
0108 static inline u32 rdpkru(void)
0109 {
0110 return 0;
0111 }
0112
0113 static inline void wrpkru(u32 pkru)
0114 {
0115 }
0116 #endif
0117
0118 static inline void native_wbinvd(void)
0119 {
0120 asm volatile("wbinvd": : :"memory");
0121 }
0122
0123 extern asmlinkage void asm_load_gs_index(unsigned int selector);
0124
0125 static inline void native_load_gs_index(unsigned int selector)
0126 {
0127 unsigned long flags;
0128
0129 local_irq_save(flags);
0130 asm_load_gs_index(selector);
0131 local_irq_restore(flags);
0132 }
0133
0134 static inline unsigned long __read_cr4(void)
0135 {
0136 return native_read_cr4();
0137 }
0138
0139 #ifdef CONFIG_PARAVIRT_XXL
0140 #include <asm/paravirt.h>
0141 #else
0142
0143 static inline unsigned long read_cr0(void)
0144 {
0145 return native_read_cr0();
0146 }
0147
0148 static inline void write_cr0(unsigned long x)
0149 {
0150 native_write_cr0(x);
0151 }
0152
0153 static __always_inline unsigned long read_cr2(void)
0154 {
0155 return native_read_cr2();
0156 }
0157
0158 static __always_inline void write_cr2(unsigned long x)
0159 {
0160 native_write_cr2(x);
0161 }
0162
0163
0164
0165
0166
0167 static inline unsigned long __read_cr3(void)
0168 {
0169 return __native_read_cr3();
0170 }
0171
0172 static inline void write_cr3(unsigned long x)
0173 {
0174 native_write_cr3(x);
0175 }
0176
0177 static inline void __write_cr4(unsigned long x)
0178 {
0179 native_write_cr4(x);
0180 }
0181
0182 static inline void wbinvd(void)
0183 {
0184 native_wbinvd();
0185 }
0186
0187
0188 static inline void load_gs_index(unsigned int selector)
0189 {
0190 #ifdef CONFIG_X86_64
0191 native_load_gs_index(selector);
0192 #else
0193 loadsegment(gs, selector);
0194 #endif
0195 }
0196
0197 #endif
0198
0199 static inline void clflush(volatile void *__p)
0200 {
0201 asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p));
0202 }
0203
0204 static inline void clflushopt(volatile void *__p)
0205 {
0206 alternative_io(".byte 0x3e; clflush %P0",
0207 ".byte 0x66; clflush %P0",
0208 X86_FEATURE_CLFLUSHOPT,
0209 "+m" (*(volatile char __force *)__p));
0210 }
0211
0212 static inline void clwb(volatile void *__p)
0213 {
0214 volatile struct { char x[64]; } *p = __p;
0215
0216 asm volatile(ALTERNATIVE_2(
0217 ".byte 0x3e; clflush (%[pax])",
0218 ".byte 0x66; clflush (%[pax])",
0219 X86_FEATURE_CLFLUSHOPT,
0220 ".byte 0x66, 0x0f, 0xae, 0x30",
0221 X86_FEATURE_CLWB)
0222 : [p] "+m" (*p)
0223 : [pax] "a" (p));
0224 }
0225
0226 #define nop() asm volatile ("nop")
0227
0228 static inline void serialize(void)
0229 {
0230
0231 asm volatile(".byte 0xf, 0x1, 0xe8" ::: "memory");
0232 }
0233
0234
0235 static inline void movdir64b(void __iomem *dst, const void *src)
0236 {
0237 const struct { char _[64]; } *__src = src;
0238 struct { char _[64]; } __iomem *__dst = dst;
0239
0240
0241
0242
0243
0244
0245
0246
0247
0248
0249
0250
0251 asm volatile(".byte 0x66, 0x0f, 0x38, 0xf8, 0x02"
0252 : "+m" (*__dst)
0253 : "m" (*__src), "a" (__dst), "d" (__src));
0254 }
0255
0256
0257
0258
0259
0260
0261
0262
0263
0264
0265
0266
0267
0268
0269
0270
0271
0272
0273
0274
0275 static inline int enqcmds(void __iomem *dst, const void *src)
0276 {
0277 const struct { char _[64]; } *__src = src;
0278 struct { char _[64]; } __iomem *__dst = dst;
0279 bool zf;
0280
0281
0282
0283
0284
0285
0286 asm volatile(".byte 0xf3, 0x0f, 0x38, 0xf8, 0x02, 0x66, 0x90"
0287 CC_SET(z)
0288 : CC_OUT(z) (zf), "+m" (*__dst)
0289 : "m" (*__src), "a" (__dst), "d" (__src));
0290
0291
0292 if (zf)
0293 return -EAGAIN;
0294
0295 return 0;
0296 }
0297
0298 static inline void tile_release(void)
0299 {
0300
0301
0302
0303
0304 asm volatile(".byte 0xc4, 0xe2, 0x78, 0x49, 0xc0");
0305 }
0306
0307 #endif
0308
0309 #endif