0001
0002
0003
0004
0005
0006
0007
0008
0009 #include <linux/export.h>
0010 #include <linux/uaccess.h>
0011 #include <asm/asm.h>
0012
0013 #ifdef CONFIG_X86_INTEL_USERCOPY
0014
0015
0016
0017 struct movsl_mask movsl_mask __read_mostly;
0018 #endif
0019
0020 static inline int __movsl_is_ok(unsigned long a1, unsigned long a2, unsigned long n)
0021 {
0022 #ifdef CONFIG_X86_INTEL_USERCOPY
0023 if (n >= 64 && ((a1 ^ a2) & movsl_mask.mask))
0024 return 0;
0025 #endif
0026 return 1;
0027 }
0028 #define movsl_is_ok(a1, a2, n) \
0029 __movsl_is_ok((unsigned long)(a1), (unsigned long)(a2), (n))
0030
0031
0032
0033
0034
0035 #define __do_clear_user(addr,size) \
0036 do { \
0037 int __d0; \
0038 might_fault(); \
0039 __asm__ __volatile__( \
0040 ASM_STAC "\n" \
0041 "0: rep; stosl\n" \
0042 " movl %2,%0\n" \
0043 "1: rep; stosb\n" \
0044 "2: " ASM_CLAC "\n" \
0045 _ASM_EXTABLE_TYPE_REG(0b, 2b, EX_TYPE_UCOPY_LEN4, %2) \
0046 _ASM_EXTABLE_UA(1b, 2b) \
0047 : "=&c"(size), "=&D" (__d0) \
0048 : "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0)); \
0049 } while (0)
0050
0051
0052
0053
0054
0055
0056
0057
0058
0059
0060
0061 unsigned long
0062 clear_user(void __user *to, unsigned long n)
0063 {
0064 might_fault();
0065 if (access_ok(to, n))
0066 __do_clear_user(to, n);
0067 return n;
0068 }
0069 EXPORT_SYMBOL(clear_user);
0070
0071
0072
0073
0074
0075
0076
0077
0078
0079
0080
0081
0082 unsigned long
0083 __clear_user(void __user *to, unsigned long n)
0084 {
0085 __do_clear_user(to, n);
0086 return n;
0087 }
0088 EXPORT_SYMBOL(__clear_user);
0089
0090 #ifdef CONFIG_X86_INTEL_USERCOPY
0091 static unsigned long
0092 __copy_user_intel(void __user *to, const void *from, unsigned long size)
0093 {
0094 int d0, d1;
0095 __asm__ __volatile__(
0096 " .align 2,0x90\n"
0097 "1: movl 32(%4), %%eax\n"
0098 " cmpl $67, %0\n"
0099 " jbe 3f\n"
0100 "2: movl 64(%4), %%eax\n"
0101 " .align 2,0x90\n"
0102 "3: movl 0(%4), %%eax\n"
0103 "4: movl 4(%4), %%edx\n"
0104 "5: movl %%eax, 0(%3)\n"
0105 "6: movl %%edx, 4(%3)\n"
0106 "7: movl 8(%4), %%eax\n"
0107 "8: movl 12(%4),%%edx\n"
0108 "9: movl %%eax, 8(%3)\n"
0109 "10: movl %%edx, 12(%3)\n"
0110 "11: movl 16(%4), %%eax\n"
0111 "12: movl 20(%4), %%edx\n"
0112 "13: movl %%eax, 16(%3)\n"
0113 "14: movl %%edx, 20(%3)\n"
0114 "15: movl 24(%4), %%eax\n"
0115 "16: movl 28(%4), %%edx\n"
0116 "17: movl %%eax, 24(%3)\n"
0117 "18: movl %%edx, 28(%3)\n"
0118 "19: movl 32(%4), %%eax\n"
0119 "20: movl 36(%4), %%edx\n"
0120 "21: movl %%eax, 32(%3)\n"
0121 "22: movl %%edx, 36(%3)\n"
0122 "23: movl 40(%4), %%eax\n"
0123 "24: movl 44(%4), %%edx\n"
0124 "25: movl %%eax, 40(%3)\n"
0125 "26: movl %%edx, 44(%3)\n"
0126 "27: movl 48(%4), %%eax\n"
0127 "28: movl 52(%4), %%edx\n"
0128 "29: movl %%eax, 48(%3)\n"
0129 "30: movl %%edx, 52(%3)\n"
0130 "31: movl 56(%4), %%eax\n"
0131 "32: movl 60(%4), %%edx\n"
0132 "33: movl %%eax, 56(%3)\n"
0133 "34: movl %%edx, 60(%3)\n"
0134 " addl $-64, %0\n"
0135 " addl $64, %4\n"
0136 " addl $64, %3\n"
0137 " cmpl $63, %0\n"
0138 " ja 1b\n"
0139 "35: movl %0, %%eax\n"
0140 " shrl $2, %0\n"
0141 " andl $3, %%eax\n"
0142 " cld\n"
0143 "99: rep; movsl\n"
0144 "36: movl %%eax, %0\n"
0145 "37: rep; movsb\n"
0146 "100:\n"
0147 _ASM_EXTABLE_UA(1b, 100b)
0148 _ASM_EXTABLE_UA(2b, 100b)
0149 _ASM_EXTABLE_UA(3b, 100b)
0150 _ASM_EXTABLE_UA(4b, 100b)
0151 _ASM_EXTABLE_UA(5b, 100b)
0152 _ASM_EXTABLE_UA(6b, 100b)
0153 _ASM_EXTABLE_UA(7b, 100b)
0154 _ASM_EXTABLE_UA(8b, 100b)
0155 _ASM_EXTABLE_UA(9b, 100b)
0156 _ASM_EXTABLE_UA(10b, 100b)
0157 _ASM_EXTABLE_UA(11b, 100b)
0158 _ASM_EXTABLE_UA(12b, 100b)
0159 _ASM_EXTABLE_UA(13b, 100b)
0160 _ASM_EXTABLE_UA(14b, 100b)
0161 _ASM_EXTABLE_UA(15b, 100b)
0162 _ASM_EXTABLE_UA(16b, 100b)
0163 _ASM_EXTABLE_UA(17b, 100b)
0164 _ASM_EXTABLE_UA(18b, 100b)
0165 _ASM_EXTABLE_UA(19b, 100b)
0166 _ASM_EXTABLE_UA(20b, 100b)
0167 _ASM_EXTABLE_UA(21b, 100b)
0168 _ASM_EXTABLE_UA(22b, 100b)
0169 _ASM_EXTABLE_UA(23b, 100b)
0170 _ASM_EXTABLE_UA(24b, 100b)
0171 _ASM_EXTABLE_UA(25b, 100b)
0172 _ASM_EXTABLE_UA(26b, 100b)
0173 _ASM_EXTABLE_UA(27b, 100b)
0174 _ASM_EXTABLE_UA(28b, 100b)
0175 _ASM_EXTABLE_UA(29b, 100b)
0176 _ASM_EXTABLE_UA(30b, 100b)
0177 _ASM_EXTABLE_UA(31b, 100b)
0178 _ASM_EXTABLE_UA(32b, 100b)
0179 _ASM_EXTABLE_UA(33b, 100b)
0180 _ASM_EXTABLE_UA(34b, 100b)
0181 _ASM_EXTABLE_UA(35b, 100b)
0182 _ASM_EXTABLE_UA(36b, 100b)
0183 _ASM_EXTABLE_UA(37b, 100b)
0184 _ASM_EXTABLE_TYPE_REG(99b, 100b, EX_TYPE_UCOPY_LEN4, %%eax)
0185 : "=&c"(size), "=&D" (d0), "=&S" (d1)
0186 : "1"(to), "2"(from), "0"(size)
0187 : "eax", "edx", "memory");
0188 return size;
0189 }
0190
0191 static unsigned long __copy_user_intel_nocache(void *to,
0192 const void __user *from, unsigned long size)
0193 {
0194 int d0, d1;
0195
0196 __asm__ __volatile__(
0197 " .align 2,0x90\n"
0198 "0: movl 32(%4), %%eax\n"
0199 " cmpl $67, %0\n"
0200 " jbe 2f\n"
0201 "1: movl 64(%4), %%eax\n"
0202 " .align 2,0x90\n"
0203 "2: movl 0(%4), %%eax\n"
0204 "21: movl 4(%4), %%edx\n"
0205 " movnti %%eax, 0(%3)\n"
0206 " movnti %%edx, 4(%3)\n"
0207 "3: movl 8(%4), %%eax\n"
0208 "31: movl 12(%4),%%edx\n"
0209 " movnti %%eax, 8(%3)\n"
0210 " movnti %%edx, 12(%3)\n"
0211 "4: movl 16(%4), %%eax\n"
0212 "41: movl 20(%4), %%edx\n"
0213 " movnti %%eax, 16(%3)\n"
0214 " movnti %%edx, 20(%3)\n"
0215 "10: movl 24(%4), %%eax\n"
0216 "51: movl 28(%4), %%edx\n"
0217 " movnti %%eax, 24(%3)\n"
0218 " movnti %%edx, 28(%3)\n"
0219 "11: movl 32(%4), %%eax\n"
0220 "61: movl 36(%4), %%edx\n"
0221 " movnti %%eax, 32(%3)\n"
0222 " movnti %%edx, 36(%3)\n"
0223 "12: movl 40(%4), %%eax\n"
0224 "71: movl 44(%4), %%edx\n"
0225 " movnti %%eax, 40(%3)\n"
0226 " movnti %%edx, 44(%3)\n"
0227 "13: movl 48(%4), %%eax\n"
0228 "81: movl 52(%4), %%edx\n"
0229 " movnti %%eax, 48(%3)\n"
0230 " movnti %%edx, 52(%3)\n"
0231 "14: movl 56(%4), %%eax\n"
0232 "91: movl 60(%4), %%edx\n"
0233 " movnti %%eax, 56(%3)\n"
0234 " movnti %%edx, 60(%3)\n"
0235 " addl $-64, %0\n"
0236 " addl $64, %4\n"
0237 " addl $64, %3\n"
0238 " cmpl $63, %0\n"
0239 " ja 0b\n"
0240 " sfence \n"
0241 "5: movl %0, %%eax\n"
0242 " shrl $2, %0\n"
0243 " andl $3, %%eax\n"
0244 " cld\n"
0245 "6: rep; movsl\n"
0246 " movl %%eax,%0\n"
0247 "7: rep; movsb\n"
0248 "8:\n"
0249 _ASM_EXTABLE_UA(0b, 8b)
0250 _ASM_EXTABLE_UA(1b, 8b)
0251 _ASM_EXTABLE_UA(2b, 8b)
0252 _ASM_EXTABLE_UA(21b, 8b)
0253 _ASM_EXTABLE_UA(3b, 8b)
0254 _ASM_EXTABLE_UA(31b, 8b)
0255 _ASM_EXTABLE_UA(4b, 8b)
0256 _ASM_EXTABLE_UA(41b, 8b)
0257 _ASM_EXTABLE_UA(10b, 8b)
0258 _ASM_EXTABLE_UA(51b, 8b)
0259 _ASM_EXTABLE_UA(11b, 8b)
0260 _ASM_EXTABLE_UA(61b, 8b)
0261 _ASM_EXTABLE_UA(12b, 8b)
0262 _ASM_EXTABLE_UA(71b, 8b)
0263 _ASM_EXTABLE_UA(13b, 8b)
0264 _ASM_EXTABLE_UA(81b, 8b)
0265 _ASM_EXTABLE_UA(14b, 8b)
0266 _ASM_EXTABLE_UA(91b, 8b)
0267 _ASM_EXTABLE_TYPE_REG(6b, 8b, EX_TYPE_UCOPY_LEN4, %%eax)
0268 _ASM_EXTABLE_UA(7b, 8b)
0269 : "=&c"(size), "=&D" (d0), "=&S" (d1)
0270 : "1"(to), "2"(from), "0"(size)
0271 : "eax", "edx", "memory");
0272 return size;
0273 }
0274
0275 #else
0276
0277
0278
0279
0280
0281 unsigned long __copy_user_intel(void __user *to, const void *from,
0282 unsigned long size);
0283 #endif
0284
0285
0286 #define __copy_user(to, from, size) \
0287 do { \
0288 int __d0, __d1, __d2; \
0289 __asm__ __volatile__( \
0290 " cmp $7,%0\n" \
0291 " jbe 1f\n" \
0292 " movl %1,%0\n" \
0293 " negl %0\n" \
0294 " andl $7,%0\n" \
0295 " subl %0,%3\n" \
0296 "4: rep; movsb\n" \
0297 " movl %3,%0\n" \
0298 " shrl $2,%0\n" \
0299 " andl $3,%3\n" \
0300 " .align 2,0x90\n" \
0301 "0: rep; movsl\n" \
0302 " movl %3,%0\n" \
0303 "1: rep; movsb\n" \
0304 "2:\n" \
0305 _ASM_EXTABLE_TYPE_REG(4b, 2b, EX_TYPE_UCOPY_LEN1, %3) \
0306 _ASM_EXTABLE_TYPE_REG(0b, 2b, EX_TYPE_UCOPY_LEN4, %3) \
0307 _ASM_EXTABLE_UA(1b, 2b) \
0308 : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) \
0309 : "3"(size), "0"(size), "1"(to), "2"(from) \
0310 : "memory"); \
0311 } while (0)
0312
0313 unsigned long __copy_user_ll(void *to, const void *from, unsigned long n)
0314 {
0315 __uaccess_begin_nospec();
0316 if (movsl_is_ok(to, from, n))
0317 __copy_user(to, from, n);
0318 else
0319 n = __copy_user_intel(to, from, n);
0320 __uaccess_end();
0321 return n;
0322 }
0323 EXPORT_SYMBOL(__copy_user_ll);
0324
0325 unsigned long __copy_from_user_ll_nocache_nozero(void *to, const void __user *from,
0326 unsigned long n)
0327 {
0328 __uaccess_begin_nospec();
0329 #ifdef CONFIG_X86_INTEL_USERCOPY
0330 if (n > 64 && static_cpu_has(X86_FEATURE_XMM2))
0331 n = __copy_user_intel_nocache(to, from, n);
0332 else
0333 __copy_user(to, from, n);
0334 #else
0335 __copy_user(to, from, n);
0336 #endif
0337 __uaccess_end();
0338 return n;
0339 }
0340 EXPORT_SYMBOL(__copy_from_user_ll_nocache_nozero);