Back to home page

OSCL-LXR

 
 

    


0001 /* SPDX-License-Identifier: GPL-2.0 */
0002 /*
0003  * __get_user functions.
0004  *
0005  * (C) Copyright 1998 Linus Torvalds
0006  * (C) Copyright 2005 Andi Kleen
0007  * (C) Copyright 2008 Glauber Costa
0008  *
0009  * These functions have a non-standard call interface
0010  * to make them more efficient, especially as they
0011  * return an error value in addition to the "real"
0012  * return value.
0013  */
0014 
0015 /*
0016  * __get_user_X
0017  *
0018  * Inputs:  %[r|e]ax contains the address.
0019  *
0020  * Outputs: %[r|e]ax is error code (0 or -EFAULT)
0021  *      %[r|e]dx contains zero-extended value
0022  *      %ecx contains the high half for 32-bit __get_user_8
0023  *
0024  *
0025  * These functions should not modify any other registers,
0026  * as they get called from within inline assembly.
0027  */
0028 
0029 #include <linux/linkage.h>
0030 #include <asm/page_types.h>
0031 #include <asm/errno.h>
0032 #include <asm/asm-offsets.h>
0033 #include <asm/thread_info.h>
0034 #include <asm/asm.h>
0035 #include <asm/smap.h>
0036 #include <asm/export.h>
0037 
0038 #define ASM_BARRIER_NOSPEC ALTERNATIVE "", "lfence", X86_FEATURE_LFENCE_RDTSC
0039 
0040 #ifdef CONFIG_X86_5LEVEL
0041 #define LOAD_TASK_SIZE_MINUS_N(n) \
0042     ALTERNATIVE __stringify(mov $((1 << 47) - 4096 - (n)),%rdx), \
0043             __stringify(mov $((1 << 56) - 4096 - (n)),%rdx), X86_FEATURE_LA57
0044 #else
0045 #define LOAD_TASK_SIZE_MINUS_N(n) \
0046     mov $(TASK_SIZE_MAX - (n)),%_ASM_DX
0047 #endif
0048 
0049     .text
0050 SYM_FUNC_START(__get_user_1)
0051     LOAD_TASK_SIZE_MINUS_N(0)
0052     cmp %_ASM_DX,%_ASM_AX
0053     jae bad_get_user
0054     sbb %_ASM_DX, %_ASM_DX      /* array_index_mask_nospec() */
0055     and %_ASM_DX, %_ASM_AX
0056     ASM_STAC
0057 1:  movzbl (%_ASM_AX),%edx
0058     xor %eax,%eax
0059     ASM_CLAC
0060     RET
0061 SYM_FUNC_END(__get_user_1)
0062 EXPORT_SYMBOL(__get_user_1)
0063 
0064 SYM_FUNC_START(__get_user_2)
0065     LOAD_TASK_SIZE_MINUS_N(1)
0066     cmp %_ASM_DX,%_ASM_AX
0067     jae bad_get_user
0068     sbb %_ASM_DX, %_ASM_DX      /* array_index_mask_nospec() */
0069     and %_ASM_DX, %_ASM_AX
0070     ASM_STAC
0071 2:  movzwl (%_ASM_AX),%edx
0072     xor %eax,%eax
0073     ASM_CLAC
0074     RET
0075 SYM_FUNC_END(__get_user_2)
0076 EXPORT_SYMBOL(__get_user_2)
0077 
0078 SYM_FUNC_START(__get_user_4)
0079     LOAD_TASK_SIZE_MINUS_N(3)
0080     cmp %_ASM_DX,%_ASM_AX
0081     jae bad_get_user
0082     sbb %_ASM_DX, %_ASM_DX      /* array_index_mask_nospec() */
0083     and %_ASM_DX, %_ASM_AX
0084     ASM_STAC
0085 3:  movl (%_ASM_AX),%edx
0086     xor %eax,%eax
0087     ASM_CLAC
0088     RET
0089 SYM_FUNC_END(__get_user_4)
0090 EXPORT_SYMBOL(__get_user_4)
0091 
0092 SYM_FUNC_START(__get_user_8)
0093 #ifdef CONFIG_X86_64
0094     LOAD_TASK_SIZE_MINUS_N(7)
0095     cmp %_ASM_DX,%_ASM_AX
0096     jae bad_get_user
0097     sbb %_ASM_DX, %_ASM_DX      /* array_index_mask_nospec() */
0098     and %_ASM_DX, %_ASM_AX
0099     ASM_STAC
0100 4:  movq (%_ASM_AX),%rdx
0101     xor %eax,%eax
0102     ASM_CLAC
0103     RET
0104 #else
0105     LOAD_TASK_SIZE_MINUS_N(7)
0106     cmp %_ASM_DX,%_ASM_AX
0107     jae bad_get_user_8
0108     sbb %_ASM_DX, %_ASM_DX      /* array_index_mask_nospec() */
0109     and %_ASM_DX, %_ASM_AX
0110     ASM_STAC
0111 4:  movl (%_ASM_AX),%edx
0112 5:  movl 4(%_ASM_AX),%ecx
0113     xor %eax,%eax
0114     ASM_CLAC
0115     RET
0116 #endif
0117 SYM_FUNC_END(__get_user_8)
0118 EXPORT_SYMBOL(__get_user_8)
0119 
0120 /* .. and the same for __get_user, just without the range checks */
0121 SYM_FUNC_START(__get_user_nocheck_1)
0122     ASM_STAC
0123     ASM_BARRIER_NOSPEC
0124 6:  movzbl (%_ASM_AX),%edx
0125     xor %eax,%eax
0126     ASM_CLAC
0127     RET
0128 SYM_FUNC_END(__get_user_nocheck_1)
0129 EXPORT_SYMBOL(__get_user_nocheck_1)
0130 
0131 SYM_FUNC_START(__get_user_nocheck_2)
0132     ASM_STAC
0133     ASM_BARRIER_NOSPEC
0134 7:  movzwl (%_ASM_AX),%edx
0135     xor %eax,%eax
0136     ASM_CLAC
0137     RET
0138 SYM_FUNC_END(__get_user_nocheck_2)
0139 EXPORT_SYMBOL(__get_user_nocheck_2)
0140 
0141 SYM_FUNC_START(__get_user_nocheck_4)
0142     ASM_STAC
0143     ASM_BARRIER_NOSPEC
0144 8:  movl (%_ASM_AX),%edx
0145     xor %eax,%eax
0146     ASM_CLAC
0147     RET
0148 SYM_FUNC_END(__get_user_nocheck_4)
0149 EXPORT_SYMBOL(__get_user_nocheck_4)
0150 
0151 SYM_FUNC_START(__get_user_nocheck_8)
0152     ASM_STAC
0153     ASM_BARRIER_NOSPEC
0154 #ifdef CONFIG_X86_64
0155 9:  movq (%_ASM_AX),%rdx
0156 #else
0157 9:  movl (%_ASM_AX),%edx
0158 10: movl 4(%_ASM_AX),%ecx
0159 #endif
0160     xor %eax,%eax
0161     ASM_CLAC
0162     RET
0163 SYM_FUNC_END(__get_user_nocheck_8)
0164 EXPORT_SYMBOL(__get_user_nocheck_8)
0165 
0166 
0167 SYM_CODE_START_LOCAL(.Lbad_get_user_clac)
0168     ASM_CLAC
0169 bad_get_user:
0170     xor %edx,%edx
0171     mov $(-EFAULT),%_ASM_AX
0172     RET
0173 SYM_CODE_END(.Lbad_get_user_clac)
0174 
0175 #ifdef CONFIG_X86_32
0176 SYM_CODE_START_LOCAL(.Lbad_get_user_8_clac)
0177     ASM_CLAC
0178 bad_get_user_8:
0179     xor %edx,%edx
0180     xor %ecx,%ecx
0181     mov $(-EFAULT),%_ASM_AX
0182     RET
0183 SYM_CODE_END(.Lbad_get_user_8_clac)
0184 #endif
0185 
0186 /* get_user */
0187     _ASM_EXTABLE_UA(1b, .Lbad_get_user_clac)
0188     _ASM_EXTABLE_UA(2b, .Lbad_get_user_clac)
0189     _ASM_EXTABLE_UA(3b, .Lbad_get_user_clac)
0190 #ifdef CONFIG_X86_64
0191     _ASM_EXTABLE_UA(4b, .Lbad_get_user_clac)
0192 #else
0193     _ASM_EXTABLE_UA(4b, .Lbad_get_user_8_clac)
0194     _ASM_EXTABLE_UA(5b, .Lbad_get_user_8_clac)
0195 #endif
0196 
0197 /* __get_user */
0198     _ASM_EXTABLE_UA(6b, .Lbad_get_user_clac)
0199     _ASM_EXTABLE_UA(7b, .Lbad_get_user_clac)
0200     _ASM_EXTABLE_UA(8b, .Lbad_get_user_clac)
0201 #ifdef CONFIG_X86_64
0202     _ASM_EXTABLE_UA(9b, .Lbad_get_user_clac)
0203 #else
0204     _ASM_EXTABLE_UA(9b, .Lbad_get_user_8_clac)
0205     _ASM_EXTABLE_UA(10b, .Lbad_get_user_8_clac)
0206 #endif