1 | /* SPDX-License-Identifier: GPL-2.0 */ |
2 | /* |
3 | * __get_user functions. |
4 | * |
5 | * (C) Copyright 1998 Linus Torvalds |
6 | * (C) Copyright 2005 Andi Kleen |
7 | * (C) Copyright 2008 Glauber Costa |
8 | * |
9 | * These functions have a non-standard call interface |
10 | * to make them more efficient, especially as they |
11 | * return an error value in addition to the "real" |
12 | * return value. |
13 | */ |
14 | |
15 | /* |
16 | * __get_user_X |
17 | * |
18 | * Inputs: %[r|e]ax contains the address. |
19 | * |
20 | * Outputs: %[r|e]ax is error code (0 or -EFAULT) |
21 | * %[r|e]dx contains zero-extended value |
22 | * %ecx contains the high half for 32-bit __get_user_8 |
23 | * |
24 | * |
25 | * These functions should not modify any other registers, |
26 | * as they get called from within inline assembly. |
27 | */ |
28 | |
29 | #include <linux/export.h> |
30 | #include <linux/linkage.h> |
31 | #include <asm/page_types.h> |
32 | #include <asm/errno.h> |
33 | #include <asm/asm-offsets.h> |
34 | #include <asm/thread_info.h> |
35 | #include <asm/asm.h> |
36 | #include <asm/smap.h> |
37 | |
38 | #define ASM_BARRIER_NOSPEC ALTERNATIVE "", "lfence", X86_FEATURE_LFENCE_RDTSC |
39 | |
40 | .macro check_range size:req |
41 | .if IS_ENABLED(CONFIG_X86_64) |
42 | mov %rax, %rdx |
43 | sar $63, %rdx |
44 | or %rdx, %rax |
45 | .else |
46 | cmp $TASK_SIZE_MAX-\size+1, %eax |
47 | jae .Lbad_get_user |
48 | sbb %edx, %edx /* array_index_mask_nospec() */ |
49 | and %edx, %eax |
50 | .endif |
51 | .endm |
52 | |
53 | .text |
54 | SYM_FUNC_START(__get_user_1) |
55 | check_range size=1 |
56 | ASM_STAC |
57 | 1: movzbl (%_ASM_AX),%edx |
58 | xor %eax,%eax |
59 | ASM_CLAC |
60 | RET |
61 | SYM_FUNC_END(__get_user_1) |
62 | EXPORT_SYMBOL(__get_user_1) |
63 | |
64 | SYM_FUNC_START(__get_user_2) |
65 | check_range size=2 |
66 | ASM_STAC |
67 | 2: movzwl (%_ASM_AX),%edx |
68 | xor %eax,%eax |
69 | ASM_CLAC |
70 | RET |
71 | SYM_FUNC_END(__get_user_2) |
72 | EXPORT_SYMBOL(__get_user_2) |
73 | |
74 | SYM_FUNC_START(__get_user_4) |
75 | check_range size=4 |
76 | ASM_STAC |
77 | 3: movl (%_ASM_AX),%edx |
78 | xor %eax,%eax |
79 | ASM_CLAC |
80 | RET |
81 | SYM_FUNC_END(__get_user_4) |
82 | EXPORT_SYMBOL(__get_user_4) |
83 | |
84 | SYM_FUNC_START(__get_user_8) |
85 | check_range size=8 |
86 | ASM_STAC |
87 | #ifdef CONFIG_X86_64 |
88 | 4: movq (%_ASM_AX),%rdx |
89 | #else |
90 | 4: movl (%_ASM_AX),%edx |
91 | 5: movl 4(%_ASM_AX),%ecx |
92 | #endif |
93 | xor %eax,%eax |
94 | ASM_CLAC |
95 | RET |
96 | SYM_FUNC_END(__get_user_8) |
97 | EXPORT_SYMBOL(__get_user_8) |
98 | |
99 | /* .. and the same for __get_user, just without the range checks */ |
100 | SYM_FUNC_START(__get_user_nocheck_1) |
101 | ASM_STAC |
102 | ASM_BARRIER_NOSPEC |
103 | 6: movzbl (%_ASM_AX),%edx |
104 | xor %eax,%eax |
105 | ASM_CLAC |
106 | RET |
107 | SYM_FUNC_END(__get_user_nocheck_1) |
108 | EXPORT_SYMBOL(__get_user_nocheck_1) |
109 | |
110 | SYM_FUNC_START(__get_user_nocheck_2) |
111 | ASM_STAC |
112 | ASM_BARRIER_NOSPEC |
113 | 7: movzwl (%_ASM_AX),%edx |
114 | xor %eax,%eax |
115 | ASM_CLAC |
116 | RET |
117 | SYM_FUNC_END(__get_user_nocheck_2) |
118 | EXPORT_SYMBOL(__get_user_nocheck_2) |
119 | |
120 | SYM_FUNC_START(__get_user_nocheck_4) |
121 | ASM_STAC |
122 | ASM_BARRIER_NOSPEC |
123 | 8: movl (%_ASM_AX),%edx |
124 | xor %eax,%eax |
125 | ASM_CLAC |
126 | RET |
127 | SYM_FUNC_END(__get_user_nocheck_4) |
128 | EXPORT_SYMBOL(__get_user_nocheck_4) |
129 | |
130 | SYM_FUNC_START(__get_user_nocheck_8) |
131 | ASM_STAC |
132 | ASM_BARRIER_NOSPEC |
133 | #ifdef CONFIG_X86_64 |
134 | 9: movq (%_ASM_AX),%rdx |
135 | #else |
136 | 9: movl (%_ASM_AX),%edx |
137 | 10: movl 4(%_ASM_AX),%ecx |
138 | #endif |
139 | xor %eax,%eax |
140 | ASM_CLAC |
141 | RET |
142 | SYM_FUNC_END(__get_user_nocheck_8) |
143 | EXPORT_SYMBOL(__get_user_nocheck_8) |
144 | |
145 | |
146 | SYM_CODE_START_LOCAL(__get_user_handle_exception) |
147 | ASM_CLAC |
148 | .Lbad_get_user: |
149 | xor %edx,%edx |
150 | mov $(-EFAULT),%_ASM_AX |
151 | RET |
152 | SYM_CODE_END(__get_user_handle_exception) |
153 | |
154 | #ifdef CONFIG_X86_32 |
155 | SYM_CODE_START_LOCAL(__get_user_8_handle_exception) |
156 | ASM_CLAC |
157 | bad_get_user_8: |
158 | xor %edx,%edx |
159 | xor %ecx,%ecx |
160 | mov $(-EFAULT),%_ASM_AX |
161 | RET |
162 | SYM_CODE_END(__get_user_8_handle_exception) |
163 | #endif |
164 | |
165 | /* get_user */ |
166 | _ASM_EXTABLE(1b, __get_user_handle_exception) |
167 | _ASM_EXTABLE(2b, __get_user_handle_exception) |
168 | _ASM_EXTABLE(3b, __get_user_handle_exception) |
169 | #ifdef CONFIG_X86_64 |
170 | _ASM_EXTABLE(4b, __get_user_handle_exception) |
171 | #else |
172 | _ASM_EXTABLE(4b, __get_user_8_handle_exception) |
173 | _ASM_EXTABLE(5b, __get_user_8_handle_exception) |
174 | #endif |
175 | |
176 | /* __get_user */ |
177 | _ASM_EXTABLE(6b, __get_user_handle_exception) |
178 | _ASM_EXTABLE(7b, __get_user_handle_exception) |
179 | _ASM_EXTABLE(8b, __get_user_handle_exception) |
180 | #ifdef CONFIG_X86_64 |
181 | _ASM_EXTABLE(9b, __get_user_handle_exception) |
182 | #else |
183 | _ASM_EXTABLE(9b, __get_user_8_handle_exception) |
184 | _ASM_EXTABLE(10b, __get_user_8_handle_exception) |
185 | #endif |
186 | |