Lines Matching +full:3 +full:- +full:n

1 // SPDX-License-Identifier: GPL-2.0
4 * The non inlined parts of asm-i386/uaccess.h are here.
21 static inline int __movsl_is_ok(unsigned long a1, unsigned long a2, unsigned long n) in __movsl_is_ok() argument
24 if (n >= 64 && ((a1 ^ a2) & movsl_mask.mask)) in __movsl_is_ok()
29 #define movsl_is_ok(a1, a2, n) \ argument
30 __movsl_is_ok((unsigned long)(a1), (unsigned long)(a2), (n))
41 ASM_STAC "\n" \
42 "0: rep; stosl\n" \
43 " movl %2,%0\n" \
44 "1: rep; stosb\n" \
45 "2: " ASM_CLAC "\n" \
46 ".section .fixup,\"ax\"\n" \
47 "3: lea 0(%2,%0,4),%0\n" \
48 " jmp 2b\n" \
49 ".previous\n" \
50 _ASM_EXTABLE_UA(0b, 3b) \
53 : "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0)); \
57 * clear_user - Zero a block of memory in user space.
59 * @n: Number of bytes to zero.
67 clear_user(void __user *to, unsigned long n) in clear_user() argument
70 if (access_ok(to, n)) in clear_user()
71 __do_clear_user(to, n); in clear_user()
72 return n; in clear_user()
77 * __clear_user - Zero a block of memory in user space, with less checking.
79 * @n: Number of bytes to zero.
88 __clear_user(void __user *to, unsigned long n) in __clear_user() argument
90 __do_clear_user(to, n); in __clear_user()
91 return n; in __clear_user()
101 " .align 2,0x90\n" in __copy_user_intel()
102 "1: movl 32(%4), %%eax\n" in __copy_user_intel()
103 " cmpl $67, %0\n" in __copy_user_intel()
104 " jbe 3f\n" in __copy_user_intel()
105 "2: movl 64(%4), %%eax\n" in __copy_user_intel()
106 " .align 2,0x90\n" in __copy_user_intel()
107 "3: movl 0(%4), %%eax\n" in __copy_user_intel()
108 "4: movl 4(%4), %%edx\n" in __copy_user_intel()
109 "5: movl %%eax, 0(%3)\n" in __copy_user_intel()
110 "6: movl %%edx, 4(%3)\n" in __copy_user_intel()
111 "7: movl 8(%4), %%eax\n" in __copy_user_intel()
112 "8: movl 12(%4),%%edx\n" in __copy_user_intel()
113 "9: movl %%eax, 8(%3)\n" in __copy_user_intel()
114 "10: movl %%edx, 12(%3)\n" in __copy_user_intel()
115 "11: movl 16(%4), %%eax\n" in __copy_user_intel()
116 "12: movl 20(%4), %%edx\n" in __copy_user_intel()
117 "13: movl %%eax, 16(%3)\n" in __copy_user_intel()
118 "14: movl %%edx, 20(%3)\n" in __copy_user_intel()
119 "15: movl 24(%4), %%eax\n" in __copy_user_intel()
120 "16: movl 28(%4), %%edx\n" in __copy_user_intel()
121 "17: movl %%eax, 24(%3)\n" in __copy_user_intel()
122 "18: movl %%edx, 28(%3)\n" in __copy_user_intel()
123 "19: movl 32(%4), %%eax\n" in __copy_user_intel()
124 "20: movl 36(%4), %%edx\n" in __copy_user_intel()
125 "21: movl %%eax, 32(%3)\n" in __copy_user_intel()
126 "22: movl %%edx, 36(%3)\n" in __copy_user_intel()
127 "23: movl 40(%4), %%eax\n" in __copy_user_intel()
128 "24: movl 44(%4), %%edx\n" in __copy_user_intel()
129 "25: movl %%eax, 40(%3)\n" in __copy_user_intel()
130 "26: movl %%edx, 44(%3)\n" in __copy_user_intel()
131 "27: movl 48(%4), %%eax\n" in __copy_user_intel()
132 "28: movl 52(%4), %%edx\n" in __copy_user_intel()
133 "29: movl %%eax, 48(%3)\n" in __copy_user_intel()
134 "30: movl %%edx, 52(%3)\n" in __copy_user_intel()
135 "31: movl 56(%4), %%eax\n" in __copy_user_intel()
136 "32: movl 60(%4), %%edx\n" in __copy_user_intel()
137 "33: movl %%eax, 56(%3)\n" in __copy_user_intel()
138 "34: movl %%edx, 60(%3)\n" in __copy_user_intel()
139 " addl $-64, %0\n" in __copy_user_intel()
140 " addl $64, %4\n" in __copy_user_intel()
141 " addl $64, %3\n" in __copy_user_intel()
142 " cmpl $63, %0\n" in __copy_user_intel()
143 " ja 1b\n" in __copy_user_intel()
144 "35: movl %0, %%eax\n" in __copy_user_intel()
145 " shrl $2, %0\n" in __copy_user_intel()
146 " andl $3, %%eax\n" in __copy_user_intel()
147 " cld\n" in __copy_user_intel()
148 "99: rep; movsl\n" in __copy_user_intel()
149 "36: movl %%eax, %0\n" in __copy_user_intel()
150 "37: rep; movsb\n" in __copy_user_intel()
151 "100:\n" in __copy_user_intel()
152 ".section .fixup,\"ax\"\n" in __copy_user_intel()
153 "101: lea 0(%%eax,%0,4),%0\n" in __copy_user_intel()
154 " jmp 100b\n" in __copy_user_intel()
155 ".previous\n" in __copy_user_intel()
158 _ASM_EXTABLE_UA(3b, 100b) in __copy_user_intel()
206 " .align 2,0x90\n" in __copy_user_intel_nocache()
207 "0: movl 32(%4), %%eax\n" in __copy_user_intel_nocache()
208 " cmpl $67, %0\n" in __copy_user_intel_nocache()
209 " jbe 2f\n" in __copy_user_intel_nocache()
210 "1: movl 64(%4), %%eax\n" in __copy_user_intel_nocache()
211 " .align 2,0x90\n" in __copy_user_intel_nocache()
212 "2: movl 0(%4), %%eax\n" in __copy_user_intel_nocache()
213 "21: movl 4(%4), %%edx\n" in __copy_user_intel_nocache()
214 " movnti %%eax, 0(%3)\n" in __copy_user_intel_nocache()
215 " movnti %%edx, 4(%3)\n" in __copy_user_intel_nocache()
216 "3: movl 8(%4), %%eax\n" in __copy_user_intel_nocache()
217 "31: movl 12(%4),%%edx\n" in __copy_user_intel_nocache()
218 " movnti %%eax, 8(%3)\n" in __copy_user_intel_nocache()
219 " movnti %%edx, 12(%3)\n" in __copy_user_intel_nocache()
220 "4: movl 16(%4), %%eax\n" in __copy_user_intel_nocache()
221 "41: movl 20(%4), %%edx\n" in __copy_user_intel_nocache()
222 " movnti %%eax, 16(%3)\n" in __copy_user_intel_nocache()
223 " movnti %%edx, 20(%3)\n" in __copy_user_intel_nocache()
224 "10: movl 24(%4), %%eax\n" in __copy_user_intel_nocache()
225 "51: movl 28(%4), %%edx\n" in __copy_user_intel_nocache()
226 " movnti %%eax, 24(%3)\n" in __copy_user_intel_nocache()
227 " movnti %%edx, 28(%3)\n" in __copy_user_intel_nocache()
228 "11: movl 32(%4), %%eax\n" in __copy_user_intel_nocache()
229 "61: movl 36(%4), %%edx\n" in __copy_user_intel_nocache()
230 " movnti %%eax, 32(%3)\n" in __copy_user_intel_nocache()
231 " movnti %%edx, 36(%3)\n" in __copy_user_intel_nocache()
232 "12: movl 40(%4), %%eax\n" in __copy_user_intel_nocache()
233 "71: movl 44(%4), %%edx\n" in __copy_user_intel_nocache()
234 " movnti %%eax, 40(%3)\n" in __copy_user_intel_nocache()
235 " movnti %%edx, 44(%3)\n" in __copy_user_intel_nocache()
236 "13: movl 48(%4), %%eax\n" in __copy_user_intel_nocache()
237 "81: movl 52(%4), %%edx\n" in __copy_user_intel_nocache()
238 " movnti %%eax, 48(%3)\n" in __copy_user_intel_nocache()
239 " movnti %%edx, 52(%3)\n" in __copy_user_intel_nocache()
240 "14: movl 56(%4), %%eax\n" in __copy_user_intel_nocache()
241 "91: movl 60(%4), %%edx\n" in __copy_user_intel_nocache()
242 " movnti %%eax, 56(%3)\n" in __copy_user_intel_nocache()
243 " movnti %%edx, 60(%3)\n" in __copy_user_intel_nocache()
244 " addl $-64, %0\n" in __copy_user_intel_nocache()
245 " addl $64, %4\n" in __copy_user_intel_nocache()
246 " addl $64, %3\n" in __copy_user_intel_nocache()
247 " cmpl $63, %0\n" in __copy_user_intel_nocache()
248 " ja 0b\n" in __copy_user_intel_nocache()
249 " sfence \n" in __copy_user_intel_nocache()
250 "5: movl %0, %%eax\n" in __copy_user_intel_nocache()
251 " shrl $2, %0\n" in __copy_user_intel_nocache()
252 " andl $3, %%eax\n" in __copy_user_intel_nocache()
253 " cld\n" in __copy_user_intel_nocache()
254 "6: rep; movsl\n" in __copy_user_intel_nocache()
255 " movl %%eax,%0\n" in __copy_user_intel_nocache()
256 "7: rep; movsb\n" in __copy_user_intel_nocache()
257 "8:\n" in __copy_user_intel_nocache()
258 ".section .fixup,\"ax\"\n" in __copy_user_intel_nocache()
259 "9: lea 0(%%eax,%0,4),%0\n" in __copy_user_intel_nocache()
260 "16: jmp 8b\n" in __copy_user_intel_nocache()
261 ".previous\n" in __copy_user_intel_nocache()
266 _ASM_EXTABLE_UA(3b, 16b) in __copy_user_intel_nocache()
303 " cmp $7,%0\n" \
304 " jbe 1f\n" \
305 " movl %1,%0\n" \
306 " negl %0\n" \
307 " andl $7,%0\n" \
308 " subl %0,%3\n" \
309 "4: rep; movsb\n" \
310 " movl %3,%0\n" \
311 " shrl $2,%0\n" \
312 " andl $3,%3\n" \
313 " .align 2,0x90\n" \
314 "0: rep; movsl\n" \
315 " movl %3,%0\n" \
316 "1: rep; movsb\n" \
317 "2:\n" \
318 ".section .fixup,\"ax\"\n" \
319 "5: addl %3,%0\n" \
320 " jmp 2b\n" \
321 "3: lea 0(%3,%0,4),%0\n" \
322 " jmp 2b\n" \
323 ".previous\n" \
325 _ASM_EXTABLE_UA(0b, 3b) \
328 : "3"(size), "0"(size), "1"(to), "2"(from) \
332 unsigned long __copy_user_ll(void *to, const void *from, unsigned long n) in __copy_user_ll() argument
335 if (movsl_is_ok(to, from, n)) in __copy_user_ll()
336 __copy_user(to, from, n); in __copy_user_ll()
338 n = __copy_user_intel(to, from, n); in __copy_user_ll()
340 return n; in __copy_user_ll()
345 unsigned long n) in __copy_from_user_ll_nocache_nozero() argument
349 if (n > 64 && static_cpu_has(X86_FEATURE_XMM2)) in __copy_from_user_ll_nocache_nozero()
350 n = __copy_user_intel_nocache(to, from, n); in __copy_from_user_ll_nocache_nozero()
352 __copy_user(to, from, n); in __copy_from_user_ll_nocache_nozero()
354 __copy_user(to, from, n); in __copy_from_user_ll_nocache_nozero()
357 return n; in __copy_from_user_ll_nocache_nozero()