Lines Matching +full:3 +full:- +full:n
1 /* SPDX-License-Identifier: GPL-2.0 */
42 asm volatile ("\n" \
43 "1: "MOVES"."#bwl" %2,%1\n" \
44 "2:\n" \
45 " .section .fixup,\"ax\"\n" \
46 " .even\n" \
47 "10: moveq.l %3,%0\n" \
48 " jra 2b\n" \
49 " .previous\n" \
50 "\n" \
51 " .section __ex_table,\"a\"\n" \
52 " .align 4\n" \
53 " .long 1b,10b\n" \
54 " .long 2b,10b\n" \
60 * These are the main single-value transfer routines. They automatically
71 __put_user_asm(__pu_err, __pu_val, ptr, b, d, -EFAULT); \
74 __put_user_asm(__pu_err, __pu_val, ptr, w, r, -EFAULT); \
77 __put_user_asm(__pu_err, __pu_val, ptr, l, r, -EFAULT); \
82 asm volatile ("\n" \
83 "1: "MOVES".l %2,(%1)+\n" \
84 "2: "MOVES".l %R2,(%1)\n" \
85 "3:\n" \
86 " .section .fixup,\"ax\"\n" \
87 " .even\n" \
88 "10: movel %3,%0\n" \
89 " jra 3b\n" \
90 " .previous\n" \
91 "\n" \
92 " .section __ex_table,\"a\"\n" \
93 " .align 4\n" \
94 " .long 1b,10b\n" \
95 " .long 2b,10b\n" \
96 " .long 3b,10b\n" \
99 : "r" (__pu_val), "i" (-EFAULT) \
114 asm volatile ("\n" \
115 "1: "MOVES"."#bwl" %2,%1\n" \
116 "2:\n" \
117 " .section .fixup,\"ax\"\n" \
118 " .even\n" \
119 "10: move.l %3,%0\n" \
120 " sub.l %1,%1\n" \
121 " jra 2b\n" \
122 " .previous\n" \
123 "\n" \
124 " .section __ex_table,\"a\"\n" \
125 " .align 4\n" \
126 " .long 1b,10b\n" \
139 __get_user_asm(__gu_err, x, ptr, u8, b, d, -EFAULT); \
142 __get_user_asm(__gu_err, x, ptr, u16, w, r, -EFAULT); \
145 __get_user_asm(__gu_err, x, ptr, u32, l, r, -EFAULT); \
153 asm volatile ("\n" \
154 "1: "MOVES".l (%2)+,%1\n" \
155 "2: "MOVES".l (%2),%R1\n" \
156 "3:\n" \
157 " .section .fixup,\"ax\"\n" \
158 " .even\n" \
159 "10: move.l %3,%0\n" \
160 " sub.l %1,%1\n" \
161 " sub.l %R1,%R1\n" \
162 " jra 3b\n" \
163 " .previous\n" \
164 "\n" \
165 " .section __ex_table,\"a\"\n" \
166 " .align 4\n" \
167 " .long 1b,10b\n" \
168 " .long 2b,10b\n" \
172 : "i" (-EFAULT) \
185 unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n);
186 unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n);
194 asm volatile ("\n" \
195 "1: "MOVES"."#s1" (%2)+,%3\n" \
196 " move."#s1" %3,(%1)+\n" \
197 " .ifnc \""#s2"\",\"\"\n" \
198 "2: "MOVES"."#s2" (%2)+,%3\n" \
199 " move."#s2" %3,(%1)+\n" \
200 " .ifnc \""#s3"\",\"\"\n" \
201 "3: "MOVES"."#s3" (%2)+,%3\n" \
202 " move."#s3" %3,(%1)+\n" \
203 " .endif\n" \
204 " .endif\n" \
205 "4:\n" \
206 " .section __ex_table,\"a\"\n" \
207 " .align 4\n" \
208 " .long 1b,10f\n" \
209 " .ifnc \""#s2"\",\"\"\n" \
210 " .long 2b,20f\n" \
211 " .ifnc \""#s3"\",\"\"\n" \
212 " .long 3b,30f\n" \
213 " .endif\n" \
214 " .endif\n" \
215 " .previous\n" \
216 "\n" \
217 " .section .fixup,\"ax\"\n" \
218 " .even\n" \
219 "10: addq.l #"#n1",%0\n" \
220 " .ifnc \""#s2"\",\"\"\n" \
221 "20: addq.l #"#n2",%0\n" \
222 " .ifnc \""#s3"\",\"\"\n" \
223 "30: addq.l #"#n3",%0\n" \
224 " .endif\n" \
225 " .endif\n" \
226 " jra 4b\n" \
227 " .previous\n" \
238 __constant_copy_from_user(void *to, const void __user *from, unsigned long n) in __constant_copy_from_user() argument
242 switch (n) { in __constant_copy_from_user()
249 case 3: in __constant_copy_from_user()
277 /* we limit the inlined version to 3 moves */ in __constant_copy_from_user()
278 return __generic_copy_from_user(to, from, n); in __constant_copy_from_user()
284 #define __constant_copy_to_user_asm(res, to, from, tmp, n, s1, s2, s3) \ argument
285 asm volatile ("\n" \
286 " move."#s1" (%2)+,%3\n" \
287 "11: "MOVES"."#s1" %3,(%1)+\n" \
288 "12: move."#s2" (%2)+,%3\n" \
289 "21: "MOVES"."#s2" %3,(%1)+\n" \
290 "22:\n" \
291 " .ifnc \""#s3"\",\"\"\n" \
292 " move."#s3" (%2)+,%3\n" \
293 "31: "MOVES"."#s3" %3,(%1)+\n" \
294 "32:\n" \
295 " .endif\n" \
296 "4:\n" \
297 "\n" \
298 " .section __ex_table,\"a\"\n" \
299 " .align 4\n" \
300 " .long 11b,5f\n" \
301 " .long 12b,5f\n" \
302 " .long 21b,5f\n" \
303 " .long 22b,5f\n" \
304 " .ifnc \""#s3"\",\"\"\n" \
305 " .long 31b,5f\n" \
306 " .long 32b,5f\n" \
307 " .endif\n" \
308 " .previous\n" \
309 "\n" \
310 " .section .fixup,\"ax\"\n" \
311 " .even\n" \
312 "5: moveq.l #"#n",%0\n" \
313 " jra 4b\n" \
314 " .previous\n" \
319 __constant_copy_to_user(void __user *to, const void *from, unsigned long n) in __constant_copy_to_user() argument
323 switch (n) { in __constant_copy_to_user()
330 case 3: in __constant_copy_to_user()
331 __constant_copy_to_user_asm(res, to, from, tmp, 3, w, b,); in __constant_copy_to_user()
358 /* limit the inlined version to 3 moves */ in __constant_copy_to_user()
359 return __generic_copy_to_user(to, from, n); in __constant_copy_to_user()
366 raw_copy_from_user(void *to, const void __user *from, unsigned long n) in raw_copy_from_user() argument
368 if (__builtin_constant_p(n)) in raw_copy_from_user()
369 return __constant_copy_from_user(to, from, n); in raw_copy_from_user()
370 return __generic_copy_from_user(to, from, n); in raw_copy_from_user()
374 raw_copy_to_user(void __user *to, const void *from, unsigned long n) in raw_copy_to_user() argument
376 if (__builtin_constant_p(n)) in raw_copy_to_user()
377 return __constant_copy_to_user(to, from, n); in raw_copy_to_user()
378 return __generic_copy_to_user(to, from, n); in raw_copy_to_user()
387 extern __must_check long strnlen_user(const char __user *str, long n);
389 unsigned long __clear_user(void __user *to, unsigned long n);
394 #include <asm-generic/uaccess.h>