288ef7d11a7a3686ba07ce3e6e3fce84fe5dabf2
[platform/kernel/linux-rpi.git] / arch / m68k / include / asm / uaccess.h
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __M68K_UACCESS_H
3 #define __M68K_UACCESS_H
4
5 #ifdef CONFIG_MMU
6
7 /*
8  * User space memory access functions
9  */
10 #include <linux/compiler.h>
11 #include <linux/types.h>
12 #include <asm/segment.h>
13 #include <asm/extable.h>
14
15 /* We let the MMU do all checking */
16 static inline int access_ok(const void __user *addr,
17                             unsigned long size)
18 {
19         /*
20          * XXX: for !CONFIG_CPU_HAS_ADDRESS_SPACES this really needs to check
21          * for TASK_SIZE!
22          */
23         return 1;
24 }
25
26 /*
27  * Not all varients of the 68k family support the notion of address spaces.
28  * The traditional 680x0 parts do, and they use the sfc/dfc registers and
29  * the "moves" instruction to access user space from kernel space. Other
30  * family members like ColdFire don't support this, and only have a single
31  * address space, and use the usual "move" instruction for user space access.
32  *
33  * Outside of this difference the user space access functions are the same.
34  * So lets keep the code simple and just define in what we need to use.
35  */
36 #ifdef CONFIG_CPU_HAS_ADDRESS_SPACES
37 #define MOVES   "moves"
38 #else
39 #define MOVES   "move"
40 #endif
41
42 #define __put_user_asm(res, x, ptr, bwl, reg, err)      \
43 asm volatile ("\n"                                      \
44         "1:     "MOVES"."#bwl"  %2,%1\n"                \
45         "2:\n"                                          \
46         "       .section .fixup,\"ax\"\n"               \
47         "       .even\n"                                \
48         "10:    moveq.l %3,%0\n"                        \
49         "       jra 2b\n"                               \
50         "       .previous\n"                            \
51         "\n"                                            \
52         "       .section __ex_table,\"a\"\n"            \
53         "       .align  4\n"                            \
54         "       .long   1b,10b\n"                       \
55         "       .long   2b,10b\n"                       \
56         "       .previous"                              \
57         : "+d" (res), "=m" (*(ptr))                     \
58         : #reg (x), "i" (err))
59
60 #define __put_user_asm8(res, x, ptr)                            \
61 do {                                                            \
62         const void *__pu_ptr = (const void __force *)(ptr);     \
63                                                                 \
64         asm volatile ("\n"                                      \
65                 "1:     "MOVES".l %2,(%1)+\n"                   \
66                 "2:     "MOVES".l %R2,(%1)\n"                   \
67                 "3:\n"                                          \
68                 "       .section .fixup,\"ax\"\n"               \
69                 "       .even\n"                                \
70                 "10:    movel %3,%0\n"                          \
71                 "       jra 3b\n"                               \
72                 "       .previous\n"                            \
73                 "\n"                                            \
74                 "       .section __ex_table,\"a\"\n"            \
75                 "       .align 4\n"                             \
76                 "       .long 1b,10b\n"                         \
77                 "       .long 2b,10b\n"                         \
78                 "       .long 3b,10b\n"                         \
79                 "       .previous"                              \
80                 : "+d" (res), "+a" (__pu_ptr)                   \
81                 : "r" (x), "i" (-EFAULT)                        \
82                 : "memory");                                    \
83 } while (0)
84
85 /*
86  * These are the main single-value transfer routines.  They automatically
87  * use the right size if we just have the right pointer type.
88  */
89
90 #define __put_user(x, ptr)                                              \
91 ({                                                                      \
92         typeof(*(ptr)) __pu_val = (x);                                  \
93         int __pu_err = 0;                                               \
94         __chk_user_ptr(ptr);                                            \
95         switch (sizeof (*(ptr))) {                                      \
96         case 1:                                                         \
97                 __put_user_asm(__pu_err, __pu_val, ptr, b, d, -EFAULT); \
98                 break;                                                  \
99         case 2:                                                         \
100                 __put_user_asm(__pu_err, __pu_val, ptr, w, r, -EFAULT); \
101                 break;                                                  \
102         case 4:                                                         \
103                 __put_user_asm(__pu_err, __pu_val, ptr, l, r, -EFAULT); \
104                 break;                                                  \
105         case 8:                                                         \
106                 __put_user_asm8(__pu_err, __pu_val, ptr);               \
107                 break;                                                  \
108         default:                                                        \
109                 BUILD_BUG();                                            \
110         }                                                               \
111         __pu_err;                                                       \
112 })
113 #define put_user(x, ptr)        __put_user(x, ptr)
114
115
116 #define __get_user_asm(res, x, ptr, type, bwl, reg, err) ({             \
117         type __gu_val;                                                  \
118         asm volatile ("\n"                                              \
119                 "1:     "MOVES"."#bwl"  %2,%1\n"                        \
120                 "2:\n"                                                  \
121                 "       .section .fixup,\"ax\"\n"                       \
122                 "       .even\n"                                        \
123                 "10:    move.l  %3,%0\n"                                \
124                 "       sub.l   %1,%1\n"                                \
125                 "       jra     2b\n"                                   \
126                 "       .previous\n"                                    \
127                 "\n"                                                    \
128                 "       .section __ex_table,\"a\"\n"                    \
129                 "       .align  4\n"                                    \
130                 "       .long   1b,10b\n"                               \
131                 "       .previous"                                      \
132                 : "+d" (res), "=&" #reg (__gu_val)                      \
133                 : "m" (*(ptr)), "i" (err));                             \
134         (x) = (__force typeof(*(ptr)))(__force unsigned long)__gu_val;  \
135 })
136
137 #define __get_user_asm8(res, x, ptr)                                    \
138 do {                                                                    \
139         const void *__gu_ptr = (const void __force *)(ptr);             \
140         union {                                                         \
141                 u64 l;                                                  \
142                 __typeof__(*(ptr)) t;                                   \
143         } __gu_val;                                                     \
144                                                                         \
145         asm volatile ("\n"                                              \
146                 "1:     "MOVES".l       (%2)+,%1\n"                     \
147                 "2:     "MOVES".l       (%2),%R1\n"                     \
148                 "3:\n"                                                  \
149                 "       .section .fixup,\"ax\"\n"                       \
150                 "       .even\n"                                        \
151                 "10:    move.l  %3,%0\n"                                \
152                 "       sub.l   %1,%1\n"                                \
153                 "       sub.l   %R1,%R1\n"                              \
154                 "       jra     3b\n"                                   \
155                 "       .previous\n"                                    \
156                 "\n"                                                    \
157                 "       .section __ex_table,\"a\"\n"                    \
158                 "       .align  4\n"                                    \
159                 "       .long   1b,10b\n"                               \
160                 "       .long   2b,10b\n"                               \
161                 "       .previous"                                      \
162                 : "+d" (res), "=&r" (__gu_val.l),                       \
163                   "+a" (__gu_ptr)                                       \
164                 : "i" (-EFAULT)                                         \
165                 : "memory");                                            \
166         (x) = __gu_val.t;                                               \
167 } while (0)
168
169 #define __get_user(x, ptr)                                              \
170 ({                                                                      \
171         int __gu_err = 0;                                               \
172         __chk_user_ptr(ptr);                                            \
173         switch (sizeof(*(ptr))) {                                       \
174         case 1:                                                         \
175                 __get_user_asm(__gu_err, x, ptr, u8, b, d, -EFAULT);    \
176                 break;                                                  \
177         case 2:                                                         \
178                 __get_user_asm(__gu_err, x, ptr, u16, w, r, -EFAULT);   \
179                 break;                                                  \
180         case 4:                                                         \
181                 __get_user_asm(__gu_err, x, ptr, u32, l, r, -EFAULT);   \
182                 break;                                                  \
183         case 8:                                                         \
184                 __get_user_asm8(__gu_err, x, ptr);                      \
185                 break;                                                  \
186         default:                                                        \
187                 BUILD_BUG();                                            \
188         }                                                               \
189         __gu_err;                                                       \
190 })
191 #define get_user(x, ptr) __get_user(x, ptr)
192
193 unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n);
194 unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n);
195
196 #define __suffix0
197 #define __suffix1 b
198 #define __suffix2 w
199 #define __suffix4 l
200
201 #define ____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\
202         asm volatile ("\n"                                              \
203                 "1:     "MOVES"."#s1"   (%2)+,%3\n"                     \
204                 "       move."#s1"      %3,(%1)+\n"                     \
205                 "       .ifnc   \""#s2"\",\"\"\n"                       \
206                 "2:     "MOVES"."#s2"   (%2)+,%3\n"                     \
207                 "       move."#s2"      %3,(%1)+\n"                     \
208                 "       .ifnc   \""#s3"\",\"\"\n"                       \
209                 "3:     "MOVES"."#s3"   (%2)+,%3\n"                     \
210                 "       move."#s3"      %3,(%1)+\n"                     \
211                 "       .endif\n"                                       \
212                 "       .endif\n"                                       \
213                 "4:\n"                                                  \
214                 "       .section __ex_table,\"a\"\n"                    \
215                 "       .align  4\n"                                    \
216                 "       .long   1b,10f\n"                               \
217                 "       .ifnc   \""#s2"\",\"\"\n"                       \
218                 "       .long   2b,20f\n"                               \
219                 "       .ifnc   \""#s3"\",\"\"\n"                       \
220                 "       .long   3b,30f\n"                               \
221                 "       .endif\n"                                       \
222                 "       .endif\n"                                       \
223                 "       .previous\n"                                    \
224                 "\n"                                                    \
225                 "       .section .fixup,\"ax\"\n"                       \
226                 "       .even\n"                                        \
227                 "10:    addq.l #"#n1",%0\n"                             \
228                 "       .ifnc   \""#s2"\",\"\"\n"                       \
229                 "20:    addq.l #"#n2",%0\n"                             \
230                 "       .ifnc   \""#s3"\",\"\"\n"                       \
231                 "30:    addq.l #"#n3",%0\n"                             \
232                 "       .endif\n"                                       \
233                 "       .endif\n"                                       \
234                 "       jra     4b\n"                                   \
235                 "       .previous\n"                                    \
236                 : "+d" (res), "+&a" (to), "+a" (from), "=&d" (tmp)      \
237                 : : "memory")
238
239 #define ___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\
240         ____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)
241 #define __constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3)   \
242         ___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3,  \
243                                         __suffix##n1, __suffix##n2, __suffix##n3)
244
245 static __always_inline unsigned long
246 __constant_copy_from_user(void *to, const void __user *from, unsigned long n)
247 {
248         unsigned long res = 0, tmp;
249
250         switch (n) {
251         case 1:
252                 __constant_copy_from_user_asm(res, to, from, tmp, 1, 0, 0);
253                 break;
254         case 2:
255                 __constant_copy_from_user_asm(res, to, from, tmp, 2, 0, 0);
256                 break;
257         case 3:
258                 __constant_copy_from_user_asm(res, to, from, tmp, 2, 1, 0);
259                 break;
260         case 4:
261                 __constant_copy_from_user_asm(res, to, from, tmp, 4, 0, 0);
262                 break;
263         case 5:
264                 __constant_copy_from_user_asm(res, to, from, tmp, 4, 1, 0);
265                 break;
266         case 6:
267                 __constant_copy_from_user_asm(res, to, from, tmp, 4, 2, 0);
268                 break;
269         case 7:
270                 __constant_copy_from_user_asm(res, to, from, tmp, 4, 2, 1);
271                 break;
272         case 8:
273                 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 0);
274                 break;
275         case 9:
276                 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 1);
277                 break;
278         case 10:
279                 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 2);
280                 break;
281         case 12:
282                 __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 4);
283                 break;
284         default:
285                 /* we limit the inlined version to 3 moves */
286                 return __generic_copy_from_user(to, from, n);
287         }
288
289         return res;
290 }
291
292 #define __constant_copy_to_user_asm(res, to, from, tmp, n, s1, s2, s3)  \
293         asm volatile ("\n"                                              \
294                 "       move."#s1"      (%2)+,%3\n"                     \
295                 "11:    "MOVES"."#s1"   %3,(%1)+\n"                     \
296                 "12:    move."#s2"      (%2)+,%3\n"                     \
297                 "21:    "MOVES"."#s2"   %3,(%1)+\n"                     \
298                 "22:\n"                                                 \
299                 "       .ifnc   \""#s3"\",\"\"\n"                       \
300                 "       move."#s3"      (%2)+,%3\n"                     \
301                 "31:    "MOVES"."#s3"   %3,(%1)+\n"                     \
302                 "32:\n"                                                 \
303                 "       .endif\n"                                       \
304                 "4:\n"                                                  \
305                 "\n"                                                    \
306                 "       .section __ex_table,\"a\"\n"                    \
307                 "       .align  4\n"                                    \
308                 "       .long   11b,5f\n"                               \
309                 "       .long   12b,5f\n"                               \
310                 "       .long   21b,5f\n"                               \
311                 "       .long   22b,5f\n"                               \
312                 "       .ifnc   \""#s3"\",\"\"\n"                       \
313                 "       .long   31b,5f\n"                               \
314                 "       .long   32b,5f\n"                               \
315                 "       .endif\n"                                       \
316                 "       .previous\n"                                    \
317                 "\n"                                                    \
318                 "       .section .fixup,\"ax\"\n"                       \
319                 "       .even\n"                                        \
320                 "5:     moveq.l #"#n",%0\n"                             \
321                 "       jra     4b\n"                                   \
322                 "       .previous\n"                                    \
323                 : "+d" (res), "+a" (to), "+a" (from), "=&d" (tmp)       \
324                 : : "memory")
325
326 static __always_inline unsigned long
327 __constant_copy_to_user(void __user *to, const void *from, unsigned long n)
328 {
329         unsigned long res = 0, tmp;
330
331         switch (n) {
332         case 1:
333                 __put_user_asm(res, *(u8 *)from, (u8 __user *)to, b, d, 1);
334                 break;
335         case 2:
336                 __put_user_asm(res, *(u16 *)from, (u16 __user *)to, w, r, 2);
337                 break;
338         case 3:
339                 __constant_copy_to_user_asm(res, to, from, tmp, 3, w, b,);
340                 break;
341         case 4:
342                 __put_user_asm(res, *(u32 *)from, (u32 __user *)to, l, r, 4);
343                 break;
344         case 5:
345                 __constant_copy_to_user_asm(res, to, from, tmp, 5, l, b,);
346                 break;
347         case 6:
348                 __constant_copy_to_user_asm(res, to, from, tmp, 6, l, w,);
349                 break;
350         case 7:
351                 __constant_copy_to_user_asm(res, to, from, tmp, 7, l, w, b);
352                 break;
353         case 8:
354                 __constant_copy_to_user_asm(res, to, from, tmp, 8, l, l,);
355                 break;
356         case 9:
357                 __constant_copy_to_user_asm(res, to, from, tmp, 9, l, l, b);
358                 break;
359         case 10:
360                 __constant_copy_to_user_asm(res, to, from, tmp, 10, l, l, w);
361                 break;
362         case 12:
363                 __constant_copy_to_user_asm(res, to, from, tmp, 12, l, l, l);
364                 break;
365         default:
366                 /* limit the inlined version to 3 moves */
367                 return __generic_copy_to_user(to, from, n);
368         }
369
370         return res;
371 }
372
373 static inline unsigned long
374 raw_copy_from_user(void *to, const void __user *from, unsigned long n)
375 {
376         if (__builtin_constant_p(n))
377                 return __constant_copy_from_user(to, from, n);
378         return __generic_copy_from_user(to, from, n);
379 }
380
381 static inline unsigned long
382 raw_copy_to_user(void __user *to, const void *from, unsigned long n)
383 {
384         if (__builtin_constant_p(n))
385                 return __constant_copy_to_user(to, from, n);
386         return __generic_copy_to_user(to, from, n);
387 }
388 #define INLINE_COPY_FROM_USER
389 #define INLINE_COPY_TO_USER
390
391 #define user_addr_max() \
392         (uaccess_kernel() ? ~0UL : TASK_SIZE)
393
394 extern long strncpy_from_user(char *dst, const char __user *src, long count);
395 extern __must_check long strnlen_user(const char __user *str, long n);
396
397 unsigned long __clear_user(void __user *to, unsigned long n);
398
399 #define clear_user      __clear_user
400
401 #else /* !CONFIG_MMU */
402 #include <asm-generic/uaccess.h>
403 #endif
404
405 #endif /* _M68K_UACCESS_H */