x86: trampoline_64.S - use predefined constants with simplification
[deliverable/linux.git] / arch / x86 / include / asm / uaccess_64.h
CommitLineData
1965aae3
PA
1#ifndef _ASM_X86_UACCESS_64_H
2#define _ASM_X86_UACCESS_64_H
1da177e4
LT
3
4/*
5 * User space memory access functions
6 */
1da177e4
LT
7#include <linux/compiler.h>
8#include <linux/errno.h>
1da177e4 9#include <linux/prefetch.h>
16dbc6c9 10#include <linux/lockdep.h>
1da177e4
LT
11#include <asm/page.h>
12
1da177e4
LT
13/*
14 * Copy To/From Userspace
15 */
16
17/* Handles exceptions in both to and from, but doesn't do access_ok */
95912008
AK
18__must_check unsigned long
19copy_user_generic(void *to, const void *from, unsigned len);
20
21__must_check unsigned long
22copy_to_user(void __user *to, const void *from, unsigned len);
23__must_check unsigned long
24copy_from_user(void *to, const void __user *from, unsigned len);
25__must_check unsigned long
26copy_in_user(void __user *to, const void __user *from, unsigned len);
27
28static __always_inline __must_check
29int __copy_from_user(void *dst, const void __user *src, unsigned size)
b896313e 30{
383d079b 31 int ret = 0;
c10d38dd 32
3ee1afa3 33 might_fault();
1da177e4 34 if (!__builtin_constant_p(size))
b896313e
JP
35 return copy_user_generic(dst, (__force void *)src, size);
36 switch (size) {
37 case 1:__get_user_asm(*(u8 *)dst, (u8 __user *)src,
38 ret, "b", "b", "=q", 1);
1da177e4 39 return ret;
b896313e
JP
40 case 2:__get_user_asm(*(u16 *)dst, (u16 __user *)src,
41 ret, "w", "w", "=r", 2);
1da177e4 42 return ret;
b896313e
JP
43 case 4:__get_user_asm(*(u32 *)dst, (u32 __user *)src,
44 ret, "l", "k", "=r", 4);
45 return ret;
46 case 8:__get_user_asm(*(u64 *)dst, (u64 __user *)src,
47 ret, "q", "", "=r", 8);
1da177e4 48 return ret;
1da177e4 49 case 10:
b896313e 50 __get_user_asm(*(u64 *)dst, (u64 __user *)src,
20a4a236 51 ret, "q", "", "=r", 10);
b896313e
JP
52 if (unlikely(ret))
53 return ret;
54 __get_user_asm(*(u16 *)(8 + (char *)dst),
55 (u16 __user *)(8 + (char __user *)src),
56 ret, "w", "w", "=r", 2);
57 return ret;
1da177e4 58 case 16:
b896313e
JP
59 __get_user_asm(*(u64 *)dst, (u64 __user *)src,
60 ret, "q", "", "=r", 16);
61 if (unlikely(ret))
62 return ret;
63 __get_user_asm(*(u64 *)(8 + (char *)dst),
64 (u64 __user *)(8 + (char __user *)src),
65 ret, "q", "", "=r", 8);
66 return ret;
1da177e4 67 default:
b896313e 68 return copy_user_generic(dst, (__force void *)src, size);
1da177e4 69 }
b896313e 70}
1da177e4 71
95912008
AK
72static __always_inline __must_check
73int __copy_to_user(void __user *dst, const void *src, unsigned size)
b896313e 74{
383d079b 75 int ret = 0;
c10d38dd 76
3ee1afa3 77 might_fault();
1da177e4 78 if (!__builtin_constant_p(size))
b896313e
JP
79 return copy_user_generic((__force void *)dst, src, size);
80 switch (size) {
81 case 1:__put_user_asm(*(u8 *)src, (u8 __user *)dst,
82 ret, "b", "b", "iq", 1);
1da177e4 83 return ret;
b896313e
JP
84 case 2:__put_user_asm(*(u16 *)src, (u16 __user *)dst,
85 ret, "w", "w", "ir", 2);
1da177e4 86 return ret;
b896313e
JP
87 case 4:__put_user_asm(*(u32 *)src, (u32 __user *)dst,
88 ret, "l", "k", "ir", 4);
89 return ret;
90 case 8:__put_user_asm(*(u64 *)src, (u64 __user *)dst,
91 ret, "q", "", "ir", 8);
1da177e4 92 return ret;
1da177e4 93 case 10:
b896313e
JP
94 __put_user_asm(*(u64 *)src, (u64 __user *)dst,
95 ret, "q", "", "ir", 10);
96 if (unlikely(ret))
97 return ret;
1da177e4 98 asm("":::"memory");
b896313e
JP
99 __put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst,
100 ret, "w", "w", "ir", 2);
101 return ret;
1da177e4 102 case 16:
b896313e
JP
103 __put_user_asm(*(u64 *)src, (u64 __user *)dst,
104 ret, "q", "", "ir", 16);
105 if (unlikely(ret))
106 return ret;
1da177e4 107 asm("":::"memory");
b896313e
JP
108 __put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst,
109 ret, "q", "", "ir", 8);
110 return ret;
1da177e4 111 default:
b896313e 112 return copy_user_generic((__force void *)dst, src, size);
1da177e4 113 }
b896313e 114}
1da177e4 115
95912008
AK
116static __always_inline __must_check
117int __copy_in_user(void __user *dst, const void __user *src, unsigned size)
b896313e 118{
383d079b 119 int ret = 0;
c10d38dd 120
3ee1afa3 121 might_fault();
1da177e4 122 if (!__builtin_constant_p(size))
b896313e
JP
123 return copy_user_generic((__force void *)dst,
124 (__force void *)src, size);
125 switch (size) {
126 case 1: {
1da177e4 127 u8 tmp;
b896313e
JP
128 __get_user_asm(tmp, (u8 __user *)src,
129 ret, "b", "b", "=q", 1);
1da177e4 130 if (likely(!ret))
b896313e
JP
131 __put_user_asm(tmp, (u8 __user *)dst,
132 ret, "b", "b", "iq", 1);
1da177e4
LT
133 return ret;
134 }
b896313e 135 case 2: {
1da177e4 136 u16 tmp;
b896313e
JP
137 __get_user_asm(tmp, (u16 __user *)src,
138 ret, "w", "w", "=r", 2);
1da177e4 139 if (likely(!ret))
b896313e
JP
140 __put_user_asm(tmp, (u16 __user *)dst,
141 ret, "w", "w", "ir", 2);
1da177e4
LT
142 return ret;
143 }
144
b896313e 145 case 4: {
1da177e4 146 u32 tmp;
b896313e
JP
147 __get_user_asm(tmp, (u32 __user *)src,
148 ret, "l", "k", "=r", 4);
1da177e4 149 if (likely(!ret))
b896313e
JP
150 __put_user_asm(tmp, (u32 __user *)dst,
151 ret, "l", "k", "ir", 4);
1da177e4
LT
152 return ret;
153 }
b896313e 154 case 8: {
1da177e4 155 u64 tmp;
b896313e
JP
156 __get_user_asm(tmp, (u64 __user *)src,
157 ret, "q", "", "=r", 8);
1da177e4 158 if (likely(!ret))
b896313e
JP
159 __put_user_asm(tmp, (u64 __user *)dst,
160 ret, "q", "", "ir", 8);
1da177e4
LT
161 return ret;
162 }
163 default:
b896313e
JP
164 return copy_user_generic((__force void *)dst,
165 (__force void *)src, size);
1da177e4 166 }
b896313e 167}
1da177e4 168
b896313e 169__must_check long
95912008 170strncpy_from_user(char *dst, const char __user *src, long count);
b896313e 171__must_check long
95912008
AK
172__strncpy_from_user(char *dst, const char __user *src, long count);
173__must_check long strnlen_user(const char __user *str, long n);
174__must_check long __strnlen_user(const char __user *str, long n);
175__must_check long strlen_user(const char __user *str);
176__must_check unsigned long clear_user(void __user *mem, unsigned long len);
177__must_check unsigned long __clear_user(void __user *mem, unsigned long len);
178
b896313e
JP
179__must_check long __copy_from_user_inatomic(void *dst, const void __user *src,
180 unsigned size);
b885808e
AK
181
182static __must_check __always_inline int
183__copy_to_user_inatomic(void __user *dst, const void *src, unsigned size)
184{
185 return copy_user_generic((__force void *)dst, src, size);
186}
1da177e4 187
b896313e
JP
188extern long __copy_user_nocache(void *dst, const void __user *src,
189 unsigned size, int zerorest);
0812a579 190
b896313e
JP
191static inline int __copy_from_user_nocache(void *dst, const void __user *src,
192 unsigned size)
0812a579
AK
193{
194 might_sleep();
ecb7524c 195 return __copy_user_nocache(dst, src, size, 1);
0812a579
AK
196}
197
b896313e
JP
198static inline int __copy_from_user_inatomic_nocache(void *dst,
199 const void __user *src,
200 unsigned size)
0812a579 201{
ecb7524c 202 return __copy_user_nocache(dst, src, size, 0);
0812a579
AK
203}
204
1129585a
VM
205unsigned long
206copy_user_handle_tail(char *to, char *from, unsigned len, unsigned zerorest);
207
1965aae3 208#endif /* _ASM_X86_UACCESS_64_H */
This page took 0.585426 seconds and 5 git commands to generate.