Added exception table based safe MSR access APIs
[akaros.git] / kern / arch / x86 / uaccess.h
1 /* Copyright (c) 2015 Google Inc
2  * Davide Libenzi <dlibenzi@google.com>
3  * See LICENSE for details.
4  *
5  * Part of this code coming from a Linux kernel file:
6  *
7  * linux/arch/x86/include/asm/uaccess.h
8  *
9  * Which, even though missing specific copyright, it is supposed to be
10  * ruled by the overall Linux copyright.
11  */
12
13 #pragma once
14
15 #include <ros/errno.h>
16 #include <compiler.h>
17 #include <stdint.h>
18 #include <umem.h>
19
20 #define ASM_STAC
21 #define ASM_CLAC
22 #define __m(x) *(x)
23
24 struct extable_ip_fixup {
25         uint64_t insn;
26         uint64_t fixup;
27 };
28
29 #define _ASM_EXTABLE_INIT()                                                                             \
30         asm volatile(                                                                                           \
31         " .pushsection \"__ex_table\",\"a\"\n"                                          \
32         " .balign 16\n"                                                                                         \
33         " .popsection\n"                                                                                        \
34         : :)
35
36 #define _ASM_EXTABLE(from, to)                                                                  \
37         " .pushsection \"__ex_table\",\"a\"\n"                                          \
38         " .balign 16\n"                                                                                         \
39         " .quad (" #from ") - .\n"                                                                      \
40         " .quad (" #to ") - .\n"                                                                        \
41         " .popsection\n"
42
43 #define __read_msr_asm(eax, edx, addr, err, errret)                                             \
44         asm volatile(ASM_STAC "\n"                                                                                      \
45                                  "1:            rdmsr\n"                                                                        \
46                                  "                      mfence\n"                                                                       \
47                                  "2: " ASM_CLAC "\n"                                                                    \
48                                  ".section .fixup,\"ax\"\n"                                                             \
49                                  "3:            mov %4,%0\n"                                                            \
50                                  "      jmp 2b\n"                                                                                       \
51                                  ".previous\n"                                                                                  \
52                                  _ASM_EXTABLE(1b, 3b)                                                                   \
53                                  : "=r" (err), "=d" (edx), "=a" (eax)                                   \
54                                  : "c" (addr), "i" (errret), "0" (err))
55
56 #define __write_msr_asm(val, addr, err, errret)                                                 \
57         asm volatile(ASM_STAC "\n"                                                                                      \
58                                  "1:            wrmsr\n"                                                                        \
59                                  "2: " ASM_CLAC "\n"                                                                    \
60                                  ".section .fixup,\"ax\"\n"                                                             \
61                                  "3:            mov %4,%0\n"                                                            \
62                                  "      jmp 2b\n"                                                                                       \
63                                  ".previous\n"                                                                                  \
64                                  _ASM_EXTABLE(1b, 3b)                                                                   \
65                                  : "=r" (err)                                                                                   \
66                                  : "d" ((uint32_t) (val >> 32)),                                                \
67                                    "a" ((uint32_t) (val & 0xffffffff)), "c" (addr),             \
68                                    "i" (errret), "0" (err))
69
70 #define __put_user_asm(x, addr, err, itype, rtype, ltype, errret)               \
71         asm volatile(ASM_STAC "\n"                                                                                      \
72                                  "1:            mov"itype" %"rtype"1,%2\n"                                      \
73                                  "2: " ASM_CLAC "\n"                                                                    \
74                                  ".section .fixup,\"ax\"\n"                                                             \
75                                  "3:            mov %3,%0\n"                                                            \
76                                  "      jmp 2b\n"                                                                                       \
77                                  ".previous\n"                                                                                  \
78                                  _ASM_EXTABLE(1b, 3b)                                                                   \
79                                  : "=r"(err)                                                                                    \
80                                  : ltype(x), "m" (__m(addr)), "i" (errret), "0" (err))
81
82 #define __get_user_asm(x, addr, err, itype, rtype, ltype, errret)       \
83         asm volatile(ASM_STAC "\n"                                                                              \
84                                  "1:            mov"itype" %2,%"rtype"1\n"                              \
85                                  "2: " ASM_CLAC "\n"                                                            \
86                                  ".section .fixup,\"ax\"\n"                                                     \
87                                  "3:            mov %3,%0\n"                                                    \
88                                  "      xor"itype" %"rtype"1,%"rtype"1\n"                               \
89                                  "      jmp 2b\n"                                                                               \
90                                  ".previous\n"                                                                          \
91                                  _ASM_EXTABLE(1b, 3b)                                                           \
92                                  : "=r" (err), ltype(x)                                                         \
93                                  : "m" (__m(addr)), "i" (errret), "0" (err))
94
95 #define __user_memcpy(dst, src, count, err, errret)                                             \
96         asm volatile(ASM_STAC "\n"                                                                                      \
97                                  "1:            rep movsb\n"                                                            \
98                                  "2: " ASM_CLAC "\n"                                                                    \
99                                  ".section .fixup,\"ax\"\n"                                                             \
100                                  "3:            mov %4,%0\n"                                                            \
101                                  "      jmp 2b\n"                                                                                       \
102                                  ".previous\n"                                                                                  \
103                                  _ASM_EXTABLE(1b, 3b)                                                                   \
104                                  : "=r"(err)                                                                                    \
105                                  : "D" (dst), "S" (src), "c" (count), "i" (errret), "0" (err) \
106                                  : "memory")
107
108 static inline int __put_user(void *dst, const void *src, unsigned int count)
109 {
110         int err = 0;
111
112         switch (count) {
113         case 1:
114                 __put_user_asm(*(const uint8_t *) src, (uint8_t *) dst, err, "b",
115                                            "b", "iq", -EFAULT);
116                 break;
117         case 2:
118                 __put_user_asm(*(const uint16_t *) src, (uint16_t *) dst, err, "w",
119                                            "w", "ir", -EFAULT);
120                 break;
121         case 4:
122                 __put_user_asm(*(const uint32_t *) src, (uint32_t *) dst, err, "l",
123                                            "k", "ir", -EFAULT);
124                 break;
125         case 8:
126                 __put_user_asm(*(const uint64_t *) src, (uint64_t *) dst, err, "q",
127                                            "", "er", -EFAULT);
128                 break;
129         default:
130                 __user_memcpy(dst, src, count, err, -EFAULT);
131         }
132
133         return err;
134 }
135
136 static inline int copy_to_user(void *dst, const void *src, unsigned int count)
137 {
138         int err = 0;
139
140         if (unlikely(!is_user_rwaddr(dst, count))) {
141                 err = -EFAULT;
142         } else if (!__builtin_constant_p(count)) {
143                 __user_memcpy(dst, src, count, err, -EFAULT);
144         } else {
145                 err = __put_user(dst, src, count);
146         }
147
148         return err;
149 }
150
151 static inline int __get_user(void *dst, const void *src, unsigned int count)
152 {
153         int err = 0;
154
155         switch (count) {
156         case 1:
157                 __get_user_asm(*(uint8_t *) dst, (const uint8_t *) src, err, "b",
158                                            "b", "=q", -EFAULT);
159                 break;
160         case 2:
161                 __get_user_asm(*(uint16_t *) dst, (const uint16_t *) src, err, "w",
162                                            "w", "=r", -EFAULT);
163                 break;
164         case 4:
165                 __get_user_asm(*(uint32_t *) dst, (const uint32_t *) src, err, "l",
166                                            "k", "=r", -EFAULT);
167                 break;
168         case 8:
169                 __get_user_asm(*(uint64_t *) dst, (const uint64_t *) src, err, "q",
170                                            "", "=r", -EFAULT);
171                 break;
172         default:
173                 __user_memcpy(dst, src, count, err, -EFAULT);
174         }
175
176         return err;
177 }
178
179 static inline int copy_from_user(void *dst, const void *src,
180                                                                  unsigned int count)
181 {
182         int err = 0;
183
184         if (unlikely(!is_user_raddr((void *) src, count))) {
185                 err = -EFAULT;
186         } else if (!__builtin_constant_p(count)) {
187                 __user_memcpy(dst, src, count, err, -EFAULT);
188         } else {
189                 err = __get_user(dst, src, count);
190         }
191
192         return err;
193 }
194
195 static inline int safe_read_msr(uint32_t addr, uint64_t *value)
196 {
197         int err = 0;
198         uint32_t edx, eax;
199
200         __read_msr_asm(eax, edx, addr, err, -EFAULT);
201         if (likely(err == 0))
202                 *value = ((uint64_t) edx << 32) | eax;
203
204         return err;
205 }
206
207 static inline int safe_write_msr(uint32_t addr, uint64_t value)
208 {
209         int err = 0;
210
211         __write_msr_asm(value, addr, err, -EFAULT);
212
213         return err;
214 }
215
216 static inline uintptr_t ex_insn_addr(const struct extable_ip_fixup *x)
217 {
218         return (uintptr_t) &x->insn + x->insn;
219 }
220
221 static inline uintptr_t ex_fixup_addr(const struct extable_ip_fixup *x)
222 {
223         return (uintptr_t) &x->fixup + x->fixup;
224 }