77b03dac2b3f1b672219977d065a9e750744faeb
[akaros.git] / kern / include / ros / common.h
1 #ifndef ROS_COMMON_H
2 #define ROS_COMMON_H
3
4 #ifndef __ASSEMBLER__
5 #ifndef __IVY__
6 #include <ros/noivy.h>
7 #endif
8
9 #include <stddef.h>
10 #include <stdint.h>
11 #include <sys/types.h>
12 #include <stdbool.h>
13
14 typedef uintptr_t physaddr_t;
15 typedef ssize_t intreg_t;
16 typedef size_t uintreg_t;
17
18 #ifndef NULL
19 #define NULL ((void*) 0)
20 #endif
21
22 #ifndef TRUE
23 #define TRUE    1
24 #endif
25
26 #ifndef FALSE
27 #define FALSE   0
28 #endif
29
30 #define CHECK_FLAG(flags,bit)   ((flags) & (1 << (bit)))
31
32 #define FOR_CIRC_BUFFER(next, size, var) \
33         for (int _var = 0, var = (next); _var < (size); _var++, var = (var + 1) % (size))
34
35 // Efficient min and max operations
36 #ifdef ROS_KERNEL /* Glibc has their own */
37 #define MIN(_a, _b)                                             \
38 ({                                                              \
39         typeof(_a) __a = (_a);                                  \
40         typeof(_b) __b = (_b);                                  \
41         __a <= __b ? __a : __b;                                 \
42 })
43 #define MAX(_a, _b)                                             \
44 ({                                                              \
45         typeof(_a) __a = (_a);                                  \
46         typeof(_b) __b = (_b);                                  \
47         __a >= __b ? __a : __b;                                 \
48 })
49 #endif
50
51 // Rounding operations (efficient when n is a power of 2)
52 // Round down to the nearest multiple of n
53 #define ROUNDDOWN(a, n)                                         \
54 ({                                                              \
55         uintptr_t __a = (uintptr_t) (a);                                \
56         (typeof(a)) (__a - __a % (n));                          \
57 })
58 // Round up to the nearest multiple of n
59 #define ROUNDUP(a, n)                                           \
60 ({                                                              \
61         uintptr_t __n = (uintptr_t) (n);                                \
62         (typeof(a)) (ROUNDDOWN((uintptr_t) (a) + __n - 1, __n));        \
63 })
64
65 // Round down to the nearest multiple of n
66 #define PTRROUNDDOWN(a, n)                                              \
67 ({                                                              \
68         char * __a = (char *) (a);                              \
69         (typeof(a)) (__a - (uintptr_t)__a % (n));                               \
70 })
71 // Round pointer up to the nearest multiple of n
72 #define PTRROUNDUP(a, n)                                                \
73 ({                                                              \
74         uintptr_t __n = (uintptr_t) (n);                                \
75         (typeof(a)) (PTRROUNDDOWN((char *) (a) + __n - 1, __n));        \
76 })
77
78 // Return the integer logarithm of the value provided rounded down
79 static inline uintptr_t LOG2_DOWN(uintptr_t value)
80 {
81         uintptr_t l = 0;
82         while( (value >> l) > 1 ) ++l;
83         return l;
84 }
85
86 // Return the integer logarithm of the value provided rounded up
87 static inline uintptr_t LOG2_UP(uintptr_t value)
88 {
89         uintptr_t _v = LOG2_DOWN(value);
90         if (value ^ (1 << _v))
91                 return _v + 1;
92         else
93                 return _v;
94 }
95
96 static inline uintptr_t ROUNDUPPWR2(uintptr_t value)
97 {
98         return 1 << LOG2_UP(value);
99 }
100
101 /* We wraparound if UINT_MAX < a * b, which is also UINT_MAX / a < b. */
102 static inline bool mult_will_overflow_u64(uint64_t a, uint64_t b)
103 {
104         if (!a)
105                 return FALSE;
106         return (uint64_t)(-1) / a < b;
107 }
108
109 // Return the offset of 'member' relative to the beginning of a struct type
110 #ifndef offsetof
111 #define offsetof(type, member)  ((size_t) (&((type*)0)->member))
112 #endif
113
114 /* Return the container/struct holding the object 'ptr' points to */
115 #define container_of(ptr, type, member) ({                                     \
116         (type*)((char*)ptr - offsetof(type, member));                             \
117 })
118
119 /* Force the reading exactly once of x.  You may still need mbs().  See
120  * http://lwn.net/Articles/508991/ for more info. */
121 #define ACCESS_ONCE(x) (*(volatile typeof(x) *)&(x))
122
123 // Ivy currently can only handle 63 bits (OCaml thing), so use this to make
124 // a uint64_t programatically
125 #define UINT64(upper, lower) ( (((uint64_t)(upper)) << 32) | (lower) )
126
127 /* Makes sure func is run exactly once.  Can handle concurrent callers, and
128  * other callers spin til the func is complete. */
129 #define run_once(func)                                                         \
130 {                                                                              \
131         static bool ran_once = FALSE;                                              \
132         static atomic_t is_running = FALSE;                                        \
133         if (!ran_once) {                                                           \
134                 if (!atomic_swap(&is_running, TRUE)) {                                 \
135                         /* we won the race and get to run the func */                      \
136                         func;                                                              \
137                         wmb();  /* don't let the ran_once write pass previous writes */    \
138                         ran_once = TRUE;                                                   \
139                 } else {                                                               \
140                         /* someone else won, wait til they are done to break out */        \
141                         while (!ran_once)                                                  \
142                                 cpu_relax();                                                   \
143                                                                                \
144                 }                                                                      \
145         }                                                                          \
146 }
147
148 /* Unprotected, single-threaded version, makes sure func is run exactly once */
149 #define run_once_racy(func)                                                    \
150 {                                                                              \
151         static bool ran_once = FALSE;                                              \
152         if (!ran_once) {                                                           \
153                 func;                                                                  \
154                 ran_once = TRUE;                                                       \
155         }                                                                          \
156 }
157
158 /* Aborts with 'retcmd' if this function has already been called.  Compared to
159  * run_once, this is put at the top of a function that can be called from
160  * multiple sources but should only execute once. */
161 #define init_once_racy(retcmd)                                                 \
162 {                                                                              \
163         static bool initialized = FALSE;                                           \
164         if (initialized) {                                                         \
165                 retcmd;                                                                \
166         }                                                                          \
167         initialized = TRUE;                                                        \
168 }
169
170 #endif /* __ASSEMBLER__ */
171
172 #endif /* ROS_COMMON_H */