mm: 9ns: Add support for mmap
[akaros.git] / kern / include / common.h
1 #pragma once
2
3 #include <ros/common.h>
4 #include <compiler.h>
5
6 /* Force a rebuild of the whole kernel if 64BIT-ness changed */
7 #ifdef CONFIG_64BIT
8 #endif
9
10 // Efficient min and max operations
11 #define MIN(_a, _b)                                             \
12 ({                                                              \
13         typeof(_a) __a = (_a);                                  \
14         typeof(_b) __b = (_b);                                  \
15         __a <= __b ? __a : __b;                                 \
16 })
17 #define MAX(_a, _b)                                             \
18 ({                                                              \
19         typeof(_a) __a = (_a);                                  \
20         typeof(_b) __b = (_b);                                  \
21         __a >= __b ? __a : __b;                                 \
22 })
23
24 /* Test for alignment, e.g. 2^6 */
25 #define ALIGNED(p, a)   (!(((uintptr_t)(p)) & ((a)-1)))
26 /* Aligns x up to the mask, e.g. (2^6 - 1) (round up if any mask bits are set)*/
27 #define __ALIGN_MASK(x, mask) (((uintptr_t)(x) + (mask)) & ~(mask))
28 /* Aligns x up to the alignment, e.g. 2^6. */
29 #define ALIGN(x, a) ((typeof(x)) __ALIGN_MASK(x, (a) - 1))
30 /* Aligns x down to the mask, e.g. (2^6 - 1)
31  * (round down if any mask bits are set)*/
32 #define __ALIGN_MASK_DOWN(x, mask) ((uintptr_t)(x) & ~(mask))
33 /* Aligns x down to the alignment, e.g. 2^6. */
34 #define ALIGN_DOWN(x, a) ((typeof(x)) __ALIGN_MASK_DOWN(x, (a) - 1))
35 /* Will return false for 0.  Debatable, based on what you want. */
36 #define IS_PWR2(x) ((x) && !((x) & (x - 1)))
37
38 #define ARRAY_SIZE(x) COUNT_OF(x)
39
40 /* Makes sure func is run exactly once.  Can handle concurrent callers, and
41  * other callers spin til the func is complete. */
42 #define run_once(func)                                                         \
43 do {                                                                           \
44         static bool ran_once = FALSE;                                              \
45         static bool is_running = FALSE;                                            \
46         if (!ran_once) {                                                           \
47                 /* fetch and set TRUE, without a header or test_and_set weirdness */   \
48                 if (!__sync_fetch_and_or(&is_running, TRUE)) {                         \
49                         /* we won the race and get to run the func */                      \
50                         func;                                                              \
51                         wmb();  /* don't let the ran_once write pass previous writes */    \
52                         ran_once = TRUE;                                                   \
53                 } else {                                                               \
54                         /* someone else won, wait til they are done to break out */        \
55                         while (!ran_once)                                                  \
56                                 cpu_relax();                                                   \
57                 }                                                                      \
58         }                                                                          \
59 } while (0)
60
61 /* Unprotected, single-threaded version, makes sure func is run exactly once */
62 #define run_once_racy(func)                                                    \
63 do {                                                                           \
64         static bool ran_once = FALSE;                                              \
65         if (!ran_once) {                                                           \
66                 func;                                                                  \
67                 ran_once = TRUE;                                                       \
68         }                                                                          \
69 } while (0)
70
71 #ifndef __ASSEMBLER__
72
73 static inline uint32_t low32(uint64_t val)
74 {
75         return val & 0xffffffff;
76 }
77
78 static inline uint32_t high32(uint64_t val)
79 {
80         return val >> 32;
81 }
82
83 #endif /* !__ASSEMBLER__ */