1 /* Copyright (c) 2009 The Regents of the University of California.
2 * See the COPYRIGHT files at the top of this source tree for full
5 * Kevin Klues <klueska@cs.berkeley.edu>
8 #ifndef ROS_KERN_KMALLOC_H
9 #define ROS_KERN_KMALLOC_H
11 #include <ros/common.h>
13 #define NUM_KMALLOC_CACHES 13
14 #define KMALLOC_ALIGNMENT 16
15 #define KMALLOC_SMALLEST 32
16 #define KMALLOC_LARGEST KMALLOC_SMALLEST << NUM_KMALLOC_CACHES
18 void kmalloc_init(void);
19 void* (DALLOC(size) kmalloc)(size_t size, int flags);
20 void* (DALLOC(size) kzmalloc)(size_t size, int flags);
21 void *kmalloc_align(size_t size, int flags, size_t align);
22 void *kzmalloc_align(size_t size, int flags, size_t align);
23 void* (DALLOC(size) krealloc)(void* buf, size_t size, int flags);
24 void (DFREE(addr) kfree)(void *addr);
25 void kmalloc_canary_check(char *str);
28 /* Flags to pass to kmalloc */
29 /* Not implemented yet. Block until it is available. */
30 #define KMALLOC_WAIT 4
32 /* Kmalloc tag flags looks like this:
34 * +--------------28---------------+-----4------+
35 * | Flag specific data | Flags |
36 * +-------------------------------+------------+
38 #define KMALLOC_TAG_CACHE 1 /* memory came from slabs */
39 #define KMALLOC_TAG_PAGES 2 /* memory came from page allocator */
40 #define KMALLOC_TAG_UNALIGN 3 /* not a real tag, jump back by offset */
41 #define KMALLOC_ALIGN_SHIFT 4 /* max flag is 16 */
42 #define KMALLOC_FLAG_MASK ((1 << KMALLOC_ALIGN_SHIFT) - 1)
44 #define KMALLOC_CANARY 0xdeadbabe
46 /* The kmalloc align/free paths require that flags is at the end of this
47 * struct, and that it is not padded. */
50 struct kmem_cache *my_cache WHEN(flags == KMALLOC_TAG_CACHE);
51 size_t num_pages WHEN(flags == KMALLOC_TAG_PAGES);
52 uint64_t unused_force_align;
58 #endif //ROS_KERN_KMALLOC_H