1 /* Copyright (c) 2009 The Regents of the University of California.
2 * See the COPYRIGHT files at the top of this source tree for full
5 * Kevin Klues <klueska@cs.berkeley.edu>
8 #ifndef ROS_KERN_KMALLOC_H
9 #define ROS_KERN_KMALLOC_H
11 #include <ros/common.h>
14 #define NUM_KMALLOC_CACHES 13
15 #define KMALLOC_ALIGNMENT 16
16 #define KMALLOC_SMALLEST (sizeof(struct kmalloc_tag) << 1)
17 #define KMALLOC_LARGEST KMALLOC_SMALLEST << NUM_KMALLOC_CACHES
19 void kmalloc_init(void);
20 void* (DALLOC(size) kmalloc)(size_t size, int flags);
21 void* (DALLOC(size) kzmalloc)(size_t size, int flags);
22 void *kmalloc_align(size_t size, int flags, size_t align);
23 void *kzmalloc_align(size_t size, int flags, size_t align);
24 void *krealloc(void *buf, size_t size, int flags);
25 int kmalloc_refcnt(void *buf);
26 void kmalloc_incref(void *buf);
27 void kfree(void *buf);
28 void kmalloc_canary_check(char *str);
31 /* Flags to pass to kmalloc */
32 /* Not implemented yet. Block until it is available. */
33 #define KMALLOC_WAIT 4
35 /* Kmalloc tag flags looks like this:
37 * +--------------28---------------+-----4------+
38 * | Flag specific data | Flags |
39 * +-------------------------------+------------+
41 #define KMALLOC_TAG_CACHE 1 /* memory came from slabs */
42 #define KMALLOC_TAG_PAGES 2 /* memory came from page allocator */
43 #define KMALLOC_TAG_UNALIGN 3 /* not a real tag, jump back by offset */
44 #define KMALLOC_ALIGN_SHIFT 4 /* max flag is 16 */
45 #define KMALLOC_FLAG_MASK ((1 << KMALLOC_ALIGN_SHIFT) - 1)
47 #define KMALLOC_CANARY 0xdeadbabe
49 /* The kmalloc align/free paths require that flags is at the end of this
50 * struct, and that it is not padded. */
53 struct kmem_cache *my_cache WHEN(flags == KMALLOC_TAG_CACHE);
54 size_t num_pages WHEN(flags == KMALLOC_TAG_PAGES);
55 uint64_t unused_force_align;
62 #endif //ROS_KERN_KMALLOC_H