1
#include "memory/easy_mem_slab.h"
3
#define EASY_MEM_POS_END (((uint16_t)(~0U))-0)
5
easy_mem_mgr_t easy_mem_mgr_var = {0};
6
static void *easy_mem_slab_get_obj(easy_mem_cache_t *cache, easy_mem_slab_t *slab);
7
static void *easy_mem_cache_grow(easy_mem_cache_t *cache);
8
static void easy_mem_slab_put_obj(easy_mem_cache_t *cache, easy_mem_slab_t *slab, void *obj);
9
static inline easy_mem_cache_t *easy_mem_get_cache(unsigned char *obj);
10
static inline easy_mem_slab_t *easy_mem_virt_to_slab(int order, const void *obj);
11
static inline easy_mem_cache_t *easy_mem_cache_size(uint32_t size);
14
int easy_mem_slab_init(int start_alloc_size, int64_t max_size)
18
if (easy_mem_mgr_var.started)
22
memset(&easy_mem_mgr_var, 0, sizeof(easy_mem_mgr_var));
23
easy_mem_mgr_var.max_size = max_size;
24
easy_mem_mgr_var.zone = easy_mem_zone_create(max_size);
26
if (easy_mem_mgr_var.zone == NULL)
30
cache_num = (easy_mem_mgr_var.zone->curr_end - easy_mem_mgr_var.zone->curr);
31
cache_num /= sizeof(easy_mem_cache_t);
34
size = EASY_MEM_SLAB_MIN;
35
easy_list_init(&easy_mem_mgr_var.list);
36
easy_mem_mgr_var.cache_max_num = easy_max(cache_num, 1024);
37
easy_mem_mgr_var.caches = (easy_mem_cache_t *)easy_mem_mgr_var.zone->curr;
40
while(size <= start_alloc_size) {
41
if (easy_mem_cache_create(size) == NULL)
47
easy_mem_mgr_var.cache_fix_num = easy_mem_mgr_var.cache_num;
48
easy_mem_mgr_var.started = 1;
53
void easy_mem_slab_destroy()
55
if (easy_mem_mgr_var.started) {
56
easy_mem_zone_destroy(easy_mem_mgr_var.zone);
57
memset(&easy_mem_mgr_var, 0, sizeof(easy_mem_mgr_var));
62
void *easy_mem_slab_realloc(void *ptr, size_t size)
64
easy_mem_cache_t *cache, *ncache;
68
obj = (unsigned char *)ptr;
70
if (size == 0 && obj != NULL) {
71
if ((cache = easy_mem_get_cache(obj)) != NULL)
72
easy_mem_cache_free(cache, obj);
78
} else if (obj != NULL) {
79
if ((cache = easy_mem_get_cache(obj)) != NULL && size <= cache->buffer_size)
83
ncache = easy_mem_cache_size(size);
86
ptr = easy_mem_cache_alloc(ncache);
88
ptr = easy_malloc(size);
90
ptr = easy_realloc(ptr, size);
95
memcpy(ptr, obj, cache->buffer_size);
96
easy_mem_cache_free(cache, obj);
102
cache = easy_mem_cache_size(size);
103
return (cache ? easy_mem_cache_alloc(cache) : malloc(size));
108
void *easy_mem_cache_alloc(easy_mem_cache_t *cache)
111
easy_mem_slab_t *slab;
115
easy_spin_lock(&cache->lock);
116
entry = cache->slabs_partial.next;
118
if (entry == &cache->slabs_partial) {
119
entry = cache->slabs_free.next;
121
if (entry == &cache->slabs_free) {
127
slab = easy_list_entry(entry, easy_mem_slab_t, list);
128
obj = easy_mem_slab_get_obj(cache, slab);
131
easy_list_del(&slab->list);
133
if (slab->free == EASY_MEM_POS_END) {
134
easy_list_add_head(&slab->list, &cache->slabs_full);
136
easy_list_add_head(&slab->list, &cache->slabs_partial);
140
easy_spin_unlock(&cache->lock);
142
if (obj == NULL) obj = easy_mem_cache_grow(cache);
148
void easy_mem_cache_free(easy_mem_cache_t *cache, void *obj)
150
easy_mem_slab_t *slab;
152
easy_spin_lock(&cache->lock);
153
slab = easy_mem_virt_to_slab(cache->order, obj);
154
easy_mem_slab_put_obj(cache, slab, obj);
157
easy_list_del(&slab->list);
159
if (slab->inuse == 0) {
160
if (cache->free_objects > cache->free_limit) {
161
cache->free_objects -= cache->num;
164
easy_list_add_head(&slab->list, &cache->slabs_free);
167
easy_list_add_tail(&slab->list, &cache->slabs_partial);
170
easy_spin_unlock(&cache->lock);
172
if (slab->mem == NULL) {
173
easy_spin_lock(&easy_mem_mgr_var.lock);
174
easy_mem_free_pages(easy_mem_mgr_var.zone, (easy_mem_page_t *)slab);
175
easy_spin_unlock(&easy_mem_mgr_var.lock);
180
easy_mem_cache_t *easy_mem_cache_create(int buffer_size)
182
easy_mem_cache_t *cache = NULL;
183
int order, size, left_over, num;
185
easy_spin_lock(&easy_mem_mgr_var.lock);
187
if (easy_mem_mgr_var.cache_num < easy_mem_mgr_var.cache_max_num) {
188
cache = easy_mem_mgr_var.caches + easy_mem_mgr_var.cache_num;
189
memset(cache, 0, sizeof(easy_mem_cache_t));
194
for (order = 0; order <= EASY_MEM_MAX_ORDER; order++) {
195
size = (EASY_MEM_PAGE_SIZE << order);
196
num = (size - sizeof(easy_mem_slab_t)) / (buffer_size + sizeof(uint16_t));
197
left_over = size - num * buffer_size - sizeof(easy_mem_slab_t);
199
cache->order = order;
201
if (left_over * 4 <= size) break;
204
cache->buffer_size = buffer_size;
205
cache->offset = (size - cache->num * buffer_size);
206
cache->free_limit = cache->num * 2;
207
cache->free_objects = 0;
208
cache->idx = easy_mem_mgr_var.cache_num;
209
easy_list_init(&cache->slabs_full);
210
easy_list_init(&cache->slabs_partial);
211
easy_list_init(&cache->slabs_free);
213
easy_mem_mgr_var.cache_num ++;
216
easy_spin_unlock(&easy_mem_mgr_var.lock);
222
static void *easy_mem_slab_get_obj(easy_mem_cache_t *cache, easy_mem_slab_t *slab)
227
obj = slab->mem + cache->buffer_size * slab->free;
229
next = slab->next_pos[slab->free];
235
static void *easy_mem_cache_grow(easy_mem_cache_t *cache)
238
easy_mem_slab_t *slab;
241
unsigned long page_idx;
244
z = easy_mem_mgr_var.zone;
245
easy_spin_lock(&easy_mem_mgr_var.lock);
246
slab = (easy_mem_slab_t *)easy_mem_alloc_pages(z, cache->order);
247
easy_spin_unlock(&easy_mem_mgr_var.lock);
254
slab->mem = (unsigned char *)slab + cache->offset;
257
slab->cache_idx = cache->idx;
260
if (cache->buffer_size > EASY_MEM_PAGE_SIZE) {
261
ptr = (unsigned char *)slab->mem;
263
for (i = 0; i < cache->num; i++) {
264
page_idx = (ptr - z->mem_start) >> EASY_MEM_PAGE_SHIFT;
265
z->page_flags[page_idx] = (0x80 | cache->order);
266
ptr += cache->buffer_size;
269
page_idx = ((unsigned char *)slab - z->mem_start) >> EASY_MEM_PAGE_SHIFT;
270
memset(z->page_flags + page_idx, (0x80 | cache->order), (1 << cache->order));
274
for (i = 0; i < cache->num; i++) {
275
slab->next_pos[i] = i + 1;
278
slab->next_pos[i - 1] = EASY_MEM_POS_END;
279
cache->free_objects += cache->num;
282
obj = easy_mem_slab_get_obj(cache, slab);
284
easy_spin_lock(&cache->lock);
286
if (slab->free == EASY_MEM_POS_END) {
287
easy_list_add_head(&slab->list, &cache->slabs_full);
289
easy_list_add_head(&slab->list, &cache->slabs_partial);
292
easy_spin_unlock(&cache->lock);
298
static inline easy_mem_slab_t *easy_mem_virt_to_slab(int order, const void *obj)
300
unsigned long a = (1 << (EASY_MEM_PAGE_SHIFT + order));
301
return (easy_mem_slab_t *)(((unsigned long)obj) & ~(a - 1));
304
static void easy_mem_slab_put_obj(easy_mem_cache_t *cache, easy_mem_slab_t *slab, void *obj)
308
idx = (unsigned)((unsigned char *)obj - slab->mem) / cache->buffer_size;
309
slab->next_pos[idx] = slab->free;
314
static inline easy_mem_cache_t *easy_mem_get_cache(unsigned char *obj)
317
easy_mem_slab_t *slab;
319
if (obj < easy_mem_mgr_var.zone->mem_start || obj >= easy_mem_mgr_var.zone->mem_end)
323
order = (obj - easy_mem_mgr_var.zone->mem_start) >> EASY_MEM_PAGE_SHIFT;
324
order = (easy_mem_mgr_var.zone->page_flags[order] & 0x0f);
325
slab = easy_mem_virt_to_slab(order, obj);
326
return &easy_mem_mgr_var.caches[slab->cache_idx];
329
static inline easy_mem_cache_t *easy_mem_cache_size(uint32_t size)
331
int flag, start, mid, end;
335
end = easy_mem_mgr_var.cache_num - 1;
338
if (size > easy_mem_mgr_var.caches[end].buffer_size)
341
while (start != end) {
342
mid = ((start + end) >> 1);
343
flag = easy_mem_mgr_var.caches[mid].buffer_size - size;
355
return &easy_mem_mgr_var.caches[start];