• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * malloc.c
3  *
4  * Very simple linked-list based malloc()/free().
5  */
6 
7 #include <syslinux/firmware.h>
8 #include <stdio.h>
9 #include <stdlib.h>
10 #include <errno.h>
11 #include <string.h>
12 #include <dprintf.h>
13 #include <minmax.h>
14 
15 #include "malloc.h"
16 #include "thread.h"
17 
18 DECLARE_INIT_SEMAPHORE(__malloc_semaphore, 1);
19 
__malloc_from_block(struct free_arena_header * fp,size_t size,malloc_tag_t tag)20 static void *__malloc_from_block(struct free_arena_header *fp,
21 				 size_t size, malloc_tag_t tag)
22 {
23     size_t fsize;
24     struct free_arena_header *nfp, *na;
25     unsigned int heap = ARENA_HEAP_GET(fp->a.attrs);
26 
27     fsize = ARENA_SIZE_GET(fp->a.attrs);
28 
29     /* We need the 2* to account for the larger requirements of a free block */
30     if ( fsize >= size+2*sizeof(struct arena_header) ) {
31         /* Bigger block than required -- split block */
32         nfp = (struct free_arena_header *)((char *)fp + size);
33         na = fp->a.next;
34 
35         ARENA_TYPE_SET(nfp->a.attrs, ARENA_TYPE_FREE);
36 	ARENA_HEAP_SET(nfp->a.attrs, heap);
37         ARENA_SIZE_SET(nfp->a.attrs, fsize-size);
38         nfp->a.tag = MALLOC_FREE;
39 #ifdef DEBUG_MALLOC
40 	nfp->a.magic = ARENA_MAGIC;
41 #endif
42         ARENA_TYPE_SET(fp->a.attrs, ARENA_TYPE_USED);
43         ARENA_SIZE_SET(fp->a.attrs, size);
44         fp->a.tag = tag;
45 
46         /* Insert into all-block chain */
47         nfp->a.prev = fp;
48         nfp->a.next = na;
49         na->a.prev = nfp;
50         fp->a.next = nfp;
51 
52         /* Replace current block on free chain */
53         nfp->next_free = fp->next_free;
54         nfp->prev_free = fp->prev_free;
55         fp->next_free->prev_free = nfp;
56         fp->prev_free->next_free = nfp;
57     } else {
58         /* Allocate the whole block */
59         ARENA_TYPE_SET(fp->a.attrs, ARENA_TYPE_USED);
60         fp->a.tag = tag;
61 
62         /* Remove from free chain */
63         fp->next_free->prev_free = fp->prev_free;
64         fp->prev_free->next_free = fp->next_free;
65     }
66 
67     return (void *)(&fp->a + 1);
68 }
69 
bios_malloc(size_t size,enum heap heap,malloc_tag_t tag)70 void *bios_malloc(size_t size, enum heap heap, malloc_tag_t tag)
71 {
72     struct free_arena_header *fp;
73     struct free_arena_header *head = &__core_malloc_head[heap];
74     void *p = NULL;
75 
76     if (size) {
77 	/* Add the obligatory arena header, and round up */
78 	size = (size + 2 * sizeof(struct arena_header) - 1) & ARENA_SIZE_MASK;
79 
80 	for ( fp = head->next_free ; fp != head ; fp = fp->next_free ) {
81 	    if ( ARENA_SIZE_GET(fp->a.attrs) >= size ) {
82 		/* Found fit -- allocate out of this block */
83 		p = __malloc_from_block(fp, size, tag);
84 		break;
85 	    }
86         }
87     }
88 
89     return p;
90 }
91 
_malloc(size_t size,enum heap heap,malloc_tag_t tag)92 static void *_malloc(size_t size, enum heap heap, malloc_tag_t tag)
93 {
94     void *p;
95 
96     dprintf("_malloc(%zu, %u, %u) @ %p = ",
97 	size, heap, tag, __builtin_return_address(0));
98 
99     sem_down(&__malloc_semaphore, 0);
100     p = firmware->mem->malloc(size, heap, tag);
101     sem_up(&__malloc_semaphore);
102 
103     dprintf("%p\n", p);
104     return p;
105 }
106 
malloc(size_t size)107 __export void *malloc(size_t size)
108 {
109     return _malloc(size, HEAP_MAIN, MALLOC_CORE);
110 }
111 
lmalloc(size_t size)112 __export void *lmalloc(size_t size)
113 {
114     void *p;
115 
116     p = _malloc(size, HEAP_LOWMEM, MALLOC_CORE);
117     if (!p)
118 	errno = ENOMEM;
119     return p;
120 }
121 
pmapi_lmalloc(size_t size)122 void *pmapi_lmalloc(size_t size)
123 {
124     return _malloc(size, HEAP_LOWMEM, MALLOC_MODULE);
125 }
126 
bios_realloc(void * ptr,size_t size)127 void *bios_realloc(void *ptr, size_t size)
128 {
129     struct free_arena_header *ah, *nah;
130     struct free_arena_header *head;
131 
132     void *newptr;
133     size_t newsize, oldsize, xsize;
134 
135     if (!ptr)
136 	return malloc(size);
137 
138     if (size == 0) {
139 	free(ptr);
140 	return NULL;
141     }
142 
143     ah = (struct free_arena_header *)
144 	((struct arena_header *)ptr - 1);
145 
146 	head = &__core_malloc_head[ARENA_HEAP_GET(ah->a.attrs)];
147 
148 #ifdef DEBUG_MALLOC
149     if (ah->a.magic != ARENA_MAGIC)
150 	dprintf("failed realloc() magic check: %p\n", ptr);
151 #endif
152 
153     /* Actual size of the old block */
154     //oldsize = ah->a.size;
155     oldsize = ARENA_SIZE_GET(ah->a.attrs);
156 
157     /* Add the obligatory arena header, and round up */
158     newsize = (size + 2 * sizeof(struct arena_header) - 1) & ARENA_SIZE_MASK;
159 
160     if (oldsize >= newsize && newsize >= (oldsize >> 2) &&
161 	oldsize - newsize < 4096) {
162 	/* This allocation is close enough already. */
163 	return ptr;
164     } else {
165 	xsize = oldsize;
166 
167 	nah = ah->a.next;
168 	if ((char *)nah == (char *)ah + ARENA_SIZE_GET(ah->a.attrs) &&
169 		ARENA_TYPE_GET(nah->a.attrs) == ARENA_TYPE_FREE &&
170 		ARENA_SIZE_GET(nah->a.attrs) + oldsize >= newsize) {
171 	    //nah->a.type == ARENA_TYPE_FREE &&
172 	    //oldsize + nah->a.size >= newsize) {
173 	    /* Merge in subsequent free block */
174 	    ah->a.next = nah->a.next;
175 	    ah->a.next->a.prev = ah;
176 	    nah->next_free->prev_free = nah->prev_free;
177 	    nah->prev_free->next_free = nah->next_free;
178 	    ARENA_SIZE_SET(ah->a.attrs, ARENA_SIZE_GET(ah->a.attrs) +
179 			   ARENA_SIZE_GET(nah->a.attrs));
180 	    xsize = ARENA_SIZE_GET(ah->a.attrs);
181 	}
182 
183 	if (xsize >= newsize) {
184 	    /* We can reallocate in place */
185 	    if (xsize >= newsize + 2 * sizeof(struct arena_header)) {
186 		/* Residual free block at end */
187 		nah = (struct free_arena_header *)((char *)ah + newsize);
188 		ARENA_TYPE_SET(nah->a.attrs, ARENA_TYPE_FREE);
189 		ARENA_SIZE_SET(nah->a.attrs, xsize - newsize);
190 		ARENA_SIZE_SET(ah->a.attrs, newsize);
191 		ARENA_HEAP_SET(nah->a.attrs, ARENA_HEAP_GET(ah->a.attrs));
192 
193 #ifdef DEBUG_MALLOC
194 		nah->a.magic = ARENA_MAGIC;
195 #endif
196 
197 		//nah->a.type = ARENA_TYPE_FREE;
198 		//nah->a.size = xsize - newsize;
199 		//ah->a.size = newsize;
200 
201 		/* Insert into block list */
202 		nah->a.next = ah->a.next;
203 		ah->a.next = nah;
204 		nah->a.next->a.prev = nah;
205 		nah->a.prev = ah;
206 
207 		/* Insert into free list */
208 		if (newsize > oldsize) {
209 		    /* Hack: this free block is in the path of a memory object
210 		       which has already been grown at least once.  As such, put
211 		       it at the *end* of the freelist instead of the beginning;
212 		       trying to save it for future realloc()s of the same block. */
213 		    nah->prev_free = head->prev_free;
214 		    nah->next_free = head;
215 		    head->prev_free = nah;
216 		    nah->prev_free->next_free = nah;
217 		} else {
218 		    nah->next_free = head->next_free;
219 		    nah->prev_free = head;
220 		    head->next_free = nah;
221 		    nah->next_free->prev_free = nah;
222 		}
223    	    }
224 	    /* otherwise, use up the whole block */
225 	    return ptr;
226 	} else {
227 	    /* Last resort: need to allocate a new block and copy */
228 	    oldsize -= sizeof(struct arena_header);
229 	    newptr = malloc(size);
230 	    if (newptr) {
231 		memcpy(newptr, ptr, min(size, oldsize));
232 		free(ptr);
233 	    }
234 	    return newptr;
235 	}
236     }
237 }
238 
realloc(void * ptr,size_t size)239 __export void *realloc(void *ptr, size_t size)
240 {
241     return firmware->mem->realloc(ptr, size);
242 }
243 
zalloc(size_t size)244 __export void *zalloc(size_t size)
245 {
246     void *ptr;
247 
248     ptr = malloc(size);
249     if (ptr)
250 	memset(ptr, 0, size);
251 
252     return ptr;
253 }
254