• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* GLIB - Library of useful routines for C programming
2  * Copyright (C) 1995-1997  Peter Mattis, Spencer Kimball and Josh MacDonald
3  *
4  * This library is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * This library is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
16  */
17 
18 /*
19  * Modified by the GLib Team and others 1997-2000.  See the AUTHORS
20  * file for a list of people on the GLib Team.  See the ChangeLog
21  * files for a list of changes.  These files are distributed with
22  * GLib at ftp://ftp.gtk.org/pub/gtk/.
23  */
24 
25 /*
26  * MT safe
27  */
28 
29 #include "config.h"
30 
31 #include <stdlib.h>
32 #include <string.h>
33 #include <signal.h>
34 
35 #include "glib.h"
36 
37 /* notes on macros:
38  * if ENABLE_GC_FRIENDLY is defined, freed memory should be 0-wiped.
39  */
40 
41 #define MEM_PROFILE_TABLE_SIZE 4096
42 
43 #define MEM_AREA_SIZE 4L
44 
45 static guint mem_chunk_recursion = 0;
46 #  define MEM_CHUNK_ROUTINE_COUNT()	(mem_chunk_recursion)
47 #  define ENTER_MEM_CHUNK_ROUTINE()	(mem_chunk_recursion = MEM_CHUNK_ROUTINE_COUNT () + 1)
48 #  define LEAVE_MEM_CHUNK_ROUTINE()	(mem_chunk_recursion = MEM_CHUNK_ROUTINE_COUNT () - 1)
49 
50 /* --- old memchunk prototypes --- */
51 GMemChunk*      old_mem_chunk_new       (const gchar  *name,
52                                          gint          atom_size,
53                                          gulong        area_size,
54                                          gint          type);
55 void            old_mem_chunk_destroy   (GMemChunk *mem_chunk);
56 gpointer        old_mem_chunk_alloc     (GMemChunk *mem_chunk);
57 gpointer        old_mem_chunk_alloc0    (GMemChunk *mem_chunk);
58 void            old_mem_chunk_free      (GMemChunk *mem_chunk,
59                                          gpointer   mem);
60 void            old_mem_chunk_clean     (GMemChunk *mem_chunk);
61 void            old_mem_chunk_reset     (GMemChunk *mem_chunk);
62 void            old_mem_chunk_print     (GMemChunk *mem_chunk);
63 void            old_mem_chunk_info      (void);
64 
65 
66 /* --- MemChunks --- */
67 #ifndef G_ALLOC_AND_FREE
68 typedef struct _GAllocator GAllocator;
69 typedef struct _GMemChunk  GMemChunk;
70 #define G_ALLOC_ONLY	  1
71 #define G_ALLOC_AND_FREE  2
72 #endif
73 
74 typedef struct _GFreeAtom      GFreeAtom;
75 typedef struct _GMemArea       GMemArea;
76 
77 struct _GFreeAtom
78 {
79   GFreeAtom *next;
80 };
81 
82 struct _GMemArea
83 {
84   GMemArea *next;            /* the next mem area */
85   GMemArea *prev;            /* the previous mem area */
86   gulong index;              /* the current index into the "mem" array */
87   gulong free;               /* the number of free bytes in this mem area */
88   gulong allocated;          /* the number of atoms allocated from this area */
89   gulong mark;               /* is this mem area marked for deletion */
90   gchar mem[MEM_AREA_SIZE];  /* the mem array from which atoms get allocated
91 			      * the actual size of this array is determined by
92 			      *  the mem chunk "area_size". ANSI says that it
93 			      *  must be declared to be the maximum size it
94 			      *  can possibly be (even though the actual size
95 			      *  may be less).
96 			      */
97 };
98 
99 struct _GMemChunk
100 {
101   const gchar *name;         /* name of this MemChunk...used for debugging output */
102   gint type;                 /* the type of MemChunk: ALLOC_ONLY or ALLOC_AND_FREE */
103   gint num_mem_areas;        /* the number of memory areas */
104   gint num_marked_areas;     /* the number of areas marked for deletion */
105   guint atom_size;           /* the size of an atom */
106   gulong area_size;          /* the size of a memory area */
107   GMemArea *mem_area;        /* the current memory area */
108   GMemArea *mem_areas;       /* a list of all the mem areas owned by this chunk */
109   GMemArea *free_mem_area;   /* the free area...which is about to be destroyed */
110   GFreeAtom *free_atoms;     /* the free atoms list */
111   GTree *mem_tree;           /* tree of mem areas sorted by memory address */
112   GMemChunk *next;           /* pointer to the next chunk */
113   GMemChunk *prev;           /* pointer to the previous chunk */
114 };
115 
116 
117 static gulong old_mem_chunk_compute_size (gulong    size,
118                                           gulong    min_size) G_GNUC_CONST;
119 static gint   old_mem_chunk_area_compare (GMemArea *a,
120                                           GMemArea *b);
121 static gint   old_mem_chunk_area_search  (GMemArea *a,
122                                           gchar    *addr);
123 
124 /* here we can't use StaticMutexes, as they depend upon a working
125  * g_malloc, the same holds true for StaticPrivate
126  */
127 static GMutex         mem_chunks_lock;
128 static GMemChunk     *mem_chunks = NULL;
129 
130 GMemChunk*
old_mem_chunk_new(const gchar * name,gint atom_size,gulong area_size,gint type)131 old_mem_chunk_new (const gchar  *name,
132                    gint          atom_size,
133                    gulong        area_size,
134                    gint          type)
135 {
136   GMemChunk *mem_chunk;
137   gulong rarea_size;
138 
139   g_return_val_if_fail (atom_size > 0, NULL);
140   g_return_val_if_fail (area_size >= atom_size, NULL);
141 
142   ENTER_MEM_CHUNK_ROUTINE ();
143 
144   area_size = (area_size + atom_size - 1) / atom_size;
145   area_size *= atom_size;
146 
147   mem_chunk = g_new (GMemChunk, 1);
148   mem_chunk->name = name;
149   mem_chunk->type = type;
150   mem_chunk->num_mem_areas = 0;
151   mem_chunk->num_marked_areas = 0;
152   mem_chunk->mem_area = NULL;
153   mem_chunk->free_mem_area = NULL;
154   mem_chunk->free_atoms = NULL;
155   mem_chunk->mem_tree = NULL;
156   mem_chunk->mem_areas = NULL;
157   mem_chunk->atom_size = atom_size;
158 
159   if (mem_chunk->type == G_ALLOC_AND_FREE)
160     mem_chunk->mem_tree = g_tree_new ((GCompareFunc) old_mem_chunk_area_compare);
161 
162   if (mem_chunk->atom_size % G_MEM_ALIGN)
163     mem_chunk->atom_size += G_MEM_ALIGN - (mem_chunk->atom_size % G_MEM_ALIGN);
164 
165   rarea_size = area_size + sizeof (GMemArea) - MEM_AREA_SIZE;
166   rarea_size = old_mem_chunk_compute_size (rarea_size, atom_size + sizeof (GMemArea) - MEM_AREA_SIZE);
167   mem_chunk->area_size = rarea_size - (sizeof (GMemArea) - MEM_AREA_SIZE);
168 
169   g_mutex_lock (&mem_chunks_lock);
170   mem_chunk->next = mem_chunks;
171   mem_chunk->prev = NULL;
172   if (mem_chunks)
173     mem_chunks->prev = mem_chunk;
174   mem_chunks = mem_chunk;
175   g_mutex_unlock (&mem_chunks_lock);
176 
177   LEAVE_MEM_CHUNK_ROUTINE ();
178 
179   return mem_chunk;
180 }
181 
182 void
old_mem_chunk_destroy(GMemChunk * mem_chunk)183 old_mem_chunk_destroy (GMemChunk *mem_chunk)
184 {
185   GMemArea *mem_areas;
186   GMemArea *temp_area;
187 
188   g_return_if_fail (mem_chunk != NULL);
189 
190   ENTER_MEM_CHUNK_ROUTINE ();
191 
192   mem_areas = mem_chunk->mem_areas;
193   while (mem_areas)
194     {
195       temp_area = mem_areas;
196       mem_areas = mem_areas->next;
197       g_free (temp_area);
198     }
199 
200   g_mutex_lock (&mem_chunks_lock);
201   if (mem_chunk->next)
202     mem_chunk->next->prev = mem_chunk->prev;
203   if (mem_chunk->prev)
204     mem_chunk->prev->next = mem_chunk->next;
205 
206   if (mem_chunk == mem_chunks)
207     mem_chunks = mem_chunks->next;
208   g_mutex_unlock (&mem_chunks_lock);
209 
210   if (mem_chunk->type == G_ALLOC_AND_FREE)
211     g_tree_destroy (mem_chunk->mem_tree);
212 
213   g_free (mem_chunk);
214 
215   LEAVE_MEM_CHUNK_ROUTINE ();
216 }
217 
218 gpointer
old_mem_chunk_alloc(GMemChunk * mem_chunk)219 old_mem_chunk_alloc (GMemChunk *mem_chunk)
220 {
221   GMemArea *temp_area;
222   gpointer mem;
223 
224   ENTER_MEM_CHUNK_ROUTINE ();
225 
226   g_return_val_if_fail (mem_chunk != NULL, NULL);
227 
228   while (mem_chunk->free_atoms)
229     {
230       /* Get the first piece of memory on the "free_atoms" list.
231        * We can go ahead and destroy the list node we used to keep
232        *  track of it with and to update the "free_atoms" list to
233        *  point to its next element.
234        */
235       mem = mem_chunk->free_atoms;
236       mem_chunk->free_atoms = mem_chunk->free_atoms->next;
237 
238       /* Determine which area this piece of memory is allocated from */
239       temp_area = g_tree_search (mem_chunk->mem_tree,
240 				 (GCompareFunc) old_mem_chunk_area_search,
241 				 mem);
242 
243       /* If the area has been marked, then it is being destroyed.
244        *  (ie marked to be destroyed).
245        * We check to see if all of the segments on the free list that
246        *  reference this area have been removed. This occurs when
247        *  the amount of free memory is less than the allocatable size.
248        * If the chunk should be freed, then we place it in the "free_mem_area".
249        * This is so we make sure not to free the mem area here and then
250        *  allocate it again a few lines down.
251        * If we don't allocate a chunk a few lines down then the "free_mem_area"
252        *  will be freed.
253        * If there is already a "free_mem_area" then we'll just free this mem area.
254        */
255       if (temp_area->mark)
256         {
257           /* Update the "free" memory available in that area */
258           temp_area->free += mem_chunk->atom_size;
259 
260           if (temp_area->free == mem_chunk->area_size)
261             {
262               if (temp_area == mem_chunk->mem_area)
263                 mem_chunk->mem_area = NULL;
264 
265               if (mem_chunk->free_mem_area)
266                 {
267                   mem_chunk->num_mem_areas -= 1;
268 
269                   if (temp_area->next)
270                     temp_area->next->prev = temp_area->prev;
271                   if (temp_area->prev)
272                     temp_area->prev->next = temp_area->next;
273                   if (temp_area == mem_chunk->mem_areas)
274                     mem_chunk->mem_areas = mem_chunk->mem_areas->next;
275 
276 		  if (mem_chunk->type == G_ALLOC_AND_FREE)
277 		    g_tree_remove (mem_chunk->mem_tree, temp_area);
278                   g_free (temp_area);
279                 }
280               else
281                 mem_chunk->free_mem_area = temp_area;
282 
283 	      mem_chunk->num_marked_areas -= 1;
284 	    }
285 	}
286       else
287         {
288           /* Update the number of allocated atoms count.
289 	   */
290           temp_area->allocated += 1;
291 
292           /* The area wasn't marked...return the memory
293 	   */
294 	  goto outa_here;
295         }
296     }
297 
298   /* If there isn't a current mem area or the current mem area is out of space
299    *  then allocate a new mem area. We'll first check and see if we can use
300    *  the "free_mem_area". Otherwise we'll just malloc the mem area.
301    */
302   if ((!mem_chunk->mem_area) ||
303       ((mem_chunk->mem_area->index + mem_chunk->atom_size) > mem_chunk->area_size))
304     {
305       if (mem_chunk->free_mem_area)
306         {
307           mem_chunk->mem_area = mem_chunk->free_mem_area;
308 	  mem_chunk->free_mem_area = NULL;
309         }
310       else
311         {
312 #ifdef ENABLE_GC_FRIENDLY
313 	  mem_chunk->mem_area = (GMemArea*) g_malloc0 (sizeof (GMemArea) -
314 						       MEM_AREA_SIZE +
315 						       mem_chunk->area_size);
316 #else /* !ENABLE_GC_FRIENDLY */
317 	  mem_chunk->mem_area = (GMemArea*) g_malloc (sizeof (GMemArea) -
318 						      MEM_AREA_SIZE +
319 						      mem_chunk->area_size);
320 #endif /* ENABLE_GC_FRIENDLY */
321 
322 	  mem_chunk->num_mem_areas += 1;
323 	  mem_chunk->mem_area->next = mem_chunk->mem_areas;
324 	  mem_chunk->mem_area->prev = NULL;
325 
326 	  if (mem_chunk->mem_areas)
327 	    mem_chunk->mem_areas->prev = mem_chunk->mem_area;
328 	  mem_chunk->mem_areas = mem_chunk->mem_area;
329 
330 	  if (mem_chunk->type == G_ALLOC_AND_FREE)
331 	    g_tree_insert (mem_chunk->mem_tree, mem_chunk->mem_area, mem_chunk->mem_area);
332         }
333 
334       mem_chunk->mem_area->index = 0;
335       mem_chunk->mem_area->free = mem_chunk->area_size;
336       mem_chunk->mem_area->allocated = 0;
337       mem_chunk->mem_area->mark = 0;
338     }
339 
340   /* Get the memory and modify the state variables appropriately.
341    */
342   mem = (gpointer) &mem_chunk->mem_area->mem[mem_chunk->mem_area->index];
343   mem_chunk->mem_area->index += mem_chunk->atom_size;
344   mem_chunk->mem_area->free -= mem_chunk->atom_size;
345   mem_chunk->mem_area->allocated += 1;
346 
347  outa_here:
348 
349   LEAVE_MEM_CHUNK_ROUTINE ();
350 
351   return mem;
352 }
353 
354 gpointer
old_mem_chunk_alloc0(GMemChunk * mem_chunk)355 old_mem_chunk_alloc0 (GMemChunk *mem_chunk)
356 {
357   gpointer mem;
358 
359   mem = old_mem_chunk_alloc (mem_chunk);
360   if (mem)
361     {
362       memset (mem, 0, mem_chunk->atom_size);
363     }
364 
365   return mem;
366 }
367 
368 void
old_mem_chunk_free(GMemChunk * mem_chunk,gpointer mem)369 old_mem_chunk_free (GMemChunk *mem_chunk,
370                     gpointer   mem)
371 {
372   GMemArea *temp_area;
373   GFreeAtom *free_atom;
374 
375   g_return_if_fail (mem_chunk != NULL);
376   g_return_if_fail (mem != NULL);
377 
378   ENTER_MEM_CHUNK_ROUTINE ();
379 
380 #ifdef ENABLE_GC_FRIENDLY
381   memset (mem, 0, mem_chunk->atom_size);
382 #endif /* ENABLE_GC_FRIENDLY */
383 
384   /* Don't do anything if this is an ALLOC_ONLY chunk
385    */
386   if (mem_chunk->type == G_ALLOC_AND_FREE)
387     {
388       /* Place the memory on the "free_atoms" list
389        */
390       free_atom = (GFreeAtom*) mem;
391       free_atom->next = mem_chunk->free_atoms;
392       mem_chunk->free_atoms = free_atom;
393 
394       temp_area = g_tree_search (mem_chunk->mem_tree,
395 				 (GCompareFunc) old_mem_chunk_area_search,
396 				 mem);
397 
398       temp_area->allocated -= 1;
399 
400       if (temp_area->allocated == 0)
401 	{
402 	  temp_area->mark = 1;
403 	  mem_chunk->num_marked_areas += 1;
404 	}
405     }
406 
407   LEAVE_MEM_CHUNK_ROUTINE ();
408 }
409 
410 /* This doesn't free the free_area if there is one */
411 void
old_mem_chunk_clean(GMemChunk * mem_chunk)412 old_mem_chunk_clean (GMemChunk *mem_chunk)
413 {
414   GMemArea *mem_area;
415   GFreeAtom *prev_free_atom;
416   GFreeAtom *temp_free_atom;
417   gpointer mem;
418 
419   g_return_if_fail (mem_chunk != NULL);
420 
421   ENTER_MEM_CHUNK_ROUTINE ();
422 
423   if (mem_chunk->type == G_ALLOC_AND_FREE)
424     {
425       prev_free_atom = NULL;
426       temp_free_atom = mem_chunk->free_atoms;
427 
428       while (temp_free_atom)
429 	{
430 	  mem = (gpointer) temp_free_atom;
431 
432 	  mem_area = g_tree_search (mem_chunk->mem_tree,
433 				    (GCompareFunc) old_mem_chunk_area_search,
434 				    mem);
435 
436           /* If this mem area is marked for destruction then delete the
437 	   *  area and list node and decrement the free mem.
438            */
439 	  if (mem_area->mark)
440 	    {
441 	      if (prev_free_atom)
442 		prev_free_atom->next = temp_free_atom->next;
443 	      else
444 		mem_chunk->free_atoms = temp_free_atom->next;
445 	      temp_free_atom = temp_free_atom->next;
446 
447 	      mem_area->free += mem_chunk->atom_size;
448 	      if (mem_area->free == mem_chunk->area_size)
449 		{
450 		  mem_chunk->num_mem_areas -= 1;
451 		  mem_chunk->num_marked_areas -= 1;
452 
453 		  if (mem_area->next)
454 		    mem_area->next->prev = mem_area->prev;
455 		  if (mem_area->prev)
456 		    mem_area->prev->next = mem_area->next;
457 		  if (mem_area == mem_chunk->mem_areas)
458 		    mem_chunk->mem_areas = mem_chunk->mem_areas->next;
459 		  if (mem_area == mem_chunk->mem_area)
460 		    mem_chunk->mem_area = NULL;
461 
462 		  if (mem_chunk->type == G_ALLOC_AND_FREE)
463 		    g_tree_remove (mem_chunk->mem_tree, mem_area);
464 		  g_free (mem_area);
465 		}
466 	    }
467 	  else
468 	    {
469 	      prev_free_atom = temp_free_atom;
470 	      temp_free_atom = temp_free_atom->next;
471 	    }
472 	}
473     }
474   LEAVE_MEM_CHUNK_ROUTINE ();
475 }
476 
477 void
old_mem_chunk_reset(GMemChunk * mem_chunk)478 old_mem_chunk_reset (GMemChunk *mem_chunk)
479 {
480   GMemArea *mem_areas;
481   GMemArea *temp_area;
482 
483   g_return_if_fail (mem_chunk != NULL);
484 
485   ENTER_MEM_CHUNK_ROUTINE ();
486 
487   mem_areas = mem_chunk->mem_areas;
488   mem_chunk->num_mem_areas = 0;
489   mem_chunk->mem_areas = NULL;
490   mem_chunk->mem_area = NULL;
491 
492   while (mem_areas)
493     {
494       temp_area = mem_areas;
495       mem_areas = mem_areas->next;
496       g_free (temp_area);
497     }
498 
499   mem_chunk->free_atoms = NULL;
500 
501   if (mem_chunk->mem_tree)
502     {
503       g_tree_destroy (mem_chunk->mem_tree);
504       mem_chunk->mem_tree = g_tree_new ((GCompareFunc) old_mem_chunk_area_compare);
505     }
506 
507   LEAVE_MEM_CHUNK_ROUTINE ();
508 }
509 
510 void
old_mem_chunk_print(GMemChunk * mem_chunk)511 old_mem_chunk_print (GMemChunk *mem_chunk)
512 {
513   GMemArea *mem_areas;
514   gulong mem;
515 
516   g_return_if_fail (mem_chunk != NULL);
517 
518   mem_areas = mem_chunk->mem_areas;
519   mem = 0;
520 
521   while (mem_areas)
522     {
523       mem += mem_chunk->area_size - mem_areas->free;
524       mem_areas = mem_areas->next;
525     }
526 
527   g_log (G_LOG_DOMAIN, G_LOG_LEVEL_INFO,
528 	 "%s: %ld bytes using %d mem areas",
529 	 mem_chunk->name, mem, mem_chunk->num_mem_areas);
530 }
531 
532 void
old_mem_chunk_info(void)533 old_mem_chunk_info (void)
534 {
535   GMemChunk *mem_chunk;
536   gint count;
537 
538   count = 0;
539   g_mutex_lock (&mem_chunks_lock);
540   mem_chunk = mem_chunks;
541   while (mem_chunk)
542     {
543       count += 1;
544       mem_chunk = mem_chunk->next;
545     }
546   g_mutex_unlock (&mem_chunks_lock);
547 
548   g_log (G_LOG_DOMAIN, G_LOG_LEVEL_INFO, "%d mem chunks", count);
549 
550   g_mutex_lock (&mem_chunks_lock);
551   mem_chunk = mem_chunks;
552   g_mutex_unlock (&mem_chunks_lock);
553 
554   while (mem_chunk)
555     {
556       old_mem_chunk_print ((GMemChunk*) mem_chunk);
557       mem_chunk = mem_chunk->next;
558     }
559 }
560 
561 static gulong
old_mem_chunk_compute_size(gulong size,gulong min_size)562 old_mem_chunk_compute_size (gulong size,
563                             gulong min_size)
564 {
565   gulong power_of_2;
566   gulong lower, upper;
567 
568   power_of_2 = 16;
569   while (power_of_2 < size)
570     power_of_2 <<= 1;
571 
572   lower = power_of_2 >> 1;
573   upper = power_of_2;
574 
575   if (size - lower < upper - size && lower >= min_size)
576     return lower;
577   else
578     return upper;
579 }
580 
581 static gint
old_mem_chunk_area_compare(GMemArea * a,GMemArea * b)582 old_mem_chunk_area_compare (GMemArea *a,
583                             GMemArea *b)
584 {
585   if (a->mem > b->mem)
586     return 1;
587   else if (a->mem < b->mem)
588     return -1;
589   return 0;
590 }
591 
592 static gint
old_mem_chunk_area_search(GMemArea * a,gchar * addr)593 old_mem_chunk_area_search (GMemArea *a,
594                            gchar    *addr)
595 {
596   if (a->mem <= addr)
597     {
598       if (addr < &a->mem[a->index])
599 	return 0;
600       return 1;
601     }
602   return -1;
603 }
604