1 /*
2
3 Copyright (C) 2000-2005 Silicon Graphics, Inc. All Rights Reserved.
4 Portions Copyright (C) 2007-2010 David Anderson. All Rights Reserved.
5
6 This program is free software; you can redistribute it and/or modify it
7 under the terms of version 2.1 of the GNU Lesser General Public License
8 as published by the Free Software Foundation.
9
10 This program is distributed in the hope that it would be useful, but
11 WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
13
14 Further, this software is distributed without any warranty that it is
15 free of the rightful claim of any third person regarding infringement
16 or the like. Any license provided herein, whether implied or
17 otherwise, applies only to this software file. Patent licenses, if
18 any, provided herein do not apply to combinations of this program with
19 other software, or any other product whatsoever.
20
21 You should have received a copy of the GNU Lesser General Public
22 License along with this program; if not, write the Free Software
23 Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston MA 02110-1301,
24 USA.
25
26 Contact information: Silicon Graphics, Inc., 1500 Crittenden Lane,
27 Mountain View, CA 94043, or:
28
29 http://www.sgi.com
30
31 For further information regarding this notice, see:
32
33 http://oss.sgi.com/projects/GenInfo/NoticeExplan
34
35 */
36 /* The address of the Free Software Foundation is
37 Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
38 Boston, MA 02110-1301, USA.
39 SGI has moved from the Crittenden Lane address.
40 */
41
42
43 #undef DEBUG
44
45 #include "config.h"
46 #include "dwarf_incl.h"
47 #include <sys/types.h>
48
49 #include <stdlib.h>
50 #include <stdio.h>
51 #include "malloc_check.h"
52
53 /*
54 These files are included to get the sizes
55 of structs to set the ah_bytes_one_struct field
56 of the Dwarf_Alloc_Hdr_s structs for each
57 allocation type.
58 */
59 #include "dwarf_line.h"
60 #include "dwarf_global.h"
61 #include "dwarf_arange.h"
62 #include "dwarf_abbrev.h"
63 #include "dwarf_die_deliv.h"
64 #include "dwarf_frame.h"
65 #include "dwarf_loc.h"
66 #include "dwarf_funcs.h"
67 #include "dwarf_types.h"
68 #include "dwarf_vars.h"
69 #include "dwarf_weaks.h"
70
71
72 static void _dwarf_free_special_error(Dwarf_Ptr space);
73
74 #ifdef DWARF_SIMPLE_MALLOC
75 static void _dwarf_simple_malloc_add_to_list(Dwarf_Debug dbg,
76 Dwarf_Ptr addr,
77 unsigned long size,
78 short alloc_type);
79 static void _dwarf_simple_malloc_delete_from_list(Dwarf_Debug dbg,
80 Dwarf_Ptr space,
81 short alloc_type);
82 void _dwarf_simple_malloc_botch(int err);
83
84 #endif /* DWARF_SIMPLE_MALLOC */
85
86
87
88
89 /*
90 This macro adds the size of a pointer to the size of a
91 struct that is given to it. It rounds up the size to
92 be a multiple of the size of a pointer. This is done
93 so that every struct returned by _dwarf_get_alloc()
94 can be preceded by a pointer to the chunk it came from.
95 Before allocating, it checks if the size of struct is less than
96 the size of a pointer. If yes, it returns the size
97 of 2 pointers. The returned size should be at least
98 the size of 2 pointers, since the first points to the
99 chunk the struct was allocated from, and the second
100 is used to link the free list.
101
102 We want DW_RESERVE to be at least the size of
103 a long long and at least the size of a pointer because
104 our struct has a long long and we want that aligned right.
105 Now Standard C defines long long as 8 bytes, so lets
106 make that standard. It will become unworkable when
107 long long or pointer grows beyound 8 bytes.
108 Unclear what to do with wierd requirements, like
109 36 bit pointers.
110
111
112 */
113 #define DW_RESERVE 8
114
115 /* Round size up to the next multiple of DW_RESERVE bytes
116 */
117 #define ROUND_SIZE(inputsize) \
118 (((inputsize) % (DW_RESERVE)) == 0 ? \
119 (inputsize): \
120 ((inputsize) + \
121 (DW_RESERVE) - ((inputsize) % (DW_RESERVE)) ))
122
123 #define ROUND_SIZE_WITH_POINTER(i_size) (ROUND_SIZE(i_size) + DW_RESERVE)
124
125 /* SMALL_ALLOC is for trivia where allocation is a waste.
126 Things that should be removed, really. */
127 #define SMALL_ALLOC 2
128
129 /* BASE_ALLOC is where a basic allocation makes sense, but 'not too large'.
130 No thorough evaluation of this value has been done, though
131 it was found wasteful of memory to have BASE_ALLOC be as large as
132 BIG_ALLOC. */
133 #define BASE_ALLOC 64
134
135 /* BIG_ALLOC is where a larger-than-BASE_ALLOC
136 allocation makes sense, but still 'not too large'.
137 No thorough evaluation of this value has been done. */
138 #define BIG_ALLOC 128
139
140 /* This translates into de_alloc_hdr index
141 ** the 0,1,1 entries are special: they don't use the
142 ** table values at all.
143 ** Rearranging the DW_DLA values would break binary compatibility
144 ** so that is not an option.
145 */
146 struct ial_s {
147 int ia_al_num; /* Index into de_alloc_hdr table. */
148
149 /* In bytes, one struct instance. This does not account for extra
150 space needed per block, but that (DW_RESERVE) will be added in
151 later where it is needed (DW_RESERVE space never added in here).
152 */
153 int ia_struct_size;
154
155
156 /* Number of instances per alloc block. MUST be > 0. */
157 int ia_base_count;
158
159 int (*specialconstructor) (Dwarf_Debug, void *);
160 void (*specialdestructor) (void *);
161 };
162
163 static const
164 struct ial_s index_into_allocated[ALLOC_AREA_INDEX_TABLE_MAX] = {
165 {0, 1, 1, 0, 0}, /* none */
166 {0, 1, 1, 0, 0}, /* 1 DW_DLA_STRING */
167 {1, sizeof(Dwarf_Loc), BASE_ALLOC, 0, 0}
168 , /* 2 DW_DLA_LOC */
169 {2, sizeof(Dwarf_Locdesc), BASE_ALLOC, 0, 0}
170 , /* 3 DW_DLA_LOCDESC */
171 {0, 1, 1, 0, 0}
172 , /* not used *//* 4 DW_DLA_ELLIST */
173 {0, 1, 1, 0, 0}
174 , /* not used *//* 5 DW_DLA_BOUNDS */
175 {3, sizeof(Dwarf_Block), BASE_ALLOC, 0, 0}
176 , /* 6 DW_DLA_BLOCK */
177 {0, 1, 1, 0, 0}
178 , /* the actual dwarf_debug structure *//* 7 DW_DLA_DEBUG */
179 {4, sizeof(struct Dwarf_Die_s), BIG_ALLOC, 0, 0}, /* 8 DW_DLA_DIE
180 */
181 {5, sizeof(struct Dwarf_Line_s), BIG_ALLOC, 0, 0}, /* 9
182 DW_DLA_LINE */
183 {6, sizeof(struct Dwarf_Attribute_s), BIG_ALLOC * 2, 0, 0},
184 /* 10 DW_DLA_ATTR */
185 {0, 1, 1, 0, 0}, /* not used *//* 11 DW_DLA_TYPE */
186 {0, 1, 1, 0, 0}, /* not used *//* 12 DW_DLA_SUBSCR */
187 {7, sizeof(struct Dwarf_Global_s), BASE_ALLOC, 0, 0}, /* 13
188 DW_DLA_GLOBAL
189 */
190 {8, sizeof(struct Dwarf_Error_s), BASE_ALLOC, 0, 0}, /* 14
191 DW_DLA_ERROR
192 */
193 {0, 1, 1, 0, 0}, /* 15 DW_DLA_LIST */
194 {0, 1, 1, 0, 0}, /* not used *//* 16 DW_DLA_LINEBUF */
195 {9, sizeof(struct Dwarf_Arange_s), BASE_ALLOC, 0, 0}, /* 17
196 DW_DLA_ARANGE
197 */
198 {10, sizeof(struct Dwarf_Abbrev_s), BIG_ALLOC, 0, 0}, /* 18
199 DW_DLA_ABBREV
200 */
201 {11, sizeof(Dwarf_Frame_Op), BIG_ALLOC, 0, 0}
202 , /* 19 DW_DLA_FRAME_OP */
203 {12, sizeof(struct Dwarf_Cie_s), BASE_ALLOC, 0, 0}, /* 20
204 DW_DLA_CIE */
205 {13, sizeof(struct Dwarf_Fde_s), BASE_ALLOC, 0, 0}, /* 21 DW_DLA_FDE */
206 {0, 1, 1, 0, 0}, /* 22 DW_DLA_LOC_BLOCK */
207 {0, 1, 1, 0, 0}, /* 23 DW_DLA_FRAME_BLOCK */
208 {14, sizeof(struct Dwarf_Global_s), BASE_ALLOC, 0, 0}, /* 24 DW_DLA_FUNC
209 UNUSED */
210 {15, sizeof(struct Dwarf_Global_s), BASE_ALLOC, 0, 0}, /* 25
211 DW_DLA_TYPENAME
212 UNUSED */
213 {16, sizeof(struct Dwarf_Global_s), BASE_ALLOC, 0, 0}, /* 26 DW_DLA_VAR
214 UNUSED */
215 {17, sizeof(struct Dwarf_Global_s), BASE_ALLOC, 0, 0}, /* 27 DW_DLA_WEAK
216 UNUSED */
217 {0, 1, 1, 0, 0}, /* 28 DW_DLA_ADDR */
218 {0, 1,1,0,0 }, /* 29 DW_DLA_RANGES */
219
220 /* The following DW_DLA data types
221 are known only inside libdwarf. */
222
223 {18, sizeof(struct Dwarf_Abbrev_List_s), BIG_ALLOC, 0, 0},
224 /* 30 DW_DLA_ABBREV_LIST */
225
226 {19, sizeof(struct Dwarf_Chain_s), BIG_ALLOC, 0, 0}, /* 31 DW_DLA_CHAIN */
227 {20, sizeof(struct Dwarf_CU_Context_s), BASE_ALLOC, 0, 0},
228 /* 32 DW_DLA_CU_CONTEXT */
229 {21, sizeof(struct Dwarf_Frame_s), BASE_ALLOC,
230 _dwarf_frame_constructor,
231 _dwarf_frame_destructor}, /* 33 DW_DLA_FRAME */
232 {22, sizeof(struct Dwarf_Global_Context_s), BASE_ALLOC, 0, 0},
233 /* 34 DW_DLA_GLOBAL_CONTEXT */
234 {23, sizeof(struct Dwarf_File_Entry_s), BASE_ALLOC, 0, 0}, /* 34 */
235 /* 35 DW_DLA_FILE_ENTRY */
236 {24, sizeof(struct Dwarf_Line_Context_s), BASE_ALLOC, 0, 0},
237 /* 36 DW_DLA_LINE_CONTEXT */
238 {25, sizeof(struct Dwarf_Loc_Chain_s), BASE_ALLOC, 0, 0}, /* 36 */
239 /* 37 DW_DLA_LOC_CHAIN */
240
241 {26, sizeof(struct Dwarf_Hash_Table_s),BASE_ALLOC, 0, 0}, /* 37 */
242 /* 38 DW_DLA_HASH_TABLE */
243
244 /* The following really use Global struct: used to be unique struct
245 per type, but now merged (11/99). The opaque types
246 are visible in the interface. The types for
247 DW_DLA_FUNC,
248 DW_DLA_TYPENAME, DW_DLA_VAR, DW_DLA_WEAK also use
249 the global types.
250
251 */
252 {27, sizeof(struct Dwarf_Global_Context_s), BASE_ALLOC, 0, 0},
253 /* 39 DW_DLA_FUNC_CONTEXT */
254 {28, sizeof(struct Dwarf_Global_Context_s), BASE_ALLOC, 0, 0},
255 /* 40 DW_DLA_TYPENAME_CONTEXT */
256 {29, sizeof(struct Dwarf_Global_Context_s), BASE_ALLOC, 0, 0},
257 /* 41 DW_DLA_VAR_CONTEXT */
258 {30, sizeof(struct Dwarf_Global_Context_s), BASE_ALLOC, 0, 0},
259 /* 42 DW_DLA_WEAK_CONTEXT */
260 {31, sizeof(struct Dwarf_Global_Context_s), BASE_ALLOC, 0, 0},
261 /* 43 DW_DLA_PUBTYPES_CONTEXT DWARF3 */
262
263 {0,1,1,0,0 },
264 /* 44 DW_DLA_HASH_TABLE_ENTRY */
265
266
267 };
268
269 #ifndef DWARF_SIMPLE_MALLOC
270
271 /*
272 This function is given a pointer to the header
273 structure that is used to allocate 1 struct of
274 the type given by alloc_type. It first checks
275 if a struct is available in its free list. If
276 not, it checks if 1 is available in its blob,
277 which is a chunk of memory that is reserved for
278 its use. If not, it malloc's a chunk. The
279 initial part of it is used to store the end
280 address of the chunk, and also to keep track
281 of the number of free structs in that chunk.
282 This information is used for freeing the chunk
283 when all the structs in it are free.
284
285 Assume all input arguments have been validated.
286
287 This function can be used only to allocate 1
288 struct of the given type.
289
290 It returns a pointer to the struct that the
291 user can use. It returns NULL only when it
292 is out of free structs, and cannot malloc
293 any more. The struct returned is zero-ed.
294
295 A pointer to the chunk that the struct belongs
296 to is stored in the bytes preceding the
297 returned address. Since this pointer it
298 never overwritten, when a struct is allocated
299 from the free_list this pointer does not
300 have to be written. In the 2 other cases,
301 where the struct is allocated from a new
302 chunk, or the blob, a pointer to the chunk
303 is written.
304 */
305 static Dwarf_Ptr
_dwarf_find_memory(Dwarf_Alloc_Hdr alloc_hdr)306 _dwarf_find_memory(Dwarf_Alloc_Hdr alloc_hdr)
307 {
308 /* Pointer to the struct allocated. */
309 Dwarf_Small *ret_mem = 0;
310
311 /* Pointer to info about chunks allocated. */
312 Dwarf_Alloc_Area alloc_area;
313
314 /* Size of chunk malloc'ed when no free structs left. */
315 Dwarf_Signed mem_block_size;
316
317 /* Pointer to block malloc'ed. */
318 Dwarf_Small *mem_block;
319
320 /*
321 Check the alloc_area from which the last allocation was made
322 (most recent new block). If that is not successful, then search
323 the list of alloc_area's from alloc_header. */
324 alloc_area = alloc_hdr->ah_last_alloc_area;
325 if (alloc_area == NULL || alloc_area->aa_free_structs_in_chunk == 0)
326 for (alloc_area = alloc_hdr->ah_alloc_area_head;
327 alloc_area != NULL; alloc_area = alloc_area->aa_next) {
328
329 if (alloc_area->aa_free_structs_in_chunk > 0) {
330 break; /* found a free entry! */
331 }
332
333 }
334
335 if (alloc_area != NULL) {
336 alloc_area->aa_free_structs_in_chunk--;
337
338 if (alloc_area->aa_free_list != NULL) {
339 ret_mem = alloc_area->aa_free_list;
340
341 /*
342 Update the free list. The initial part of the struct is
343 used to hold a pointer to the next struct on the free
344 list. In this way, the free list chain is maintained at
345 0 memory cost. */
346 alloc_area->aa_free_list =
347 ((Dwarf_Free_List) ret_mem)->fl_next;
348 } else if (alloc_area->aa_blob_start < alloc_area->aa_blob_end) {
349 ret_mem = alloc_area->aa_blob_start;
350
351 /*
352 Store pointer to chunk this struct belongs to in the
353 first few bytes. Return pointer to bytes after this
354 pointer storage. */
355 *(Dwarf_Alloc_Area *) ret_mem = alloc_area;
356 ret_mem += DW_RESERVE;
357
358 alloc_area->aa_blob_start += alloc_hdr->ah_bytes_one_struct;
359 } else {
360 /* else fall thru , though it should be impossible to fall
361 thru. And represents a disastrous programming error if
362 we get here. */
363 #ifdef DEBUG
364 fprintf(stderr, "libdwarf Internal error start %x end %x\n",
365 (int) alloc_area->aa_blob_start,
366 (int) alloc_area->aa_blob_end);
367 #endif
368 }
369 }
370
371 /* New memory has to malloc'ed since there are no free structs. */
372 if (ret_mem == 0) {
373 Dwarf_Word rounded_area_hdr_size;
374
375 alloc_hdr->ah_chunks_allocated++;
376
377 { /* this nonsense avoids a warning */
378 /* CONSTCOND would be better */
379 unsigned long v = sizeof(struct Dwarf_Alloc_Area_s);
380
381 rounded_area_hdr_size = ROUND_SIZE(v);
382 }
383
384 /*
385 Allocate memory to contain the required number of structs
386 and the Dwarf_Alloc_Area_s to control it. */
387 mem_block_size = alloc_hdr->ah_bytes_malloc_per_chunk +
388 rounded_area_hdr_size;
389
390 mem_block = malloc(mem_block_size);
391 if (mem_block == NULL) {
392 return (NULL);
393 }
394
395
396 /*
397 Attach the Dwarf_Alloc_Area_s struct to the list of chunks
398 malloc'ed for this struct type. Also initialize the fields
399 of the Dwarf_Alloc_Area_s. */
400 alloc_area = (Dwarf_Alloc_Area) mem_block;
401 alloc_area->aa_prev = 0;
402 if (alloc_hdr->ah_alloc_area_head != NULL) {
403 alloc_hdr->ah_alloc_area_head->aa_prev = alloc_area;
404 }
405 alloc_area->aa_free_list = 0;
406 alloc_area->aa_next = alloc_hdr->ah_alloc_area_head;
407 alloc_hdr->ah_alloc_area_head = alloc_area;
408
409 alloc_area->aa_alloc_hdr = alloc_hdr;
410 alloc_area->aa_free_structs_in_chunk =
411 (Dwarf_Sword) alloc_hdr->ah_structs_per_chunk - 1;
412 if (alloc_area->aa_free_structs_in_chunk < 1) {
413 /* If we get here, there is a disastrous programming error
414 somewhere. */
415 #ifdef DEBUG
416 fprintf(stderr,
417 "libdwarf Internal error: free structs in chunk %d\n",
418 (int) alloc_area->aa_free_structs_in_chunk);
419 #endif
420 return NULL;
421 }
422
423 /*
424 The struct returned begins immediately after the
425 Dwarf_Alloc_Area_s struct. */
426 ret_mem = mem_block + rounded_area_hdr_size;
427 alloc_area->aa_blob_start =
428 ret_mem + alloc_hdr->ah_bytes_one_struct;
429 alloc_area->aa_blob_end = mem_block + mem_block_size;
430
431 /*
432 Store pointer to chunk this struct belongs to in the first
433 few bytes. Return pointer to bytes after this pointer
434 storage. */
435 *(Dwarf_Alloc_Area *) ret_mem = alloc_area;
436 ret_mem += DW_RESERVE;
437 }
438
439 alloc_hdr->ah_last_alloc_area = alloc_area;
440 alloc_hdr->ah_struct_user_holds++;
441 memset(ret_mem, 0, alloc_hdr->ah_bytes_one_struct - DW_RESERVE);
442 return (ret_mem);
443 }
444
445 #endif /* ndef DWARF_SIMPLE_MALLOC */
446
447 /*
448 This function returns a pointer to a region
449 of memory. For alloc_types that are not
450 strings or lists of pointers, only 1 struct
451 can be requested at a time. This is indicated
452 by an input count of 1. For strings, count
453 equals the length of the string it will
454 contain, i.e it the length of the string
455 plus 1 for the terminating null. For lists
456 of pointers, count is equal to the number of
457 pointers. For DW_DLA_FRAME_BLOCK, DW_DLA_RANGES, and
458 DW_DLA_LOC_BLOCK allocation types also, count
459 is the count of the number of structs needed.
460
461 This function cannot be used to allocate a
462 Dwarf_Debug_s struct.
463
464 */
465 Dwarf_Ptr
_dwarf_get_alloc(Dwarf_Debug dbg,Dwarf_Small alloc_type,Dwarf_Unsigned count)466 _dwarf_get_alloc(Dwarf_Debug dbg,
467 Dwarf_Small alloc_type, Dwarf_Unsigned count)
468 {
469 Dwarf_Alloc_Hdr alloc_hdr;
470
471 Dwarf_Ptr ret_mem;
472
473 Dwarf_Signed size = 0;
474 unsigned int index;
475 unsigned int type = alloc_type;
476
477 if (dbg == NULL) {
478 return (NULL);
479 }
480
481 if (type >= ALLOC_AREA_INDEX_TABLE_MAX) {
482 /* internal error */
483 return NULL;
484 }
485 index = index_into_allocated[type].ia_al_num;
486 /* zero also illegal but not tested for */
487
488 /* If the Dwarf_Debug is not fully set up, we will get index 0 for
489 any type and must do something. 'Not fully set up' can only
490 happen for DW_DLA_ERROR, I (davea) believe, and for that we call
491 special code here.. */
492
493 if (index == 0) {
494 if (alloc_type == DW_DLA_STRING) {
495 size = count;
496 } else if (alloc_type == DW_DLA_LIST) {
497 size = count * sizeof(Dwarf_Ptr);
498 } else if (alloc_type == DW_DLA_FRAME_BLOCK) {
499 size = count * sizeof(Dwarf_Frame_Op);
500 } else if (alloc_type == DW_DLA_LOC_BLOCK) {
501 size = count * sizeof(Dwarf_Loc);
502 } else if (alloc_type == DW_DLA_HASH_TABLE_ENTRY) {
503 size = count * sizeof(struct Dwarf_Hash_Table_Entry_s);
504 } else if (alloc_type == DW_DLA_ADDR) {
505 size = count *
506 (sizeof(Dwarf_Addr) > sizeof(Dwarf_Off) ?
507 sizeof(Dwarf_Addr) : sizeof(Dwarf_Off));
508 } else if (alloc_type == DW_DLA_RANGES) {
509 size = count * sizeof(Dwarf_Ranges);
510 } else if (alloc_type == DW_DLA_ERROR) {
511 void *m = _dwarf_special_no_dbg_error_malloc();
512
513 dwarf_malloc_check_alloc_data(m, DW_DLA_ERROR);
514 return m;
515
516 } else {
517 /* If we get here, there is a disastrous programming error
518 somewhere. */
519 #ifdef DEBUG
520 fprintf(stderr,
521 "libdwarf Internal error: type %d unexpected\n",
522 (int) type);
523 #endif
524 }
525 } else {
526 alloc_hdr = &dbg->de_alloc_hdr[index];
527 if (alloc_hdr->ah_bytes_one_struct > 0) {
528 #ifdef DWARF_SIMPLE_MALLOC
529 size = alloc_hdr->ah_bytes_one_struct;
530 #else
531 {
532 void *m = _dwarf_find_memory(alloc_hdr);
533
534 dwarf_malloc_check_alloc_data(m, type);
535 if (index_into_allocated[type].specialconstructor) {
536 int res =
537 index_into_allocated[type].
538 specialconstructor(dbg, m);
539 if (res != DW_DLV_OK) {
540 /* We leak what we allocated in
541 _dwarf_find_memory when constructor fails. */
542 return NULL;
543 }
544 }
545 return m;
546 }
547 #endif
548
549 } else {
550 /* Special case: should not really happen at all. */
551 if (type == DW_DLA_ERROR) {
552 /* dwarf_init failure. Because dbg is incomplete we
553 won't use it to record the malloc. */
554 void *m = _dwarf_special_no_dbg_error_malloc();
555
556 dwarf_malloc_check_alloc_data(m, DW_DLA_ERROR);
557 return m;
558 } else {
559 /* If we get here, there is a disastrous programming
560 error somewhere. */
561 #ifdef DWARF_SIMPLE_MALLOC
562 _dwarf_simple_malloc_botch(3);
563 #endif
564 #ifdef DEBUG
565 fprintf(stderr,
566 "libdwarf Internal error: Type %d unexpected\n",
567 (int) type);
568 #endif
569 }
570 }
571 }
572
573 ret_mem = malloc(size);
574 #ifdef DWARF_SIMPLE_MALLOC
575 _dwarf_simple_malloc_add_to_list(dbg, ret_mem, (unsigned long) size,
576 type);
577 #endif
578 if (ret_mem != NULL)
579 memset(ret_mem, 0, size);
580
581 dwarf_malloc_check_alloc_data(ret_mem, type);
582 if (index_into_allocated[type].specialconstructor) {
583 int res =
584 index_into_allocated[type].specialconstructor(dbg, ret_mem);
585 if (res != DW_DLV_OK) {
586 /* We leak what we allocated in _dwarf_find_memory when
587 constructor fails. */
588 return NULL;
589 }
590 }
591
592 return (ret_mem);
593 }
594
595
596
597 /*
598 This function is used to deallocate a region of memory
599 that was obtained by a call to _dwarf_get_alloc. Note
600 that though dwarf_dealloc() is a public function,
601 _dwarf_get_alloc() isn't.
602
603 For lists, typically arrays of pointers, it is assumed
604 that the space was allocated by a direct call to malloc,
605 and so a straight free() is done. This is also the case
606 for variable length blocks such as DW_DLA_FRAME_BLOCK
607 and DW_DLA_LOC_BLOCK and DW_DLA_RANGES.
608
609 For strings, the pointer might point to a string in
610 .debug_info or .debug_string. After this is checked,
611 and if found not to be the case, a free() is done,
612 again on the assumption that a malloc was used to
613 obtain the space.
614
615 For other types of structs, a pointer to the chunk that
616 the struct was allocated out of, is present in the bytes
617 preceding the pointer passed in. For this chunk it is
618 checked whether all the structs in that chunk are now free.
619 If so, the entire chunk is free_ed. Otherwise, the space
620 is added to the free list for that chunk, and the free count
621 incremented.
622
623 This function does not return anything.
624 */
625 void
dwarf_dealloc(Dwarf_Debug dbg,Dwarf_Ptr space,Dwarf_Unsigned alloc_type)626 dwarf_dealloc(Dwarf_Debug dbg,
627 Dwarf_Ptr space, Dwarf_Unsigned alloc_type)
628 {
629 Dwarf_Alloc_Hdr alloc_hdr;
630 Dwarf_Alloc_Area alloc_area;
631 unsigned int type = alloc_type;
632 unsigned int index;
633
634 if (space == NULL) {
635 return;
636 }
637 if (type == DW_DLA_ERROR) {
638 /* Get pointer to Dwarf_Alloc_Area this struct came from. See
639 dwarf_alloc.h ROUND_SIZE_WITH_POINTER stuff */
640 alloc_area =
641 *(Dwarf_Alloc_Area *) ((char *) space - DW_RESERVE);
642 if (alloc_area == 0) {
643 /* This is the special case of a failed dwarf_init(). Also
644 (and more signficantly) there are a variety of other
645 situations where libdwarf does not *know* what dbg is
646 involved (because of a libdwarf-caller-error) so
647 libdwarf uses NULL as the dbg. Those too wind up here. */
648 _dwarf_free_special_error(space);
649 dwarf_malloc_check_dealloc_data(space, type);
650 return;
651 }
652
653 }
654 if (dbg == NULL) {
655 /* App error, or an app that failed to succeed in a
656 dwarf_init() call. */
657 return;
658 }
659 if (type >= ALLOC_AREA_INDEX_TABLE_MAX) {
660 /* internal or user app error */
661 return;
662 }
663
664 index = index_into_allocated[type].ia_al_num;
665 /*
666 A string pointer may point into .debug_info or .debug_string.
667 Otherwise, they are directly malloc'ed. */
668 dwarf_malloc_check_dealloc_data(space, type);
669 if (index == 0) {
670 if (type == DW_DLA_STRING) {
671 if ((Dwarf_Small *) space >= dbg->de_debug_info.dss_data &&
672 (Dwarf_Small *) space <
673 dbg->de_debug_info.dss_data + dbg->de_debug_info.dss_size)
674 return;
675
676 if (dbg->de_debug_line.dss_data != NULL &&
677 (Dwarf_Small *) space >= dbg->de_debug_line.dss_data &&
678 (Dwarf_Small *) space <
679 dbg->de_debug_line.dss_data + dbg->de_debug_line.dss_size)
680 return;
681
682 if (dbg->de_debug_pubnames.dss_data != NULL &&
683 (Dwarf_Small *) space >= dbg->de_debug_pubnames.dss_data &&
684 (Dwarf_Small *) space <
685 dbg->de_debug_pubnames.dss_data +
686 dbg->de_debug_pubnames.dss_size)
687 return;
688
689 if (dbg->de_debug_frame.dss_data != NULL &&
690 (Dwarf_Small *) space >= dbg->de_debug_frame.dss_data &&
691 (Dwarf_Small *) space <
692 dbg->de_debug_frame.dss_data + dbg->de_debug_frame.dss_size)
693 return;
694
695 if (dbg->de_debug_str.dss_data != NULL &&
696 (Dwarf_Small *) space >= dbg->de_debug_str.dss_data &&
697 (Dwarf_Small *) space <
698 dbg->de_debug_str.dss_data + dbg->de_debug_str.dss_size)
699 return;
700
701 if (dbg->de_debug_funcnames.dss_data != NULL &&
702 (Dwarf_Small *) space >= dbg->de_debug_funcnames.dss_data &&
703 (Dwarf_Small *) space <
704 dbg->de_debug_funcnames.dss_data +
705 dbg->de_debug_funcnames.dss_size)
706 return;
707
708 if (dbg->de_debug_typenames.dss_data != NULL &&
709 (Dwarf_Small *) space >= dbg->de_debug_typenames.dss_data &&
710 (Dwarf_Small *) space <
711 dbg->de_debug_typenames.dss_data +
712 dbg->de_debug_typenames.dss_size)
713 return;
714 if (dbg->de_debug_pubtypes.dss_data != NULL &&
715 (Dwarf_Small *) space >= dbg->de_debug_pubtypes.dss_data &&
716 (Dwarf_Small *) space <
717 dbg->de_debug_pubtypes.dss_data +
718 dbg->de_debug_pubtypes.dss_size)
719 return;
720
721 if (dbg->de_debug_varnames.dss_data != NULL &&
722 (Dwarf_Small *) space >= dbg->de_debug_varnames.dss_data &&
723 (Dwarf_Small *) space <
724 dbg->de_debug_varnames.dss_data +
725 dbg->de_debug_varnames.dss_size)
726 return;
727
728 if (dbg->de_debug_weaknames.dss_data != NULL &&
729 (Dwarf_Small *) space >= dbg->de_debug_weaknames.dss_data &&
730 (Dwarf_Small *) space <
731 dbg->de_debug_weaknames.dss_data +
732 dbg->de_debug_weaknames.dss_size)
733 return;
734
735 #ifdef DWARF_SIMPLE_MALLOC
736 _dwarf_simple_malloc_delete_from_list(dbg, space, type);
737 #endif
738 free(space);
739 return;
740 }
741
742 if (type == DW_DLA_LIST ||
743 type == DW_DLA_FRAME_BLOCK ||
744 type == DW_DLA_LOC_BLOCK || type == DW_DLA_ADDR ||
745 type == DW_DLA_RANGES ||
746 type == DW_DLA_HASH_TABLE_ENTRY) {
747
748 #ifdef DWARF_SIMPLE_MALLOC
749 _dwarf_simple_malloc_delete_from_list(dbg, space, type);
750 #endif
751 free(space);
752 return;
753 }
754 /* else is an alloc type that is not used */
755 /* app or internal error */
756 #ifdef DWARF_SIMPLE_MALLOC
757 _dwarf_simple_malloc_botch(4);
758 #endif
759 return;
760
761 }
762 if (index_into_allocated[type].specialdestructor) {
763 index_into_allocated[type].specialdestructor(space);
764 }
765 #ifdef DWARF_SIMPLE_MALLOC
766 _dwarf_simple_malloc_delete_from_list(dbg, space, type);
767 free(space);
768 #else /* !DWARF_SIMPLE_MALLOC */
769 alloc_hdr = &dbg->de_alloc_hdr[index];
770
771 /* Get pointer to Dwarf_Alloc_Area this struct came from. See
772 dwarf_alloc.h ROUND_SIZE_WITH_POINTER stuff */
773 alloc_area = *(Dwarf_Alloc_Area *) ((char *) space - DW_RESERVE);
774
775 /* ASSERT: alloc_area != NULL If NULL we could abort, let it
776 coredump below, or return, pretending all is well. We go on,
777 letting program crash. Is caller error. */
778
779 /*
780 Check that the alloc_hdr field of the alloc_area we have is
781 pointing to the right alloc_hdr. This is used to catch use of
782 incorrect deallocation code by the user. */
783 if (alloc_area->aa_alloc_hdr != alloc_hdr) {
784 /* If we get here, the user has called dwarf_dealloc wrongly or
785 there is some other disastrous error. By leaking mem here we
786 try to be safe... */
787 #ifdef DEBUG
788 fprintf(stderr,
789 "libdwarf Internal error: type %d hdr mismatch %lx %lx "
790 "area ptr %lx\n",
791 (int) type,
792 (long) alloc_area->aa_alloc_hdr,
793 (long) alloc_hdr, (long) alloc_area);
794 #endif
795 return;
796 }
797
798 alloc_hdr->ah_struct_user_holds--;
799 alloc_area->aa_free_structs_in_chunk++;
800
801 /*
802 Give chunk back to malloc only when every struct is freed */
803 if (alloc_area->aa_free_structs_in_chunk ==
804 alloc_hdr->ah_structs_per_chunk) {
805 if (alloc_area->aa_prev != NULL) {
806 alloc_area->aa_prev->aa_next = alloc_area->aa_next;
807 } else {
808 alloc_hdr->ah_alloc_area_head = alloc_area->aa_next;
809 }
810
811 if (alloc_area->aa_next != NULL) {
812 alloc_area->aa_next->aa_prev = alloc_area->aa_prev;
813 }
814
815 alloc_hdr->ah_chunks_allocated--;
816
817 if (alloc_area == alloc_hdr->ah_last_alloc_area) {
818 alloc_hdr->ah_last_alloc_area = NULL;
819 }
820 memset(alloc_area, 0, sizeof(*alloc_area));
821 free(alloc_area);
822 }
823
824 else {
825 ((Dwarf_Free_List) space)->fl_next = alloc_area->aa_free_list;
826 alloc_area->aa_free_list = space;
827 }
828 #endif /* !DWARF_SIMPLE_MALLOC */
829 }
830
831
832 /*
833 Allocates space for a Dwarf_Debug_s struct,
834 since one does not exist.
835 */
836 Dwarf_Debug
_dwarf_get_debug(void)837 _dwarf_get_debug(void
838 )
839 {
840 Dwarf_Debug dbg;
841
842 dbg = (Dwarf_Debug) malloc(sizeof(struct Dwarf_Debug_s));
843 if (dbg == NULL)
844 return (NULL);
845 else
846 memset(dbg, 0, sizeof(struct Dwarf_Debug_s));
847 return (dbg);
848 }
849
850
851 /*
852 Sets up the Dwarf_Debug_s struct for all the
853 allocation types currently defined.
854 Allocation types DW_DLA_STRING, DW_DLA_LIST,
855 DW_DLA_FRAME_BLOCK, DW_DLA_LOC_BLOCK, DW_DLA_RANGES are
856 malloc'ed directly.
857
858 This routine should be called after _dwarf_setup(),
859 so that information about the sizes of the Dwarf
860 sections can be used to decide the number of
861 structs of each type malloc'ed.
862
863 Also DW_DLA_ELLIST, DW_DLA_BOUNDS, DW_DLA_TYPE,
864 DW_DLA_SUBSCR, DW_DLA_LINEBUF allocation types
865 are currently not used.
866 The ah_bytes_one_struct and ah_structs_per_chunk fields for
867 these types have been set to 1 for efficiency
868 in dwarf_get_alloc().
869
870 Ah_alloc_num should be greater than 1 for all
871 types that are currently being used.
872
873 Therefore, for these allocation types the
874 ah_bytes_one_struct, and ah_structs_per_chunk fields do not
875 need to be initialized.
876
877 Being an internal routine, assume proper dbg.
878 */
879
880 Dwarf_Debug
_dwarf_setup_debug(Dwarf_Debug dbg)881 _dwarf_setup_debug(Dwarf_Debug dbg)
882 {
883 int i;
884
885 for (i = 1; i <= MAX_DW_DLA; i++) {
886 const struct ial_s *ialp = &index_into_allocated[i];
887 unsigned int hdr_index = ialp->ia_al_num;
888 Dwarf_Word str_size = ialp->ia_struct_size;
889 Dwarf_Word str_count = ialp->ia_base_count;
890 Dwarf_Word rnded_size = ROUND_SIZE_WITH_POINTER(str_size);
891
892 Dwarf_Alloc_Hdr alloc_hdr = &dbg->de_alloc_hdr[hdr_index];
893
894 alloc_hdr->ah_bytes_one_struct = (Dwarf_Half) rnded_size;
895
896 /* ah_structs_per_chunk must be >0 else we are in trouble */
897 alloc_hdr->ah_structs_per_chunk = str_count;
898 alloc_hdr->ah_bytes_malloc_per_chunk = rnded_size * str_count;
899 }
900 return (dbg);
901 }
902
903 /*
904 This function prints out the statistics
905 collected on allocation of memory chunks.
906 */
907 void
dwarf_print_memory_stats(Dwarf_Debug dbg)908 dwarf_print_memory_stats(Dwarf_Debug dbg)
909 {
910 Dwarf_Alloc_Hdr alloc_hdr;
911 Dwarf_Shalf i;
912
913 /*
914 Alloc types start at 1, not 0. Hence, the first NULL string, and
915 also a size of MAX_DW_DLA + 1. */
916 char *alloc_type_name[MAX_DW_DLA + 1] = {
917 "",
918 "DW_DLA_STRING",
919 "DW_DLA_LOC",
920 "DW_DLA_LOCDESC",
921 "DW_DLA_ELLIST",
922 "DW_DLA_BOUNDS",
923 "DW_DLA_BLOCK",
924 "DW_DLA_DEBUG",
925 "DW_DLA_DIE",
926 "DW_DLA_LINE",
927 "DW_DLA_ATTR",
928 "DW_DLA_TYPE",
929 "DW_DLA_SUBSCR",
930 "DW_DLA_GLOBAL",
931 "DW_DLA_ERROR",
932 "DW_DLA_LIST",
933 "DW_DLA_LINEBUF",
934 "DW_DLA_ARANGE",
935 "DW_DLA_ABBREV",
936 "DW_DLA_FRAME_OP",
937 "DW_DLA_CIE",
938 "DW_DLA_FDE",
939 "DW_DLA_LOC_BLOCK",
940 "DW_DLA_FRAME_BLOCK",
941 "DW_DLA_FUNC",
942 "DW_DLA_TYPENAME",
943 "DW_DLA_VAR",
944 "DW_DLA_WEAK",
945 "DW_DLA_ADDR",
946 "DW_DLA_RANGES",
947 "DW_DLA_ABBREV_LIST",
948 "DW_DLA_CHAIN",
949 "DW_DLA_CU_CONTEXT",
950 "DW_DLA_FRAME",
951 "DW_DLA_GLOBAL_CONTEXT",
952 "DW_DLA_FILE_ENTRY",
953 "DW_DLA_LINE_CONTEXT",
954 "DW_DLA_LOC_CHAIN",
955 "DW_DLA_HASH_TABLE",
956 "DW_DLA_FUNC_CONTEXT",
957 "DW_DLA_TYPENAME_CONTEXT",
958 "DW_DLA_VAR_CONTEXT",
959 "DW_DLA_WEAK_CONTEXT",
960 "DW_DLA_PUBTYPES_CONTEXT",
961 "DW_DLA_HASH_TABLE_ENTRY",
962 };
963
964 if (dbg == NULL)
965 return;
966
967 printf("Size of Dwarf_Debug %4ld bytes\n",
968 (long) sizeof(*dbg));
969 printf("Size of Dwarf_Alloc_Hdr_s %4ld bytes\n",
970 (long) sizeof(struct Dwarf_Alloc_Hdr_s));
971 printf("size of Dwarf_Alloc_Area_s %4ld bytes\n",
972 (long) sizeof(struct Dwarf_Alloc_Area_s));
973
974 printf(" Alloc Type Curr Structs byt str\n");
975 printf(" ---------- ---- ------- per per\n");
976 for (i = 1; i <= MAX_DW_DLA; i++) {
977 int indx = index_into_allocated[i].ia_al_num;
978
979 alloc_hdr = &dbg->de_alloc_hdr[indx];
980 if (alloc_hdr->ah_bytes_one_struct != 1) {
981 printf("%2d %-25s %6d %8d %6d %6d\n",
982 (int) i,
983 alloc_type_name[i],
984 (int) alloc_hdr->ah_chunks_allocated,
985 (int) alloc_hdr->ah_struct_user_holds,
986 (int) alloc_hdr->ah_bytes_malloc_per_chunk,
987 (int) alloc_hdr->ah_structs_per_chunk);
988 }
989 }
990 }
991
992
993 #ifndef DWARF_SIMPLE_MALLOC
994 /*
995 This recursively frees
996 the chunks still allocated, and
997 forward chained through the aa_next
998 pointer.
999 */
1000 static void
_dwarf_recursive_free(Dwarf_Alloc_Area alloc_area)1001 _dwarf_recursive_free(Dwarf_Alloc_Area alloc_area)
1002 {
1003 if (alloc_area->aa_next != NULL) {
1004 _dwarf_recursive_free(alloc_area->aa_next);
1005 }
1006
1007 alloc_area->aa_next = 0;
1008 alloc_area->aa_prev = 0;
1009 free(alloc_area);
1010 }
1011 #endif
1012
1013 /* In the 'rela' relocation case we might have malloc'd
1014 space to ensure it is read-write. In that case, free the space. */
1015 static void
rela_free(struct Dwarf_Section_s * sec)1016 rela_free(struct Dwarf_Section_s * sec)
1017 {
1018 if (sec->dss_data_was_malloc) {
1019 free(sec->dss_data);
1020 }
1021 sec->dss_data = 0;
1022 sec->dss_data_was_malloc = 0;
1023 }
1024
1025 /*
1026 Used to free all space allocated for this Dwarf_Debug.
1027 The caller should assume that the Dwarf_Debug pointer
1028 itself is no longer valid upon return from this function.
1029
1030 In case of difficulty, this function simply returns quietly.
1031 */
1032 int
_dwarf_free_all_of_one_debug(Dwarf_Debug dbg)1033 _dwarf_free_all_of_one_debug(Dwarf_Debug dbg)
1034 {
1035 Dwarf_Alloc_Hdr alloc_hdr;
1036 Dwarf_Shalf i;
1037 Dwarf_CU_Context context = 0;
1038 Dwarf_CU_Context nextcontext = 0;
1039
1040 if (dbg == NULL)
1041 return (DW_DLV_ERROR);
1042
1043 /* To do complete validation that we have no surprising missing or
1044 erroneous deallocs it is advisable to do the dwarf_deallocs here
1045 that are not things the user can otherwise request.
1046 Housecleaning. */
1047
1048 for (context = dbg->de_cu_context_list;
1049 context; context = nextcontext) {
1050 Dwarf_Hash_Table hash_table = context->cc_abbrev_hash_table;
1051 _dwarf_free_abbrev_hash_table_contents(dbg,hash_table);
1052 nextcontext = context->cc_next;
1053 dwarf_dealloc(dbg, hash_table, DW_DLA_HASH_TABLE);
1054 dwarf_dealloc(dbg, context, DW_DLA_CU_CONTEXT);
1055 }
1056
1057 /* Housecleaning done. Now really free all the space. */
1058 #ifdef DWARF_SIMPLE_MALLOC
1059 if (dbg->de_simple_malloc_base) {
1060 struct simple_malloc_record_s *smp = dbg->de_simple_malloc_base;
1061
1062 while (smp) {
1063 int i;
1064 struct simple_malloc_record_s *prev_smp = 0;
1065
1066 for (i = 0; i < smp->sr_used; ++i) {
1067 struct simple_malloc_entry_s *cur;
1068
1069 cur = &smp->sr_entry[i];
1070 if (cur->se_addr != 0) {
1071 free(cur->se_addr);
1072 cur->se_addr = 0;
1073 }
1074 }
1075 prev_smp = smp;
1076 smp = smp->sr_next;
1077 free(prev_smp);
1078 }
1079 dbg->de_simple_malloc_base = 0;
1080 }
1081 #else
1082 for (i = 1; i < ALLOC_AREA_REAL_TABLE_MAX; i++) {
1083 int indx = i;
1084
1085 alloc_hdr = &dbg->de_alloc_hdr[indx];
1086 if (alloc_hdr->ah_alloc_area_head != NULL) {
1087 _dwarf_recursive_free(alloc_hdr->ah_alloc_area_head);
1088 }
1089 }
1090
1091 #endif
1092 rela_free(&dbg->de_debug_info);
1093 rela_free(&dbg->de_debug_abbrev);
1094 rela_free(&dbg->de_debug_line);
1095 rela_free(&dbg->de_debug_loc);
1096 rela_free(&dbg->de_debug_aranges);
1097 rela_free(&dbg->de_debug_macinfo);
1098 rela_free(&dbg->de_debug_pubnames);
1099 rela_free(&dbg->de_debug_str);
1100 rela_free(&dbg->de_debug_frame);
1101 rela_free(&dbg->de_debug_frame_eh_gnu);
1102 rela_free(&dbg->de_debug_pubtypes);
1103 rela_free(&dbg->de_debug_funcnames);
1104 rela_free(&dbg->de_debug_typenames);
1105 rela_free(&dbg->de_debug_varnames);
1106 rela_free(&dbg->de_debug_weaknames);
1107 rela_free(&dbg->de_debug_ranges);
1108 dwarf_harmless_cleanout(&dbg->de_harmless_errors);
1109
1110 memset(dbg, 0, sizeof(*dbg)); /* Prevent accidental use later. */
1111 free(dbg);
1112 return (DW_DLV_OK);
1113 }
1114
1115 /* A special case: we have no dbg, no alloc header etc.
1116 So create something out of thin air that we can recognize
1117 in dwarf_dealloc.
1118 Something with the prefix (prefix space hidden from caller).
1119
1120 Only applies to DW_DLA_ERROR, making up an error record.
1121 */
1122
1123 struct Dwarf_Error_s *
_dwarf_special_no_dbg_error_malloc(void)1124 _dwarf_special_no_dbg_error_malloc(void)
1125 {
1126 /* the union unused things are to guarantee proper alignment */
1127 union u {
1128 Dwarf_Alloc_Area ptr_not_used;
1129 struct Dwarf_Error_s base_not_used;
1130 char data_space[sizeof(struct Dwarf_Error_s) +
1131 (DW_RESERVE * 2)];
1132 };
1133 char *mem;
1134
1135 mem = malloc(sizeof(union u));
1136
1137 if (mem == 0) {
1138 return 0;
1139
1140 }
1141 memset(mem, 0, sizeof(union u));
1142 mem += DW_RESERVE;
1143 return (struct Dwarf_Error_s *) mem;
1144 }
1145
1146 /* The free side of _dwarf_special_no_dbg_error_malloc()
1147 */
1148 static void
_dwarf_free_special_error(Dwarf_Ptr space)1149 _dwarf_free_special_error(Dwarf_Ptr space)
1150 {
1151 char *mem = (char *) space;
1152
1153 mem -= DW_RESERVE;
1154 free(mem);
1155 }
1156
1157
1158 #ifdef DWARF_SIMPLE_MALLOC
1159 /* here solely for planting a breakpoint. */
1160 /* ARGSUSED */
1161 void
_dwarf_simple_malloc_botch(int err)1162 _dwarf_simple_malloc_botch(int err)
1163 {
1164 fprintf(stderr,"simple malloc botch %d\n",err);
1165 }
1166 static void
_dwarf_simple_malloc_add_to_list(Dwarf_Debug dbg,Dwarf_Ptr addr,unsigned long size,short alloc_type)1167 _dwarf_simple_malloc_add_to_list(Dwarf_Debug dbg,
1168 Dwarf_Ptr addr,
1169 unsigned long size, short alloc_type)
1170 {
1171 struct simple_malloc_record_s *cur;
1172 struct simple_malloc_entry_s *newentry;
1173
1174 if (!dbg->de_simple_malloc_base) {
1175 /* First entry to this routine. */
1176 dbg->de_simple_malloc_base =
1177 malloc(sizeof(struct simple_malloc_record_s));
1178 if (!dbg->de_simple_malloc_base) {
1179 _dwarf_simple_malloc_botch(7);
1180 return; /* no memory, give up */
1181 }
1182 memset(dbg->de_simple_malloc_base,
1183 0, sizeof(struct simple_malloc_record_s));
1184 }
1185 cur = dbg->de_simple_malloc_base;
1186
1187 if (cur->sr_used >= DSM_BLOCK_COUNT) {
1188 /* Better not be > than as that means chaos */
1189
1190 /* Create a new block to link at the head. */
1191
1192 struct simple_malloc_record_s *newblock =
1193 malloc(sizeof(struct simple_malloc_record_s));
1194 if (!newblock) {
1195 _dwarf_simple_malloc_botch(8);
1196 return; /* Can do nothing, out of memory */
1197 }
1198 memset(newblock, 0, sizeof(struct simple_malloc_record_s));
1199 /* Link the new block at the head of the chain, and make it
1200 'current' */
1201 dbg->de_simple_malloc_base = newblock;
1202 newblock->sr_next = cur;
1203 cur = newblock;
1204 }
1205 newentry = &cur->sr_entry[cur->sr_used];
1206 newentry->se_addr = addr;
1207 newentry->se_size = size;
1208 newentry->se_type = alloc_type;
1209 ++cur->sr_used;
1210 }
1211
1212 /*
1213 DWARF_SIMPLE_MALLOC: testing the hypothesis that the existing
1214 malloc scheme here (see _dwarf_get_alloc()) is pointless complexity.
1215
1216 DWARF_SIMPLE_MALLOC also makes it easy for a malloc-tracing
1217 tool to verify libdwarf malloc has no botches (though of course
1218 such does not test the complicated standard-libdwarf-alloc code).
1219
1220 To properly answer the question, the simple-malloc allocate
1221 and delete should be something other than a simple list.
1222 Perhaps a heap, or perhaps a red-black tree.
1223
1224 */
1225 static void
_dwarf_simple_malloc_delete_from_list(Dwarf_Debug dbg,Dwarf_Ptr space,short alloc_type)1226 _dwarf_simple_malloc_delete_from_list(Dwarf_Debug dbg,
1227 Dwarf_Ptr space, short alloc_type)
1228 {
1229 if (space == 0) {
1230 _dwarf_simple_malloc_botch(6);
1231 }
1232 if (dbg->de_simple_malloc_base) {
1233 struct simple_malloc_record_s *smp = dbg->de_simple_malloc_base;
1234
1235 while (smp) {
1236 int i;
1237
1238 for (i = 0; i < smp->sr_used; ++i) {
1239 struct simple_malloc_entry_s *cur;
1240
1241 cur = &smp->sr_entry[i];
1242 if (cur->se_addr == space) {
1243 if (cur->se_type != alloc_type) {
1244 _dwarf_simple_malloc_botch(0);
1245 }
1246 cur->se_addr = 0;
1247 return;
1248 }
1249 }
1250 smp = smp->sr_next;
1251 }
1252 }
1253 /* Never found the space. */
1254 _dwarf_simple_malloc_botch(1);
1255 return;
1256
1257 }
1258 #endif
1259