PR target/6641
[official-gcc.git] / gcc / alloc-pool.h
blobed0cb6d3242924b3a0bb89e1b05b77b4d31748ce
1 /* Functions to support a pool of allocatable objects
2 Copyright (C) 1997-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@cgsoftware.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20 #ifndef ALLOC_POOL_H
21 #define ALLOC_POOL_H
23 #include "hash-map.h"
25 extern void dump_alloc_pool_statistics (void);
27 typedef unsigned long ALLOC_POOL_ID_TYPE;
29 /* Pool allocator memory usage. */
30 struct pool_usage: public mem_usage
32 /* Default contructor. */
33 pool_usage (): m_element_size (0), m_pool_name ("") {}
34 /* Constructor. */
35 pool_usage (size_t allocated, size_t times, size_t peak,
36 size_t instances, size_t element_size,
37 const char *pool_name)
38 : mem_usage (allocated, times, peak, instances),
39 m_element_size (element_size),
40 m_pool_name (pool_name) {}
42 /* Sum the usage with SECOND usage. */
43 pool_usage
44 operator+ (const pool_usage &second)
46 return pool_usage (m_allocated + second.m_allocated,
47 m_times + second.m_times,
48 m_peak + second.m_peak,
49 m_instances + second.m_instances,
50 m_element_size, m_pool_name);
53 /* Dump usage coupled to LOC location, where TOTAL is sum of all rows. */
54 inline void
55 dump (mem_location *loc, mem_usage &total) const
57 char *location_string = loc->to_string ();
59 fprintf (stderr, "%-32s%-48s %6li%10li:%5.1f%%%10li%10li:%5.1f%%%12li\n",
60 m_pool_name, location_string, (long)m_instances,
61 (long)m_allocated, get_percent (m_allocated, total.m_allocated),
62 (long)m_peak, (long)m_times,
63 get_percent (m_times, total.m_times),
64 (long)m_element_size);
66 free (location_string);
69 /* Dump header with NAME. */
70 static inline void
71 dump_header (const char *name)
73 fprintf (stderr, "%-32s%-48s %6s%11s%16s%17s%12s\n", "Pool name", name,
74 "Pools", "Leak", "Peak", "Times", "Elt size");
75 print_dash_line ();
78 /* Dump footer. */
79 inline void
80 dump_footer ()
82 print_dash_line ();
83 fprintf (stderr, "%s%82li%10li\n", "Total", (long)m_instances,
84 (long)m_allocated);
85 print_dash_line ();
88 /* Element size. */
89 size_t m_element_size;
90 /* Pool name. */
91 const char *m_pool_name;
94 extern mem_alloc_description<pool_usage> pool_allocator_usage;
96 /* Type based memory pool allocator. */
97 template <typename T>
98 class pool_allocator
100 public:
101 /* Default constructor for pool allocator called NAME. Each block
102 has NUM elements. The allocator support EXTRA_SIZE and can
103 potentially IGNORE_TYPE_SIZE. */
104 pool_allocator (const char *name, size_t num, size_t extra_size = 0,
105 bool ignore_type_size = false CXX_MEM_STAT_INFO);
106 ~pool_allocator ();
107 void release ();
108 void release_if_empty ();
109 T *allocate () ATTRIBUTE_MALLOC;
110 void remove (T *object);
112 private:
113 struct allocation_pool_list
115 allocation_pool_list *next;
118 /* Initialize a pool allocator. */
119 void initialize ();
121 template <typename U>
122 struct allocation_object
124 /* The ID of alloc pool which the object was allocated from. */
125 ALLOC_POOL_ID_TYPE id;
127 union
129 /* The data of the object. */
130 char data[1];
132 /* Because we want any type of data to be well aligned after the ID,
133 the following elements are here. They are never accessed so
134 the allocated object may be even smaller than this structure.
135 We do not care about alignment for floating-point types. */
136 char *align_p;
137 int64_t align_i;
138 } u;
140 static inline allocation_object<U> *
141 get_instance (void *data_ptr)
143 return (allocation_object<U> *)(((char *)(data_ptr))
144 - offsetof (allocation_object<U>,
145 u.data));
148 static inline U *
149 get_data (void *instance_ptr)
151 return (U*)(((allocation_object<U> *) instance_ptr)->u.data);
155 /* Align X to 8. */
156 size_t
157 align_eight (size_t x)
159 return (((x+7) >> 3) << 3);
162 const char *m_name;
163 ALLOC_POOL_ID_TYPE m_id;
164 size_t m_elts_per_block;
166 /* These are the elements that have been allocated at least once
167 and freed. */
168 allocation_pool_list *m_returned_free_list;
170 /* These are the elements that have not yet been allocated out of
171 the last block obtained from XNEWVEC. */
172 char* m_virgin_free_list;
174 /* The number of elements in the virgin_free_list that can be
175 allocated before needing another block. */
176 size_t m_virgin_elts_remaining;
177 /* The number of elements that are allocated. */
178 size_t m_elts_allocated;
179 /* The number of elements that are released. */
180 size_t m_elts_free;
181 /* The number of allocated blocks. */
182 size_t m_blocks_allocated;
183 /* List of blocks that are used to allocate new objects. */
184 allocation_pool_list *m_block_list;
185 /* The number of elements in a block. */
186 size_t m_block_size;
187 /* Size of a pool elements in bytes. */
188 size_t m_elt_size;
189 /* Flag if we shoul ignore size of a type. */
190 bool m_ignore_type_size;
191 /* Extra size in bytes that should be allocated for each element. */
192 size_t m_extra_size;
193 /* Flag if a pool allocator is initialized. */
194 bool m_initialized;
195 /* Memory allocation location. */
196 mem_location m_location;
199 /* Last used ID. */
200 extern ALLOC_POOL_ID_TYPE last_id;
202 /* Store information about each particular alloc_pool. Note that this
203 will underestimate the amount the amount of storage used by a small amount:
204 1) The overhead in a pool is not accounted for.
205 2) The unallocated elements in a block are not accounted for. Note
206 that this can at worst case be one element smaller that the block
207 size for that pool. */
208 struct alloc_pool_descriptor
210 /* Number of pools allocated. */
211 unsigned long created;
212 /* Gross allocated storage. */
213 unsigned long allocated;
214 /* Amount of currently active storage. */
215 unsigned long current;
216 /* Peak amount of storage used. */
217 unsigned long peak;
218 /* Size of element in the pool. */
219 int elt_size;
223 /* Hashtable mapping alloc_pool names to descriptors. */
224 extern hash_map<const char *, alloc_pool_descriptor> *alloc_pool_hash;
226 template <typename T>
227 inline
228 pool_allocator<T>::pool_allocator (const char *name, size_t num,
229 size_t extra_size, bool ignore_type_size
230 MEM_STAT_DECL):
231 m_name (name), m_id (0), m_elts_per_block (num), m_returned_free_list (NULL),
232 m_virgin_free_list (NULL), m_virgin_elts_remaining (0), m_elts_allocated (0),
233 m_elts_free (0), m_blocks_allocated (0), m_block_list (NULL),
234 m_block_size (0), m_ignore_type_size (ignore_type_size),
235 m_extra_size (extra_size), m_initialized (false),
236 m_location (ALLOC_POOL, false PASS_MEM_STAT) {}
238 /* Initialize a pool allocator. */
240 template <typename T>
241 void
242 pool_allocator<T>::initialize ()
244 gcc_checking_assert (!m_initialized);
245 m_initialized = true;
247 size_t header_size;
248 size_t size = (m_ignore_type_size ? 0 : sizeof (T)) + m_extra_size;
250 gcc_checking_assert (m_name);
252 /* Make size large enough to store the list header. */
253 if (size < sizeof (allocation_pool_list*))
254 size = sizeof (allocation_pool_list*);
256 /* Now align the size to a multiple of 4. */
257 size = align_eight (size);
259 /* Add the aligned size of ID. */
260 size += offsetof (allocation_object<T>, u.data);
262 /* Um, we can't really allocate 0 elements per block. */
263 gcc_checking_assert (m_elts_per_block);
265 m_elt_size = size;
267 if (GATHER_STATISTICS)
269 pool_usage *u = pool_allocator_usage.register_descriptor
270 (this, new mem_location (m_location));
272 u->m_element_size = m_elt_size;
273 u->m_pool_name = m_name;
276 /* List header size should be a multiple of 8. */
277 header_size = align_eight (sizeof (allocation_pool_list));
279 m_block_size = (size * m_elts_per_block) + header_size;
281 #ifdef ENABLE_CHECKING
282 /* Increase the last used ID and use it for this pool.
283 ID == 0 is used for free elements of pool so skip it. */
284 last_id++;
285 if (last_id == 0)
286 last_id++;
288 m_id = last_id;
289 #endif
292 /* Free all memory allocated for the given memory pool. */
293 template <typename T>
294 inline void
295 pool_allocator<T>::release ()
297 if (!m_initialized)
298 return;
300 allocation_pool_list *block, *next_block;
302 /* Free each block allocated to the pool. */
303 for (block = m_block_list; block != NULL; block = next_block)
305 next_block = block->next;
306 free (block);
309 if (GATHER_STATISTICS)
311 pool_allocator_usage.release_instance_overhead
312 (this, (m_elts_allocated - m_elts_free) * m_elt_size);
315 m_returned_free_list = NULL;
316 m_virgin_free_list = NULL;
317 m_virgin_elts_remaining = 0;
318 m_elts_allocated = 0;
319 m_elts_free = 0;
320 m_blocks_allocated = 0;
321 m_block_list = NULL;
324 template <typename T>
325 void
326 inline pool_allocator<T>::release_if_empty ()
328 if (m_elts_free == m_elts_allocated)
329 release ();
332 template <typename T>
333 inline pool_allocator<T>::~pool_allocator ()
335 release ();
338 /* Allocates one element from the pool specified. */
339 template <typename T>
340 inline T *
341 pool_allocator<T>::allocate ()
343 if (!m_initialized)
344 initialize ();
346 allocation_pool_list *header;
347 #ifdef ENABLE_VALGRIND_ANNOTATIONS
348 int size;
349 #endif
351 if (GATHER_STATISTICS)
353 pool_allocator_usage.register_instance_overhead (m_elt_size, this);
356 #ifdef ENABLE_VALGRIND_ANNOTATIONS
357 size = m_elt_size - offsetof (allocation_object<T>, u.data);
358 #endif
360 /* If there are no more free elements, make some more!. */
361 if (!m_returned_free_list)
363 char *block;
364 if (!m_virgin_elts_remaining)
366 allocation_pool_list *block_header;
368 /* Make the block. */
369 block = XNEWVEC (char, m_block_size);
370 block_header = (allocation_pool_list*) block;
371 block += align_eight (sizeof (allocation_pool_list));
373 /* Throw it on the block list. */
374 block_header->next = m_block_list;
375 m_block_list = block_header;
377 /* Make the block available for allocation. */
378 m_virgin_free_list = block;
379 m_virgin_elts_remaining = m_elts_per_block;
381 /* Also update the number of elements we have free/allocated, and
382 increment the allocated block count. */
383 m_elts_allocated += m_elts_per_block;
384 m_elts_free += m_elts_per_block;
385 m_blocks_allocated += 1;
388 /* We now know that we can take the first elt off the virgin list and
389 put it on the returned list. */
390 block = m_virgin_free_list;
391 header = (allocation_pool_list*) allocation_object<T>::get_data (block);
392 header->next = NULL;
393 #ifdef ENABLE_CHECKING
394 /* Mark the element to be free. */
395 ((allocation_object<T> *) block)->id = 0;
396 #endif
397 VALGRIND_DISCARD (VALGRIND_MAKE_MEM_NOACCESS (header,size));
398 m_returned_free_list = header;
399 m_virgin_free_list += m_elt_size;
400 m_virgin_elts_remaining--;
404 /* Pull the first free element from the free list, and return it. */
405 header = m_returned_free_list;
406 VALGRIND_DISCARD (VALGRIND_MAKE_MEM_DEFINED (header, sizeof (*header)));
407 m_returned_free_list = header->next;
408 m_elts_free--;
410 #ifdef ENABLE_CHECKING
411 /* Set the ID for element. */
412 allocation_object<T>::get_instance (header)->id = m_id;
413 #endif
414 VALGRIND_DISCARD (VALGRIND_MAKE_MEM_UNDEFINED (header, size));
416 /* Call default constructor. */
417 return (T *)(header);
420 /* Puts PTR back on POOL's free list. */
421 template <typename T>
422 void
423 pool_allocator<T>::remove (T *object)
425 gcc_checking_assert (m_initialized);
427 allocation_pool_list *header;
428 int size ATTRIBUTE_UNUSED;
429 size = m_elt_size - offsetof (allocation_object<T>, u.data);
431 #ifdef ENABLE_CHECKING
432 gcc_assert (object
433 /* Check if we free more than we allocated, which is Bad (TM). */
434 && m_elts_free < m_elts_allocated
435 /* Check whether the PTR was allocated from POOL. */
436 && m_id == allocation_object<T>::get_instance (object)->id);
438 memset (object, 0xaf, size);
440 /* Mark the element to be free. */
441 allocation_object<T>::get_instance (object)->id = 0;
442 #endif
444 header = (allocation_pool_list*) object;
445 header->next = m_returned_free_list;
446 m_returned_free_list = header;
447 VALGRIND_DISCARD (VALGRIND_MAKE_MEM_NOACCESS (object, size));
448 m_elts_free++;
450 if (GATHER_STATISTICS)
452 pool_allocator_usage.release_instance_overhead (this, m_elt_size);
456 #endif