aix: Fix building fat library for AIX
[official-gcc.git] / gcc / value-range-storage.cc
blobbbae0da4772d2e078b7fdc404866769a519ea66d
1 /* Support routines for vrange storage.
2 Copyright (C) 2022-2024 Free Software Foundation, Inc.
3 Contributed by Aldy Hernandez <aldyh@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "ssa.h"
28 #include "tree-pretty-print.h"
29 #include "fold-const.h"
30 #include "gimple-range.h"
31 #include "value-range-storage.h"
33 // Generic memory allocator to share one interface between GC and
34 // obstack allocators.
36 class vrange_internal_alloc
38 public:
39 vrange_internal_alloc () { }
40 virtual ~vrange_internal_alloc () { }
41 virtual void *alloc (size_t size) = 0;
42 virtual void free (void *) = 0;
43 private:
44 DISABLE_COPY_AND_ASSIGN (vrange_internal_alloc);
47 class vrange_obstack_alloc final: public vrange_internal_alloc
49 public:
50 vrange_obstack_alloc ()
52 obstack_init (&m_obstack);
54 virtual ~vrange_obstack_alloc () final override
56 obstack_free (&m_obstack, NULL);
58 virtual void *alloc (size_t size) final override
60 return obstack_alloc (&m_obstack, size);
62 virtual void free (void *) final override { }
63 private:
64 obstack m_obstack;
67 class vrange_ggc_alloc final: public vrange_internal_alloc
69 public:
70 vrange_ggc_alloc () { }
71 virtual ~vrange_ggc_alloc () final override { }
72 virtual void *alloc (size_t size) final override
74 return ggc_internal_alloc (size);
76 virtual void free (void *p) final override
78 return ggc_free (p);
82 vrange_allocator::vrange_allocator (bool gc)
84 if (gc)
85 m_alloc = new vrange_ggc_alloc;
86 else
87 m_alloc = new vrange_obstack_alloc;
90 vrange_allocator::~vrange_allocator ()
92 delete m_alloc;
95 void *
96 vrange_allocator::alloc (size_t size)
98 return m_alloc->alloc (size);
101 void
102 vrange_allocator::free (void *p)
104 m_alloc->free (p);
107 // Allocate a new vrange_storage object initialized to R and return
108 // it.
110 vrange_storage *
111 vrange_allocator::clone (const vrange &r)
113 return vrange_storage::alloc (*m_alloc, r);
116 vrange_storage *
117 vrange_allocator::clone_varying (tree type)
119 if (irange::supports_p (type))
120 return irange_storage::alloc (*m_alloc, int_range <1> (type));
121 if (prange::supports_p (type))
122 return prange_storage::alloc (*m_alloc, prange (type));
123 if (frange::supports_p (type))
124 return frange_storage::alloc (*m_alloc, frange (type));
125 return NULL;
128 vrange_storage *
129 vrange_allocator::clone_undefined (tree type)
131 if (irange::supports_p (type))
132 return irange_storage::alloc (*m_alloc, int_range<1> ());
133 if (prange::supports_p (type))
134 return prange_storage::alloc (*m_alloc, prange ());
135 if (frange::supports_p (type))
136 return frange_storage::alloc (*m_alloc, frange ());
137 return NULL;
140 // Allocate a new vrange_storage object initialized to R and return
141 // it. Return NULL if R is unsupported.
143 vrange_storage *
144 vrange_storage::alloc (vrange_internal_alloc &allocator, const vrange &r)
146 if (is_a <irange> (r))
147 return irange_storage::alloc (allocator, as_a <irange> (r));
148 if (is_a <prange> (r))
149 return prange_storage::alloc (allocator, as_a <prange> (r));
150 if (is_a <frange> (r))
151 return frange_storage::alloc (allocator, as_a <frange> (r));
152 return NULL;
155 // Set storage to R.
157 void
158 vrange_storage::set_vrange (const vrange &r)
160 if (is_a <irange> (r))
162 irange_storage *s = static_cast <irange_storage *> (this);
163 gcc_checking_assert (s->fits_p (as_a <irange> (r)));
164 s->set_irange (as_a <irange> (r));
166 else if (is_a <prange> (r))
168 prange_storage *s = static_cast <prange_storage *> (this);
169 gcc_checking_assert (s->fits_p (as_a <prange> (r)));
170 s->set_prange (as_a <prange> (r));
172 else if (is_a <frange> (r))
174 frange_storage *s = static_cast <frange_storage *> (this);
175 gcc_checking_assert (s->fits_p (as_a <frange> (r)));
176 s->set_frange (as_a <frange> (r));
178 else
179 gcc_unreachable ();
181 // Verify that reading back from the cache didn't drop bits.
182 if (flag_checking
183 // FIXME: Avoid checking frange, as it currently pessimizes some ranges:
185 // gfortran.dg/pr49472.f90 pessimizes [0.0, 1.0] into [-0.0, 1.0].
186 && !is_a <frange> (r)
187 && !r.undefined_p ())
189 Value_Range tmp (r);
190 get_vrange (tmp, r.type ());
191 gcc_checking_assert (tmp == r);
195 // Restore R from storage.
197 void
198 vrange_storage::get_vrange (vrange &r, tree type) const
200 if (is_a <irange> (r))
202 const irange_storage *s = static_cast <const irange_storage *> (this);
203 s->get_irange (as_a <irange> (r), type);
205 else if (is_a <prange> (r))
207 const prange_storage *s = static_cast <const prange_storage *> (this);
208 s->get_prange (as_a <prange> (r), type);
210 else if (is_a <frange> (r))
212 const frange_storage *s = static_cast <const frange_storage *> (this);
213 s->get_frange (as_a <frange> (r), type);
215 else
216 gcc_unreachable ();
219 // Return TRUE if storage can fit R.
221 bool
222 vrange_storage::fits_p (const vrange &r) const
224 if (is_a <irange> (r))
226 const irange_storage *s = static_cast <const irange_storage *> (this);
227 return s->fits_p (as_a <irange> (r));
229 if (is_a <prange> (r))
231 const prange_storage *s = static_cast <const prange_storage *> (this);
232 return s->fits_p (as_a <prange> (r));
234 if (is_a <frange> (r))
236 const frange_storage *s = static_cast <const frange_storage *> (this);
237 return s->fits_p (as_a <frange> (r));
239 gcc_unreachable ();
240 return false;
243 // Return TRUE if the range in storage is equal to R. It is the
244 // caller's responsibility to verify that the type of the range in
245 // storage matches that of R.
247 bool
248 vrange_storage::equal_p (const vrange &r) const
250 if (is_a <irange> (r))
252 const irange_storage *s = static_cast <const irange_storage *> (this);
253 return s->equal_p (as_a <irange> (r));
255 if (is_a <prange> (r))
257 const prange_storage *s = static_cast <const prange_storage *> (this);
258 return s->equal_p (as_a <prange> (r));
260 if (is_a <frange> (r))
262 const frange_storage *s = static_cast <const frange_storage *> (this);
263 return s->equal_p (as_a <frange> (r));
265 gcc_unreachable ();
268 //============================================================================
269 // irange_storage implementation
270 //============================================================================
272 unsigned short *
273 irange_storage::write_lengths_address ()
275 return (unsigned short *)&m_val[(m_num_ranges * 2 + 2)
276 * WIDE_INT_MAX_HWIS (m_precision)];
279 const unsigned short *
280 irange_storage::lengths_address () const
282 return const_cast <irange_storage *> (this)->write_lengths_address ();
285 // Allocate a new irange_storage object initialized to R.
287 irange_storage *
288 irange_storage::alloc (vrange_internal_alloc &allocator, const irange &r)
290 size_t size = irange_storage::size (r);
291 irange_storage *p = static_cast <irange_storage *> (allocator.alloc (size));
292 new (p) irange_storage (r);
293 return p;
296 // Initialize the storage with R.
298 irange_storage::irange_storage (const irange &r)
299 : m_max_ranges (r.num_pairs ())
301 m_num_ranges = m_max_ranges;
302 set_irange (r);
305 static inline void
306 write_wide_int (HOST_WIDE_INT *&val, unsigned short *&len, const wide_int &w)
308 *len = w.get_len ();
309 for (unsigned i = 0; i < *len; ++i)
310 *val++ = w.elt (i);
311 ++len;
314 // Store R into the current storage.
316 void
317 irange_storage::set_irange (const irange &r)
319 gcc_checking_assert (fits_p (r));
321 if (r.undefined_p ())
323 m_kind = VR_UNDEFINED;
324 return;
326 if (r.varying_p ())
328 m_kind = VR_VARYING;
329 return;
332 m_precision = TYPE_PRECISION (r.type ());
333 m_num_ranges = r.num_pairs ();
334 m_kind = VR_RANGE;
336 HOST_WIDE_INT *val = &m_val[0];
337 unsigned short *len = write_lengths_address ();
339 for (unsigned i = 0; i < r.num_pairs (); ++i)
341 write_wide_int (val, len, r.lower_bound (i));
342 write_wide_int (val, len, r.upper_bound (i));
345 // TODO: We could avoid streaming out the value if the mask is -1.
346 irange_bitmask bm = r.m_bitmask;
347 write_wide_int (val, len, bm.value ());
348 write_wide_int (val, len, bm.mask ());
351 static inline void
352 read_wide_int (wide_int &w,
353 const HOST_WIDE_INT *val, unsigned short len, unsigned prec)
355 trailing_wide_int_storage stow (prec, &len,
356 const_cast <HOST_WIDE_INT *> (val));
357 w = trailing_wide_int (stow);
360 // Restore a range of TYPE from storage into R.
362 void
363 irange_storage::get_irange (irange &r, tree type) const
365 if (m_kind == VR_UNDEFINED)
367 r.set_undefined ();
368 return;
370 if (m_kind == VR_VARYING)
372 r.set_varying (type);
373 return;
376 gcc_checking_assert (TYPE_PRECISION (type) == m_precision);
377 const HOST_WIDE_INT *val = &m_val[0];
378 const unsigned short *len = lengths_address ();
380 // Handle the common case where R can fit the new range.
381 if (r.m_max_ranges >= m_num_ranges)
383 r.m_kind = VR_RANGE;
384 r.m_num_ranges = m_num_ranges;
385 r.m_type = type;
386 for (unsigned i = 0; i < m_num_ranges * 2; ++i)
388 read_wide_int (r.m_base[i], val, *len, m_precision);
389 val += *len++;
392 // Otherwise build the range piecewise.
393 else
395 r.set_undefined ();
396 for (unsigned i = 0; i < m_num_ranges; ++i)
398 wide_int lb, ub;
399 read_wide_int (lb, val, *len, m_precision);
400 val += *len++;
401 read_wide_int (ub, val, *len, m_precision);
402 val += *len++;
403 int_range<1> tmp (type, lb, ub);
404 r.union_ (tmp);
408 wide_int bits_value, bits_mask;
409 read_wide_int (bits_value, val, *len, m_precision);
410 val += *len++;
411 read_wide_int (bits_mask, val, *len, m_precision);
412 r.m_bitmask = irange_bitmask (bits_value, bits_mask);
413 if (r.m_kind == VR_VARYING)
414 r.m_kind = VR_RANGE;
416 if (flag_checking)
417 r.verify_range ();
420 bool
421 irange_storage::equal_p (const irange &r) const
423 if (m_kind == VR_UNDEFINED || r.undefined_p ())
424 return m_kind == r.m_kind;
425 if (m_kind == VR_VARYING || r.varying_p ())
426 return m_kind == r.m_kind;
428 // ?? We could make this faster by doing the comparison in place,
429 // without going through get_irange.
430 int_range_max tmp;
431 get_irange (tmp, r.type ());
432 return tmp == r;
435 // Return the size in bytes to allocate storage that can hold R.
437 size_t
438 irange_storage::size (const irange &r)
440 if (r.undefined_p ())
441 return sizeof (irange_storage);
443 unsigned prec = TYPE_PRECISION (r.type ());
444 unsigned n = r.num_pairs () * 2 + 2;
445 unsigned hwi_size = ((n * WIDE_INT_MAX_HWIS (prec) - 1)
446 * sizeof (HOST_WIDE_INT));
447 unsigned len_size = n * sizeof (unsigned short);
448 return sizeof (irange_storage) + hwi_size + len_size;
451 // Return TRUE if R fits in the current storage.
453 bool
454 irange_storage::fits_p (const irange &r) const
456 return m_max_ranges >= r.num_pairs ();
459 void
460 irange_storage::dump () const
462 fprintf (stderr, "irange_storage (prec=%d, ranges=%d):\n",
463 m_precision, m_num_ranges);
465 if (m_num_ranges == 0)
466 return;
468 const HOST_WIDE_INT *val = &m_val[0];
469 const unsigned short *len = lengths_address ();
470 int i, j;
472 fprintf (stderr, " lengths = [ ");
473 for (i = 0; i < m_num_ranges * 2 + 2; ++i)
474 fprintf (stderr, "%d ", len[i]);
475 fprintf (stderr, "]\n");
477 for (i = 0; i < m_num_ranges; ++i)
479 for (j = 0; j < *len; ++j)
480 fprintf (stderr, " [PAIR %d] LB " HOST_WIDE_INT_PRINT_DEC "\n", i,
481 *val++);
482 ++len;
483 for (j = 0; j < *len; ++j)
484 fprintf (stderr, " [PAIR %d] UB " HOST_WIDE_INT_PRINT_DEC "\n", i,
485 *val++);
486 ++len;
489 // Dump value/mask pair.
490 for (j = 0; j < *len; ++j)
491 fprintf (stderr, " [VALUE] " HOST_WIDE_INT_PRINT_DEC "\n", *val++);
492 ++len;
493 for (j = 0; j < *len; ++j)
494 fprintf (stderr, " [MASK] " HOST_WIDE_INT_PRINT_DEC "\n", *val++);
497 DEBUG_FUNCTION void
498 debug (const irange_storage &storage)
500 storage.dump ();
501 fprintf (stderr, "\n");
504 //============================================================================
505 // frange_storage implementation
506 //============================================================================
508 // Allocate a new frange_storage object initialized to R.
510 frange_storage *
511 frange_storage::alloc (vrange_internal_alloc &allocator, const frange &r)
513 size_t size = sizeof (frange_storage);
514 frange_storage *p = static_cast <frange_storage *> (allocator.alloc (size));
515 new (p) frange_storage (r);
516 return p;
519 void
520 frange_storage::set_frange (const frange &r)
522 gcc_checking_assert (fits_p (r));
524 m_kind = r.m_kind;
525 m_min = r.m_min;
526 m_max = r.m_max;
527 m_pos_nan = r.m_pos_nan;
528 m_neg_nan = r.m_neg_nan;
531 void
532 frange_storage::get_frange (frange &r, tree type) const
534 gcc_checking_assert (r.supports_type_p (type));
536 // Handle explicit NANs.
537 if (m_kind == VR_NAN)
539 if (HONOR_NANS (type))
541 if (m_pos_nan && m_neg_nan)
542 r.set_nan (type);
543 else
544 r.set_nan (type, m_neg_nan);
546 else
547 r.set_undefined ();
548 return;
550 if (m_kind == VR_UNDEFINED)
552 r.set_undefined ();
553 return;
556 // We use the constructor to create the new range instead of writing
557 // out the bits into the frange directly, because the global range
558 // being read may be being inlined into a function with different
559 // restrictions as when it was originally written. We want to make
560 // sure the resulting range is canonicalized correctly for the new
561 // consumer.
562 r = frange (type, m_min, m_max, m_kind);
564 // The constructor will set the NAN bits for HONOR_NANS, but we must
565 // make sure to set the NAN sign if known.
566 if (HONOR_NANS (type) && (m_pos_nan ^ m_neg_nan) == 1)
567 r.update_nan (m_neg_nan);
568 else if (!m_pos_nan && !m_neg_nan)
569 r.clear_nan ();
572 bool
573 frange_storage::equal_p (const frange &r) const
575 if (r.undefined_p ())
576 return m_kind == VR_UNDEFINED;
578 frange tmp;
579 get_frange (tmp, r.type ());
580 return tmp == r;
583 bool
584 frange_storage::fits_p (const frange &) const
586 return true;
589 //============================================================================
590 // prange_storage implementation
591 //============================================================================
593 prange_storage *
594 prange_storage::alloc (vrange_internal_alloc &allocator, const prange &r)
596 // Assume all pointers are the same size.
597 unsigned prec = TYPE_PRECISION (TREE_TYPE (null_pointer_node));
598 gcc_checking_assert (r.undefined_p () || TYPE_PRECISION (r.type ()) == prec);
600 typedef trailing_wide_ints<NINTS> twi;
601 size_t size = sizeof (prange_storage) + twi::extra_size (prec);
602 prange_storage *p = static_cast <prange_storage *> (allocator.alloc (size));
603 new (p) prange_storage (r);
604 return p;
607 // Initialize the storage with R.
609 prange_storage::prange_storage (const prange &r)
611 // It is the caller's responsibility to allocate enough space such
612 // that the precision fits.
613 unsigned prec = TYPE_PRECISION (TREE_TYPE (null_pointer_node));
614 m_trailing_ints.set_precision (prec);
616 set_prange (r);
619 void
620 prange_storage::set_prange (const prange &r)
622 if (r.undefined_p ())
623 m_kind = VR_UNDEFINED;
624 else if (r.varying_p ())
625 m_kind = VR_VARYING;
626 else
628 m_kind = VR_RANGE;
629 set_low (r.lower_bound ());
630 set_high (r.upper_bound ());
631 irange_bitmask bm = r.m_bitmask;
632 set_value (bm.value ());
633 set_mask (bm.mask ());
637 void
638 prange_storage::get_prange (prange &r, tree type) const
640 gcc_checking_assert (r.supports_type_p (type));
642 if (m_kind == VR_UNDEFINED)
643 r.set_undefined ();
644 else if (m_kind == VR_VARYING)
645 r.set_varying (type);
646 else
648 gcc_checking_assert (m_kind == VR_RANGE);
649 gcc_checking_assert (TYPE_PRECISION (type) == m_trailing_ints.get_precision ());
650 r.m_kind = VR_RANGE;
651 r.m_type = type;
652 r.m_min = get_low ();
653 r.m_max = get_high ();
654 r.m_bitmask = irange_bitmask (get_value (), get_mask ());
655 if (flag_checking)
656 r.verify_range ();
660 bool
661 prange_storage::equal_p (const prange &r) const
663 if (r.undefined_p ())
664 return m_kind == VR_UNDEFINED;
666 prange tmp;
667 get_prange (tmp, r.type ());
668 return tmp == r;
671 bool
672 prange_storage::fits_p (const prange &) const
674 // All pointers are the same size.
675 return true;
679 static vrange_allocator ggc_vrange_allocator (true);
681 vrange_storage *ggc_alloc_vrange_storage (tree type)
683 return ggc_vrange_allocator.clone_varying (type);
686 vrange_storage *ggc_alloc_vrange_storage (const vrange &r)
688 return ggc_vrange_allocator.clone (r);