analyzer: deal with -fshort-enums
[official-gcc.git] / gcc / analyzer / bounds-checking.cc
blobcc43ecc546832f5e83b1ca51d8e7588b60d7f25e
1 /* Bounds-checking of reads and writes to memory regions.
2 Copyright (C) 2019-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful, but
12 WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #define INCLUDE_MEMORY
22 #define INCLUDE_VECTOR
23 #include "system.h"
24 #include "coretypes.h"
25 #include "make-unique.h"
26 #include "tree.h"
27 #include "function.h"
28 #include "basic-block.h"
29 #include "intl.h"
30 #include "gimple.h"
31 #include "gimple-iterator.h"
32 #include "diagnostic-core.h"
33 #include "diagnostic-diagram.h"
34 #include "analyzer/analyzer.h"
35 #include "analyzer/analyzer-logging.h"
36 #include "analyzer/region-model.h"
37 #include "analyzer/checker-event.h"
38 #include "analyzer/checker-path.h"
39 #include "analyzer/access-diagram.h"
41 #if ENABLE_ANALYZER
43 namespace ana {
45 /* Abstract base class for all out-of-bounds warnings. */
47 class out_of_bounds : public pending_diagnostic
49 public:
50 class oob_region_creation_event_capacity : public region_creation_event_capacity
52 public:
53 oob_region_creation_event_capacity (tree capacity,
54 const event_loc_info &loc_info,
55 out_of_bounds &oob)
56 : region_creation_event_capacity (capacity,
57 loc_info),
58 m_oob (oob)
61 void prepare_for_emission (checker_path *path,
62 pending_diagnostic *pd,
63 diagnostic_event_id_t emission_id) override
65 region_creation_event_capacity::prepare_for_emission (path,
66 pd,
67 emission_id);
68 m_oob.m_region_creation_event_id = emission_id;
70 private:
71 out_of_bounds &m_oob;
74 out_of_bounds (const region_model &model,
75 const region *reg,
76 tree diag_arg,
77 const svalue *sval_hint)
78 : m_model (model), m_reg (reg), m_diag_arg (diag_arg), m_sval_hint (sval_hint)
81 bool subclass_equal_p (const pending_diagnostic &base_other) const override
83 const out_of_bounds &other
84 (static_cast <const out_of_bounds &>(base_other));
85 return (m_reg == other.m_reg
86 && pending_diagnostic::same_tree_p (m_diag_arg, other.m_diag_arg));
89 int get_controlling_option () const final override
91 return OPT_Wanalyzer_out_of_bounds;
94 void mark_interesting_stuff (interesting_t *interest) final override
96 interest->add_region_creation (m_reg->get_base_region ());
99 void add_region_creation_events (const region *,
100 tree capacity,
101 const event_loc_info &loc_info,
102 checker_path &emission_path) override
104 /* The memory space is described in the diagnostic message itself,
105 so we don't need an event for that. */
106 if (capacity)
107 emission_path.add_event
108 (make_unique<oob_region_creation_event_capacity> (capacity, loc_info,
109 *this));
112 virtual enum access_direction get_dir () const = 0;
114 protected:
115 enum memory_space get_memory_space () const
117 return m_reg->get_memory_space ();
120 void
121 maybe_show_notes (diagnostic_emission_context &ctxt) const
123 maybe_describe_array_bounds (ctxt.get_location ());
124 maybe_show_diagram (ctxt.get_logger ());
127 /* Potentially add a note about valid ways to index this array, such
128 as (given "int arr[10];"):
129 note: valid subscripts for 'arr' are '[0]' to '[9]'
130 We print the '[' and ']' characters so as to express the valid
131 subscripts using C syntax, rather than just as byte ranges,
132 which hopefully is more clear to the user. */
133 void
134 maybe_describe_array_bounds (location_t loc) const
136 if (!m_diag_arg)
137 return;
138 tree t = TREE_TYPE (m_diag_arg);
139 if (!t)
140 return;
141 if (TREE_CODE (t) != ARRAY_TYPE)
142 return;
143 tree domain = TYPE_DOMAIN (t);
144 if (!domain)
145 return;
146 tree max_idx = TYPE_MAX_VALUE (domain);
147 if (!max_idx)
148 return;
149 tree min_idx = TYPE_MIN_VALUE (domain);
150 inform (loc,
151 "valid subscripts for %qE are %<[%E]%> to %<[%E]%>",
152 m_diag_arg, min_idx, max_idx);
155 void
156 maybe_show_diagram (logger *logger) const
158 access_operation op (m_model, get_dir (), *m_reg, m_sval_hint);
160 /* Don't attempt to make a diagram if there's no valid way of
161 accessing the base region (e.g. a 0-element array). */
162 if (op.get_valid_bits ().empty_p ())
163 return;
165 if (const text_art::theme *theme = global_dc->get_diagram_theme ())
167 text_art::style_manager sm;
168 text_art::canvas canvas (make_access_diagram (op, sm, *theme, logger));
169 if (canvas.get_size ().w == 0 && canvas.get_size ().h == 0)
171 /* In lieu of exceptions, return a zero-sized diagram if there's
172 a problem. Give up if that's happened. */
173 return;
175 diagnostic_diagram diagram
176 (canvas,
177 /* Alt text. */
178 _("Diagram visualizing the predicted out-of-bounds access"));
179 global_dc->emit_diagram (diagram);
183 text_art::canvas
184 make_access_diagram (const access_operation &op,
185 text_art::style_manager &sm,
186 const text_art::theme &theme,
187 logger *logger) const
189 access_diagram d (op, m_region_creation_event_id, sm, theme, logger);
190 return d.to_canvas (sm);
193 region_model m_model;
194 const region *m_reg;
195 tree m_diag_arg;
196 const svalue *m_sval_hint;
197 diagnostic_event_id_t m_region_creation_event_id;
200 /* Abstract base class for all out-of-bounds warnings where the
201 out-of-bounds range is concrete. */
203 class concrete_out_of_bounds : public out_of_bounds
205 public:
206 concrete_out_of_bounds (const region_model &model,
207 const region *reg, tree diag_arg,
208 byte_range out_of_bounds_range,
209 const svalue *sval_hint)
210 : out_of_bounds (model, reg, diag_arg, sval_hint),
211 m_out_of_bounds_range (out_of_bounds_range)
214 bool subclass_equal_p (const pending_diagnostic &base_other) const override
216 const concrete_out_of_bounds &other
217 (static_cast <const concrete_out_of_bounds &>(base_other));
218 return (out_of_bounds::subclass_equal_p (other)
219 && m_out_of_bounds_range == other.m_out_of_bounds_range);
222 protected:
223 byte_range m_out_of_bounds_range;
226 /* Abstract subclass to complaing about concrete out-of-bounds
227 past the end of the buffer. */
229 class concrete_past_the_end : public concrete_out_of_bounds
231 public:
232 concrete_past_the_end (const region_model &model,
233 const region *reg, tree diag_arg, byte_range range,
234 tree byte_bound,
235 const svalue *sval_hint)
236 : concrete_out_of_bounds (model, reg, diag_arg, range, sval_hint),
237 m_byte_bound (byte_bound)
240 bool
241 subclass_equal_p (const pending_diagnostic &base_other) const final override
243 const concrete_past_the_end &other
244 (static_cast <const concrete_past_the_end &>(base_other));
245 return (concrete_out_of_bounds::subclass_equal_p (other)
246 && pending_diagnostic::same_tree_p (m_byte_bound,
247 other.m_byte_bound));
250 void add_region_creation_events (const region *,
251 tree,
252 const event_loc_info &loc_info,
253 checker_path &emission_path) final override
255 if (m_byte_bound && TREE_CODE (m_byte_bound) == INTEGER_CST)
256 emission_path.add_event
257 (make_unique<oob_region_creation_event_capacity> (m_byte_bound,
258 loc_info,
259 *this));
262 protected:
263 tree m_byte_bound;
266 /* Concrete subclass to complain about buffer overflows. */
268 class concrete_buffer_overflow : public concrete_past_the_end
270 public:
271 concrete_buffer_overflow (const region_model &model,
272 const region *reg, tree diag_arg,
273 byte_range range, tree byte_bound,
274 const svalue *sval_hint)
275 : concrete_past_the_end (model, reg, diag_arg, range, byte_bound, sval_hint)
278 const char *get_kind () const final override
280 return "concrete_buffer_overflow";
283 bool emit (diagnostic_emission_context &ctxt) final override
285 bool warned;
286 switch (get_memory_space ())
288 default:
289 ctxt.add_cwe (787);
290 warned = ctxt.warn ("buffer overflow");
291 break;
292 case MEMSPACE_STACK:
293 ctxt.add_cwe (121);
294 warned = ctxt.warn ("stack-based buffer overflow");
295 break;
296 case MEMSPACE_HEAP:
297 ctxt.add_cwe (122);
298 warned = ctxt.warn ("heap-based buffer overflow");
299 break;
302 if (warned)
304 if (wi::fits_uhwi_p (m_out_of_bounds_range.m_size_in_bytes))
306 unsigned HOST_WIDE_INT num_bad_bytes
307 = m_out_of_bounds_range.m_size_in_bytes.to_uhwi ();
308 if (m_diag_arg)
309 inform_n (ctxt.get_location (),
310 num_bad_bytes,
311 "write of %wu byte to beyond the end of %qE",
312 "write of %wu bytes to beyond the end of %qE",
313 num_bad_bytes,
314 m_diag_arg);
315 else
316 inform_n (ctxt.get_location (),
317 num_bad_bytes,
318 "write of %wu byte to beyond the end of the region",
319 "write of %wu bytes to beyond the end of the region",
320 num_bad_bytes);
322 else if (m_diag_arg)
323 inform (ctxt.get_location (),
324 "write to beyond the end of %qE",
325 m_diag_arg);
327 maybe_show_notes (ctxt);
330 return warned;
333 label_text describe_final_event (const evdesc::final_event &ev)
334 final override
336 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
337 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
338 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
339 print_dec (start, start_buf, SIGNED);
340 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
341 print_dec (end, end_buf, SIGNED);
343 if (start == end)
345 if (m_diag_arg)
346 return ev.formatted_print ("out-of-bounds write at byte %s but %qE"
347 " ends at byte %E", start_buf, m_diag_arg,
348 m_byte_bound);
349 return ev.formatted_print ("out-of-bounds write at byte %s but region"
350 " ends at byte %E", start_buf,
351 m_byte_bound);
353 else
355 if (m_diag_arg)
356 return ev.formatted_print ("out-of-bounds write from byte %s till"
357 " byte %s but %qE ends at byte %E",
358 start_buf, end_buf, m_diag_arg,
359 m_byte_bound);
360 return ev.formatted_print ("out-of-bounds write from byte %s till"
361 " byte %s but region ends at byte %E",
362 start_buf, end_buf, m_byte_bound);
366 enum access_direction get_dir () const final override { return DIR_WRITE; }
369 /* Concrete subclass to complain about buffer over-reads. */
371 class concrete_buffer_over_read : public concrete_past_the_end
373 public:
374 concrete_buffer_over_read (const region_model &model,
375 const region *reg, tree diag_arg,
376 byte_range range, tree byte_bound)
377 : concrete_past_the_end (model, reg, diag_arg, range, byte_bound, NULL)
380 const char *get_kind () const final override
382 return "concrete_buffer_over_read";
385 bool emit (diagnostic_emission_context &ctxt) final override
387 bool warned;
388 ctxt.add_cwe (126);
389 switch (get_memory_space ())
391 default:
392 warned = ctxt.warn ("buffer over-read");
393 break;
394 case MEMSPACE_STACK:
395 warned = ctxt.warn ("stack-based buffer over-read");
396 break;
397 case MEMSPACE_HEAP:
398 warned = ctxt.warn ("heap-based buffer over-read");
399 break;
402 if (warned)
404 if (wi::fits_uhwi_p (m_out_of_bounds_range.m_size_in_bytes))
406 unsigned HOST_WIDE_INT num_bad_bytes
407 = m_out_of_bounds_range.m_size_in_bytes.to_uhwi ();
408 if (m_diag_arg)
409 inform_n (ctxt.get_location (),
410 num_bad_bytes,
411 "read of %wu byte from after the end of %qE",
412 "read of %wu bytes from after the end of %qE",
413 num_bad_bytes,
414 m_diag_arg);
415 else
416 inform_n (ctxt.get_location (),
417 num_bad_bytes,
418 "read of %wu byte from after the end of the region",
419 "read of %wu bytes from after the end of the region",
420 num_bad_bytes);
422 else if (m_diag_arg)
423 inform (ctxt.get_location (),
424 "read from after the end of %qE",
425 m_diag_arg);
427 maybe_show_notes (ctxt);
430 return warned;
433 label_text describe_final_event (const evdesc::final_event &ev)
434 final override
436 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
437 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
438 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
439 print_dec (start, start_buf, SIGNED);
440 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
441 print_dec (end, end_buf, SIGNED);
443 if (start == end)
445 if (m_diag_arg)
446 return ev.formatted_print ("out-of-bounds read at byte %s but %qE"
447 " ends at byte %E", start_buf, m_diag_arg,
448 m_byte_bound);
449 return ev.formatted_print ("out-of-bounds read at byte %s but region"
450 " ends at byte %E", start_buf,
451 m_byte_bound);
453 else
455 if (m_diag_arg)
456 return ev.formatted_print ("out-of-bounds read from byte %s till"
457 " byte %s but %qE ends at byte %E",
458 start_buf, end_buf, m_diag_arg,
459 m_byte_bound);
460 return ev.formatted_print ("out-of-bounds read from byte %s till"
461 " byte %s but region ends at byte %E",
462 start_buf, end_buf, m_byte_bound);
466 enum access_direction get_dir () const final override { return DIR_READ; }
469 /* Concrete subclass to complain about buffer underwrites. */
471 class concrete_buffer_underwrite : public concrete_out_of_bounds
473 public:
474 concrete_buffer_underwrite (const region_model &model,
475 const region *reg, tree diag_arg,
476 byte_range range,
477 const svalue *sval_hint)
478 : concrete_out_of_bounds (model, reg, diag_arg, range, sval_hint)
481 const char *get_kind () const final override
483 return "concrete_buffer_underwrite";
486 bool emit (diagnostic_emission_context &ctxt) final override
488 bool warned;
489 ctxt.add_cwe (124);
490 switch (get_memory_space ())
492 default:
493 warned = ctxt.warn ("buffer underwrite");
494 break;
495 case MEMSPACE_STACK:
496 warned = ctxt.warn ("stack-based buffer underwrite");
497 break;
498 case MEMSPACE_HEAP:
499 warned = ctxt.warn ("heap-based buffer underwrite");
500 break;
502 if (warned)
503 maybe_show_notes (ctxt);
504 return warned;
507 label_text describe_final_event (const evdesc::final_event &ev)
508 final override
510 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
511 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
512 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
513 print_dec (start, start_buf, SIGNED);
514 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
515 print_dec (end, end_buf, SIGNED);
517 if (start == end)
519 if (m_diag_arg)
520 return ev.formatted_print ("out-of-bounds write at byte %s but %qE"
521 " starts at byte 0", start_buf,
522 m_diag_arg);
523 return ev.formatted_print ("out-of-bounds write at byte %s but region"
524 " starts at byte 0", start_buf);
526 else
528 if (m_diag_arg)
529 return ev.formatted_print ("out-of-bounds write from byte %s till"
530 " byte %s but %qE starts at byte 0",
531 start_buf, end_buf, m_diag_arg);
532 return ev.formatted_print ("out-of-bounds write from byte %s till"
533 " byte %s but region starts at byte 0",
534 start_buf, end_buf);;
538 enum access_direction get_dir () const final override { return DIR_WRITE; }
541 /* Concrete subclass to complain about buffer under-reads. */
543 class concrete_buffer_under_read : public concrete_out_of_bounds
545 public:
546 concrete_buffer_under_read (const region_model &model,
547 const region *reg, tree diag_arg,
548 byte_range range)
549 : concrete_out_of_bounds (model, reg, diag_arg, range, NULL)
552 const char *get_kind () const final override
554 return "concrete_buffer_under_read";
557 bool emit (diagnostic_emission_context &ctxt) final override
559 bool warned;
560 ctxt.add_cwe (127);
561 switch (get_memory_space ())
563 default:
564 warned = ctxt.warn ("buffer under-read");
565 break;
566 case MEMSPACE_STACK:
567 warned = ctxt.warn ("stack-based buffer under-read");
568 break;
569 case MEMSPACE_HEAP:
570 warned = ctxt.warn ("heap-based buffer under-read");
571 break;
573 if (warned)
574 maybe_show_notes (ctxt);
575 return warned;
578 label_text describe_final_event (const evdesc::final_event &ev)
579 final override
581 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
582 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
583 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
584 print_dec (start, start_buf, SIGNED);
585 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
586 print_dec (end, end_buf, SIGNED);
588 if (start == end)
590 if (m_diag_arg)
591 return ev.formatted_print ("out-of-bounds read at byte %s but %qE"
592 " starts at byte 0", start_buf,
593 m_diag_arg);
594 return ev.formatted_print ("out-of-bounds read at byte %s but region"
595 " starts at byte 0", start_buf);
597 else
599 if (m_diag_arg)
600 return ev.formatted_print ("out-of-bounds read from byte %s till"
601 " byte %s but %qE starts at byte 0",
602 start_buf, end_buf, m_diag_arg);
603 return ev.formatted_print ("out-of-bounds read from byte %s till"
604 " byte %s but region starts at byte 0",
605 start_buf, end_buf);;
609 enum access_direction get_dir () const final override { return DIR_READ; }
612 /* Abstract class to complain about out-of-bounds read/writes where
613 the values are symbolic. */
615 class symbolic_past_the_end : public out_of_bounds
617 public:
618 symbolic_past_the_end (const region_model &model,
619 const region *reg, tree diag_arg, tree offset,
620 tree num_bytes, tree capacity,
621 const svalue *sval_hint)
622 : out_of_bounds (model, reg, diag_arg, sval_hint),
623 m_offset (offset),
624 m_num_bytes (num_bytes),
625 m_capacity (capacity)
628 bool
629 subclass_equal_p (const pending_diagnostic &base_other) const final override
631 const symbolic_past_the_end &other
632 (static_cast <const symbolic_past_the_end &>(base_other));
633 return (out_of_bounds::subclass_equal_p (other)
634 && pending_diagnostic::same_tree_p (m_offset, other.m_offset)
635 && pending_diagnostic::same_tree_p (m_num_bytes, other.m_num_bytes)
636 && pending_diagnostic::same_tree_p (m_capacity, other.m_capacity));
639 protected:
640 tree m_offset;
641 tree m_num_bytes;
642 tree m_capacity;
645 /* Concrete subclass to complain about overflows with symbolic values. */
647 class symbolic_buffer_overflow : public symbolic_past_the_end
649 public:
650 symbolic_buffer_overflow (const region_model &model,
651 const region *reg, tree diag_arg, tree offset,
652 tree num_bytes, tree capacity,
653 const svalue *sval_hint)
654 : symbolic_past_the_end (model, reg, diag_arg, offset, num_bytes, capacity,
655 sval_hint)
659 const char *get_kind () const final override
661 return "symbolic_buffer_overflow";
664 bool emit (diagnostic_emission_context &ctxt) final override
666 bool warned;
667 switch (get_memory_space ())
669 default:
670 ctxt.add_cwe (787);
671 warned = ctxt.warn ("buffer overflow");
672 break;
673 case MEMSPACE_STACK:
674 ctxt.add_cwe (121);
675 warned = ctxt.warn ("stack-based buffer overflow");
676 break;
677 case MEMSPACE_HEAP:
678 ctxt.add_cwe (122);
679 warned = ctxt.warn ("heap-based buffer overflow");
680 break;
682 if (warned)
683 maybe_show_notes (ctxt);
684 return warned;
687 label_text
688 describe_final_event (const evdesc::final_event &ev) final override
690 if (m_offset)
692 /* Known offset. */
693 if (m_num_bytes)
695 /* Known offset, known size. */
696 if (TREE_CODE (m_num_bytes) == INTEGER_CST)
698 /* Known offset, known constant size. */
699 if (pending_diagnostic::same_tree_p (m_num_bytes,
700 integer_one_node))
702 /* Singular m_num_bytes. */
703 if (m_diag_arg)
704 return ev.formatted_print
705 ("write of %E byte at offset %qE exceeds %qE",
706 m_num_bytes, m_offset, m_diag_arg);
707 else
708 return ev.formatted_print
709 ("write of %E byte at offset %qE exceeds the buffer",
710 m_num_bytes, m_offset);
712 else
714 /* Plural m_num_bytes. */
715 if (m_diag_arg)
716 return ev.formatted_print
717 ("write of %E bytes at offset %qE exceeds %qE",
718 m_num_bytes, m_offset, m_diag_arg);
719 else
720 return ev.formatted_print
721 ("write of %E bytes at offset %qE exceeds the buffer",
722 m_num_bytes, m_offset);
725 else
727 /* Known offset, known symbolic size. */
728 if (m_diag_arg)
729 return ev.formatted_print
730 ("write of %qE bytes at offset %qE exceeds %qE",
731 m_num_bytes, m_offset, m_diag_arg);
732 else
733 return ev.formatted_print
734 ("write of %qE bytes at offset %qE exceeds the buffer",
735 m_num_bytes, m_offset);
738 else
740 /* Known offset, unknown size. */
741 if (m_diag_arg)
742 return ev.formatted_print ("write at offset %qE exceeds %qE",
743 m_offset, m_diag_arg);
744 else
745 return ev.formatted_print ("write at offset %qE exceeds the"
746 " buffer", m_offset);
749 /* Unknown offset. */
750 if (m_diag_arg)
751 return ev.formatted_print ("out-of-bounds write on %qE",
752 m_diag_arg);
753 return ev.formatted_print ("out-of-bounds write");
756 enum access_direction get_dir () const final override { return DIR_WRITE; }
759 /* Concrete subclass to complain about over-reads with symbolic values. */
761 class symbolic_buffer_over_read : public symbolic_past_the_end
763 public:
764 symbolic_buffer_over_read (const region_model &model,
765 const region *reg, tree diag_arg, tree offset,
766 tree num_bytes, tree capacity)
767 : symbolic_past_the_end (model, reg, diag_arg, offset, num_bytes, capacity,
768 NULL)
772 const char *get_kind () const final override
774 return "symbolic_buffer_over_read";
777 bool emit (diagnostic_emission_context &ctxt) final override
779 ctxt.add_cwe (126);
780 bool warned;
781 switch (get_memory_space ())
783 default:
784 ctxt.add_cwe (787);
785 warned = ctxt.warn ("buffer over-read");
786 break;
787 case MEMSPACE_STACK:
788 ctxt.add_cwe (121);
789 warned = ctxt.warn ("stack-based buffer over-read");
790 break;
791 case MEMSPACE_HEAP:
792 ctxt.add_cwe (122);
793 warned = ctxt.warn ("heap-based buffer over-read");
794 break;
796 if (warned)
797 maybe_show_notes (ctxt);
798 return warned;
801 label_text
802 describe_final_event (const evdesc::final_event &ev) final override
804 if (m_offset)
806 /* Known offset. */
807 if (m_num_bytes)
809 /* Known offset, known size. */
810 if (TREE_CODE (m_num_bytes) == INTEGER_CST)
812 /* Known offset, known constant size. */
813 if (pending_diagnostic::same_tree_p (m_num_bytes,
814 integer_one_node))
816 /* Singular m_num_bytes. */
817 if (m_diag_arg)
818 return ev.formatted_print
819 ("read of %E byte at offset %qE exceeds %qE",
820 m_num_bytes, m_offset, m_diag_arg);
821 else
822 return ev.formatted_print
823 ("read of %E byte at offset %qE exceeds the buffer",
824 m_num_bytes, m_offset);
826 else
828 /* Plural m_num_bytes. */
829 if (m_diag_arg)
830 return ev.formatted_print
831 ("read of %E bytes at offset %qE exceeds %qE",
832 m_num_bytes, m_offset, m_diag_arg);
833 else
834 return ev.formatted_print
835 ("read of %E bytes at offset %qE exceeds the buffer",
836 m_num_bytes, m_offset);
839 else
841 /* Known offset, known symbolic size. */
842 if (m_diag_arg)
843 return ev.formatted_print
844 ("read of %qE bytes at offset %qE exceeds %qE",
845 m_num_bytes, m_offset, m_diag_arg);
846 else
847 return ev.formatted_print
848 ("read of %qE bytes at offset %qE exceeds the buffer",
849 m_num_bytes, m_offset);
852 else
854 /* Known offset, unknown size. */
855 if (m_diag_arg)
856 return ev.formatted_print ("read at offset %qE exceeds %qE",
857 m_offset, m_diag_arg);
858 else
859 return ev.formatted_print ("read at offset %qE exceeds the"
860 " buffer", m_offset);
863 /* Unknown offset. */
864 if (m_diag_arg)
865 return ev.formatted_print ("out-of-bounds read on %qE",
866 m_diag_arg);
867 return ev.formatted_print ("out-of-bounds read");
870 enum access_direction get_dir () const final override { return DIR_READ; }
873 /* Check whether an access is past the end of the BASE_REG.
874 Return TRUE if the access was valid, FALSE otherwise. */
876 bool
877 region_model::check_symbolic_bounds (const region *base_reg,
878 const svalue *sym_byte_offset,
879 const svalue *num_bytes_sval,
880 const svalue *capacity,
881 enum access_direction dir,
882 const svalue *sval_hint,
883 region_model_context *ctxt) const
885 gcc_assert (ctxt);
887 const svalue *next_byte
888 = m_mgr->get_or_create_binop (num_bytes_sval->get_type (), PLUS_EXPR,
889 sym_byte_offset, num_bytes_sval);
891 if (eval_condition (next_byte, GT_EXPR, capacity).is_true ())
893 tree diag_arg = get_representative_tree (base_reg);
894 tree offset_tree = get_representative_tree (sym_byte_offset);
895 tree num_bytes_tree = get_representative_tree (num_bytes_sval);
896 tree capacity_tree = get_representative_tree (capacity);
897 const region *offset_reg = m_mgr->get_offset_region (base_reg,
898 NULL_TREE,
899 sym_byte_offset);
900 const region *sized_offset_reg = m_mgr->get_sized_region (offset_reg,
901 NULL_TREE,
902 num_bytes_sval);
903 switch (dir)
905 default:
906 gcc_unreachable ();
907 break;
908 case DIR_READ:
909 gcc_assert (sval_hint == nullptr);
910 ctxt->warn (make_unique<symbolic_buffer_over_read> (*this,
911 sized_offset_reg,
912 diag_arg,
913 offset_tree,
914 num_bytes_tree,
915 capacity_tree));
916 return false;
917 break;
918 case DIR_WRITE:
919 ctxt->warn (make_unique<symbolic_buffer_overflow> (*this,
920 sized_offset_reg,
921 diag_arg,
922 offset_tree,
923 num_bytes_tree,
924 capacity_tree,
925 sval_hint));
926 return false;
927 break;
930 return true;
933 static tree
934 maybe_get_integer_cst_tree (const svalue *sval)
936 tree cst_tree = sval->maybe_get_constant ();
937 if (cst_tree && TREE_CODE (cst_tree) == INTEGER_CST)
938 return cst_tree;
940 return NULL_TREE;
943 /* May complain when the access on REG is out-of-bounds.
944 Return TRUE if the access was valid, FALSE otherwise. */
946 bool
947 region_model::check_region_bounds (const region *reg,
948 enum access_direction dir,
949 const svalue *sval_hint,
950 region_model_context *ctxt) const
952 gcc_assert (ctxt);
954 /* Get the offset. */
955 region_offset reg_offset = reg->get_offset (m_mgr);
956 const region *base_reg = reg_offset.get_base_region ();
958 /* Find out how many bytes were accessed. */
959 const svalue *num_bytes_sval = reg->get_byte_size_sval (m_mgr);
960 tree num_bytes_tree = maybe_get_integer_cst_tree (num_bytes_sval);
961 /* Bail out if 0 bytes are accessed. */
962 if (num_bytes_tree && zerop (num_bytes_tree))
963 return true;
965 /* Get the capacity of the buffer. */
966 const svalue *capacity = get_capacity (base_reg);
967 tree cst_capacity_tree = maybe_get_integer_cst_tree (capacity);
969 /* The constant offset from a pointer is represented internally as a sizetype
970 but should be interpreted as a signed value here. The statement below
971 converts the offset from bits to bytes and then to a signed integer with
972 the same precision the sizetype has on the target system.
974 For example, this is needed for out-of-bounds-3.c test1 to pass when
975 compiled with a 64-bit gcc build targeting 32-bit systems. */
976 byte_offset_t offset;
977 if (!reg_offset.symbolic_p ())
978 offset = wi::sext (reg_offset.get_bit_offset () >> LOG2_BITS_PER_UNIT,
979 TYPE_PRECISION (size_type_node));
981 /* If any of the base region, the offset, or the number of bytes accessed
982 are symbolic, we have to reason about symbolic values. */
983 if (base_reg->symbolic_p () || reg_offset.symbolic_p () || !num_bytes_tree)
985 const svalue* byte_offset_sval;
986 if (!reg_offset.symbolic_p ())
988 tree offset_tree = wide_int_to_tree (integer_type_node, offset);
989 byte_offset_sval
990 = m_mgr->get_or_create_constant_svalue (offset_tree);
992 else
993 byte_offset_sval = reg_offset.get_symbolic_byte_offset ();
994 return check_symbolic_bounds (base_reg, byte_offset_sval, num_bytes_sval,
995 capacity, dir, sval_hint, ctxt);
998 /* Otherwise continue to check with concrete values. */
999 byte_range out (0, 0);
1000 bool oob_safe = true;
1001 /* NUM_BYTES_TREE should always be interpreted as unsigned. */
1002 byte_offset_t num_bytes_unsigned = wi::to_offset (num_bytes_tree);
1003 byte_range read_bytes (offset, num_bytes_unsigned);
1004 /* If read_bytes has a subset < 0, we do have an underwrite. */
1005 if (read_bytes.falls_short_of_p (0, &out))
1007 tree diag_arg = get_representative_tree (base_reg);
1008 switch (dir)
1010 default:
1011 gcc_unreachable ();
1012 break;
1013 case DIR_READ:
1014 gcc_assert (sval_hint == nullptr);
1015 ctxt->warn (make_unique<concrete_buffer_under_read> (*this, reg,
1016 diag_arg,
1017 out));
1018 oob_safe = false;
1019 break;
1020 case DIR_WRITE:
1021 ctxt->warn (make_unique<concrete_buffer_underwrite> (*this,
1022 reg, diag_arg,
1023 out,
1024 sval_hint));
1025 oob_safe = false;
1026 break;
1030 /* For accesses past the end, we do need a concrete capacity. No need to
1031 do a symbolic check here because the inequality check does not reason
1032 whether constants are greater than symbolic values. */
1033 if (!cst_capacity_tree)
1034 return oob_safe;
1036 byte_range buffer (0, wi::to_offset (cst_capacity_tree));
1037 /* If READ_BYTES exceeds BUFFER, we do have an overflow. */
1038 if (read_bytes.exceeds_p (buffer, &out))
1040 tree byte_bound = wide_int_to_tree (size_type_node,
1041 buffer.get_next_byte_offset ());
1042 tree diag_arg = get_representative_tree (base_reg);
1044 switch (dir)
1046 default:
1047 gcc_unreachable ();
1048 break;
1049 case DIR_READ:
1050 gcc_assert (sval_hint == nullptr);
1051 ctxt->warn (make_unique<concrete_buffer_over_read> (*this,
1052 reg, diag_arg,
1053 out, byte_bound));
1054 oob_safe = false;
1055 break;
1056 case DIR_WRITE:
1057 ctxt->warn (make_unique<concrete_buffer_overflow> (*this,
1058 reg, diag_arg,
1059 out, byte_bound,
1060 sval_hint));
1061 oob_safe = false;
1062 break;
1065 return oob_safe;
1068 } // namespace ana
1070 #endif /* #if ENABLE_ANALYZER */