1 /* Classes for modeling the state of memory.
2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #define INCLUDE_MEMORY
24 #include "coretypes.h"
25 #include "make-unique.h"
28 #include "basic-block.h"
30 #include "gimple-iterator.h"
31 #include "diagnostic-core.h"
36 #include "stringpool.h"
39 #include "fold-const.h"
40 #include "tree-pretty-print.h"
41 #include "diagnostic-color.h"
42 #include "diagnostic-metadata.h"
45 #include "analyzer/analyzer.h"
46 #include "analyzer/analyzer-logging.h"
47 #include "ordered-hash-map.h"
51 #include "analyzer/supergraph.h"
53 #include "analyzer/call-string.h"
54 #include "analyzer/program-point.h"
55 #include "analyzer/store.h"
56 #include "analyzer/region-model.h"
57 #include "analyzer/constraint-manager.h"
58 #include "diagnostic-event-id.h"
59 #include "analyzer/sm.h"
60 #include "diagnostic-event-id.h"
61 #include "analyzer/sm.h"
62 #include "analyzer/pending-diagnostic.h"
63 #include "analyzer/region-model-reachability.h"
64 #include "analyzer/analyzer-selftests.h"
65 #include "analyzer/program-state.h"
66 #include "analyzer/call-summary.h"
67 #include "stor-layout.h"
69 #include "tree-object-size.h"
70 #include "gimple-ssa.h"
71 #include "tree-phinodes.h"
72 #include "tree-ssa-operands.h"
73 #include "ssa-iterators.h"
76 #include "gcc-rich-location.h"
77 #include "analyzer/checker-event.h"
78 #include "analyzer/checker-path.h"
84 /* Dump T to PP in language-independent form, for debugging/logging/dumping
88 dump_tree (pretty_printer *pp, tree t)
90 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
93 /* Dump T to PP in language-independent form in quotes, for
94 debugging/logging/dumping purposes. */
97 dump_quoted_tree (pretty_printer *pp, tree t)
99 pp_begin_quote (pp, pp_show_color (pp));
101 pp_end_quote (pp, pp_show_color (pp));
104 /* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
105 calls within other pp_printf calls.
107 default_tree_printer handles 'T' and some other codes by calling
108 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
109 dump_generic_node calls pp_printf in various places, leading to
112 Ideally pp_printf could be made to be reentrant, but in the meantime
113 this function provides a workaround. */
116 print_quoted_type (pretty_printer *pp, tree t)
118 pp_begin_quote (pp, pp_show_color (pp));
119 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
120 pp_end_quote (pp, pp_show_color (pp));
123 /* class region_to_value_map. */
125 /* Assignment operator for region_to_value_map. */
127 region_to_value_map &
128 region_to_value_map::operator= (const region_to_value_map &other)
131 for (auto iter : other.m_hash_map)
133 const region *reg = iter.first;
134 const svalue *sval = iter.second;
135 m_hash_map.put (reg, sval);
140 /* Equality operator for region_to_value_map. */
143 region_to_value_map::operator== (const region_to_value_map &other) const
145 if (m_hash_map.elements () != other.m_hash_map.elements ())
148 for (auto iter : *this)
150 const region *reg = iter.first;
151 const svalue *sval = iter.second;
152 const svalue * const *other_slot = other.get (reg);
153 if (other_slot == NULL)
155 if (sval != *other_slot)
162 /* Dump this object to PP. */
165 region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
166 bool multiline) const
168 auto_vec<const region *> regs;
169 for (iterator iter = begin (); iter != end (); ++iter)
170 regs.safe_push ((*iter).first);
171 regs.qsort (region::cmp_ptr_ptr);
175 pp_string (pp, " {");
178 FOR_EACH_VEC_ELT (regs, i, reg)
183 pp_string (pp, ", ");
184 reg->dump_to_pp (pp, simple);
185 pp_string (pp, ": ");
186 const svalue *sval = *get (reg);
187 sval->dump_to_pp (pp, true);
195 /* Dump this object to stderr. */
198 region_to_value_map::dump (bool simple) const
201 pp_format_decoder (&pp) = default_tree_printer;
202 pp_show_color (&pp) = pp_show_color (global_dc->printer);
203 pp.buffer->stream = stderr;
204 dump_to_pp (&pp, simple, true);
210 /* Attempt to merge THIS with OTHER, writing the result
213 For now, write (region, value) mappings that are in common between THIS
214 and OTHER to OUT, effectively taking the intersection.
216 Reject merger of different values. */
219 region_to_value_map::can_merge_with_p (const region_to_value_map &other,
220 region_to_value_map *out) const
222 for (auto iter : *this)
224 const region *iter_reg = iter.first;
225 const svalue *iter_sval = iter.second;
226 const svalue * const * other_slot = other.get (iter_reg);
229 if (iter_sval == *other_slot)
230 out->put (iter_reg, iter_sval);
238 /* Purge any state involving SVAL. */
241 region_to_value_map::purge_state_involving (const svalue *sval)
243 auto_vec<const region *> to_purge;
244 for (auto iter : *this)
246 const region *iter_reg = iter.first;
247 const svalue *iter_sval = iter.second;
248 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
249 to_purge.safe_push (iter_reg);
251 for (auto iter : to_purge)
252 m_hash_map.remove (iter);
255 /* class region_model. */
257 /* Ctor for region_model: construct an "empty" model. */
259 region_model::region_model (region_model_manager *mgr)
260 : m_mgr (mgr), m_store (), m_current_frame (NULL),
263 m_constraints = new constraint_manager (mgr);
266 /* region_model's copy ctor. */
268 region_model::region_model (const region_model &other)
269 : m_mgr (other.m_mgr), m_store (other.m_store),
270 m_constraints (new constraint_manager (*other.m_constraints)),
271 m_current_frame (other.m_current_frame),
272 m_dynamic_extents (other.m_dynamic_extents)
276 /* region_model's dtor. */
278 region_model::~region_model ()
280 delete m_constraints;
283 /* region_model's assignment operator. */
286 region_model::operator= (const region_model &other)
288 /* m_mgr is const. */
289 gcc_assert (m_mgr == other.m_mgr);
291 m_store = other.m_store;
293 delete m_constraints;
294 m_constraints = new constraint_manager (*other.m_constraints);
296 m_current_frame = other.m_current_frame;
298 m_dynamic_extents = other.m_dynamic_extents;
303 /* Equality operator for region_model.
305 Amongst other things this directly compares the stores and the constraint
306 managers, so for this to be meaningful both this and OTHER should
307 have been canonicalized. */
310 region_model::operator== (const region_model &other) const
312 /* We can only compare instances that use the same manager. */
313 gcc_assert (m_mgr == other.m_mgr);
315 if (m_store != other.m_store)
318 if (*m_constraints != *other.m_constraints)
321 if (m_current_frame != other.m_current_frame)
324 if (m_dynamic_extents != other.m_dynamic_extents)
327 gcc_checking_assert (hash () == other.hash ());
332 /* Generate a hash value for this region_model. */
335 region_model::hash () const
337 hashval_t result = m_store.hash ();
338 result ^= m_constraints->hash ();
342 /* Dump a representation of this model to PP, showing the
343 stack, the store, and any constraints.
344 Use SIMPLE to control how svalues and regions are printed. */
347 region_model::dump_to_pp (pretty_printer *pp, bool simple,
348 bool multiline) const
351 pp_printf (pp, "stack depth: %i", get_stack_depth ());
355 pp_string (pp, " {");
356 for (const frame_region *iter_frame = m_current_frame; iter_frame;
357 iter_frame = iter_frame->get_calling_frame ())
361 else if (iter_frame != m_current_frame)
362 pp_string (pp, ", ");
363 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
364 iter_frame->dump_to_pp (pp, simple);
373 pp_string (pp, ", {");
374 m_store.dump_to_pp (pp, simple, multiline,
375 m_mgr->get_store_manager ());
379 /* Dump constraints. */
380 pp_string (pp, "constraint_manager:");
384 pp_string (pp, " {");
385 m_constraints->dump_to_pp (pp, multiline);
389 /* Dump sizes of dynamic regions, if any are known. */
390 if (!m_dynamic_extents.is_empty ())
392 pp_string (pp, "dynamic_extents:");
393 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
397 /* Dump a representation of this model to FILE. */
400 region_model::dump (FILE *fp, bool simple, bool multiline) const
403 pp_format_decoder (&pp) = default_tree_printer;
404 pp_show_color (&pp) = pp_show_color (global_dc->printer);
405 pp.buffer->stream = fp;
406 dump_to_pp (&pp, simple, multiline);
411 /* Dump a multiline representation of this model to stderr. */
414 region_model::dump (bool simple) const
416 dump (stderr, simple, true);
419 /* Dump a multiline representation of this model to stderr. */
422 region_model::debug () const
427 /* Assert that this object is valid. */
430 region_model::validate () const
435 /* Canonicalize the store and constraints, to maximize the chance of
436 equality between region_model instances. */
439 region_model::canonicalize ()
441 m_store.canonicalize (m_mgr->get_store_manager ());
442 m_constraints->canonicalize ();
445 /* Return true if this region_model is in canonical form. */
448 region_model::canonicalized_p () const
450 region_model copy (*this);
451 copy.canonicalize ();
452 return *this == copy;
455 /* See the comment for store::loop_replay_fixup. */
458 region_model::loop_replay_fixup (const region_model *dst_state)
460 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
463 /* A subclass of pending_diagnostic for complaining about uses of
466 class poisoned_value_diagnostic
467 : public pending_diagnostic_subclass<poisoned_value_diagnostic>
470 poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
471 const region *src_region)
472 : m_expr (expr), m_pkind (pkind),
473 m_src_region (src_region)
476 const char *get_kind () const final override { return "poisoned_value_diagnostic"; }
478 bool use_of_uninit_p () const final override
480 return m_pkind == POISON_KIND_UNINIT;
483 bool operator== (const poisoned_value_diagnostic &other) const
485 return (m_expr == other.m_expr
486 && m_pkind == other.m_pkind
487 && m_src_region == other.m_src_region);
490 int get_controlling_option () const final override
496 case POISON_KIND_UNINIT:
497 return OPT_Wanalyzer_use_of_uninitialized_value;
498 case POISON_KIND_FREED:
499 return OPT_Wanalyzer_use_after_free;
500 case POISON_KIND_POPPED_STACK:
501 return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame;
505 bool emit (rich_location *rich_loc) final override
511 case POISON_KIND_UNINIT:
513 diagnostic_metadata m;
514 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
515 return warning_meta (rich_loc, m, get_controlling_option (),
516 "use of uninitialized value %qE",
520 case POISON_KIND_FREED:
522 diagnostic_metadata m;
523 m.add_cwe (416); /* "CWE-416: Use After Free". */
524 return warning_meta (rich_loc, m, get_controlling_option (),
525 "use after %<free%> of %qE",
529 case POISON_KIND_POPPED_STACK:
531 /* TODO: which CWE? */
533 (rich_loc, get_controlling_option (),
534 "dereferencing pointer %qE to within stale stack frame",
541 label_text describe_final_event (const evdesc::final_event &ev) final override
547 case POISON_KIND_UNINIT:
548 return ev.formatted_print ("use of uninitialized value %qE here",
550 case POISON_KIND_FREED:
551 return ev.formatted_print ("use after %<free%> of %qE here",
553 case POISON_KIND_POPPED_STACK:
554 return ev.formatted_print
555 ("dereferencing pointer %qE to within stale stack frame",
560 void mark_interesting_stuff (interesting_t *interest) final override
563 interest->add_region_creation (m_src_region);
568 enum poison_kind m_pkind;
569 const region *m_src_region;
572 /* A subclass of pending_diagnostic for complaining about shifts
573 by negative counts. */
575 class shift_count_negative_diagnostic
576 : public pending_diagnostic_subclass<shift_count_negative_diagnostic>
579 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
580 : m_assign (assign), m_count_cst (count_cst)
583 const char *get_kind () const final override
585 return "shift_count_negative_diagnostic";
588 bool operator== (const shift_count_negative_diagnostic &other) const
590 return (m_assign == other.m_assign
591 && same_tree_p (m_count_cst, other.m_count_cst));
594 int get_controlling_option () const final override
596 return OPT_Wanalyzer_shift_count_negative;
599 bool emit (rich_location *rich_loc) final override
601 return warning_at (rich_loc, get_controlling_option (),
602 "shift by negative count (%qE)", m_count_cst);
605 label_text describe_final_event (const evdesc::final_event &ev) final override
607 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
611 const gassign *m_assign;
615 /* A subclass of pending_diagnostic for complaining about shifts
616 by counts >= the width of the operand type. */
618 class shift_count_overflow_diagnostic
619 : public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
622 shift_count_overflow_diagnostic (const gassign *assign,
623 int operand_precision,
625 : m_assign (assign), m_operand_precision (operand_precision),
626 m_count_cst (count_cst)
629 const char *get_kind () const final override
631 return "shift_count_overflow_diagnostic";
634 bool operator== (const shift_count_overflow_diagnostic &other) const
636 return (m_assign == other.m_assign
637 && m_operand_precision == other.m_operand_precision
638 && same_tree_p (m_count_cst, other.m_count_cst));
641 int get_controlling_option () const final override
643 return OPT_Wanalyzer_shift_count_overflow;
646 bool emit (rich_location *rich_loc) final override
648 return warning_at (rich_loc, get_controlling_option (),
649 "shift by count (%qE) >= precision of type (%qi)",
650 m_count_cst, m_operand_precision);
653 label_text describe_final_event (const evdesc::final_event &ev) final override
655 return ev.formatted_print ("shift by count %qE here", m_count_cst);
659 const gassign *m_assign;
660 int m_operand_precision;
664 /* If ASSIGN is a stmt that can be modelled via
665 set_value (lhs_reg, SVALUE, CTXT)
666 for some SVALUE, get the SVALUE.
667 Otherwise return NULL. */
670 region_model::get_gassign_result (const gassign *assign,
671 region_model_context *ctxt)
673 tree lhs = gimple_assign_lhs (assign);
674 tree rhs1 = gimple_assign_rhs1 (assign);
675 enum tree_code op = gimple_assign_rhs_code (assign);
681 case POINTER_PLUS_EXPR:
683 /* e.g. "_1 = a_10(D) + 12;" */
685 tree offset = gimple_assign_rhs2 (assign);
687 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
688 const svalue *offset_sval = get_rvalue (offset, ctxt);
689 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
690 is an integer of type sizetype". */
691 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
693 const svalue *sval_binop
694 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
695 ptr_sval, offset_sval);
700 case POINTER_DIFF_EXPR:
702 /* e.g. "_1 = p_2(D) - q_3(D);". */
703 tree rhs2 = gimple_assign_rhs2 (assign);
704 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
705 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
707 // TODO: perhaps fold to zero if they're known to be equal?
709 const svalue *sval_binop
710 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
711 rhs1_sval, rhs2_sval);
716 /* Assignments of the form
717 set_value (lvalue (LHS), rvalue (EXPR))
719 We already have the lvalue for the LHS above, as "lhs_reg". */
720 case ADDR_EXPR: /* LHS = &RHS; */
722 case COMPONENT_REF: /* LHS = op0.op1; */
729 case SSA_NAME: /* LHS = VAR; */
730 case VAR_DECL: /* LHS = VAR; */
731 case PARM_DECL:/* LHS = VAR; */
734 return get_rvalue (rhs1, ctxt);
744 case VIEW_CONVERT_EXPR:
747 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
748 const svalue *sval_unaryop
749 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
762 tree rhs2 = gimple_assign_rhs2 (assign);
764 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
765 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
767 if (TREE_TYPE (lhs) == boolean_type_node)
769 /* Consider constraints between svalues. */
770 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
772 return m_mgr->get_or_create_constant_svalue
773 (t.is_true () ? boolean_true_node : boolean_false_node);
776 /* Otherwise, generate a symbolic binary op. */
777 const svalue *sval_binop
778 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
779 rhs1_sval, rhs2_sval);
787 case MULT_HIGHPART_EXPR:
810 tree rhs2 = gimple_assign_rhs2 (assign);
812 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
813 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
815 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
817 /* "INT34-C. Do not shift an expression by a negative number of bits
818 or by greater than or equal to the number of bits that exist in
820 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
821 if (TREE_CODE (rhs2_cst) == INTEGER_CST)
823 if (tree_int_cst_sgn (rhs2_cst) < 0)
825 (make_unique<shift_count_negative_diagnostic>
827 else if (compare_tree_int (rhs2_cst,
828 TYPE_PRECISION (TREE_TYPE (rhs1)))
831 (make_unique<shift_count_overflow_diagnostic>
833 int (TYPE_PRECISION (TREE_TYPE (rhs1))),
838 const svalue *sval_binop
839 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
840 rhs1_sval, rhs2_sval);
844 /* Vector expressions. In theory we could implement these elementwise,
845 but for now, simply return unknown values. */
846 case VEC_DUPLICATE_EXPR:
847 case VEC_SERIES_EXPR:
850 case VEC_WIDEN_MULT_HI_EXPR:
851 case VEC_WIDEN_MULT_LO_EXPR:
852 case VEC_WIDEN_MULT_EVEN_EXPR:
853 case VEC_WIDEN_MULT_ODD_EXPR:
854 case VEC_UNPACK_HI_EXPR:
855 case VEC_UNPACK_LO_EXPR:
856 case VEC_UNPACK_FLOAT_HI_EXPR:
857 case VEC_UNPACK_FLOAT_LO_EXPR:
858 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
859 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
860 case VEC_PACK_TRUNC_EXPR:
861 case VEC_PACK_SAT_EXPR:
862 case VEC_PACK_FIX_TRUNC_EXPR:
863 case VEC_PACK_FLOAT_EXPR:
864 case VEC_WIDEN_LSHIFT_HI_EXPR:
865 case VEC_WIDEN_LSHIFT_LO_EXPR:
866 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
870 /* Workaround for discarding certain false positives from
871 -Wanalyzer-use-of-uninitialized-value
873 ((A OR-IF B) OR-IF C)
875 ((A AND-IF B) AND-IF C)
876 where evaluating B is redundant, but could involve simple accesses of
877 uninitialized locals.
879 When optimization is turned on the FE can immediately fold compound
880 conditionals. Specifically, c_parser_condition parses this condition:
881 ((A OR-IF B) OR-IF C)
882 and calls c_fully_fold on the condition.
883 Within c_fully_fold, fold_truth_andor is called, which bails when
884 optimization is off, but if any optimization is turned on can convert the
885 ((A OR-IF B) OR-IF C)
888 for sufficiently simple B
889 i.e. the inner OR-IF becomes an OR.
890 At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
891 giving this for the inner condition:
894 thus effectively synthesizing a redundant access of B when optimization
895 is turned on, when compared to:
896 if (A) goto L1; else goto L4;
897 L1: if (B) goto L2; else goto L4;
898 L2: if (C) goto L3; else goto L4;
899 for the unoptimized case.
901 Return true if CTXT appears to be handling such a short-circuitable stmt,
902 such as the def-stmt for B for the:
904 case above, for the case where A is true and thus B would have been
905 short-circuited without optimization, using MODEL for the value of A. */
908 within_short_circuited_stmt_p (const region_model *model,
909 const gassign *assign_stmt)
911 /* We must have an assignment to a temporary of _Bool type. */
912 tree lhs = gimple_assign_lhs (assign_stmt);
913 if (TREE_TYPE (lhs) != boolean_type_node)
915 if (TREE_CODE (lhs) != SSA_NAME)
917 if (SSA_NAME_VAR (lhs) != NULL_TREE)
920 /* The temporary bool must be used exactly once: as the second arg of
921 a BIT_IOR_EXPR or BIT_AND_EXPR. */
922 use_operand_p use_op;
924 if (!single_imm_use (lhs, &use_op, &use_stmt))
926 const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
929 enum tree_code op = gimple_assign_rhs_code (use_assign);
930 if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
932 if (!(gimple_assign_rhs1 (use_assign) != lhs
933 && gimple_assign_rhs2 (use_assign) == lhs))
936 /* The first arg of the bitwise stmt must have a known value in MODEL
937 that implies that the value of the second arg doesn't matter, i.e.
938 1 for bitwise or, 0 for bitwise and. */
939 tree other_arg = gimple_assign_rhs1 (use_assign);
940 /* Use a NULL ctxt here to avoid generating warnings. */
941 const svalue *other_arg_sval = model->get_rvalue (other_arg, NULL);
942 tree other_arg_cst = other_arg_sval->maybe_get_constant ();
950 if (zerop (other_arg_cst))
954 if (!zerop (other_arg_cst))
959 /* All tests passed. We appear to be in a stmt that generates a boolean
960 temporary with a value that won't matter. */
964 /* Workaround for discarding certain false positives from
965 -Wanalyzer-use-of-uninitialized-value
966 seen with -ftrivial-auto-var-init=.
968 -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
970 If the address of the var is taken, gimplification will give us
973 _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
976 The result of DEFERRED_INIT will be an uninit value; we don't
977 want to emit a false positive for "len = _1;"
979 Return true if ASSIGN_STMT is such a stmt. */
982 due_to_ifn_deferred_init_p (const gassign *assign_stmt)
985 /* We must have an assignment to a decl from an SSA name that's the
986 result of a IFN_DEFERRED_INIT call. */
987 if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
989 tree lhs = gimple_assign_lhs (assign_stmt);
990 if (TREE_CODE (lhs) != VAR_DECL)
992 tree rhs = gimple_assign_rhs1 (assign_stmt);
993 if (TREE_CODE (rhs) != SSA_NAME)
995 const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
996 const gcall *call = dyn_cast <const gcall *> (def_stmt);
999 if (gimple_call_internal_p (call)
1000 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1005 /* Check for SVAL being poisoned, adding a warning to CTXT.
1006 Return SVAL, or, if a warning is added, another value, to avoid
1007 repeatedly complaining about the same poisoned value in followup code. */
1010 region_model::check_for_poison (const svalue *sval,
1012 region_model_context *ctxt) const
1017 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1019 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1021 /* Ignore uninitialized uses of empty types; there's nothing
1023 if (pkind == POISON_KIND_UNINIT
1024 && sval->get_type ()
1025 && is_empty_type (sval->get_type ()))
1028 if (pkind == POISON_KIND_UNINIT)
1029 if (const gimple *curr_stmt = ctxt->get_stmt ())
1030 if (const gassign *assign_stmt
1031 = dyn_cast <const gassign *> (curr_stmt))
1033 /* Special case to avoid certain false positives. */
1034 if (within_short_circuited_stmt_p (this, assign_stmt))
1037 /* Special case to avoid false positive on
1038 -ftrivial-auto-var-init=. */
1039 if (due_to_ifn_deferred_init_p (assign_stmt))
1043 /* If we have an SSA name for a temporary, we don't want to print
1045 Poisoned values are shared by type, and so we can't reconstruct
1046 the tree other than via the def stmts, using
1047 fixup_tree_for_diagnostic. */
1048 tree diag_arg = fixup_tree_for_diagnostic (expr);
1049 const region *src_region = NULL;
1050 if (pkind == POISON_KIND_UNINIT)
1051 src_region = get_region_for_poisoned_expr (expr);
1052 if (ctxt->warn (make_unique<poisoned_value_diagnostic> (diag_arg,
1056 /* We only want to report use of a poisoned value at the first
1057 place it gets used; return an unknown value to avoid generating
1058 a chain of followup warnings. */
1059 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1068 /* Attempt to get a region for describing EXPR, the source of region of
1069 a poisoned_svalue for use in a poisoned_value_diagnostic.
1070 Return NULL if there is no good region to use. */
1073 region_model::get_region_for_poisoned_expr (tree expr) const
1075 if (TREE_CODE (expr) == SSA_NAME)
1077 tree decl = SSA_NAME_VAR (expr);
1078 if (decl && DECL_P (decl))
1083 return get_lvalue (expr, NULL);
1086 /* Update this model for the ASSIGN stmt, using CTXT to report any
1090 region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1092 tree lhs = gimple_assign_lhs (assign);
1093 tree rhs1 = gimple_assign_rhs1 (assign);
1095 const region *lhs_reg = get_lvalue (lhs, ctxt);
1097 /* Most assignments are handled by:
1098 set_value (lhs_reg, SVALUE, CTXT)
1100 if (const svalue *sval = get_gassign_result (assign, ctxt))
1102 tree expr = get_diagnostic_tree_for_gassign (assign);
1103 check_for_poison (sval, expr, ctxt);
1104 set_value (lhs_reg, sval, ctxt);
1108 enum tree_code op = gimple_assign_rhs_code (assign);
1114 sorry_at (assign->location, "unhandled assignment op: %qs",
1115 get_tree_code_name (op));
1116 const svalue *unknown_sval
1117 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1118 set_value (lhs_reg, unknown_sval, ctxt);
1124 if (TREE_CLOBBER_P (rhs1))
1126 /* e.g. "x ={v} {CLOBBER};" */
1127 clobber_region (lhs_reg);
1131 /* Any CONSTRUCTOR that survives to this point is either
1132 just a zero-init of everything, or a vector. */
1133 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
1134 zero_fill_region (lhs_reg);
1138 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1140 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1142 index = build_int_cst (integer_type_node, ix);
1143 gcc_assert (TREE_CODE (index) == INTEGER_CST);
1144 const svalue *index_sval
1145 = m_mgr->get_or_create_constant_svalue (index);
1146 gcc_assert (index_sval);
1147 const region *sub_reg
1148 = m_mgr->get_element_region (lhs_reg,
1151 const svalue *val_sval = get_rvalue (val, ctxt);
1152 set_value (sub_reg, val_sval, ctxt);
1160 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
1161 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1162 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
1163 ctxt ? ctxt->get_uncertainty () : NULL);
1169 /* Handle the pre-sm-state part of STMT, modifying this object in-place.
1170 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1174 region_model::on_stmt_pre (const gimple *stmt,
1175 bool *out_unknown_side_effects,
1176 region_model_context *ctxt)
1178 switch (gimple_code (stmt))
1181 /* No-op for now. */
1186 const gassign *assign = as_a <const gassign *> (stmt);
1187 on_assignment (assign, ctxt);
1193 const gasm *asm_stmt = as_a <const gasm *> (stmt);
1194 on_asm_stmt (asm_stmt, ctxt);
1200 /* Track whether we have a gcall to a function that's not recognized by
1201 anything, for which we don't have a function body, or for which we
1202 don't know the fndecl. */
1203 const gcall *call = as_a <const gcall *> (stmt);
1204 *out_unknown_side_effects = on_call_pre (call, ctxt);
1210 const greturn *return_ = as_a <const greturn *> (stmt);
1211 on_return (return_, ctxt);
1217 /* Ensure that all arguments at the call described by CD are checked
1218 for poisoned values, by calling get_rvalue on each argument. */
1221 region_model::check_call_args (const call_details &cd) const
1223 for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1224 cd.get_arg_svalue (arg_idx);
1227 /* Return true if CD is known to be a call to a function with
1228 __attribute__((const)). */
1231 const_fn_p (const call_details &cd)
1233 tree fndecl = cd.get_fndecl_for_call ();
1236 gcc_assert (DECL_P (fndecl));
1237 return TREE_READONLY (fndecl);
1240 /* If this CD is known to be a call to a function with
1241 __attribute__((const)), attempt to get a const_fn_result_svalue
1242 based on the arguments, or return NULL otherwise. */
1244 static const svalue *
1245 maybe_get_const_fn_result (const call_details &cd)
1247 if (!const_fn_p (cd))
1250 unsigned num_args = cd.num_args ();
1251 if (num_args > const_fn_result_svalue::MAX_INPUTS)
1252 /* Too many arguments. */
1255 auto_vec<const svalue *> inputs (num_args);
1256 for (unsigned arg_idx = 0; arg_idx < num_args; arg_idx++)
1258 const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
1259 if (!arg_sval->can_have_associated_state_p ())
1261 inputs.quick_push (arg_sval);
1264 region_model_manager *mgr = cd.get_manager ();
1266 = mgr->get_or_create_const_fn_result_svalue (cd.get_lhs_type (),
1267 cd.get_fndecl_for_call (),
1272 /* Update this model for an outcome of a call that returns a specific
1274 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1275 the state-merger code from merging success and failure outcomes. */
1278 region_model::update_for_int_cst_return (const call_details &cd,
1282 if (!cd.get_lhs_type ())
1284 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1286 const svalue *result
1287 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), retval);
1289 result = m_mgr->get_or_create_unmergeable (result);
1290 set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
1293 /* Update this model for an outcome of a call that returns zero.
1294 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1295 the state-merger code from merging success and failure outcomes. */
1298 region_model::update_for_zero_return (const call_details &cd,
1301 update_for_int_cst_return (cd, 0, unmergeable);
1304 /* Update this model for an outcome of a call that returns non-zero. */
1307 region_model::update_for_nonzero_return (const call_details &cd)
1309 if (!cd.get_lhs_type ())
1311 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1314 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
1315 const svalue *result
1316 = get_store_value (cd.get_lhs_region (), cd.get_ctxt ());
1317 add_constraint (result, NE_EXPR, zero, cd.get_ctxt ());
1320 /* Subroutine of region_model::maybe_get_copy_bounds.
1321 The Linux kernel commonly uses
1322 min_t([unsigned] long, VAR, sizeof(T));
1323 to set an upper bound on the size of a copy_to_user.
1324 Attempt to simplify such sizes by trying to get the upper bound as a
1326 Return the simplified svalue if possible, or NULL otherwise. */
1328 static const svalue *
1329 maybe_simplify_upper_bound (const svalue *num_bytes_sval,
1330 region_model_manager *mgr)
1332 tree type = num_bytes_sval->get_type ();
1333 while (const svalue *raw = num_bytes_sval->maybe_undo_cast ())
1334 num_bytes_sval = raw;
1335 if (const binop_svalue *binop_sval = num_bytes_sval->dyn_cast_binop_svalue ())
1336 if (binop_sval->get_op () == MIN_EXPR)
1337 if (binop_sval->get_arg1 ()->get_kind () == SK_CONSTANT)
1339 return mgr->get_or_create_cast (type, binop_sval->get_arg1 ());
1340 /* TODO: we might want to also capture the constraint
1341 when recording the diagnostic, or note that we're using
1347 /* Attempt to get an upper bound for the size of a copy when simulating a
1350 NUM_BYTES_SVAL is the symbolic value for the size of the copy.
1351 Use it if it's constant, otherwise try to simplify it. Failing
1352 that, use the size of SRC_REG if constant.
1354 Return a symbolic value for an upper limit on the number of bytes
1355 copied, or NULL if no such value could be determined. */
1358 region_model::maybe_get_copy_bounds (const region *src_reg,
1359 const svalue *num_bytes_sval)
1361 if (num_bytes_sval->maybe_get_constant ())
1362 return num_bytes_sval;
1364 if (const svalue *simplified
1365 = maybe_simplify_upper_bound (num_bytes_sval, m_mgr))
1366 num_bytes_sval = simplified;
1368 if (num_bytes_sval->maybe_get_constant ())
1369 return num_bytes_sval;
1371 /* For now, try just guessing the size as the capacity of the
1372 base region of the src.
1373 This is a hack; we might get too large a value. */
1374 const region *src_base_reg = src_reg->get_base_region ();
1375 num_bytes_sval = get_capacity (src_base_reg);
1377 if (num_bytes_sval->maybe_get_constant ())
1378 return num_bytes_sval;
1380 /* Non-constant: give up. */
1384 /* Get any known_function for FNDECL for call CD.
1386 The call must match all assumptions made by the known_function (such as
1387 e.g. "argument 1's type must be a pointer type").
1389 Return NULL if no known_function is found, or it does not match the
1392 const known_function *
1393 region_model::get_known_function (tree fndecl, const call_details &cd) const
1395 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
1396 return known_fn_mgr->get_match (fndecl, cd);
1399 /* Get any known_function for IFN, or NULL. */
1401 const known_function *
1402 region_model::get_known_function (enum internal_fn ifn) const
1404 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
1405 return known_fn_mgr->get_internal_fn (ifn);
1408 /* Update this model for the CALL stmt, using CTXT to report any
1409 diagnostics - the first half.
1411 Updates to the region_model that should be made *before* sm-states
1412 are updated are done here; other updates to the region_model are done
1413 in region_model::on_call_post.
1415 Return true if the function call has unknown side effects (it wasn't
1416 recognized and we don't have a body for it, or are unable to tell which
1420 region_model::on_call_pre (const gcall *call, region_model_context *ctxt)
1422 call_details cd (call, this, ctxt);
1424 bool unknown_side_effects = false;
1426 /* Special-case for IFN_DEFERRED_INIT.
1427 We want to report uninitialized variables with -fanalyzer (treating
1428 -ftrivial-auto-var-init= as purely a mitigation feature).
1429 Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
1430 lhs of the call, so that it is still uninitialized from the point of
1431 view of the analyzer. */
1432 if (gimple_call_internal_p (call)
1433 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1436 /* Get svalues for all of the arguments at the callsite, to ensure that we
1437 complain about any uninitialized arguments. This might lead to
1438 duplicates if any of the handling below also looks up the svalues,
1439 but the deduplication code should deal with that. */
1441 check_call_args (cd);
1443 /* Some of the cases below update the lhs of the call based on the
1444 return value, but not all. Provide a default value, which may
1445 get overwritten below. */
1446 if (tree lhs = gimple_call_lhs (call))
1448 const region *lhs_region = get_lvalue (lhs, ctxt);
1449 const svalue *sval = maybe_get_const_fn_result (cd);
1452 /* For the common case of functions without __attribute__((const)),
1453 use a conjured value, and purge any prior state involving that
1454 value (in case this is in a loop). */
1455 sval = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs), call,
1457 conjured_purge (this,
1460 set_value (lhs_region, sval, ctxt);
1463 if (gimple_call_internal_p (call))
1464 if (const known_function *kf
1465 = get_known_function (gimple_call_internal_fn (call)))
1467 kf->impl_call_pre (cd);
1471 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1473 int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
1475 if (const known_function *kf = get_known_function (callee_fndecl, cd))
1477 kf->impl_call_pre (cd);
1480 else if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
1481 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
1483 if (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
1484 unknown_side_effects = true;
1486 else if (!fndecl_has_gimple_body_p (callee_fndecl)
1487 && (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
1488 && !fndecl_built_in_p (callee_fndecl))
1489 unknown_side_effects = true;
1492 unknown_side_effects = true;
1494 return unknown_side_effects;
1497 /* Update this model for the CALL stmt, using CTXT to report any
1498 diagnostics - the second half.
1500 Updates to the region_model that should be made *after* sm-states
1501 are updated are done here; other updates to the region_model are done
1502 in region_model::on_call_pre.
1504 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
1508 region_model::on_call_post (const gcall *call,
1509 bool unknown_side_effects,
1510 region_model_context *ctxt)
1512 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1514 call_details cd (call, this, ctxt);
1515 if (const known_function *kf = get_known_function (callee_fndecl, cd))
1517 kf->impl_call_post (cd);
1520 /* Was this fndecl referenced by
1521 __attribute__((malloc(FOO)))? */
1522 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
1524 impl_deallocation_call (cd);
1529 if (unknown_side_effects)
1530 handle_unrecognized_call (call, ctxt);
1533 /* Purge state involving SVAL from this region_model, using CTXT
1534 (if non-NULL) to purge other state in a program_state.
1536 For example, if we're at the def-stmt of an SSA name, then we need to
1537 purge any state for svalues that involve that SSA name. This avoids
1538 false positives in loops, since a symbolic value referring to the
1539 SSA name will be referring to the previous value of that SSA name.
1542 while ((e = hashmap_iter_next(&iter))) {
1543 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
1544 free (e_strbuf->value);
1546 at the def-stmt of e_8:
1547 e_8 = hashmap_iter_next (&iter);
1548 we should purge the "freed" state of:
1549 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
1550 which is the "e_strbuf->value" value from the previous iteration,
1551 or we will erroneously report a double-free - the "e_8" within it
1552 refers to the previous value. */
1555 region_model::purge_state_involving (const svalue *sval,
1556 region_model_context *ctxt)
1558 if (!sval->can_have_associated_state_p ())
1560 m_store.purge_state_involving (sval, m_mgr);
1561 m_constraints->purge_state_involving (sval);
1562 m_dynamic_extents.purge_state_involving (sval);
1564 ctxt->purge_state_involving (sval);
1567 /* A pending_note subclass for adding a note about an
1568 __attribute__((access, ...)) to a diagnostic. */
1570 class reason_attr_access : public pending_note_subclass<reason_attr_access>
1573 reason_attr_access (tree callee_fndecl, const attr_access &access)
1574 : m_callee_fndecl (callee_fndecl),
1575 m_ptr_argno (access.ptrarg),
1576 m_access_str (TREE_STRING_POINTER (access.to_external_string ()))
1580 const char *get_kind () const final override { return "reason_attr_access"; }
1582 void emit () const final override
1584 inform (DECL_SOURCE_LOCATION (m_callee_fndecl),
1585 "parameter %i of %qD marked with attribute %qs",
1586 m_ptr_argno + 1, m_callee_fndecl, m_access_str);
1589 bool operator== (const reason_attr_access &other) const
1591 return (m_callee_fndecl == other.m_callee_fndecl
1592 && m_ptr_argno == other.m_ptr_argno
1593 && !strcmp (m_access_str, other.m_access_str));
1597 tree m_callee_fndecl;
1598 unsigned m_ptr_argno;
1599 const char *m_access_str;
1602 /* Check CALL a call to external function CALLEE_FNDECL based on
1603 any __attribute__ ((access, ....) on the latter, complaining to
1604 CTXT about any issues.
1606 Currently we merely call check_region_for_write on any regions
1607 pointed to by arguments marked with a "write_only" or "read_write"
1612 check_external_function_for_access_attr (const gcall *call,
1614 region_model_context *ctxt) const
1617 gcc_assert (callee_fndecl);
1620 tree fntype = TREE_TYPE (callee_fndecl);
1624 if (!TYPE_ATTRIBUTES (fntype))
1627 /* Initialize a map of attribute access specifications for arguments
1628 to the function call. */
1630 init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
1634 for (tree iter = TYPE_ARG_TYPES (fntype); iter;
1635 iter = TREE_CHAIN (iter), ++argno)
1637 const attr_access* access = rdwr_idx.get (argno);
1641 /* Ignore any duplicate entry in the map for the size argument. */
1642 if (access->ptrarg != argno)
1645 if (access->mode == access_write_only
1646 || access->mode == access_read_write)
1648 /* Subclass of decorated_region_model_context that
1649 adds a note about the attr access to any saved diagnostics. */
1650 class annotating_ctxt : public note_adding_context
1653 annotating_ctxt (tree callee_fndecl,
1654 const attr_access &access,
1655 region_model_context *ctxt)
1656 : note_adding_context (ctxt),
1657 m_callee_fndecl (callee_fndecl),
1661 std::unique_ptr<pending_note> make_note () final override
1663 return make_unique<reason_attr_access>
1664 (m_callee_fndecl, m_access);
1667 tree m_callee_fndecl;
1668 const attr_access &m_access;
1671 /* Use this ctxt below so that any diagnostics get the
1672 note added to them. */
1673 annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
1675 tree ptr_tree = gimple_call_arg (call, access->ptrarg);
1676 const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
1677 const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
1678 check_region_for_write (reg, &my_ctxt);
1679 /* We don't use the size arg for now. */
1684 /* Handle a call CALL to a function with unknown behavior.
1686 Traverse the regions in this model, determining what regions are
1687 reachable from pointer arguments to CALL and from global variables,
1690 Set all reachable regions to new unknown values and purge sm-state
1691 from their values, and from values that point to them. */
1694 region_model::handle_unrecognized_call (const gcall *call,
1695 region_model_context *ctxt)
1697 tree fndecl = get_fndecl_for_call (call, ctxt);
1700 check_external_function_for_access_attr (call, fndecl, ctxt);
1702 reachable_regions reachable_regs (this);
1704 /* Determine the reachable regions and their mutability. */
1706 /* Add globals and regions that already escaped in previous
1708 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1711 /* Params that are pointers. */
1712 tree iter_param_types = NULL_TREE;
1714 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
1715 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
1717 /* Track expected param type, where available. */
1718 tree param_type = NULL_TREE;
1719 if (iter_param_types)
1721 param_type = TREE_VALUE (iter_param_types);
1722 gcc_assert (param_type);
1723 iter_param_types = TREE_CHAIN (iter_param_types);
1726 tree parm = gimple_call_arg (call, arg_idx);
1727 const svalue *parm_sval = get_rvalue (parm, ctxt);
1728 reachable_regs.handle_parm (parm_sval, param_type);
1732 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
1734 /* Purge sm-state for the svalues that were reachable,
1735 both in non-mutable and mutable form. */
1736 for (svalue_set::iterator iter
1737 = reachable_regs.begin_reachable_svals ();
1738 iter != reachable_regs.end_reachable_svals (); ++iter)
1740 const svalue *sval = (*iter);
1742 ctxt->on_unknown_change (sval, false);
1744 for (svalue_set::iterator iter
1745 = reachable_regs.begin_mutable_svals ();
1746 iter != reachable_regs.end_mutable_svals (); ++iter)
1748 const svalue *sval = (*iter);
1750 ctxt->on_unknown_change (sval, true);
1752 uncertainty->on_mutable_sval_at_unknown_call (sval);
1755 /* Mark any clusters that have escaped. */
1756 reachable_regs.mark_escaped_clusters (ctxt);
1758 /* Update bindings for all clusters that have escaped, whether above,
1760 m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
1761 conjured_purge (this, ctxt));
1763 /* Purge dynamic extents from any regions that have escaped mutably:
1764 realloc could have been called on them. */
1765 for (hash_set<const region *>::iterator
1766 iter = reachable_regs.begin_mutable_base_regs ();
1767 iter != reachable_regs.end_mutable_base_regs ();
1770 const region *base_reg = (*iter);
1771 unset_dynamic_extents (base_reg);
1775 /* Traverse the regions in this model, determining what regions are
1776 reachable from the store and populating *OUT.
1778 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
1779 for reachability (for handling return values from functions when
1780 analyzing return of the only function on the stack).
1782 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
1783 within it as being maybe-bound as additional "roots" for reachability.
1785 Find svalues that haven't leaked. */
1788 region_model::get_reachable_svalues (svalue_set *out,
1789 const svalue *extra_sval,
1790 const uncertainty_t *uncertainty)
1792 reachable_regions reachable_regs (this);
1794 /* Add globals and regions that already escaped in previous
1796 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1800 reachable_regs.handle_sval (extra_sval);
1803 for (uncertainty_t::iterator iter
1804 = uncertainty->begin_maybe_bound_svals ();
1805 iter != uncertainty->end_maybe_bound_svals (); ++iter)
1806 reachable_regs.handle_sval (*iter);
1808 /* Get regions for locals that have explicitly bound values. */
1809 for (store::cluster_map_t::iterator iter = m_store.begin ();
1810 iter != m_store.end (); ++iter)
1812 const region *base_reg = (*iter).first;
1813 if (const region *parent = base_reg->get_parent_region ())
1814 if (parent->get_kind () == RK_FRAME)
1815 reachable_regs.add (base_reg, false);
1818 /* Populate *OUT based on the values that were reachable. */
1819 for (svalue_set::iterator iter
1820 = reachable_regs.begin_reachable_svals ();
1821 iter != reachable_regs.end_reachable_svals (); ++iter)
1825 /* Update this model for the RETURN_STMT, using CTXT to report any
1829 region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
1831 tree callee = get_current_function ()->decl;
1832 tree lhs = DECL_RESULT (callee);
1833 tree rhs = gimple_return_retval (return_stmt);
1837 const svalue *sval = get_rvalue (rhs, ctxt);
1838 const region *ret_reg = get_lvalue (lhs, ctxt);
1839 set_value (ret_reg, sval, ctxt);
1843 /* Update this model for a call and return of setjmp/sigsetjmp at CALL within
1844 ENODE, using CTXT to report any diagnostics.
1846 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
1847 0), as opposed to any second return due to longjmp/sigsetjmp. */
1850 region_model::on_setjmp (const gcall *call, const exploded_node *enode,
1851 region_model_context *ctxt)
1853 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
1854 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
1857 /* Create a setjmp_svalue for this call and store it in BUF_REG's
1861 setjmp_record r (enode, call);
1863 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
1864 set_value (buf_reg, sval, ctxt);
1867 /* Direct calls to setjmp return 0. */
1868 if (tree lhs = gimple_call_lhs (call))
1870 const svalue *new_sval
1871 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
1872 const region *lhs_reg = get_lvalue (lhs, ctxt);
1873 set_value (lhs_reg, new_sval, ctxt);
1877 /* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
1878 to a "setjmp" at SETJMP_CALL where the final stack depth should be
1879 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
1880 done, and should be done by the caller. */
1883 region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
1884 int setjmp_stack_depth, region_model_context *ctxt)
1886 /* Evaluate the val, using the frame of the "longjmp". */
1887 tree fake_retval = gimple_call_arg (longjmp_call, 1);
1888 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
1890 /* Pop any frames until we reach the stack depth of the function where
1891 setjmp was called. */
1892 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
1893 while (get_stack_depth () > setjmp_stack_depth)
1894 pop_frame (NULL, NULL, ctxt);
1896 gcc_assert (get_stack_depth () == setjmp_stack_depth);
1898 /* Assign to LHS of "setjmp" in new_state. */
1899 if (tree lhs = gimple_call_lhs (setjmp_call))
1901 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
1902 const svalue *zero_sval
1903 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
1904 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
1905 /* If we have 0, use 1. */
1906 if (eq_zero.is_true ())
1908 const svalue *one_sval
1909 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
1910 fake_retval_sval = one_sval;
1914 /* Otherwise note that the value is nonzero. */
1915 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
1918 /* Decorate the return value from setjmp as being unmergeable,
1919 so that we don't attempt to merge states with it as zero
1920 with states in which it's nonzero, leading to a clean distinction
1921 in the exploded_graph betweeen the first return and the second
1923 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
1925 const region *lhs_reg = get_lvalue (lhs, ctxt);
1926 set_value (lhs_reg, fake_retval_sval, ctxt);
1930 /* Update this region_model for a phi stmt of the form
1931 LHS = PHI <...RHS...>.
1932 where RHS is for the appropriate edge.
1933 Get state from OLD_STATE so that all of the phi stmts for a basic block
1934 are effectively handled simultaneously. */
1937 region_model::handle_phi (const gphi *phi,
1939 const region_model &old_state,
1940 region_model_context *ctxt)
1942 /* For now, don't bother tracking the .MEM SSA names. */
1943 if (tree var = SSA_NAME_VAR (lhs))
1944 if (TREE_CODE (var) == VAR_DECL)
1945 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
1948 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
1949 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
1951 set_value (dst_reg, src_sval, ctxt);
1954 ctxt->on_phi (phi, rhs);
1957 /* Implementation of region_model::get_lvalue; the latter adds type-checking.
1959 Get the id of the region for PV within this region_model,
1960 emitting any diagnostics to CTXT. */
1963 region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
1965 tree expr = pv.m_tree;
1969 switch (TREE_CODE (expr))
1972 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
1973 dump_location_t ());
1977 tree array = TREE_OPERAND (expr, 0);
1978 tree index = TREE_OPERAND (expr, 1);
1980 const region *array_reg = get_lvalue (array, ctxt);
1981 const svalue *index_sval = get_rvalue (index, ctxt);
1982 return m_mgr->get_element_region (array_reg,
1983 TREE_TYPE (TREE_TYPE (array)),
1990 tree inner_expr = TREE_OPERAND (expr, 0);
1991 const region *inner_reg = get_lvalue (inner_expr, ctxt);
1992 tree num_bits = TREE_OPERAND (expr, 1);
1993 tree first_bit_offset = TREE_OPERAND (expr, 2);
1994 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
1995 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
1996 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
1997 TREE_INT_CST_LOW (num_bits));
1998 return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
2004 tree ptr = TREE_OPERAND (expr, 0);
2005 tree offset = TREE_OPERAND (expr, 1);
2006 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2007 const svalue *offset_sval = get_rvalue (offset, ctxt);
2008 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
2009 return m_mgr->get_offset_region (star_ptr,
2016 return m_mgr->get_region_for_fndecl (expr);
2019 return m_mgr->get_region_for_label (expr);
2022 /* Handle globals. */
2023 if (is_global_var (expr))
2024 return m_mgr->get_region_for_global (expr);
2032 gcc_assert (TREE_CODE (expr) == SSA_NAME
2033 || TREE_CODE (expr) == PARM_DECL
2034 || TREE_CODE (expr) == VAR_DECL
2035 || TREE_CODE (expr) == RESULT_DECL);
2037 int stack_index = pv.m_stack_depth;
2038 const frame_region *frame = get_frame_at_index (stack_index);
2040 return frame->get_region_for_local (m_mgr, expr, ctxt);
2046 tree obj = TREE_OPERAND (expr, 0);
2047 tree field = TREE_OPERAND (expr, 1);
2048 const region *obj_reg = get_lvalue (obj, ctxt);
2049 return m_mgr->get_field_region (obj_reg, field);
2054 return m_mgr->get_region_for_string (expr);
2058 /* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
2061 assert_compat_types (tree src_type, tree dst_type)
2063 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2066 if (!(useless_type_conversion_p (src_type, dst_type)))
2067 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
2072 /* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
2075 compat_types_p (tree src_type, tree dst_type)
2077 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2078 if (!(useless_type_conversion_p (src_type, dst_type)))
2083 /* Get the region for PV within this region_model,
2084 emitting any diagnostics to CTXT. */
2087 region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
2089 if (pv.m_tree == NULL_TREE)
2092 const region *result_reg = get_lvalue_1 (pv, ctxt);
2093 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
2097 /* Get the region for EXPR within this region_model (assuming the most
2098 recent stack frame if it's a local). */
2101 region_model::get_lvalue (tree expr, region_model_context *ctxt) const
2103 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2106 /* Implementation of region_model::get_rvalue; the latter adds type-checking.
2108 Get the value of PV within this region_model,
2109 emitting any diagnostics to CTXT. */
2112 region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
2114 gcc_assert (pv.m_tree);
2116 switch (TREE_CODE (pv.m_tree))
2119 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
2124 tree expr = pv.m_tree;
2125 tree op0 = TREE_OPERAND (expr, 0);
2126 const region *expr_reg = get_lvalue (op0, ctxt);
2127 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
2133 tree expr = pv.m_tree;
2134 tree op0 = TREE_OPERAND (expr, 0);
2135 const region *reg = get_lvalue (op0, ctxt);
2136 tree num_bits = TREE_OPERAND (expr, 1);
2137 tree first_bit_offset = TREE_OPERAND (expr, 2);
2138 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2139 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2140 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2141 TREE_INT_CST_LOW (num_bits));
2142 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
2151 const region *reg = get_lvalue (pv, ctxt);
2152 return get_store_value (reg, ctxt);
2157 case VIEW_CONVERT_EXPR:
2159 tree expr = pv.m_tree;
2160 tree arg = TREE_OPERAND (expr, 0);
2161 const svalue *arg_sval = get_rvalue (arg, ctxt);
2162 const svalue *sval_unaryop
2163 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
2165 return sval_unaryop;
2173 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
2175 case POINTER_PLUS_EXPR:
2177 tree expr = pv.m_tree;
2178 tree ptr = TREE_OPERAND (expr, 0);
2179 tree offset = TREE_OPERAND (expr, 1);
2180 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2181 const svalue *offset_sval = get_rvalue (offset, ctxt);
2182 const svalue *sval_binop
2183 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
2184 ptr_sval, offset_sval);
2192 tree expr = pv.m_tree;
2193 tree arg0 = TREE_OPERAND (expr, 0);
2194 tree arg1 = TREE_OPERAND (expr, 1);
2195 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
2196 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
2197 const svalue *sval_binop
2198 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
2199 arg0_sval, arg1_sval);
2206 const region *ref_reg = get_lvalue (pv, ctxt);
2207 return get_store_value (ref_reg, ctxt);
2211 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
2212 return get_rvalue (expr, ctxt);
2217 /* Get the value of PV within this region_model,
2218 emitting any diagnostics to CTXT. */
2221 region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
2223 if (pv.m_tree == NULL_TREE)
2226 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
2228 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
2230 result_sval = check_for_poison (result_sval, pv.m_tree, ctxt);
2235 /* Get the value of EXPR within this region_model (assuming the most
2236 recent stack frame if it's a local). */
2239 region_model::get_rvalue (tree expr, region_model_context *ctxt) const
2241 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2244 /* Return true if this model is on a path with "main" as the entrypoint
2245 (as opposed to one in which we're merely analyzing a subset of the
2246 path through the code). */
2249 region_model::called_from_main_p () const
2251 if (!m_current_frame)
2253 /* Determine if the oldest stack frame in this model is for "main". */
2254 const frame_region *frame0 = get_frame_at_index (0);
2255 gcc_assert (frame0);
2256 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
2259 /* Subroutine of region_model::get_store_value for when REG is (or is within)
2260 a global variable that hasn't been touched since the start of this path
2261 (or was implicitly touched due to a call to an unknown function). */
2264 region_model::get_initial_value_for_global (const region *reg) const
2266 /* Get the decl that REG is for (or is within). */
2267 const decl_region *base_reg
2268 = reg->get_base_region ()->dyn_cast_decl_region ();
2269 gcc_assert (base_reg);
2270 tree decl = base_reg->get_decl ();
2272 /* Special-case: to avoid having to explicitly update all previously
2273 untracked globals when calling an unknown fn, they implicitly have
2274 an unknown value if an unknown call has occurred, unless this is
2275 static to-this-TU and hasn't escaped. Globals that have escaped
2276 are explicitly tracked, so we shouldn't hit this case for them. */
2277 if (m_store.called_unknown_fn_p ()
2278 && TREE_PUBLIC (decl)
2279 && !TREE_READONLY (decl))
2280 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2282 /* If we are on a path from the entrypoint from "main" and we have a
2283 global decl defined in this TU that hasn't been touched yet, then
2284 the initial value of REG can be taken from the initialization value
2286 if (called_from_main_p () || TREE_READONLY (decl))
2288 /* Attempt to get the initializer value for base_reg. */
2289 if (const svalue *base_reg_init
2290 = base_reg->get_svalue_for_initializer (m_mgr))
2292 if (reg == base_reg)
2293 return base_reg_init;
2296 /* Get the value for REG within base_reg_init. */
2297 binding_cluster c (base_reg);
2298 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init);
2300 = c.get_any_binding (m_mgr->get_store_manager (), reg);
2303 if (reg->get_type ())
2304 sval = m_mgr->get_or_create_cast (reg->get_type (),
2312 /* Otherwise, return INIT_VAL(REG). */
2313 return m_mgr->get_or_create_initial_value (reg);
2316 /* Get a value for REG, looking it up in the store, or otherwise falling
2317 back to "initial" or "unknown" values.
2318 Use CTXT to report any warnings associated with reading from REG. */
2321 region_model::get_store_value (const region *reg,
2322 region_model_context *ctxt) const
2324 check_region_for_read (reg, ctxt);
2326 /* Special-case: handle var_decls in the constant pool. */
2327 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
2328 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
2332 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
2335 if (reg->get_type ())
2336 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
2340 /* Special-case: read at a constant index within a STRING_CST. */
2341 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
2342 if (tree byte_offset_cst
2343 = offset_reg->get_byte_offset ()->maybe_get_constant ())
2344 if (const string_region *str_reg
2345 = reg->get_parent_region ()->dyn_cast_string_region ())
2347 tree string_cst = str_reg->get_string_cst ();
2348 if (const svalue *char_sval
2349 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2351 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2354 /* Special-case: read the initial char of a STRING_CST. */
2355 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
2356 if (const string_region *str_reg
2357 = cast_reg->get_original_region ()->dyn_cast_string_region ())
2359 tree string_cst = str_reg->get_string_cst ();
2360 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
2361 if (const svalue *char_sval
2362 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2364 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2367 /* Otherwise we implicitly have the initial value of the region
2368 (if the cluster had been touched, binding_cluster::get_any_binding,
2369 would have returned UNKNOWN, and we would already have returned
2372 /* Handle globals. */
2373 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
2375 return get_initial_value_for_global (reg);
2377 return m_mgr->get_or_create_initial_value (reg);
2380 /* Return false if REG does not exist, true if it may do.
2381 This is for detecting regions within the stack that don't exist anymore
2382 after frames are popped. */
2385 region_model::region_exists_p (const region *reg) const
2387 /* If within a stack frame, check that the stack frame is live. */
2388 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
2390 /* Check that the current frame is the enclosing frame, or is called
2392 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
2393 iter_frame = iter_frame->get_calling_frame ())
2394 if (iter_frame == enclosing_frame)
2402 /* Get a region for referencing PTR_SVAL, creating a region if need be, and
2403 potentially generating warnings via CTXT.
2404 PTR_SVAL must be of pointer type.
2405 PTR_TREE if non-NULL can be used when emitting diagnostics. */
2408 region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
2409 region_model_context *ctxt) const
2411 gcc_assert (ptr_sval);
2412 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
2414 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
2415 as a constraint. This suppresses false positives from
2416 -Wanalyzer-null-dereference for the case where we later have an
2417 if (PTR_SVAL) that would occur if we considered the false branch
2418 and transitioned the malloc state machine from start->null. */
2419 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
2420 const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
2421 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
2423 switch (ptr_sval->get_kind ())
2430 const region_svalue *region_sval
2431 = as_a <const region_svalue *> (ptr_sval);
2432 return region_sval->get_pointee ();
2437 const binop_svalue *binop_sval
2438 = as_a <const binop_svalue *> (ptr_sval);
2439 switch (binop_sval->get_op ())
2441 case POINTER_PLUS_EXPR:
2443 /* If we have a symbolic value expressing pointer arithmentic,
2444 try to convert it to a suitable region. */
2445 const region *parent_region
2446 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
2447 const svalue *offset = binop_sval->get_arg1 ();
2448 tree type= TREE_TYPE (ptr_sval->get_type ());
2449 return m_mgr->get_offset_region (parent_region, type, offset);
2461 tree ptr = get_representative_tree (ptr_sval);
2462 /* If we can't get a representative tree for PTR_SVAL
2463 (e.g. if it hasn't been bound into the store), then
2464 fall back on PTR_TREE, if non-NULL. */
2469 const poisoned_svalue *poisoned_sval
2470 = as_a <const poisoned_svalue *> (ptr_sval);
2471 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
2472 ctxt->warn (make_unique<poisoned_value_diagnostic>
2473 (ptr, pkind, NULL));
2480 return m_mgr->get_symbolic_region (ptr_sval);
2483 /* Attempt to get BITS within any value of REG, as TYPE.
2484 In particular, extract values from compound_svalues for the case
2485 where there's a concrete binding at BITS.
2486 Return an unknown svalue if we can't handle the given case.
2487 Use CTXT to report any warnings associated with reading from REG. */
2490 region_model::get_rvalue_for_bits (tree type,
2492 const bit_range &bits,
2493 region_model_context *ctxt) const
2495 const svalue *sval = get_store_value (reg, ctxt);
2496 return m_mgr->get_or_create_bits_within (type, bits, sval);
2499 /* A subclass of pending_diagnostic for complaining about writes to
2500 constant regions of memory. */
2502 class write_to_const_diagnostic
2503 : public pending_diagnostic_subclass<write_to_const_diagnostic>
2506 write_to_const_diagnostic (const region *reg, tree decl)
2507 : m_reg (reg), m_decl (decl)
2510 const char *get_kind () const final override
2512 return "write_to_const_diagnostic";
2515 bool operator== (const write_to_const_diagnostic &other) const
2517 return (m_reg == other.m_reg
2518 && m_decl == other.m_decl);
2521 int get_controlling_option () const final override
2523 return OPT_Wanalyzer_write_to_const;
2526 bool emit (rich_location *rich_loc) final override
2528 auto_diagnostic_group d;
2530 switch (m_reg->get_kind ())
2533 warned = warning_at (rich_loc, get_controlling_option (),
2534 "write to %<const%> object %qE", m_decl);
2537 warned = warning_at (rich_loc, get_controlling_option (),
2538 "write to function %qE", m_decl);
2541 warned = warning_at (rich_loc, get_controlling_option (),
2542 "write to label %qE", m_decl);
2546 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
2550 label_text describe_final_event (const evdesc::final_event &ev) final override
2552 switch (m_reg->get_kind ())
2555 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
2557 return ev.formatted_print ("write to function %qE here", m_decl);
2559 return ev.formatted_print ("write to label %qE here", m_decl);
2564 const region *m_reg;
2568 /* A subclass of pending_diagnostic for complaining about writes to
2571 class write_to_string_literal_diagnostic
2572 : public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
2575 write_to_string_literal_diagnostic (const region *reg)
2579 const char *get_kind () const final override
2581 return "write_to_string_literal_diagnostic";
2584 bool operator== (const write_to_string_literal_diagnostic &other) const
2586 return m_reg == other.m_reg;
2589 int get_controlling_option () const final override
2591 return OPT_Wanalyzer_write_to_string_literal;
2594 bool emit (rich_location *rich_loc) final override
2596 return warning_at (rich_loc, get_controlling_option (),
2597 "write to string literal");
2598 /* Ideally we would show the location of the STRING_CST as well,
2599 but it is not available at this point. */
2602 label_text describe_final_event (const evdesc::final_event &ev) final override
2604 return ev.formatted_print ("write to string literal here");
2608 const region *m_reg;
2611 /* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
2614 region_model::check_for_writable_region (const region* dest_reg,
2615 region_model_context *ctxt) const
2617 /* Fail gracefully if CTXT is NULL. */
2621 const region *base_reg = dest_reg->get_base_region ();
2622 switch (base_reg->get_kind ())
2628 const function_region *func_reg = as_a <const function_region *> (base_reg);
2629 tree fndecl = func_reg->get_fndecl ();
2630 ctxt->warn (make_unique<write_to_const_diagnostic>
2631 (func_reg, fndecl));
2636 const label_region *label_reg = as_a <const label_region *> (base_reg);
2637 tree label = label_reg->get_label ();
2638 ctxt->warn (make_unique<write_to_const_diagnostic>
2639 (label_reg, label));
2644 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
2645 tree decl = decl_reg->get_decl ();
2646 /* Warn about writes to const globals.
2647 Don't warn for writes to const locals, and params in particular,
2648 since we would warn in push_frame when setting them up (e.g the
2649 "this" param is "T* const"). */
2650 if (TREE_READONLY (decl)
2651 && is_global_var (decl))
2652 ctxt->warn (make_unique<write_to_const_diagnostic> (dest_reg, decl));
2656 ctxt->warn (make_unique<write_to_string_literal_diagnostic> (dest_reg));
2661 /* Get the capacity of REG in bytes. */
2664 region_model::get_capacity (const region *reg) const
2666 switch (reg->get_kind ())
2672 const decl_region *decl_reg = as_a <const decl_region *> (reg);
2673 tree decl = decl_reg->get_decl ();
2674 if (TREE_CODE (decl) == SSA_NAME)
2676 tree type = TREE_TYPE (decl);
2677 tree size = TYPE_SIZE (type);
2678 return get_rvalue (size, NULL);
2682 tree size = decl_init_size (decl, false);
2684 return get_rvalue (size, NULL);
2689 /* Look through sized regions to get at the capacity
2690 of the underlying regions. */
2691 return get_capacity (reg->get_parent_region ());
2694 if (const svalue *recorded = get_dynamic_extents (reg))
2697 return m_mgr->get_or_create_unknown_svalue (sizetype);
2700 /* Return the string size, including the 0-terminator, if SVAL is a
2701 constant_svalue holding a string. Otherwise, return an unknown_svalue. */
2704 region_model::get_string_size (const svalue *sval) const
2706 tree cst = sval->maybe_get_constant ();
2707 if (!cst || TREE_CODE (cst) != STRING_CST)
2708 return m_mgr->get_or_create_unknown_svalue (size_type_node);
2710 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
2711 return m_mgr->get_or_create_constant_svalue (out);
2714 /* Return the string size, including the 0-terminator, if REG is a
2715 string_region. Otherwise, return an unknown_svalue. */
2718 region_model::get_string_size (const region *reg) const
2720 const string_region *str_reg = dyn_cast <const string_region *> (reg);
2722 return m_mgr->get_or_create_unknown_svalue (size_type_node);
2724 tree cst = str_reg->get_string_cst ();
2725 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
2726 return m_mgr->get_or_create_constant_svalue (out);
2729 /* If CTXT is non-NULL, use it to warn about any problems accessing REG,
2730 using DIR to determine if this access is a read or write. */
2733 region_model::check_region_access (const region *reg,
2734 enum access_direction dir,
2735 region_model_context *ctxt) const
2737 /* Fail gracefully if CTXT is NULL. */
2741 check_region_for_taint (reg, dir, ctxt);
2742 check_region_bounds (reg, dir, ctxt);
2749 /* Currently a no-op. */
2752 check_for_writable_region (reg, ctxt);
2757 /* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
2760 region_model::check_region_for_write (const region *dest_reg,
2761 region_model_context *ctxt) const
2763 check_region_access (dest_reg, DIR_WRITE, ctxt);
2766 /* If CTXT is non-NULL, use it to warn about any problems reading from REG. */
2769 region_model::check_region_for_read (const region *src_reg,
2770 region_model_context *ctxt) const
2772 check_region_access (src_reg, DIR_READ, ctxt);
2775 /* Concrete subclass for casts of pointers that lead to trailing bytes. */
2777 class dubious_allocation_size
2778 : public pending_diagnostic_subclass<dubious_allocation_size>
2781 dubious_allocation_size (const region *lhs, const region *rhs)
2782 : m_lhs (lhs), m_rhs (rhs), m_expr (NULL_TREE),
2783 m_has_allocation_event (false)
2786 dubious_allocation_size (const region *lhs, const region *rhs,
2788 : m_lhs (lhs), m_rhs (rhs), m_expr (expr),
2789 m_has_allocation_event (false)
2792 const char *get_kind () const final override
2794 return "dubious_allocation_size";
2797 bool operator== (const dubious_allocation_size &other) const
2799 return m_lhs == other.m_lhs && m_rhs == other.m_rhs
2800 && pending_diagnostic::same_tree_p (m_expr, other.m_expr);
2803 int get_controlling_option () const final override
2805 return OPT_Wanalyzer_allocation_size;
2808 bool emit (rich_location *rich_loc) final override
2810 diagnostic_metadata m;
2813 return warning_meta (rich_loc, m, get_controlling_option (),
2814 "allocated buffer size is not a multiple"
2815 " of the pointee's size");
2818 label_text describe_final_event (const evdesc::final_event &ev) final
2821 tree pointee_type = TREE_TYPE (m_lhs->get_type ());
2822 if (m_has_allocation_event)
2823 return ev.formatted_print ("assigned to %qT here;"
2824 " %<sizeof (%T)%> is %qE",
2825 m_lhs->get_type (), pointee_type,
2826 size_in_bytes (pointee_type));
2827 /* Fallback: Typically, we should always see an allocation_event
2831 if (TREE_CODE (m_expr) == INTEGER_CST)
2832 return ev.formatted_print ("allocated %E bytes and assigned to"
2833 " %qT here; %<sizeof (%T)%> is %qE",
2834 m_expr, m_lhs->get_type (), pointee_type,
2835 size_in_bytes (pointee_type));
2837 return ev.formatted_print ("allocated %qE bytes and assigned to"
2838 " %qT here; %<sizeof (%T)%> is %qE",
2839 m_expr, m_lhs->get_type (), pointee_type,
2840 size_in_bytes (pointee_type));
2843 return ev.formatted_print ("allocated and assigned to %qT here;"
2844 " %<sizeof (%T)%> is %qE",
2845 m_lhs->get_type (), pointee_type,
2846 size_in_bytes (pointee_type));
2850 add_region_creation_events (const region *,
2853 tree fndecl, int depth,
2854 checker_path &emission_path) final override
2856 emission_path.add_event
2857 (make_unique<region_creation_event_allocation_size> (capacity,
2858 loc, fndecl, depth));
2860 m_has_allocation_event = true;
2863 void mark_interesting_stuff (interesting_t *interest) final override
2865 interest->add_region_creation (m_rhs);
2869 const region *m_lhs;
2870 const region *m_rhs;
2872 bool m_has_allocation_event;
2875 /* Return true on dubious allocation sizes for constant sizes. */
2878 capacity_compatible_with_type (tree cst, tree pointee_size_tree,
2881 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
2882 gcc_assert (TREE_CODE (pointee_size_tree) == INTEGER_CST);
2884 unsigned HOST_WIDE_INT pointee_size = TREE_INT_CST_LOW (pointee_size_tree);
2885 unsigned HOST_WIDE_INT alloc_size = TREE_INT_CST_LOW (cst);
2888 return alloc_size == 0 || alloc_size >= pointee_size;
2889 return alloc_size % pointee_size == 0;
2893 capacity_compatible_with_type (tree cst, tree pointee_size_tree)
2895 return capacity_compatible_with_type (cst, pointee_size_tree, false);
2898 /* Checks whether SVAL could be a multiple of SIZE_CST.
2900 It works by visiting all svalues inside SVAL until it reaches
2901 atomic nodes. From those, it goes back up again and adds each
2902 node that might be a multiple of SIZE_CST to the RESULT_SET. */
2904 class size_visitor : public visitor
2907 size_visitor (tree size_cst, const svalue *root_sval, constraint_manager *cm)
2908 : m_size_cst (size_cst), m_root_sval (root_sval), m_cm (cm)
2910 m_root_sval->accept (this);
2915 return result_set.contains (m_root_sval);
2918 void visit_constant_svalue (const constant_svalue *sval) final override
2920 check_constant (sval->get_constant (), sval);
2923 void visit_unknown_svalue (const unknown_svalue *sval ATTRIBUTE_UNUSED)
2926 result_set.add (sval);
2929 void visit_poisoned_svalue (const poisoned_svalue *sval ATTRIBUTE_UNUSED)
2932 result_set.add (sval);
2935 void visit_unaryop_svalue (const unaryop_svalue *sval) final override
2937 const svalue *arg = sval->get_arg ();
2938 if (result_set.contains (arg))
2939 result_set.add (sval);
2942 void visit_binop_svalue (const binop_svalue *sval) final override
2944 const svalue *arg0 = sval->get_arg0 ();
2945 const svalue *arg1 = sval->get_arg1 ();
2947 if (sval->get_op () == MULT_EXPR)
2949 if (result_set.contains (arg0) || result_set.contains (arg1))
2950 result_set.add (sval);
2954 if (result_set.contains (arg0) && result_set.contains (arg1))
2955 result_set.add (sval);
2959 void visit_repeated_svalue (const repeated_svalue *sval) final override
2961 sval->get_inner_svalue ()->accept (this);
2962 if (result_set.contains (sval->get_inner_svalue ()))
2963 result_set.add (sval);
2966 void visit_unmergeable_svalue (const unmergeable_svalue *sval) final override
2968 sval->get_arg ()->accept (this);
2969 if (result_set.contains (sval->get_arg ()))
2970 result_set.add (sval);
2973 void visit_widening_svalue (const widening_svalue *sval) final override
2975 const svalue *base = sval->get_base_svalue ();
2976 const svalue *iter = sval->get_iter_svalue ();
2978 if (result_set.contains (base) && result_set.contains (iter))
2979 result_set.add (sval);
2982 void visit_conjured_svalue (const conjured_svalue *sval ATTRIBUTE_UNUSED)
2985 equiv_class_id id (-1);
2986 if (m_cm->get_equiv_class_by_svalue (sval, &id))
2988 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
2989 check_constant (cst, sval);
2991 result_set.add (sval);
2995 void visit_asm_output_svalue (const asm_output_svalue *sval ATTRIBUTE_UNUSED)
2998 result_set.add (sval);
3001 void visit_const_fn_result_svalue (const const_fn_result_svalue
3002 *sval ATTRIBUTE_UNUSED) final override
3004 result_set.add (sval);
3008 void check_constant (tree cst, const svalue *sval)
3010 switch (TREE_CODE (cst))
3013 /* Assume all unhandled operands are compatible. */
3014 result_set.add (sval);
3017 if (capacity_compatible_with_type (cst, m_size_cst))
3018 result_set.add (sval);
3024 const svalue *m_root_sval;
3025 constraint_manager *m_cm;
3026 svalue_set result_set; /* Used as a mapping of svalue*->bool. */
3029 /* Return true if a struct or union either uses the inheritance pattern,
3030 where the first field is a base struct, or the flexible array member
3031 pattern, where the last field is an array without a specified size. */
3034 struct_or_union_with_inheritance_p (tree struc)
3036 tree iter = TYPE_FIELDS (struc);
3037 if (iter == NULL_TREE)
3039 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (iter)))
3043 while (iter != NULL_TREE)
3046 iter = DECL_CHAIN (iter);
3049 if (last_field != NULL_TREE
3050 && TREE_CODE (TREE_TYPE (last_field)) == ARRAY_TYPE)
3056 /* Return true if the lhs and rhs of an assignment have different types. */
3059 is_any_cast_p (const gimple *stmt)
3061 if (const gassign *assign = dyn_cast <const gassign *> (stmt))
3062 return gimple_assign_cast_p (assign)
3063 || !pending_diagnostic::same_tree_p (
3064 TREE_TYPE (gimple_assign_lhs (assign)),
3065 TREE_TYPE (gimple_assign_rhs1 (assign)));
3066 else if (const gcall *call = dyn_cast <const gcall *> (stmt))
3068 tree lhs = gimple_call_lhs (call);
3069 return lhs != NULL_TREE && !pending_diagnostic::same_tree_p (
3070 TREE_TYPE (gimple_call_lhs (call)),
3071 gimple_call_return_type (call));
3077 /* On pointer assignments, check whether the buffer size of
3078 RHS_SVAL is compatible with the type of the LHS_REG.
3079 Use a non-null CTXT to report allocation size warnings. */
3082 region_model::check_region_size (const region *lhs_reg, const svalue *rhs_sval,
3083 region_model_context *ctxt) const
3085 if (!ctxt || ctxt->get_stmt () == NULL)
3087 /* Only report warnings on assignments that actually change the type. */
3088 if (!is_any_cast_p (ctxt->get_stmt ()))
3091 const region_svalue *reg_sval = dyn_cast <const region_svalue *> (rhs_sval);
3095 tree pointer_type = lhs_reg->get_type ();
3096 if (pointer_type == NULL_TREE || !POINTER_TYPE_P (pointer_type))
3099 tree pointee_type = TREE_TYPE (pointer_type);
3100 /* Make sure that the type on the left-hand size actually has a size. */
3101 if (pointee_type == NULL_TREE || VOID_TYPE_P (pointee_type)
3102 || TYPE_SIZE_UNIT (pointee_type) == NULL_TREE)
3105 /* Bail out early on pointers to structs where we can
3106 not deduce whether the buffer size is compatible. */
3107 bool is_struct = RECORD_OR_UNION_TYPE_P (pointee_type);
3108 if (is_struct && struct_or_union_with_inheritance_p (pointee_type))
3111 tree pointee_size_tree = size_in_bytes (pointee_type);
3112 /* We give up if the type size is not known at compile-time or the
3113 type size is always compatible regardless of the buffer size. */
3114 if (TREE_CODE (pointee_size_tree) != INTEGER_CST
3115 || integer_zerop (pointee_size_tree)
3116 || integer_onep (pointee_size_tree))
3119 const region *rhs_reg = reg_sval->get_pointee ();
3120 const svalue *capacity = get_capacity (rhs_reg);
3121 switch (capacity->get_kind ())
3123 case svalue_kind::SK_CONSTANT:
3125 const constant_svalue *cst_cap_sval
3126 = as_a <const constant_svalue *> (capacity);
3127 tree cst_cap = cst_cap_sval->get_constant ();
3128 if (TREE_CODE (cst_cap) == INTEGER_CST
3129 && !capacity_compatible_with_type (cst_cap, pointee_size_tree,
3131 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg, rhs_reg,
3139 size_visitor v (pointee_size_tree, capacity, m_constraints);
3140 if (!v.get_result ())
3142 tree expr = get_representative_tree (capacity);
3143 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg,
3153 /* Set the value of the region given by LHS_REG to the value given
3155 Use CTXT to report any warnings associated with writing to LHS_REG. */
3158 region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
3159 region_model_context *ctxt)
3161 gcc_assert (lhs_reg);
3162 gcc_assert (rhs_sval);
3164 check_region_size (lhs_reg, rhs_sval, ctxt);
3166 check_region_for_write (lhs_reg, ctxt);
3168 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
3169 ctxt ? ctxt->get_uncertainty () : NULL);
3172 /* Set the value of the region given by LHS to the value given by RHS. */
3175 region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
3177 const region *lhs_reg = get_lvalue (lhs, ctxt);
3178 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
3179 gcc_assert (lhs_reg);
3180 gcc_assert (rhs_sval);
3181 set_value (lhs_reg, rhs_sval, ctxt);
3184 /* Remove all bindings overlapping REG within the store. */
3187 region_model::clobber_region (const region *reg)
3189 m_store.clobber_region (m_mgr->get_store_manager(), reg);
3192 /* Remove any bindings for REG within the store. */
3195 region_model::purge_region (const region *reg)
3197 m_store.purge_region (m_mgr->get_store_manager(), reg);
3200 /* Fill REG with SVAL. */
3203 region_model::fill_region (const region *reg, const svalue *sval)
3205 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
3208 /* Zero-fill REG. */
3211 region_model::zero_fill_region (const region *reg)
3213 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
3216 /* Mark REG as having unknown content. */
3219 region_model::mark_region_as_unknown (const region *reg,
3220 uncertainty_t *uncertainty)
3222 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
3226 /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
3230 region_model::eval_condition (const svalue *lhs,
3232 const svalue *rhs) const
3237 /* For now, make no attempt to capture constraints on floating-point
3239 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
3240 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
3241 return tristate::unknown ();
3243 /* See what we know based on the values. */
3245 /* Unwrap any unmergeable values. */
3246 lhs = lhs->unwrap_any_unmergeable ();
3247 rhs = rhs->unwrap_any_unmergeable ();
3251 /* If we have the same svalue, then we have equality
3252 (apart from NaN-handling).
3253 TODO: should this definitely be the case for poisoned values? */
3254 /* Poisoned and unknown values are "unknowable". */
3255 if (lhs->get_kind () == SK_POISONED
3256 || lhs->get_kind () == SK_UNKNOWN)
3257 return tristate::TS_UNKNOWN;
3264 return tristate::TS_TRUE;
3269 return tristate::TS_FALSE;
3272 /* For other ops, use the logic below. */
3277 /* If we have a pair of region_svalues, compare them. */
3278 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3279 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3281 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
3282 if (res.is_known ())
3284 /* Otherwise, only known through constraints. */
3287 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
3289 /* If we have a pair of constants, compare them. */
3290 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3291 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
3294 /* When we have one constant, put it on the RHS. */
3295 std::swap (lhs, rhs);
3296 op = swap_tree_comparison (op);
3299 gcc_assert (lhs->get_kind () != SK_CONSTANT);
3301 /* Handle comparison against zero. */
3302 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3303 if (zerop (cst_rhs->get_constant ()))
3305 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
3307 /* A region_svalue is a non-NULL pointer, except in certain
3308 special cases (see the comment for region::non_null_p). */
3309 const region *pointee = ptr->get_pointee ();
3310 if (pointee->non_null_p ())
3320 return tristate::TS_FALSE;
3325 return tristate::TS_TRUE;
3329 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
3331 /* Treat offsets from a non-NULL pointer as being non-NULL. This
3332 isn't strictly true, in that eventually ptr++ will wrap
3333 around and be NULL, but it won't occur in practise and thus
3334 can be used to suppress effectively false positives that we
3335 shouldn't warn for. */
3336 if (binop->get_op () == POINTER_PLUS_EXPR)
3338 tristate lhs_ts = eval_condition (binop->get_arg0 (), op, rhs);
3339 if (lhs_ts.is_known ())
3343 else if (const unaryop_svalue *unaryop
3344 = lhs->dyn_cast_unaryop_svalue ())
3346 if (unaryop->get_op () == NEGATE_EXPR)
3348 /* e.g. "-X <= 0" is equivalent to X >= 0". */
3349 tristate lhs_ts = eval_condition (unaryop->get_arg (),
3350 swap_tree_comparison (op),
3352 if (lhs_ts.is_known ())
3358 /* Handle rejection of equality for comparisons of the initial values of
3359 "external" values (such as params) with the address of locals. */
3360 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
3361 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3363 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
3364 if (res.is_known ())
3367 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
3368 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3370 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
3371 if (res.is_known ())
3375 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
3376 if (tree rhs_cst = rhs->maybe_get_constant ())
3378 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
3379 if (res.is_known ())
3383 /* Handle comparisons between two svalues with more than one operand. */
3384 if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
3392 /* TODO: binops can be equal even if they are not structurally
3393 equal in case of commutative operators. */
3394 tristate res = structural_equality (lhs, rhs);
3401 tristate res = structural_equality (lhs, rhs);
3408 tristate res = structural_equality (lhs, rhs);
3411 res = symbolic_greater_than (binop, rhs);
3418 tristate res = symbolic_greater_than (binop, rhs);
3426 /* Otherwise, try constraints.
3427 Cast to const to ensure we don't change the constraint_manager as we
3428 do this (e.g. by creating equivalence classes). */
3429 const constraint_manager *constraints = m_constraints;
3430 return constraints->eval_condition (lhs, op, rhs);
3433 /* Subroutine of region_model::eval_condition, for rejecting
3434 equality of INIT_VAL(PARM) with &LOCAL. */
3437 region_model::compare_initial_and_pointer (const initial_svalue *init,
3438 const region_svalue *ptr) const
3440 const region *pointee = ptr->get_pointee ();
3442 /* If we have a pointer to something within a stack frame, it can't be the
3443 initial value of a param. */
3444 if (pointee->maybe_get_frame_region ())
3445 if (init->initial_value_of_param_p ())
3446 return tristate::TS_FALSE;
3448 return tristate::TS_UNKNOWN;
3451 /* Return true if SVAL is definitely positive. */
3454 is_positive_svalue (const svalue *sval)
3456 if (tree cst = sval->maybe_get_constant ())
3457 return !zerop (cst) && get_range_pos_neg (cst) == 1;
3458 tree type = sval->get_type ();
3461 /* Consider a binary operation size_t + int. The analyzer wraps the int in
3462 an unaryop_svalue, converting it to a size_t, but in the dynamic execution
3463 the result is smaller than the first operand. Thus, we have to look if
3464 the argument of the unaryop_svalue is also positive. */
3465 if (const unaryop_svalue *un_op = dyn_cast <const unaryop_svalue *> (sval))
3466 return CONVERT_EXPR_CODE_P (un_op->get_op ()) && TYPE_UNSIGNED (type)
3467 && is_positive_svalue (un_op->get_arg ());
3468 return TYPE_UNSIGNED (type);
3471 /* Return true if A is definitely larger than B.
3473 Limitation: does not account for integer overflows and does not try to
3474 return false, so it can not be used negated. */
3477 region_model::symbolic_greater_than (const binop_svalue *bin_a,
3478 const svalue *b) const
3480 if (bin_a->get_op () == PLUS_EXPR || bin_a->get_op () == MULT_EXPR)
3482 /* Eliminate the right-hand side of both svalues. */
3483 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
3484 if (bin_a->get_op () == bin_b->get_op ()
3485 && eval_condition (bin_a->get_arg1 (),
3487 bin_b->get_arg1 ()).is_true ()
3488 && eval_condition (bin_a->get_arg0 (),
3490 bin_b->get_arg0 ()).is_true ())
3491 return tristate (tristate::TS_TRUE);
3493 /* Otherwise, try to remove a positive offset or factor from BIN_A. */
3494 if (is_positive_svalue (bin_a->get_arg1 ())
3495 && eval_condition (bin_a->get_arg0 (),
3496 GE_EXPR, b).is_true ())
3497 return tristate (tristate::TS_TRUE);
3499 return tristate::unknown ();
3502 /* Return true if A and B are equal structurally.
3504 Structural equality means that A and B are equal if the svalues A and B have
3505 the same nodes at the same positions in the tree and the leafs are equal.
3506 Equality for conjured_svalues and initial_svalues is determined by comparing
3507 the pointers while constants are compared by value. That behavior is useful
3508 to check for binaryop_svlaues that evaluate to the same concrete value but
3509 might use one operand with a different type but the same constant value.
3512 binop_svalue (mult_expr,
3513 initial_svalue (‘size_t’, decl_region (..., 'some_var')),
3514 constant_svalue (‘size_t’, 4))
3516 binop_svalue (mult_expr,
3517 initial_svalue (‘size_t’, decl_region (..., 'some_var'),
3518 constant_svalue (‘sizetype’, 4))
3519 are structurally equal. A concrete C code example, where this occurs, can
3520 be found in test7 of out-of-bounds-5.c. */
3523 region_model::structural_equality (const svalue *a, const svalue *b) const
3525 /* If A and B are referentially equal, they are also structurally equal. */
3527 return tristate (tristate::TS_TRUE);
3529 switch (a->get_kind ())
3532 return tristate::unknown ();
3533 /* SK_CONJURED and SK_INITIAL are already handled
3534 by the referential equality above. */
3537 tree a_cst = a->maybe_get_constant ();
3538 tree b_cst = b->maybe_get_constant ();
3540 return tristate (tree_int_cst_equal (a_cst, b_cst));
3542 return tristate (tristate::TS_FALSE);
3545 const unaryop_svalue *un_a = as_a <const unaryop_svalue *> (a);
3546 if (const unaryop_svalue *un_b = dyn_cast <const unaryop_svalue *> (b))
3547 return tristate (pending_diagnostic::same_tree_p (un_a->get_type (),
3549 && un_a->get_op () == un_b->get_op ()
3550 && structural_equality (un_a->get_arg (),
3553 return tristate (tristate::TS_FALSE);
3556 const binop_svalue *bin_a = as_a <const binop_svalue *> (a);
3557 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
3558 return tristate (bin_a->get_op () == bin_b->get_op ()
3559 && structural_equality (bin_a->get_arg0 (),
3561 && structural_equality (bin_a->get_arg1 (),
3562 bin_b->get_arg1 ()));
3564 return tristate (tristate::TS_FALSE);
3568 /* Handle various constraints of the form:
3569 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
3573 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
3576 by adding constraints for INNER_LHS INNEROP INNER_RHS.
3578 Return true if this function can fully handle the constraint; if
3579 so, add the implied constraint(s) and write true to *OUT if they
3580 are consistent with existing constraints, or write false to *OUT
3581 if they contradicts existing constraints.
3583 Return false for cases that this function doeesn't know how to handle.
3585 For example, if we're checking a stored conditional, we'll have
3587 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
3590 which this function can turn into an add_constraint of:
3591 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
3593 Similarly, optimized && and || conditionals lead to e.g.
3595 becoming gimple like this:
3599 On the "_3 is false" branch we can have constraints of the form:
3600 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
3601 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
3603 which implies that both _1 and _2 are false,
3604 which this function can turn into a pair of add_constraints of
3605 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
3607 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
3610 region_model::add_constraints_from_binop (const svalue *outer_lhs,
3611 enum tree_code outer_op,
3612 const svalue *outer_rhs,
3614 region_model_context *ctxt)
3616 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
3618 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
3621 if (!outer_rhs->all_zeroes_p ())
3624 const svalue *inner_lhs = binop_sval->get_arg0 ();
3625 enum tree_code inner_op = binop_sval->get_op ();
3626 const svalue *inner_rhs = binop_sval->get_arg1 ();
3628 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
3632 - "OUTER_LHS != false" (i.e. OUTER is true), or
3633 - "OUTER_LHS == false" (i.e. OUTER is false). */
3634 bool is_true = outer_op == NE_EXPR;
3644 /* ...and "(inner_lhs OP inner_rhs) == 0"
3645 then (inner_lhs OP inner_rhs) must have the same
3646 logical value as LHS. */
3648 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
3649 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
3657 /* ...and "(inner_lhs & inner_rhs) != 0"
3658 then both inner_lhs and inner_rhs must be true. */
3659 const svalue *false_sval
3660 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
3661 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
3662 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
3663 *out = sat1 && sat2;
3671 /* ...and "(inner_lhs | inner_rhs) == 0"
3672 i.e. "(inner_lhs | inner_rhs)" is false
3673 then both inner_lhs and inner_rhs must be false. */
3674 const svalue *false_sval
3675 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
3676 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
3677 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
3678 *out = sat1 && sat2;
3685 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
3686 If it is consistent with existing constraints, add it, and return true.
3687 Return false if it contradicts existing constraints.
3688 Use CTXT for reporting any diagnostics associated with the accesses. */
3691 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3692 region_model_context *ctxt)
3694 /* For now, make no attempt to capture constraints on floating-point
3696 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3699 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
3700 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
3702 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
3705 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
3706 If it is consistent with existing constraints, add it, and return true.
3707 Return false if it contradicts existing constraints.
3708 Use CTXT for reporting any diagnostics associated with the accesses. */
3711 region_model::add_constraint (const svalue *lhs,
3714 region_model_context *ctxt)
3716 tristate t_cond = eval_condition (lhs, op, rhs);
3718 /* If we already have the condition, do nothing. */
3719 if (t_cond.is_true ())
3722 /* Reject a constraint that would contradict existing knowledge, as
3724 if (t_cond.is_false ())
3728 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
3731 /* Attempt to store the constraint. */
3732 if (!m_constraints->add_constraint (lhs, op, rhs))
3735 /* Notify the context, if any. This exists so that the state machines
3736 in a program_state can be notified about the condition, and so can
3737 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
3738 when synthesizing constraints as above. */
3740 ctxt->on_condition (lhs, op, rhs);
3742 /* If we have ®ION == NULL, then drop dynamic extents for REGION (for
3743 the case where REGION is heap-allocated and thus could be NULL). */
3744 if (tree rhs_cst = rhs->maybe_get_constant ())
3745 if (op == EQ_EXPR && zerop (rhs_cst))
3746 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
3747 unset_dynamic_extents (region_sval->get_pointee ());
3752 /* As above, but when returning false, if OUT is non-NULL, write a
3753 new rejected_constraint to *OUT. */
3756 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3757 region_model_context *ctxt,
3758 rejected_constraint **out)
3760 bool sat = add_constraint (lhs, op, rhs, ctxt);
3762 *out = new rejected_op_constraint (*this, lhs, op, rhs);
3766 /* Determine what is known about the condition "LHS OP RHS" within
3768 Use CTXT for reporting any diagnostics associated with the accesses. */
3771 region_model::eval_condition (tree lhs,
3774 region_model_context *ctxt) const
3776 /* For now, make no attempt to model constraints on floating-point
3778 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3779 return tristate::unknown ();
3781 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
3784 /* Implementation of region_model::get_representative_path_var.
3785 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
3786 Use VISITED to prevent infinite mutual recursion with the overload for
3790 region_model::get_representative_path_var_1 (const svalue *sval,
3791 svalue_set *visited) const
3795 /* Prevent infinite recursion. */
3796 if (visited->contains (sval))
3797 return path_var (NULL_TREE, 0);
3798 visited->add (sval);
3800 /* Handle casts by recursion into get_representative_path_var. */
3801 if (const svalue *cast_sval = sval->maybe_undo_cast ())
3803 path_var result = get_representative_path_var (cast_sval, visited);
3804 tree orig_type = sval->get_type ();
3805 /* If necessary, wrap the result in a cast. */
3806 if (result.m_tree && orig_type)
3807 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
3811 auto_vec<path_var> pvs;
3812 m_store.get_representative_path_vars (this, visited, sval, &pvs);
3814 if (tree cst = sval->maybe_get_constant ())
3815 pvs.safe_push (path_var (cst, 0));
3817 /* Handle string literals and various other pointers. */
3818 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
3820 const region *reg = ptr_sval->get_pointee ();
3821 if (path_var pv = get_representative_path_var (reg, visited))
3822 return path_var (build1 (ADDR_EXPR,
3828 /* If we have a sub_svalue, look for ways to represent the parent. */
3829 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
3831 const svalue *parent_sval = sub_sval->get_parent ();
3832 const region *subreg = sub_sval->get_subregion ();
3833 if (path_var parent_pv
3834 = get_representative_path_var (parent_sval, visited))
3835 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
3836 return path_var (build3 (COMPONENT_REF,
3839 field_reg->get_field (),
3841 parent_pv.m_stack_depth);
3844 /* Handle binops. */
3845 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
3847 = get_representative_path_var (binop_sval->get_arg0 (), visited))
3849 = get_representative_path_var (binop_sval->get_arg1 (), visited))
3850 return path_var (build2 (binop_sval->get_op (),
3852 lhs_pv.m_tree, rhs_pv.m_tree),
3853 lhs_pv.m_stack_depth);
3855 if (pvs.length () < 1)
3856 return path_var (NULL_TREE, 0);
3858 pvs.qsort (readability_comparator);
3862 /* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
3863 Use VISITED to prevent infinite mutual recursion with the overload for
3866 This function defers to get_representative_path_var_1 to do the work;
3867 it adds verification that get_representative_path_var_1 returned a tree
3868 of the correct type. */
3871 region_model::get_representative_path_var (const svalue *sval,
3872 svalue_set *visited) const
3875 return path_var (NULL_TREE, 0);
3877 tree orig_type = sval->get_type ();
3879 path_var result = get_representative_path_var_1 (sval, visited);
3881 /* Verify that the result has the same type as SVAL, if any. */
3882 if (result.m_tree && orig_type)
3883 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
3888 /* Attempt to return a tree that represents SVAL, or return NULL_TREE.
3890 Strip off any top-level cast, to avoid messages like
3891 double-free of '(void *)ptr'
3892 from analyzer diagnostics. */
3895 region_model::get_representative_tree (const svalue *sval) const
3898 tree expr = get_representative_path_var (sval, &visited).m_tree;
3900 /* Strip off any top-level cast. */
3901 if (expr && TREE_CODE (expr) == NOP_EXPR)
3902 expr = TREE_OPERAND (expr, 0);
3904 return fixup_tree_for_diagnostic (expr);
3908 region_model::get_representative_tree (const region *reg) const
3911 tree expr = get_representative_path_var (reg, &visited).m_tree;
3913 /* Strip off any top-level cast. */
3914 if (expr && TREE_CODE (expr) == NOP_EXPR)
3915 expr = TREE_OPERAND (expr, 0);
3917 return fixup_tree_for_diagnostic (expr);
3920 /* Implementation of region_model::get_representative_path_var.
3922 Attempt to return a path_var that represents REG, or return
3924 For example, a region for a field of a local would be a path_var
3925 wrapping a COMPONENT_REF.
3926 Use VISITED to prevent infinite mutual recursion with the overload for
3930 region_model::get_representative_path_var_1 (const region *reg,
3931 svalue_set *visited) const
3933 switch (reg->get_kind ())
3943 case RK_THREAD_LOCAL:
3945 /* Regions that represent memory spaces are not expressible as trees. */
3946 return path_var (NULL_TREE, 0);
3950 const function_region *function_reg
3951 = as_a <const function_region *> (reg);
3952 return path_var (function_reg->get_fndecl (), 0);
3956 const label_region *label_reg = as_a <const label_region *> (reg);
3957 return path_var (label_reg->get_label (), 0);
3962 const symbolic_region *symbolic_reg
3963 = as_a <const symbolic_region *> (reg);
3964 const svalue *pointer = symbolic_reg->get_pointer ();
3965 path_var pointer_pv = get_representative_path_var (pointer, visited);
3967 return path_var (NULL_TREE, 0);
3968 tree offset = build_int_cst (pointer->get_type (), 0);
3969 return path_var (build2 (MEM_REF,
3973 pointer_pv.m_stack_depth);
3977 const decl_region *decl_reg = as_a <const decl_region *> (reg);
3978 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
3982 const field_region *field_reg = as_a <const field_region *> (reg);
3984 = get_representative_path_var (reg->get_parent_region (), visited);
3986 return path_var (NULL_TREE, 0);
3987 return path_var (build3 (COMPONENT_REF,
3990 field_reg->get_field (),
3992 parent_pv.m_stack_depth);
3997 const element_region *element_reg
3998 = as_a <const element_region *> (reg);
4000 = get_representative_path_var (reg->get_parent_region (), visited);
4002 return path_var (NULL_TREE, 0);
4004 = get_representative_path_var (element_reg->get_index (), visited);
4006 return path_var (NULL_TREE, 0);
4007 return path_var (build4 (ARRAY_REF,
4009 parent_pv.m_tree, index_pv.m_tree,
4010 NULL_TREE, NULL_TREE),
4011 parent_pv.m_stack_depth);
4016 const offset_region *offset_reg
4017 = as_a <const offset_region *> (reg);
4019 = get_representative_path_var (reg->get_parent_region (), visited);
4021 return path_var (NULL_TREE, 0);
4023 = get_representative_path_var (offset_reg->get_byte_offset (),
4025 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
4026 return path_var (NULL_TREE, 0);
4027 tree addr_parent = build1 (ADDR_EXPR,
4028 build_pointer_type (reg->get_type ()),
4030 return path_var (build2 (MEM_REF,
4032 addr_parent, offset_pv.m_tree),
4033 parent_pv.m_stack_depth);
4037 return path_var (NULL_TREE, 0);
4042 = get_representative_path_var (reg->get_parent_region (), visited);
4044 return path_var (NULL_TREE, 0);
4045 return path_var (build1 (NOP_EXPR,
4048 parent_pv.m_stack_depth);
4051 case RK_HEAP_ALLOCATED:
4053 /* No good way to express heap-allocated/alloca regions as trees. */
4054 return path_var (NULL_TREE, 0);
4058 const string_region *string_reg = as_a <const string_region *> (reg);
4059 return path_var (string_reg->get_string_cst (), 0);
4065 return path_var (NULL_TREE, 0);
4069 /* Attempt to return a path_var that represents REG, or return
4071 For example, a region for a field of a local would be a path_var
4072 wrapping a COMPONENT_REF.
4073 Use VISITED to prevent infinite mutual recursion with the overload for
4076 This function defers to get_representative_path_var_1 to do the work;
4077 it adds verification that get_representative_path_var_1 returned a tree
4078 of the correct type. */
4081 region_model::get_representative_path_var (const region *reg,
4082 svalue_set *visited) const
4084 path_var result = get_representative_path_var_1 (reg, visited);
4086 /* Verify that the result has the same type as REG, if any. */
4087 if (result.m_tree && reg->get_type ())
4088 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
4093 /* Update this model for any phis in SNODE, assuming we came from
4094 LAST_CFG_SUPEREDGE. */
4097 region_model::update_for_phis (const supernode *snode,
4098 const cfg_superedge *last_cfg_superedge,
4099 region_model_context *ctxt)
4101 gcc_assert (last_cfg_superedge);
4103 /* Copy this state and pass it to handle_phi so that all of the phi stmts
4104 are effectively handled simultaneously. */
4105 const region_model old_state (*this);
4107 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
4108 !gsi_end_p (gpi); gsi_next (&gpi))
4110 gphi *phi = gpi.phi ();
4112 tree src = last_cfg_superedge->get_phi_arg (phi);
4113 tree lhs = gimple_phi_result (phi);
4115 /* Update next_state based on phi and old_state. */
4116 handle_phi (phi, lhs, src, old_state, ctxt);
4120 /* Attempt to update this model for taking EDGE (where the last statement
4121 was LAST_STMT), returning true if the edge can be taken, false
4123 When returning false, if OUT is non-NULL, write a new rejected_constraint
4126 For CFG superedges where LAST_STMT is a conditional or a switch
4127 statement, attempt to add the relevant conditions for EDGE to this
4128 model, returning true if they are feasible, or false if they are
4131 For call superedges, push frame information and store arguments
4134 For return superedges, pop frame information and store return
4135 values into any lhs.
4137 Rejection of call/return superedges happens elsewhere, in
4138 program_point::on_edge (i.e. based on program point, rather
4139 than program state). */
4142 region_model::maybe_update_for_edge (const superedge &edge,
4143 const gimple *last_stmt,
4144 region_model_context *ctxt,
4145 rejected_constraint **out)
4147 /* Handle frame updates for interprocedural edges. */
4148 switch (edge.m_kind)
4153 case SUPEREDGE_CALL:
4155 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
4156 update_for_call_superedge (*call_edge, ctxt);
4160 case SUPEREDGE_RETURN:
4162 const return_superedge *return_edge
4163 = as_a <const return_superedge *> (&edge);
4164 update_for_return_superedge (*return_edge, ctxt);
4168 case SUPEREDGE_INTRAPROCEDURAL_CALL:
4169 /* This is a no-op for call summaries; we should already
4170 have handled the effect of the call summary at the call stmt. */
4174 if (last_stmt == NULL)
4177 /* Apply any constraints for conditionals/switch statements. */
4179 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
4181 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
4182 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
4185 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
4187 const switch_cfg_superedge *switch_sedge
4188 = as_a <const switch_cfg_superedge *> (&edge);
4189 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
4193 /* Apply any constraints due to an exception being thrown. */
4194 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
4195 if (cfg_sedge->get_flags () & EDGE_EH)
4196 return apply_constraints_for_exception (last_stmt, ctxt, out);
4201 /* Push a new frame_region on to the stack region.
4202 Populate the frame_region with child regions for the function call's
4203 parameters, using values from the arguments at the callsite in the
4207 region_model::update_for_gcall (const gcall *call_stmt,
4208 region_model_context *ctxt,
4211 /* Build a vec of argument svalues, using the current top
4212 frame for resolving tree expressions. */
4213 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
4215 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
4217 tree arg = gimple_call_arg (call_stmt, i);
4218 arg_svals.quick_push (get_rvalue (arg, ctxt));
4223 /* Get the function * from the gcall. */
4224 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
4225 callee = DECL_STRUCT_FUNCTION (fn_decl);
4228 push_frame (callee, &arg_svals, ctxt);
4231 /* Pop the top-most frame_region from the stack, and copy the return
4232 region's values (if any) into the region for the lvalue of the LHS of
4233 the call (if any). */
4236 region_model::update_for_return_gcall (const gcall *call_stmt,
4237 region_model_context *ctxt)
4239 /* Get the lvalue for the result of the call, passing it to pop_frame,
4240 so that pop_frame can determine the region with respect to the
4242 tree lhs = gimple_call_lhs (call_stmt);
4243 pop_frame (lhs, NULL, ctxt);
4246 /* Extract calling information from the superedge and update the model for the
4250 region_model::update_for_call_superedge (const call_superedge &call_edge,
4251 region_model_context *ctxt)
4253 const gcall *call_stmt = call_edge.get_call_stmt ();
4254 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
4257 /* Extract calling information from the return superedge and update the model
4258 for the returning call */
4261 region_model::update_for_return_superedge (const return_superedge &return_edge,
4262 region_model_context *ctxt)
4264 const gcall *call_stmt = return_edge.get_call_stmt ();
4265 update_for_return_gcall (call_stmt, ctxt);
4268 /* Attempt to to use R to replay SUMMARY into this object.
4269 Return true if it is possible. */
4272 region_model::replay_call_summary (call_summary_replay &r,
4273 const region_model &summary)
4275 gcc_assert (summary.get_stack_depth () == 1);
4277 m_store.replay_call_summary (r, summary.m_store);
4279 if (!m_constraints->replay_call_summary (r, *summary.m_constraints))
4282 for (auto kv : summary.m_dynamic_extents)
4284 const region *summary_reg = kv.first;
4285 const region *caller_reg = r.convert_region_from_summary (summary_reg);
4288 const svalue *summary_sval = kv.second;
4289 const svalue *caller_sval = r.convert_svalue_from_summary (summary_sval);
4292 m_dynamic_extents.put (caller_reg, caller_sval);
4298 /* Given a true or false edge guarded by conditional statement COND_STMT,
4299 determine appropriate constraints for the edge to be taken.
4301 If they are feasible, add the constraints and return true.
4303 Return false if the constraints contradict existing knowledge
4304 (and so the edge should not be taken).
4305 When returning false, if OUT is non-NULL, write a new rejected_constraint
4309 region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
4310 const gcond *cond_stmt,
4311 region_model_context *ctxt,
4312 rejected_constraint **out)
4314 ::edge cfg_edge = sedge.get_cfg_edge ();
4315 gcc_assert (cfg_edge != NULL);
4316 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
4318 enum tree_code op = gimple_cond_code (cond_stmt);
4319 tree lhs = gimple_cond_lhs (cond_stmt);
4320 tree rhs = gimple_cond_rhs (cond_stmt);
4321 if (cfg_edge->flags & EDGE_FALSE_VALUE)
4322 op = invert_tree_comparison (op, false /* honor_nans */);
4323 return add_constraint (lhs, op, rhs, ctxt, out);
4326 /* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
4327 for the edge to be taken.
4329 If they are feasible, add the constraints and return true.
4331 Return false if the constraints contradict existing knowledge
4332 (and so the edge should not be taken).
4333 When returning false, if OUT is non-NULL, write a new rejected_constraint
4337 region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
4338 const gswitch *switch_stmt,
4339 region_model_context *ctxt,
4340 rejected_constraint **out)
4342 bounded_ranges_manager *ranges_mgr = get_range_manager ();
4343 const bounded_ranges *all_cases_ranges
4344 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
4345 tree index = gimple_switch_index (switch_stmt);
4346 const svalue *index_sval = get_rvalue (index, ctxt);
4347 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
4349 *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
4350 if (sat && ctxt && !all_cases_ranges->empty_p ())
4351 ctxt->on_bounded_ranges (*index_sval, *all_cases_ranges);
4355 /* Apply any constraints due to an exception being thrown at LAST_STMT.
4357 If they are feasible, add the constraints and return true.
4359 Return false if the constraints contradict existing knowledge
4360 (and so the edge should not be taken).
4361 When returning false, if OUT is non-NULL, write a new rejected_constraint
4365 region_model::apply_constraints_for_exception (const gimple *last_stmt,
4366 region_model_context *ctxt,
4367 rejected_constraint **out)
4369 gcc_assert (last_stmt);
4370 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
4371 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
4372 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
4373 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
4375 /* We have an exception thrown from operator new.
4376 Add a constraint that the result was NULL, to avoid a false
4377 leak report due to the result being lost when following
4379 if (tree lhs = gimple_call_lhs (call))
4380 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
4386 /* For use with push_frame when handling a top-level call within the analysis.
4387 PARAM has a defined but unknown initial value.
4388 Anything it points to has escaped, since the calling context "knows"
4389 the pointer, and thus calls to unknown functions could read/write into
4393 region_model::on_top_level_param (tree param,
4394 region_model_context *ctxt)
4396 if (POINTER_TYPE_P (TREE_TYPE (param)))
4398 const region *param_reg = get_lvalue (param, ctxt);
4399 const svalue *init_ptr_sval
4400 = m_mgr->get_or_create_initial_value (param_reg);
4401 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
4402 m_store.mark_as_escaped (pointee_reg);
4406 /* Update this region_model to reflect pushing a frame onto the stack
4409 If ARG_SVALS is non-NULL, use it to populate the parameters
4411 Otherwise, the params have their initial_svalues.
4413 Return the frame_region for the new frame. */
4416 region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
4417 region_model_context *ctxt)
4419 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
4422 /* Arguments supplied from a caller frame. */
4423 tree fndecl = fun->decl;
4425 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
4426 iter_parm = DECL_CHAIN (iter_parm), ++idx)
4428 /* If there's a mismatching declaration, the call stmt might
4429 not have enough args. Handle this case by leaving the
4430 rest of the params as uninitialized. */
4431 if (idx >= arg_svals->length ())
4433 tree parm_lval = iter_parm;
4434 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
4435 parm_lval = parm_default_ssa;
4436 const region *parm_reg = get_lvalue (parm_lval, ctxt);
4437 const svalue *arg_sval = (*arg_svals)[idx];
4438 set_value (parm_reg, arg_sval, ctxt);
4441 /* Handle any variadic args. */
4442 unsigned va_arg_idx = 0;
4443 for (; idx < arg_svals->length (); idx++, va_arg_idx++)
4445 const svalue *arg_sval = (*arg_svals)[idx];
4446 const region *var_arg_reg
4447 = m_mgr->get_var_arg_region (m_current_frame,
4449 set_value (var_arg_reg, arg_sval, ctxt);
4454 /* Otherwise we have a top-level call within the analysis. The params
4455 have defined but unknown initial values.
4456 Anything they point to has escaped. */
4457 tree fndecl = fun->decl;
4458 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
4459 iter_parm = DECL_CHAIN (iter_parm))
4461 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
4462 on_top_level_param (parm_default_ssa, ctxt);
4464 on_top_level_param (iter_parm, ctxt);
4468 return m_current_frame;
4471 /* Get the function of the top-most frame in this region_model's stack.
4472 There must be such a frame. */
4475 region_model::get_current_function () const
4477 const frame_region *frame = get_current_frame ();
4479 return frame->get_function ();
4482 /* Pop the topmost frame_region from this region_model's stack;
4484 If RESULT_LVALUE is non-null, copy any return value from the frame
4485 into the corresponding region (evaluated with respect to the *caller*
4486 frame, rather than the called frame).
4487 If OUT_RESULT is non-null, copy any return value from the frame
4490 Purge the frame region and all its descendent regions.
4491 Convert any pointers that point into such regions into
4492 POISON_KIND_POPPED_STACK svalues. */
4495 region_model::pop_frame (tree result_lvalue,
4496 const svalue **out_result,
4497 region_model_context *ctxt)
4499 gcc_assert (m_current_frame);
4501 const frame_region *frame_reg = m_current_frame;
4503 /* Notify state machines. */
4505 ctxt->on_pop_frame (frame_reg);
4507 /* Evaluate the result, within the callee frame. */
4508 tree fndecl = m_current_frame->get_function ()->decl;
4509 tree result = DECL_RESULT (fndecl);
4510 const svalue *retval = NULL;
4511 if (result && TREE_TYPE (result) != void_type_node)
4513 retval = get_rvalue (result, ctxt);
4515 *out_result = retval;
4518 /* Pop the frame. */
4519 m_current_frame = m_current_frame->get_calling_frame ();
4521 if (result_lvalue && retval)
4523 /* Compute result_dst_reg using RESULT_LVALUE *after* popping
4524 the frame, but before poisoning pointers into the old frame. */
4525 const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
4526 set_value (result_dst_reg, retval, ctxt);
4529 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
4532 /* Get the number of frames in this region_model's stack. */
4535 region_model::get_stack_depth () const
4537 const frame_region *frame = get_current_frame ();
4539 return frame->get_stack_depth ();
4544 /* Get the frame_region with the given index within the stack.
4545 The frame_region must exist. */
4547 const frame_region *
4548 region_model::get_frame_at_index (int index) const
4550 const frame_region *frame = get_current_frame ();
4552 gcc_assert (index >= 0);
4553 gcc_assert (index <= frame->get_index ());
4554 while (index != frame->get_index ())
4556 frame = frame->get_calling_frame ();
4562 /* Unbind svalues for any regions in REG and below.
4563 Find any pointers to such regions; convert them to
4564 poisoned values of kind PKIND.
4565 Also purge any dynamic extents. */
4568 region_model::unbind_region_and_descendents (const region *reg,
4569 enum poison_kind pkind)
4571 /* Gather a set of base regions to be unbound. */
4572 hash_set<const region *> base_regs;
4573 for (store::cluster_map_t::iterator iter = m_store.begin ();
4574 iter != m_store.end (); ++iter)
4576 const region *iter_base_reg = (*iter).first;
4577 if (iter_base_reg->descendent_of_p (reg))
4578 base_regs.add (iter_base_reg);
4580 for (hash_set<const region *>::iterator iter = base_regs.begin ();
4581 iter != base_regs.end (); ++iter)
4582 m_store.purge_cluster (*iter);
4584 /* Find any pointers to REG or its descendents; convert to poisoned. */
4585 poison_any_pointers_to_descendents (reg, pkind);
4587 /* Purge dynamic extents of any base regions in REG and below
4588 (e.g. VLAs and alloca stack regions). */
4589 for (auto iter : m_dynamic_extents)
4591 const region *iter_reg = iter.first;
4592 if (iter_reg->descendent_of_p (reg))
4593 unset_dynamic_extents (iter_reg);
4597 /* Implementation of BindingVisitor.
4598 Update the bound svalues for regions below REG to use poisoned
4601 struct bad_pointer_finder
4603 bad_pointer_finder (const region *reg, enum poison_kind pkind,
4604 region_model_manager *mgr)
4605 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
4608 void on_binding (const binding_key *, const svalue *&sval)
4610 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
4612 const region *ptr_dst = ptr_sval->get_pointee ();
4613 /* Poison ptrs to descendents of REG, but not to REG itself,
4614 otherwise double-free detection doesn't work (since sm-state
4615 for "free" is stored on the original ptr svalue). */
4616 if (ptr_dst->descendent_of_p (m_reg)
4617 && ptr_dst != m_reg)
4619 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
4626 const region *m_reg;
4627 enum poison_kind m_pkind;
4628 region_model_manager *const m_mgr;
4632 /* Find any pointers to REG or its descendents; convert them to
4633 poisoned values of kind PKIND.
4634 Return the number of pointers that were poisoned. */
4637 region_model::poison_any_pointers_to_descendents (const region *reg,
4638 enum poison_kind pkind)
4640 bad_pointer_finder bv (reg, pkind, m_mgr);
4641 m_store.for_each_binding (bv);
4645 /* Attempt to merge THIS with OTHER_MODEL, writing the result
4646 to OUT_MODEL. Use POINT to distinguish values created as a
4647 result of merging. */
4650 region_model::can_merge_with_p (const region_model &other_model,
4651 const program_point &point,
4652 region_model *out_model,
4653 const extrinsic_state *ext_state,
4654 const program_state *state_a,
4655 const program_state *state_b) const
4657 gcc_assert (out_model);
4658 gcc_assert (m_mgr == other_model.m_mgr);
4659 gcc_assert (m_mgr == out_model->m_mgr);
4661 if (m_current_frame != other_model.m_current_frame)
4663 out_model->m_current_frame = m_current_frame;
4665 model_merger m (this, &other_model, point, out_model,
4666 ext_state, state_a, state_b);
4668 if (!store::can_merge_p (&m_store, &other_model.m_store,
4669 &out_model->m_store, m_mgr->get_store_manager (),
4673 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
4674 &out_model->m_dynamic_extents))
4677 /* Merge constraints. */
4678 constraint_manager::merge (*m_constraints,
4679 *other_model.m_constraints,
4680 out_model->m_constraints);
4685 /* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
4689 region_model::get_fndecl_for_call (const gcall *call,
4690 region_model_context *ctxt)
4692 tree fn_ptr = gimple_call_fn (call);
4693 if (fn_ptr == NULL_TREE)
4695 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
4696 if (const region_svalue *fn_ptr_ptr
4697 = fn_ptr_sval->dyn_cast_region_svalue ())
4699 const region *reg = fn_ptr_ptr->get_pointee ();
4700 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
4702 tree fn_decl = fn_reg->get_fndecl ();
4703 cgraph_node *node = cgraph_node::get (fn_decl);
4706 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
4708 return ultimate_node->decl;
4715 /* Would be much simpler to use a lambda here, if it were supported. */
4717 struct append_regions_cb_data
4719 const region_model *model;
4720 auto_vec<const decl_region *> *out;
4723 /* Populate *OUT with all decl_regions in the current
4724 frame that have clusters within the store. */
4728 get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
4730 append_regions_cb_data data;
4733 m_store.for_each_cluster (append_regions_cb, &data);
4736 /* Implementation detail of get_regions_for_current_frame. */
4739 region_model::append_regions_cb (const region *base_reg,
4740 append_regions_cb_data *cb_data)
4742 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
4744 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
4745 cb_data->out->safe_push (decl_reg);
4749 /* Abstract class for diagnostics related to the use of
4750 floating-point arithmetic where precision is needed. */
4752 class imprecise_floating_point_arithmetic : public pending_diagnostic
4755 int get_controlling_option () const final override
4757 return OPT_Wanalyzer_imprecise_fp_arithmetic;
4761 /* Concrete diagnostic to complain about uses of floating-point arithmetic
4762 in the size argument of malloc etc. */
4764 class float_as_size_arg : public imprecise_floating_point_arithmetic
4767 float_as_size_arg (tree arg) : m_arg (arg)
4770 const char *get_kind () const final override
4772 return "float_as_size_arg_diagnostic";
4775 bool subclass_equal_p (const pending_diagnostic &other) const final override
4777 return same_tree_p (m_arg, ((const float_as_size_arg &) other).m_arg);
4780 bool emit (rich_location *rich_loc) final override
4782 diagnostic_metadata m;
4783 bool warned = warning_meta (rich_loc, m, get_controlling_option (),
4784 "use of floating-point arithmetic here might"
4785 " yield unexpected results");
4787 inform (rich_loc->get_loc (), "only use operands of an integer type"
4788 " inside the size argument");
4792 label_text describe_final_event (const evdesc::final_event &ev) final
4796 return ev.formatted_print ("operand %qE is of type %qT",
4797 m_arg, TREE_TYPE (m_arg));
4798 return ev.formatted_print ("at least one operand of the size argument is"
4799 " of a floating-point type");
4806 /* Visitor to find uses of floating-point variables/constants in an svalue. */
4808 class contains_floating_point_visitor : public visitor
4811 contains_floating_point_visitor (const svalue *root_sval) : m_result (NULL)
4813 root_sval->accept (this);
4816 const svalue *get_svalue_to_report ()
4821 void visit_constant_svalue (const constant_svalue *sval) final override
4823 /* At the point the analyzer runs, constant integer operands in a floating
4824 point expression are already implictly converted to floating-points.
4825 Thus, we do prefer to report non-constants such that the diagnostic
4826 always reports a floating-point operand. */
4827 tree type = sval->get_type ();
4828 if (type && FLOAT_TYPE_P (type) && !m_result)
4832 void visit_conjured_svalue (const conjured_svalue *sval) final override
4834 tree type = sval->get_type ();
4835 if (type && FLOAT_TYPE_P (type))
4839 void visit_initial_svalue (const initial_svalue *sval) final override
4841 tree type = sval->get_type ();
4842 if (type && FLOAT_TYPE_P (type))
4847 /* Non-null if at least one floating-point operand was found. */
4848 const svalue *m_result;
4851 /* May complain about uses of floating-point operands in SIZE_IN_BYTES. */
4854 region_model::check_dynamic_size_for_floats (const svalue *size_in_bytes,
4855 region_model_context *ctxt) const
4859 contains_floating_point_visitor v (size_in_bytes);
4860 if (const svalue *float_sval = v.get_svalue_to_report ())
4862 tree diag_arg = get_representative_tree (float_sval);
4863 ctxt->warn (make_unique<float_as_size_arg> (diag_arg));
4867 /* Return a region describing a heap-allocated block of memory.
4868 Use CTXT to complain about tainted sizes.
4870 Reuse an existing heap_allocated_region if it's not being referenced by
4871 this region_model; otherwise create a new one. */
4874 region_model::get_or_create_region_for_heap_alloc (const svalue *size_in_bytes,
4875 region_model_context *ctxt)
4877 /* Determine which regions are referenced in this region_model, so that
4878 we can reuse an existing heap_allocated_region if it's not in use on
4880 auto_sbitmap base_regs_in_use (m_mgr->get_num_regions ());
4881 get_referenced_base_regions (base_regs_in_use);
4883 = m_mgr->get_or_create_region_for_heap_alloc (base_regs_in_use);
4884 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
4885 set_dynamic_extents (reg, size_in_bytes, ctxt);
4889 /* Populate OUT_IDS with the set of IDs of those base regions which are
4890 reachable in this region_model. */
4893 region_model::get_referenced_base_regions (auto_sbitmap &out_ids) const
4895 reachable_regions reachable_regs (const_cast<region_model *> (this));
4896 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
4898 /* Get regions for locals that have explicitly bound values. */
4899 for (store::cluster_map_t::iterator iter = m_store.begin ();
4900 iter != m_store.end (); ++iter)
4902 const region *base_reg = (*iter).first;
4903 if (const region *parent = base_reg->get_parent_region ())
4904 if (parent->get_kind () == RK_FRAME)
4905 reachable_regs.add (base_reg, false);
4908 bitmap_clear (out_ids);
4909 for (auto iter_reg : reachable_regs)
4910 bitmap_set_bit (out_ids, iter_reg->get_id ());
4913 /* Return a new region describing a block of memory allocated within the
4915 Use CTXT to complain about tainted sizes. */
4918 region_model::create_region_for_alloca (const svalue *size_in_bytes,
4919 region_model_context *ctxt)
4921 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
4922 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
4923 set_dynamic_extents (reg, size_in_bytes, ctxt);
4927 /* Record that the size of REG is SIZE_IN_BYTES.
4928 Use CTXT to complain about tainted sizes. */
4931 region_model::set_dynamic_extents (const region *reg,
4932 const svalue *size_in_bytes,
4933 region_model_context *ctxt)
4935 assert_compat_types (size_in_bytes->get_type (), size_type_node);
4938 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
4940 check_dynamic_size_for_floats (size_in_bytes, ctxt);
4942 m_dynamic_extents.put (reg, size_in_bytes);
4945 /* Get the recording of REG in bytes, or NULL if no dynamic size was
4949 region_model::get_dynamic_extents (const region *reg) const
4951 if (const svalue * const *slot = m_dynamic_extents.get (reg))
4956 /* Unset any recorded dynamic size of REG. */
4959 region_model::unset_dynamic_extents (const region *reg)
4961 m_dynamic_extents.remove (reg);
4964 /* Information of the layout of a RECORD_TYPE, capturing it as a vector
4965 of items, where each item is either a field or padding. */
4970 /* An item within a record; either a field, or padding after a field. */
4974 item (const bit_range &br,
4979 m_is_padding (is_padding)
4983 bit_offset_t get_start_bit_offset () const
4985 return m_bit_range.get_start_bit_offset ();
4987 bit_offset_t get_next_bit_offset () const
4989 return m_bit_range.get_next_bit_offset ();
4992 bool contains_p (bit_offset_t offset) const
4994 return m_bit_range.contains_p (offset);
4997 void dump_to_pp (pretty_printer *pp) const
5000 pp_printf (pp, "padding after %qD", m_field);
5002 pp_printf (pp, "%qD", m_field);
5003 pp_string (pp, ", ");
5004 m_bit_range.dump_to_pp (pp);
5007 bit_range m_bit_range;
5012 record_layout (tree record_type)
5014 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE);
5016 for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE;
5017 iter = DECL_CHAIN (iter))
5019 if (TREE_CODE (iter) == FIELD_DECL)
5021 int iter_field_offset = int_bit_position (iter);
5022 bit_size_t size_in_bits;
5023 if (!int_size_in_bits (TREE_TYPE (iter), &size_in_bits))
5026 maybe_pad_to (iter_field_offset);
5029 m_items.safe_push (item (bit_range (iter_field_offset,
5035 /* Add any trailing padding. */
5036 bit_size_t size_in_bits;
5037 if (int_size_in_bits (record_type, &size_in_bits))
5038 maybe_pad_to (size_in_bits);
5041 void dump_to_pp (pretty_printer *pp) const
5045 FOR_EACH_VEC_ELT (m_items, i, it)
5047 it->dump_to_pp (pp);
5052 DEBUG_FUNCTION void dump () const
5055 pp_format_decoder (&pp) = default_tree_printer;
5056 pp.buffer->stream = stderr;
5061 const record_layout::item *get_item_at (bit_offset_t offset) const
5065 FOR_EACH_VEC_ELT (m_items, i, it)
5066 if (it->contains_p (offset))
5072 /* Subroutine of ctor. Add padding item to NEXT_OFFSET if necessary. */
5074 void maybe_pad_to (bit_offset_t next_offset)
5076 if (m_items.length () > 0)
5078 const item &last_item = m_items[m_items.length () - 1];
5079 bit_offset_t offset_after_last_item
5080 = last_item.get_next_bit_offset ();
5081 if (next_offset > offset_after_last_item)
5083 bit_size_t padding_size
5084 = next_offset - offset_after_last_item;
5085 m_items.safe_push (item (bit_range (offset_after_last_item,
5087 last_item.m_field, true));
5092 auto_vec<item> m_items;
5095 /* A subclass of pending_diagnostic for complaining about uninitialized data
5096 being copied across a trust boundary to an untrusted output
5097 (e.g. copy_to_user infoleaks in the Linux kernel). */
5099 class exposure_through_uninit_copy
5100 : public pending_diagnostic_subclass<exposure_through_uninit_copy>
5103 exposure_through_uninit_copy (const region *src_region,
5104 const region *dest_region,
5105 const svalue *copied_sval)
5106 : m_src_region (src_region),
5107 m_dest_region (dest_region),
5108 m_copied_sval (copied_sval)
5110 gcc_assert (m_copied_sval->get_kind () == SK_POISONED
5111 || m_copied_sval->get_kind () == SK_COMPOUND);
5114 const char *get_kind () const final override
5116 return "exposure_through_uninit_copy";
5119 bool operator== (const exposure_through_uninit_copy &other) const
5121 return (m_src_region == other.m_src_region
5122 && m_dest_region == other.m_dest_region
5123 && m_copied_sval == other.m_copied_sval);
5126 int get_controlling_option () const final override
5128 return OPT_Wanalyzer_exposure_through_uninit_copy;
5131 bool emit (rich_location *rich_loc) final override
5133 diagnostic_metadata m;
5134 /* CWE-200: Exposure of Sensitive Information to an Unauthorized Actor. */
5136 enum memory_space mem_space = get_src_memory_space ();
5141 warned = warning_meta
5142 (rich_loc, m, get_controlling_option (),
5143 "potential exposure of sensitive information"
5144 " by copying uninitialized data across trust boundary");
5146 case MEMSPACE_STACK:
5147 warned = warning_meta
5148 (rich_loc, m, get_controlling_option (),
5149 "potential exposure of sensitive information"
5150 " by copying uninitialized data from stack across trust boundary");
5153 warned = warning_meta
5154 (rich_loc, m, get_controlling_option (),
5155 "potential exposure of sensitive information"
5156 " by copying uninitialized data from heap across trust boundary");
5161 location_t loc = rich_loc->get_loc ();
5162 inform_number_of_uninit_bits (loc);
5163 complain_about_uninit_ranges (loc);
5165 if (mem_space == MEMSPACE_STACK)
5166 maybe_emit_fixit_hint ();
5171 label_text describe_final_event (const evdesc::final_event &) final override
5173 enum memory_space mem_space = get_src_memory_space ();
5177 return label_text::borrow ("uninitialized data copied here");
5179 case MEMSPACE_STACK:
5180 return label_text::borrow ("uninitialized data copied from stack here");
5183 return label_text::borrow ("uninitialized data copied from heap here");
5187 void mark_interesting_stuff (interesting_t *interest) final override
5190 interest->add_region_creation (m_src_region);
5194 enum memory_space get_src_memory_space () const
5196 return m_src_region ? m_src_region->get_memory_space () : MEMSPACE_UNKNOWN;
5199 bit_size_t calc_num_uninit_bits () const
5201 switch (m_copied_sval->get_kind ())
5208 const poisoned_svalue *poisoned_sval
5209 = as_a <const poisoned_svalue *> (m_copied_sval);
5210 gcc_assert (poisoned_sval->get_poison_kind () == POISON_KIND_UNINIT);
5212 /* Give up if don't have type information. */
5213 if (m_copied_sval->get_type () == NULL_TREE)
5216 bit_size_t size_in_bits;
5217 if (int_size_in_bits (m_copied_sval->get_type (), &size_in_bits))
5218 return size_in_bits;
5220 /* Give up if we can't get the size of the type. */
5226 const compound_svalue *compound_sval
5227 = as_a <const compound_svalue *> (m_copied_sval);
5228 bit_size_t result = 0;
5229 /* Find keys for uninit svals. */
5230 for (auto iter : *compound_sval)
5232 const svalue *sval = iter.second;
5233 if (const poisoned_svalue *psval
5234 = sval->dyn_cast_poisoned_svalue ())
5235 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
5237 const binding_key *key = iter.first;
5238 const concrete_binding *ckey
5239 = key->dyn_cast_concrete_binding ();
5241 result += ckey->get_size_in_bits ();
5249 void inform_number_of_uninit_bits (location_t loc) const
5251 bit_size_t num_uninit_bits = calc_num_uninit_bits ();
5252 if (num_uninit_bits <= 0)
5254 if (num_uninit_bits % BITS_PER_UNIT == 0)
5256 /* Express in bytes. */
5257 byte_size_t num_uninit_bytes = num_uninit_bits / BITS_PER_UNIT;
5258 if (num_uninit_bytes == 1)
5259 inform (loc, "1 byte is uninitialized");
5262 "%wu bytes are uninitialized", num_uninit_bytes.to_uhwi ());
5266 /* Express in bits. */
5267 if (num_uninit_bits == 1)
5268 inform (loc, "1 bit is uninitialized");
5271 "%wu bits are uninitialized", num_uninit_bits.to_uhwi ());
5275 void complain_about_uninit_ranges (location_t loc) const
5277 if (const compound_svalue *compound_sval
5278 = m_copied_sval->dyn_cast_compound_svalue ())
5280 /* Find keys for uninit svals. */
5281 auto_vec<const concrete_binding *> uninit_keys;
5282 for (auto iter : *compound_sval)
5284 const svalue *sval = iter.second;
5285 if (const poisoned_svalue *psval
5286 = sval->dyn_cast_poisoned_svalue ())
5287 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
5289 const binding_key *key = iter.first;
5290 const concrete_binding *ckey
5291 = key->dyn_cast_concrete_binding ();
5293 uninit_keys.safe_push (ckey);
5296 /* Complain about them in sorted order. */
5297 uninit_keys.qsort (concrete_binding::cmp_ptr_ptr);
5299 std::unique_ptr<record_layout> layout;
5301 tree type = m_copied_sval->get_type ();
5302 if (type && TREE_CODE (type) == RECORD_TYPE)
5304 // (std::make_unique is C++14)
5305 layout = std::unique_ptr<record_layout> (new record_layout (type));
5312 const concrete_binding *ckey;
5313 FOR_EACH_VEC_ELT (uninit_keys, i, ckey)
5315 bit_offset_t start_bit = ckey->get_start_bit_offset ();
5316 bit_offset_t next_bit = ckey->get_next_bit_offset ();
5317 complain_about_uninit_range (loc, start_bit, next_bit,
5323 void complain_about_uninit_range (location_t loc,
5324 bit_offset_t start_bit,
5325 bit_offset_t next_bit,
5326 const record_layout *layout) const
5330 while (start_bit < next_bit)
5332 if (const record_layout::item *item
5333 = layout->get_item_at (start_bit))
5335 gcc_assert (start_bit >= item->get_start_bit_offset ());
5336 gcc_assert (start_bit < item->get_next_bit_offset ());
5337 if (item->get_start_bit_offset () == start_bit
5338 && item->get_next_bit_offset () <= next_bit)
5339 complain_about_fully_uninit_item (*item);
5341 complain_about_partially_uninit_item (*item);
5342 start_bit = item->get_next_bit_offset ();
5350 if (start_bit >= next_bit)
5353 if (start_bit % 8 == 0 && next_bit % 8 == 0)
5355 /* Express in bytes. */
5356 byte_offset_t start_byte = start_bit / 8;
5357 byte_offset_t last_byte = (next_bit / 8) - 1;
5358 if (last_byte == start_byte)
5360 "byte %wu is uninitialized",
5361 start_byte.to_uhwi ());
5364 "bytes %wu - %wu are uninitialized",
5365 start_byte.to_uhwi (),
5366 last_byte.to_uhwi ());
5370 /* Express in bits. */
5371 bit_offset_t last_bit = next_bit - 1;
5372 if (last_bit == start_bit)
5374 "bit %wu is uninitialized",
5375 start_bit.to_uhwi ());
5378 "bits %wu - %wu are uninitialized",
5379 start_bit.to_uhwi (),
5380 last_bit.to_uhwi ());
5385 complain_about_fully_uninit_item (const record_layout::item &item)
5387 tree field = item.m_field;
5388 bit_size_t num_bits = item.m_bit_range.m_size_in_bits;
5389 if (item.m_is_padding)
5391 if (num_bits % 8 == 0)
5393 /* Express in bytes. */
5394 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
5396 inform (DECL_SOURCE_LOCATION (field),
5397 "padding after field %qD is uninitialized (1 byte)",
5400 inform (DECL_SOURCE_LOCATION (field),
5401 "padding after field %qD is uninitialized (%wu bytes)",
5402 field, num_bytes.to_uhwi ());
5406 /* Express in bits. */
5408 inform (DECL_SOURCE_LOCATION (field),
5409 "padding after field %qD is uninitialized (1 bit)",
5412 inform (DECL_SOURCE_LOCATION (field),
5413 "padding after field %qD is uninitialized (%wu bits)",
5414 field, num_bits.to_uhwi ());
5419 if (num_bits % 8 == 0)
5421 /* Express in bytes. */
5422 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
5424 inform (DECL_SOURCE_LOCATION (field),
5425 "field %qD is uninitialized (1 byte)", field);
5427 inform (DECL_SOURCE_LOCATION (field),
5428 "field %qD is uninitialized (%wu bytes)",
5429 field, num_bytes.to_uhwi ());
5433 /* Express in bits. */
5435 inform (DECL_SOURCE_LOCATION (field),
5436 "field %qD is uninitialized (1 bit)", field);
5438 inform (DECL_SOURCE_LOCATION (field),
5439 "field %qD is uninitialized (%wu bits)",
5440 field, num_bits.to_uhwi ());
5446 complain_about_partially_uninit_item (const record_layout::item &item)
5448 tree field = item.m_field;
5449 if (item.m_is_padding)
5450 inform (DECL_SOURCE_LOCATION (field),
5451 "padding after field %qD is partially uninitialized",
5454 inform (DECL_SOURCE_LOCATION (field),
5455 "field %qD is partially uninitialized",
5457 /* TODO: ideally we'd describe what parts are uninitialized. */
5460 void maybe_emit_fixit_hint () const
5462 if (tree decl = m_src_region->maybe_get_decl ())
5464 gcc_rich_location hint_richloc (DECL_SOURCE_LOCATION (decl));
5465 hint_richloc.add_fixit_insert_after (" = {0}");
5466 inform (&hint_richloc,
5467 "suggest forcing zero-initialization by"
5468 " providing a %<{0}%> initializer");
5473 const region *m_src_region;
5474 const region *m_dest_region;
5475 const svalue *m_copied_sval;
5478 /* Return true if any part of SVAL is uninitialized. */
5481 contains_uninit_p (const svalue *sval)
5483 struct uninit_finder : public visitor
5486 uninit_finder () : m_found_uninit (false) {}
5487 void visit_poisoned_svalue (const poisoned_svalue *sval)
5489 if (sval->get_poison_kind () == POISON_KIND_UNINIT)
5490 m_found_uninit = true;
5492 bool m_found_uninit;
5498 return v.m_found_uninit;
5501 /* Function for use by plugins when simulating writing data through a
5502 pointer to an "untrusted" region DST_REG (and thus crossing a security
5503 boundary), such as copying data to user space in an OS kernel.
5505 Check that COPIED_SVAL is fully initialized. If not, complain about
5506 an infoleak to CTXT.
5508 SRC_REG can be NULL; if non-NULL it is used as a hint in the diagnostic
5509 as to where COPIED_SVAL came from. */
5512 region_model::maybe_complain_about_infoleak (const region *dst_reg,
5513 const svalue *copied_sval,
5514 const region *src_reg,
5515 region_model_context *ctxt)
5517 /* Check for exposure. */
5518 if (contains_uninit_p (copied_sval))
5519 ctxt->warn (make_unique<exposure_through_uninit_copy> (src_reg,
5524 /* Set errno to a positive symbolic int, as if some error has occurred. */
5527 region_model::set_errno (const call_details &cd)
5529 const region *errno_reg = m_mgr->get_errno_region ();
5530 conjured_purge p (this, cd.get_ctxt ());
5531 const svalue *new_errno_sval
5532 = m_mgr->get_or_create_conjured_svalue (integer_type_node,
5533 cd.get_call_stmt (),
5536 = m_mgr->get_or_create_int_cst (integer_type_node, 0);
5537 add_constraint (new_errno_sval, GT_EXPR, zero, cd.get_ctxt ());
5538 set_value (errno_reg, new_errno_sval, cd.get_ctxt ());
5541 /* class noop_region_model_context : public region_model_context. */
5544 noop_region_model_context::add_note (std::unique_ptr<pending_note>)
5549 noop_region_model_context::bifurcate (std::unique_ptr<custom_edge_info>)
5554 noop_region_model_context::terminate_path ()
5558 /* struct model_merger. */
5560 /* Dump a multiline representation of this merger to PP. */
5563 model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
5565 pp_string (pp, "model A:");
5567 m_model_a->dump_to_pp (pp, simple, true);
5570 pp_string (pp, "model B:");
5572 m_model_b->dump_to_pp (pp, simple, true);
5575 pp_string (pp, "merged model:");
5577 m_merged_model->dump_to_pp (pp, simple, true);
5581 /* Dump a multiline representation of this merger to FILE. */
5584 model_merger::dump (FILE *fp, bool simple) const
5587 pp_format_decoder (&pp) = default_tree_printer;
5588 pp_show_color (&pp) = pp_show_color (global_dc->printer);
5589 pp.buffer->stream = fp;
5590 dump_to_pp (&pp, simple);
5594 /* Dump a multiline representation of this merger to stderr. */
5597 model_merger::dump (bool simple) const
5599 dump (stderr, simple);
5602 /* Return true if it's OK to merge SVAL with other svalues. */
5605 model_merger::mergeable_svalue_p (const svalue *sval) const
5609 /* Reject merging svalues that have non-purgable sm-state,
5610 to avoid falsely reporting memory leaks by merging them
5611 with something else. For example, given a local var "p",
5612 reject the merger of a:
5613 store_a mapping "p" to a malloc-ed ptr
5615 store_b mapping "p" to a NULL ptr. */
5617 if (!m_state_a->can_purge_p (*m_ext_state, sval))
5620 if (!m_state_b->can_purge_p (*m_ext_state, sval))
5628 /* Dump RMODEL fully to stderr (i.e. without summarization). */
5631 debug (const region_model &rmodel)
5633 rmodel.dump (false);
5636 /* class rejected_op_constraint : public rejected_constraint. */
5639 rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
5641 region_model m (m_model);
5642 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
5643 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
5644 lhs_sval->dump_to_pp (pp, true);
5645 pp_printf (pp, " %s ", op_symbol_code (m_op));
5646 rhs_sval->dump_to_pp (pp, true);
5649 /* class rejected_ranges_constraint : public rejected_constraint. */
5652 rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
5654 region_model m (m_model);
5655 const svalue *sval = m.get_rvalue (m_expr, NULL);
5656 sval->dump_to_pp (pp, true);
5657 pp_string (pp, " in ");
5658 m_ranges->dump_to_pp (pp, true);
5663 /* engine's ctor. */
5665 engine::engine (const supergraph *sg, logger *logger)
5666 : m_sg (sg), m_mgr (logger)
5670 /* Dump the managed objects by class to LOGGER, and the per-class totals. */
5673 engine::log_stats (logger *logger) const
5675 m_mgr.log_stats (logger, true);
5682 namespace selftest {
5684 /* Build a constant tree of the given type from STR. */
5687 build_real_cst_from_string (tree type, const char *str)
5689 REAL_VALUE_TYPE real;
5690 real_from_string (&real, str);
5691 return build_real (type, real);
5694 /* Append various "interesting" constants to OUT (e.g. NaN). */
5697 append_interesting_constants (auto_vec<tree> *out)
5699 out->safe_push (build_int_cst (integer_type_node, 0));
5700 out->safe_push (build_int_cst (integer_type_node, 42));
5701 out->safe_push (build_int_cst (unsigned_type_node, 0));
5702 out->safe_push (build_int_cst (unsigned_type_node, 42));
5703 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
5704 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
5705 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
5706 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
5707 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
5708 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
5709 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
5710 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
5713 /* Verify that tree_cmp is a well-behaved comparator for qsort, even
5714 if the underlying constants aren't comparable. */
5717 test_tree_cmp_on_constants ()
5719 auto_vec<tree> csts;
5720 append_interesting_constants (&csts);
5722 /* Try sorting every triple. */
5723 const unsigned num = csts.length ();
5724 for (unsigned i = 0; i < num; i++)
5725 for (unsigned j = 0; j < num; j++)
5726 for (unsigned k = 0; k < num; k++)
5728 auto_vec<tree> v (3);
5729 v.quick_push (csts[i]);
5730 v.quick_push (csts[j]);
5731 v.quick_push (csts[k]);
5736 /* Implementation detail of the ASSERT_CONDITION_* macros. */
5739 assert_condition (const location &loc,
5740 region_model &model,
5741 const svalue *lhs, tree_code op, const svalue *rhs,
5744 tristate actual = model.eval_condition (lhs, op, rhs);
5745 ASSERT_EQ_AT (loc, actual, expected);
5748 /* Implementation detail of the ASSERT_CONDITION_* macros. */
5751 assert_condition (const location &loc,
5752 region_model &model,
5753 tree lhs, tree_code op, tree rhs,
5756 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
5757 ASSERT_EQ_AT (loc, actual, expected);
5760 /* Implementation detail of ASSERT_DUMP_TREE_EQ. */
5763 assert_dump_tree_eq (const location &loc, tree t, const char *expected)
5765 auto_fix_quotes sentinel;
5767 pp_format_decoder (&pp) = default_tree_printer;
5769 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
5772 /* Assert that dump_tree (T) is EXPECTED. */
5774 #define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
5775 SELFTEST_BEGIN_STMT \
5776 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
5779 /* Implementation detail of ASSERT_DUMP_EQ. */
5782 assert_dump_eq (const location &loc,
5783 const region_model &model,
5785 const char *expected)
5787 auto_fix_quotes sentinel;
5789 pp_format_decoder (&pp) = default_tree_printer;
5791 model.dump_to_pp (&pp, summarize, true);
5792 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
5795 /* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
5797 #define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
5798 SELFTEST_BEGIN_STMT \
5799 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
5802 /* Smoketest for region_model::dump_to_pp. */
5807 region_model_manager mgr;
5808 region_model model (&mgr);
5810 ASSERT_DUMP_EQ (model, false,
5812 "m_called_unknown_fn: FALSE\n"
5813 "constraint_manager:\n"
5816 ASSERT_DUMP_EQ (model, true,
5818 "m_called_unknown_fn: FALSE\n"
5819 "constraint_manager:\n"
5824 /* Helper function for selftests. Create a struct or union type named NAME,
5825 with the fields given by the FIELD_DECLS in FIELDS.
5826 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
5827 create a UNION_TYPE. */
5830 make_test_compound_type (const char *name, bool is_struct,
5831 const auto_vec<tree> *fields)
5833 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
5834 TYPE_NAME (t) = get_identifier (name);
5837 tree fieldlist = NULL;
5840 FOR_EACH_VEC_ELT (*fields, i, field)
5842 gcc_assert (TREE_CODE (field) == FIELD_DECL);
5843 DECL_CONTEXT (field) = t;
5844 fieldlist = chainon (field, fieldlist);
5846 fieldlist = nreverse (fieldlist);
5847 TYPE_FIELDS (t) = fieldlist;
5853 /* Selftest fixture for creating the type "struct coord {int x; int y; };". */
5859 auto_vec<tree> fields;
5860 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
5861 get_identifier ("x"), integer_type_node);
5862 fields.safe_push (m_x_field);
5863 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
5864 get_identifier ("y"), integer_type_node);
5865 fields.safe_push (m_y_field);
5866 m_coord_type = make_test_compound_type ("coord", true, &fields);
5874 /* Verify usage of a struct. */
5881 tree c = build_global_decl ("c", ct.m_coord_type);
5882 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
5883 c, ct.m_x_field, NULL_TREE);
5884 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
5885 c, ct.m_y_field, NULL_TREE);
5887 tree int_17 = build_int_cst (integer_type_node, 17);
5888 tree int_m3 = build_int_cst (integer_type_node, -3);
5890 region_model_manager mgr;
5891 region_model model (&mgr);
5892 model.set_value (c_x, int_17, NULL);
5893 model.set_value (c_y, int_m3, NULL);
5895 /* Verify get_offset for "c.x". */
5897 const region *c_x_reg = model.get_lvalue (c_x, NULL);
5898 region_offset offset = c_x_reg->get_offset (&mgr);
5899 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
5900 ASSERT_EQ (offset.get_bit_offset (), 0);
5903 /* Verify get_offset for "c.y". */
5905 const region *c_y_reg = model.get_lvalue (c_y, NULL);
5906 region_offset offset = c_y_reg->get_offset (&mgr);
5907 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
5908 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
5912 /* Verify usage of an array element. */
5917 tree tlen = size_int (10);
5918 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
5920 tree a = build_global_decl ("a", arr_type);
5922 region_model_manager mgr;
5923 region_model model (&mgr);
5924 tree int_0 = build_int_cst (integer_type_node, 0);
5925 tree a_0 = build4 (ARRAY_REF, char_type_node,
5926 a, int_0, NULL_TREE, NULL_TREE);
5927 tree char_A = build_int_cst (char_type_node, 'A');
5928 model.set_value (a_0, char_A, NULL);
5931 /* Verify that region_model::get_representative_tree works as expected. */
5934 test_get_representative_tree ()
5936 region_model_manager mgr;
5940 tree string_cst = build_string (4, "foo");
5941 region_model m (&mgr);
5942 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
5943 tree rep = m.get_representative_tree (str_sval);
5944 ASSERT_EQ (rep, string_cst);
5947 /* String literal. */
5949 tree string_cst_ptr = build_string_literal (4, "foo");
5950 region_model m (&mgr);
5951 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
5952 tree rep = m.get_representative_tree (str_sval);
5953 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
5956 /* Value of an element within an array. */
5958 tree tlen = size_int (10);
5959 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
5960 tree a = build_global_decl ("a", arr_type);
5961 placeholder_svalue test_sval (char_type_node, "test value");
5963 /* Value of a[3]. */
5965 test_region_model_context ctxt;
5966 region_model model (&mgr);
5967 tree int_3 = build_int_cst (integer_type_node, 3);
5968 tree a_3 = build4 (ARRAY_REF, char_type_node,
5969 a, int_3, NULL_TREE, NULL_TREE);
5970 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
5971 model.set_value (a_3_reg, &test_sval, &ctxt);
5972 tree rep = model.get_representative_tree (&test_sval);
5973 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
5976 /* Value of a[0]. */
5978 test_region_model_context ctxt;
5979 region_model model (&mgr);
5980 tree idx = build_int_cst (integer_type_node, 0);
5981 tree a_0 = build4 (ARRAY_REF, char_type_node,
5982 a, idx, NULL_TREE, NULL_TREE);
5983 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
5984 model.set_value (a_0_reg, &test_sval, &ctxt);
5985 tree rep = model.get_representative_tree (&test_sval);
5986 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
5990 /* Value of a field within a struct. */
5994 tree c = build_global_decl ("c", ct.m_coord_type);
5995 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
5996 c, ct.m_x_field, NULL_TREE);
5997 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
5998 c, ct.m_y_field, NULL_TREE);
6000 test_region_model_context ctxt;
6002 /* Value of initial field. */
6004 region_model m (&mgr);
6005 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
6006 placeholder_svalue test_sval_x (integer_type_node, "test x val");
6007 m.set_value (c_x_reg, &test_sval_x, &ctxt);
6008 tree rep = m.get_representative_tree (&test_sval_x);
6009 ASSERT_DUMP_TREE_EQ (rep, "c.x");
6012 /* Value of non-initial field. */
6014 region_model m (&mgr);
6015 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
6016 placeholder_svalue test_sval_y (integer_type_node, "test y val");
6017 m.set_value (c_y_reg, &test_sval_y, &ctxt);
6018 tree rep = m.get_representative_tree (&test_sval_y);
6019 ASSERT_DUMP_TREE_EQ (rep, "c.y");
6024 /* Verify that calling region_model::get_rvalue repeatedly on the same
6025 tree constant retrieves the same svalue *. */
6028 test_unique_constants ()
6030 tree int_0 = build_int_cst (integer_type_node, 0);
6031 tree int_42 = build_int_cst (integer_type_node, 42);
6033 test_region_model_context ctxt;
6034 region_model_manager mgr;
6035 region_model model (&mgr);
6036 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
6037 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
6038 model.get_rvalue (int_42, &ctxt));
6039 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
6040 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
6042 /* A "(const int)42" will be a different tree from "(int)42)"... */
6043 tree const_int_type_node
6044 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
6045 tree const_int_42 = build_int_cst (const_int_type_node, 42);
6046 ASSERT_NE (int_42, const_int_42);
6047 /* It should have a different const_svalue. */
6048 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
6049 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
6050 ASSERT_NE (int_42_sval, const_int_42_sval);
6051 /* But they should compare as equal. */
6052 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
6053 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
6056 /* Verify that each type gets its own singleton unknown_svalue within a
6057 region_model_manager, and that NULL_TREE gets its own singleton. */
6060 test_unique_unknowns ()
6062 region_model_manager mgr;
6063 const svalue *unknown_int
6064 = mgr.get_or_create_unknown_svalue (integer_type_node);
6065 /* Repeated calls with the same type should get the same "unknown"
6067 const svalue *unknown_int_2
6068 = mgr.get_or_create_unknown_svalue (integer_type_node);
6069 ASSERT_EQ (unknown_int, unknown_int_2);
6071 /* Different types (or the NULL type) should have different
6073 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
6074 ASSERT_NE (unknown_NULL_type, unknown_int);
6076 /* Repeated calls with NULL for the type should get the same "unknown"
6078 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
6079 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
6082 /* Verify that initial_svalue are handled as expected. */
6085 test_initial_svalue_folding ()
6087 region_model_manager mgr;
6088 tree x = build_global_decl ("x", integer_type_node);
6089 tree y = build_global_decl ("y", integer_type_node);
6091 test_region_model_context ctxt;
6092 region_model model (&mgr);
6093 const svalue *x_init = model.get_rvalue (x, &ctxt);
6094 const svalue *y_init = model.get_rvalue (y, &ctxt);
6095 ASSERT_NE (x_init, y_init);
6096 const region *x_reg = model.get_lvalue (x, &ctxt);
6097 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
6101 /* Verify that unary ops are folded as expected. */
6104 test_unaryop_svalue_folding ()
6106 region_model_manager mgr;
6107 tree x = build_global_decl ("x", integer_type_node);
6108 tree y = build_global_decl ("y", integer_type_node);
6110 test_region_model_context ctxt;
6111 region_model model (&mgr);
6112 const svalue *x_init = model.get_rvalue (x, &ctxt);
6113 const svalue *y_init = model.get_rvalue (y, &ctxt);
6114 const region *x_reg = model.get_lvalue (x, &ctxt);
6115 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
6117 /* "(int)x" -> "x". */
6118 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
6120 /* "(void *)x" -> something other than "x". */
6121 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
6123 /* "!(x == y)" -> "x != y". */
6124 ASSERT_EQ (mgr.get_or_create_unaryop
6125 (boolean_type_node, TRUTH_NOT_EXPR,
6126 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
6128 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
6130 /* "!(x > y)" -> "x <= y". */
6131 ASSERT_EQ (mgr.get_or_create_unaryop
6132 (boolean_type_node, TRUTH_NOT_EXPR,
6133 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
6135 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
6139 /* Verify that binops on constant svalues are folded. */
6142 test_binop_svalue_folding ()
6145 tree cst_int[NUM_CSTS];
6146 region_model_manager mgr;
6147 const svalue *cst_sval[NUM_CSTS];
6148 for (int i = 0; i < NUM_CSTS; i++)
6150 cst_int[i] = build_int_cst (integer_type_node, i);
6151 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
6152 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
6153 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
6156 for (int i = 0; i < NUM_CSTS; i++)
6157 for (int j = 0; j < NUM_CSTS; j++)
6160 ASSERT_NE (cst_sval[i], cst_sval[j]);
6161 if (i + j < NUM_CSTS)
6164 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6165 cst_sval[i], cst_sval[j]);
6166 ASSERT_EQ (sum, cst_sval[i + j]);
6170 const svalue *difference
6171 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
6172 cst_sval[i], cst_sval[j]);
6173 ASSERT_EQ (difference, cst_sval[i - j]);
6175 if (i * j < NUM_CSTS)
6177 const svalue *product
6178 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6179 cst_sval[i], cst_sval[j]);
6180 ASSERT_EQ (product, cst_sval[i * j]);
6182 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
6183 cst_sval[i], cst_sval[j]);
6184 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
6185 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
6186 cst_sval[i], cst_sval[j]);
6187 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
6191 tree x = build_global_decl ("x", integer_type_node);
6193 test_region_model_context ctxt;
6194 region_model model (&mgr);
6195 const svalue *x_init = model.get_rvalue (x, &ctxt);
6197 /* PLUS_EXPR folding. */
6198 const svalue *x_init_plus_zero
6199 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6200 x_init, cst_sval[0]);
6201 ASSERT_EQ (x_init_plus_zero, x_init);
6202 const svalue *zero_plus_x_init
6203 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6204 cst_sval[0], x_init);
6205 ASSERT_EQ (zero_plus_x_init, x_init);
6207 /* MULT_EXPR folding. */
6208 const svalue *x_init_times_zero
6209 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6210 x_init, cst_sval[0]);
6211 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
6212 const svalue *zero_times_x_init
6213 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6214 cst_sval[0], x_init);
6215 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
6217 const svalue *x_init_times_one
6218 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6219 x_init, cst_sval[1]);
6220 ASSERT_EQ (x_init_times_one, x_init);
6221 const svalue *one_times_x_init
6222 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6223 cst_sval[1], x_init);
6224 ASSERT_EQ (one_times_x_init, x_init);
6227 // TODO: do we want to use the match-and-simplify DSL for this?
6229 /* Verify that binops put any constants on the RHS. */
6230 const svalue *four_times_x_init
6231 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6232 cst_sval[4], x_init);
6233 const svalue *x_init_times_four
6234 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6235 x_init, cst_sval[4]);
6236 ASSERT_EQ (four_times_x_init, x_init_times_four);
6237 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
6238 ASSERT_EQ (binop->get_op (), MULT_EXPR);
6239 ASSERT_EQ (binop->get_arg0 (), x_init);
6240 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
6242 /* Verify that ((x + 1) + 1) == (x + 2). */
6243 const svalue *x_init_plus_one
6244 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6245 x_init, cst_sval[1]);
6246 const svalue *x_init_plus_two
6247 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6248 x_init, cst_sval[2]);
6249 const svalue *x_init_plus_one_plus_one
6250 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6251 x_init_plus_one, cst_sval[1]);
6252 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
6254 /* Verify various binops on booleans. */
6256 const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
6257 const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
6258 const svalue *sval_unknown
6259 = mgr.get_or_create_unknown_svalue (boolean_type_node);
6260 const placeholder_svalue sval_placeholder (boolean_type_node, "v");
6261 for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
6263 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6264 sval_true, sval_unknown),
6266 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6267 sval_false, sval_unknown),
6269 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6270 sval_false, &sval_placeholder),
6273 for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
6275 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6276 sval_false, sval_unknown),
6278 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6279 sval_true, sval_unknown),
6281 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6282 sval_true, &sval_placeholder),
6288 /* Verify that sub_svalues are folded as expected. */
6291 test_sub_svalue_folding ()
6294 tree c = build_global_decl ("c", ct.m_coord_type);
6295 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6296 c, ct.m_x_field, NULL_TREE);
6298 region_model_manager mgr;
6299 region_model model (&mgr);
6300 test_region_model_context ctxt;
6301 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
6303 /* Verify that sub_svalue of "unknown" simply
6304 yields an unknown. */
6306 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
6307 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
6309 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
6310 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
6313 /* Get BIT within VAL as a symbolic value within MGR. */
6315 static const svalue *
6316 get_bit (region_model_manager *mgr,
6318 unsigned HOST_WIDE_INT val)
6320 const svalue *inner_svalue
6321 = mgr->get_or_create_int_cst (unsigned_type_node, val);
6322 return mgr->get_or_create_bits_within (boolean_type_node,
6327 /* Verify that bits_within_svalues are folded as expected. */
6330 test_bits_within_svalue_folding ()
6332 region_model_manager mgr;
6334 const svalue *zero = mgr.get_or_create_int_cst (boolean_type_node, 0);
6335 const svalue *one = mgr.get_or_create_int_cst (boolean_type_node, 1);
6338 const unsigned val = 0x0000;
6339 for (unsigned bit = 0; bit < 16; bit++)
6340 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6344 const unsigned val = 0x0001;
6345 ASSERT_EQ (get_bit (&mgr, 0, val), one);
6346 for (unsigned bit = 1; bit < 16; bit++)
6347 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6351 const unsigned val = 0x8000;
6352 for (unsigned bit = 0; bit < 15; bit++)
6353 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6354 ASSERT_EQ (get_bit (&mgr, 15, val), one);
6358 const unsigned val = 0xFFFF;
6359 for (unsigned bit = 0; bit < 16; bit++)
6360 ASSERT_EQ (get_bit (&mgr, bit, val), one);
6364 /* Test that region::descendent_of_p works as expected. */
6367 test_descendent_of_p ()
6369 region_model_manager mgr;
6370 const region *stack = mgr.get_stack_region ();
6371 const region *heap = mgr.get_heap_region ();
6372 const region *code = mgr.get_code_region ();
6373 const region *globals = mgr.get_globals_region ();
6375 /* descendent_of_p should return true when used on the region itself. */
6376 ASSERT_TRUE (stack->descendent_of_p (stack));
6377 ASSERT_FALSE (stack->descendent_of_p (heap));
6378 ASSERT_FALSE (stack->descendent_of_p (code));
6379 ASSERT_FALSE (stack->descendent_of_p (globals));
6381 tree x = build_global_decl ("x", integer_type_node);
6382 const region *x_reg = mgr.get_region_for_global (x);
6383 ASSERT_TRUE (x_reg->descendent_of_p (globals));
6385 /* A cast_region should be a descendent of the original region. */
6386 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
6387 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
6390 /* Verify that bit_range_region works as expected. */
6393 test_bit_range_regions ()
6395 tree x = build_global_decl ("x", integer_type_node);
6396 region_model_manager mgr;
6397 const region *x_reg = mgr.get_region_for_global (x);
6399 = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
6401 = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
6402 ASSERT_TRUE (byte0->descendent_of_p (x_reg));
6403 ASSERT_TRUE (byte1->descendent_of_p (x_reg));
6404 ASSERT_NE (byte0, byte1);
6407 /* Verify that simple assignments work as expected. */
6412 tree int_0 = build_int_cst (integer_type_node, 0);
6413 tree x = build_global_decl ("x", integer_type_node);
6414 tree y = build_global_decl ("y", integer_type_node);
6416 /* "x == 0", then use of y, then "y = 0;". */
6417 region_model_manager mgr;
6418 region_model model (&mgr);
6419 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
6420 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
6421 model.set_value (model.get_lvalue (y, NULL),
6422 model.get_rvalue (int_0, NULL),
6424 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
6425 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
6428 /* Verify that compound assignments work as expected. */
6431 test_compound_assignment ()
6435 tree c = build_global_decl ("c", ct.m_coord_type);
6436 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6437 c, ct.m_x_field, NULL_TREE);
6438 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6439 c, ct.m_y_field, NULL_TREE);
6440 tree d = build_global_decl ("d", ct.m_coord_type);
6441 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6442 d, ct.m_x_field, NULL_TREE);
6443 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6444 d, ct.m_y_field, NULL_TREE);
6446 tree int_17 = build_int_cst (integer_type_node, 17);
6447 tree int_m3 = build_int_cst (integer_type_node, -3);
6449 region_model_manager mgr;
6450 region_model model (&mgr);
6451 model.set_value (c_x, int_17, NULL);
6452 model.set_value (c_y, int_m3, NULL);
6455 const svalue *sval = model.get_rvalue (c, NULL);
6456 model.set_value (model.get_lvalue (d, NULL), sval, NULL);
6458 /* Check that the fields have the same svalues. */
6459 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
6460 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
6463 /* Verify the details of pushing and popping stack frames. */
6466 test_stack_frames ()
6468 tree int_42 = build_int_cst (integer_type_node, 42);
6469 tree int_10 = build_int_cst (integer_type_node, 10);
6470 tree int_5 = build_int_cst (integer_type_node, 5);
6471 tree int_0 = build_int_cst (integer_type_node, 0);
6473 auto_vec <tree> param_types;
6474 tree parent_fndecl = make_fndecl (integer_type_node,
6477 allocate_struct_function (parent_fndecl, true);
6479 tree child_fndecl = make_fndecl (integer_type_node,
6482 allocate_struct_function (child_fndecl, true);
6484 /* "a" and "b" in the parent frame. */
6485 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6486 get_identifier ("a"),
6488 DECL_CONTEXT (a) = parent_fndecl;
6489 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6490 get_identifier ("b"),
6492 DECL_CONTEXT (b) = parent_fndecl;
6493 /* "x" and "y" in a child frame. */
6494 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6495 get_identifier ("x"),
6497 DECL_CONTEXT (x) = child_fndecl;
6498 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6499 get_identifier ("y"),
6501 DECL_CONTEXT (y) = child_fndecl;
6504 tree p = build_global_decl ("p", ptr_type_node);
6507 tree q = build_global_decl ("q", ptr_type_node);
6509 region_model_manager mgr;
6510 test_region_model_context ctxt;
6511 region_model model (&mgr);
6513 /* Push stack frame for "parent_fn". */
6514 const region *parent_frame_reg
6515 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
6517 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
6518 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
6519 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
6520 model.set_value (a_in_parent_reg,
6521 model.get_rvalue (int_42, &ctxt),
6523 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
6525 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
6526 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
6527 tristate (tristate::TS_TRUE));
6529 /* Push stack frame for "child_fn". */
6530 const region *child_frame_reg
6531 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
6532 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
6533 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
6534 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
6535 model.set_value (x_in_child_reg,
6536 model.get_rvalue (int_0, &ctxt),
6538 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
6540 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
6541 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
6542 tristate (tristate::TS_TRUE));
6544 /* Point a global pointer at a local in the child frame: p = &x. */
6545 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
6546 model.set_value (p_in_globals_reg,
6547 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
6549 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
6551 /* Point another global pointer at p: q = &p. */
6552 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
6553 model.set_value (q_in_globals_reg,
6554 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
6557 /* Test region::descendent_of_p. */
6558 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
6559 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
6560 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
6562 /* Pop the "child_fn" frame from the stack. */
6563 model.pop_frame (NULL, NULL, &ctxt);
6564 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
6565 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
6567 /* Verify that p (which was pointing at the local "x" in the popped
6568 frame) has been poisoned. */
6569 const svalue *new_p_sval = model.get_rvalue (p, NULL);
6570 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
6571 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
6572 POISON_KIND_POPPED_STACK);
6574 /* Verify that q still points to p, in spite of the region
6576 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
6577 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
6578 ASSERT_EQ (new_q_sval->maybe_get_region (),
6579 model.get_lvalue (p, &ctxt));
6581 /* Verify that top of stack has been updated. */
6582 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
6584 /* Verify locals in parent frame. */
6585 /* Verify "a" still has its value. */
6586 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
6587 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
6588 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
6590 /* Verify "b" still has its constraint. */
6591 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
6592 tristate (tristate::TS_TRUE));
6595 /* Verify that get_representative_path_var works as expected, that
6596 we can map from regions to parms and back within a recursive call
6600 test_get_representative_path_var ()
6602 auto_vec <tree> param_types;
6603 tree fndecl = make_fndecl (integer_type_node,
6606 allocate_struct_function (fndecl, true);
6609 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6610 get_identifier ("n"),
6612 DECL_CONTEXT (n) = fndecl;
6614 region_model_manager mgr;
6615 test_region_model_context ctxt;
6616 region_model model (&mgr);
6618 /* Push 5 stack frames for "factorial", each with a param */
6619 auto_vec<const region *> parm_regs;
6620 auto_vec<const svalue *> parm_svals;
6621 for (int depth = 0; depth < 5; depth++)
6623 const region *frame_n_reg
6624 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
6625 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
6626 parm_regs.safe_push (parm_n_reg);
6628 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
6629 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
6630 parm_svals.safe_push (sval_n);
6633 /* Verify that we can recognize that the regions are the parms,
6635 for (int depth = 0; depth < 5; depth++)
6639 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
6641 path_var (n, depth + 1));
6643 /* ...and that we can lookup lvalues for locals for all frames,
6644 not just the top. */
6645 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
6647 /* ...and that we can locate the svalues. */
6650 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
6652 path_var (n, depth + 1));
6657 /* Ensure that region_model::operator== works as expected. */
6662 tree int_42 = build_int_cst (integer_type_node, 42);
6663 tree int_17 = build_int_cst (integer_type_node, 17);
6665 /* Verify that "empty" region_model instances are equal to each other. */
6666 region_model_manager mgr;
6667 region_model model0 (&mgr);
6668 region_model model1 (&mgr);
6669 ASSERT_EQ (model0, model1);
6671 /* Verify that setting state in model1 makes the models non-equal. */
6672 tree x = build_global_decl ("x", integer_type_node);
6673 model0.set_value (x, int_42, NULL);
6674 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6675 ASSERT_NE (model0, model1);
6677 /* Verify the copy-ctor. */
6678 region_model model2 (model0);
6679 ASSERT_EQ (model0, model2);
6680 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6681 ASSERT_NE (model1, model2);
6683 /* Verify that models obtained from copy-ctor are independently editable
6684 w/o affecting the original model. */
6685 model2.set_value (x, int_17, NULL);
6686 ASSERT_NE (model0, model2);
6687 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
6688 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6691 /* Verify that region models for
6698 test_canonicalization_2 ()
6700 tree int_42 = build_int_cst (integer_type_node, 42);
6701 tree int_113 = build_int_cst (integer_type_node, 113);
6702 tree x = build_global_decl ("x", integer_type_node);
6703 tree y = build_global_decl ("y", integer_type_node);
6705 region_model_manager mgr;
6706 region_model model0 (&mgr);
6707 model0.set_value (model0.get_lvalue (x, NULL),
6708 model0.get_rvalue (int_42, NULL),
6710 model0.set_value (model0.get_lvalue (y, NULL),
6711 model0.get_rvalue (int_113, NULL),
6714 region_model model1 (&mgr);
6715 model1.set_value (model1.get_lvalue (y, NULL),
6716 model1.get_rvalue (int_113, NULL),
6718 model1.set_value (model1.get_lvalue (x, NULL),
6719 model1.get_rvalue (int_42, NULL),
6722 ASSERT_EQ (model0, model1);
6725 /* Verify that constraints for
6729 are equal after canonicalization. */
6732 test_canonicalization_3 ()
6734 tree int_3 = build_int_cst (integer_type_node, 3);
6735 tree int_42 = build_int_cst (integer_type_node, 42);
6736 tree x = build_global_decl ("x", integer_type_node);
6737 tree y = build_global_decl ("y", integer_type_node);
6739 region_model_manager mgr;
6740 region_model model0 (&mgr);
6741 model0.add_constraint (x, GT_EXPR, int_3, NULL);
6742 model0.add_constraint (y, GT_EXPR, int_42, NULL);
6744 region_model model1 (&mgr);
6745 model1.add_constraint (y, GT_EXPR, int_42, NULL);
6746 model1.add_constraint (x, GT_EXPR, int_3, NULL);
6748 model0.canonicalize ();
6749 model1.canonicalize ();
6750 ASSERT_EQ (model0, model1);
6753 /* Verify that we can canonicalize a model containing NaN and other real
6757 test_canonicalization_4 ()
6759 auto_vec<tree> csts;
6760 append_interesting_constants (&csts);
6762 region_model_manager mgr;
6763 region_model model (&mgr);
6765 for (tree cst : csts)
6766 model.get_rvalue (cst, NULL);
6768 model.canonicalize ();
6771 /* Assert that if we have two region_model instances
6772 with values VAL_A and VAL_B for EXPR that they are
6773 mergable. Write the merged model to *OUT_MERGED_MODEL,
6774 and the merged svalue ptr to *OUT_MERGED_SVALUE.
6775 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
6776 for that region_model. */
6779 assert_region_models_merge (tree expr, tree val_a, tree val_b,
6780 region_model *out_merged_model,
6781 const svalue **out_merged_svalue)
6783 region_model_manager *mgr = out_merged_model->get_manager ();
6784 program_point point (program_point::origin (*mgr));
6785 test_region_model_context ctxt;
6786 region_model model0 (mgr);
6787 region_model model1 (mgr);
6789 model0.set_value (model0.get_lvalue (expr, &ctxt),
6790 model0.get_rvalue (val_a, &ctxt),
6793 model1.set_value (model1.get_lvalue (expr, &ctxt),
6794 model1.get_rvalue (val_b, &ctxt),
6797 /* They should be mergeable. */
6798 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
6799 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
6802 /* Verify that we can merge region_model instances. */
6805 test_state_merging ()
6807 tree int_42 = build_int_cst (integer_type_node, 42);
6808 tree int_113 = build_int_cst (integer_type_node, 113);
6809 tree x = build_global_decl ("x", integer_type_node);
6810 tree y = build_global_decl ("y", integer_type_node);
6811 tree z = build_global_decl ("z", integer_type_node);
6812 tree p = build_global_decl ("p", ptr_type_node);
6814 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
6815 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
6817 auto_vec <tree> param_types;
6818 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
6819 allocate_struct_function (test_fndecl, true);
6822 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6823 get_identifier ("a"),
6825 DECL_CONTEXT (a) = test_fndecl;
6826 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
6828 /* Param "q", a pointer. */
6829 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6830 get_identifier ("q"),
6832 DECL_CONTEXT (q) = test_fndecl;
6834 region_model_manager mgr;
6835 program_point point (program_point::origin (mgr));
6838 region_model model0 (&mgr);
6839 region_model model1 (&mgr);
6840 region_model merged (&mgr);
6841 /* Verify empty models can be merged. */
6842 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
6843 ASSERT_EQ (model0, merged);
6846 /* Verify that we can merge two contradictory constraints on the
6847 value for a global. */
6848 /* TODO: verify that the merged model doesn't have a value for
6851 region_model model0 (&mgr);
6852 region_model model1 (&mgr);
6853 region_model merged (&mgr);
6854 test_region_model_context ctxt;
6855 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
6856 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
6857 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
6858 ASSERT_NE (model0, merged);
6859 ASSERT_NE (model1, merged);
6862 /* Verify handling of a PARM_DECL. */
6864 test_region_model_context ctxt;
6865 region_model model0 (&mgr);
6866 region_model model1 (&mgr);
6867 ASSERT_EQ (model0.get_stack_depth (), 0);
6868 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
6869 ASSERT_EQ (model0.get_stack_depth (), 1);
6870 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
6872 placeholder_svalue test_sval (integer_type_node, "test sval");
6873 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
6874 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
6875 ASSERT_EQ (model0, model1);
6877 /* They should be mergeable, and the result should be the same. */
6878 region_model merged (&mgr);
6879 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
6880 ASSERT_EQ (model0, merged);
6881 /* In particular, "a" should have the placeholder value. */
6882 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
6885 /* Verify handling of a global. */
6887 test_region_model_context ctxt;
6888 region_model model0 (&mgr);
6889 region_model model1 (&mgr);
6891 placeholder_svalue test_sval (integer_type_node, "test sval");
6892 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
6893 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
6894 ASSERT_EQ (model0, model1);
6896 /* They should be mergeable, and the result should be the same. */
6897 region_model merged (&mgr);
6898 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
6899 ASSERT_EQ (model0, merged);
6900 /* In particular, "x" should have the placeholder value. */
6901 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
6904 /* Use global-handling to verify various combinations of values. */
6906 /* Two equal constant values. */
6908 region_model merged (&mgr);
6909 const svalue *merged_x_sval;
6910 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
6912 /* In particular, there should be a constant value for "x". */
6913 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
6914 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
6918 /* Two non-equal constant values. */
6920 region_model merged (&mgr);
6921 const svalue *merged_x_sval;
6922 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
6924 /* In particular, there should be a "widening" value for "x". */
6925 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
6928 /* Initial and constant. */
6930 region_model merged (&mgr);
6931 const svalue *merged_x_sval;
6932 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
6934 /* In particular, there should be an unknown value for "x". */
6935 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
6938 /* Constant and initial. */
6940 region_model merged (&mgr);
6941 const svalue *merged_x_sval;
6942 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
6944 /* In particular, there should be an unknown value for "x". */
6945 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
6948 /* Unknown and constant. */
6951 /* Pointers: NULL and NULL. */
6954 /* Pointers: NULL and non-NULL. */
6957 /* Pointers: non-NULL and non-NULL: ptr to a local. */
6959 region_model model0 (&mgr);
6960 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
6961 model0.set_value (model0.get_lvalue (p, NULL),
6962 model0.get_rvalue (addr_of_a, NULL), NULL);
6964 region_model model1 (model0);
6965 ASSERT_EQ (model0, model1);
6967 /* They should be mergeable, and the result should be the same. */
6968 region_model merged (&mgr);
6969 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
6970 ASSERT_EQ (model0, merged);
6973 /* Pointers: non-NULL and non-NULL: ptr to a global. */
6975 region_model merged (&mgr);
6976 /* p == &y in both input models. */
6977 const svalue *merged_p_sval;
6978 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
6981 /* We should get p == &y in the merged model. */
6982 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
6983 const region_svalue *merged_p_ptr
6984 = merged_p_sval->dyn_cast_region_svalue ();
6985 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
6986 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
6989 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
6991 region_model merged (&mgr);
6992 /* x == &y vs x == &z in the input models; these are actually casts
6993 of the ptrs to "int". */
6994 const svalue *merged_x_sval;
6996 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
6999 /* We should get x == unknown in the merged model. */
7000 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7003 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
7005 test_region_model_context ctxt;
7006 region_model model0 (&mgr);
7007 tree size = build_int_cst (size_type_node, 1024);
7008 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
7009 const region *new_reg
7010 = model0.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
7011 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
7012 model0.set_value (model0.get_lvalue (p, &ctxt),
7015 region_model model1 (model0);
7017 ASSERT_EQ (model0, model1);
7019 region_model merged (&mgr);
7020 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7022 /* The merged model ought to be identical. */
7023 ASSERT_EQ (model0, merged);
7026 /* Two regions sharing the same placeholder svalue should continue sharing
7027 it after self-merger. */
7029 test_region_model_context ctxt;
7030 region_model model0 (&mgr);
7031 placeholder_svalue placeholder_sval (integer_type_node, "test");
7032 model0.set_value (model0.get_lvalue (x, &ctxt),
7033 &placeholder_sval, &ctxt);
7034 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
7035 region_model model1 (model0);
7037 /* They should be mergeable, and the result should be the same. */
7038 region_model merged (&mgr);
7039 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7040 ASSERT_EQ (model0, merged);
7042 /* In particular, we should have x == y. */
7043 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
7044 tristate (tristate::TS_TRUE));
7048 region_model model0 (&mgr);
7049 region_model model1 (&mgr);
7050 test_region_model_context ctxt;
7051 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7052 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7053 region_model merged (&mgr);
7054 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7058 region_model model0 (&mgr);
7059 region_model model1 (&mgr);
7060 test_region_model_context ctxt;
7061 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7062 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7063 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
7064 region_model merged (&mgr);
7065 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7068 // TODO: what can't we merge? need at least one such test
7070 /* TODO: various things
7073 - every combination, but in particular
7079 test_region_model_context ctxt;
7080 region_model model0 (&mgr);
7082 const region *x_reg = model0.get_lvalue (x, &ctxt);
7083 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
7084 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
7086 region_model model1 (model0);
7087 ASSERT_EQ (model1, model0);
7089 /* They should be mergeable, and the result should be the same. */
7090 region_model merged (&mgr);
7091 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7094 /* Verify that we can merge a model in which a local in an older stack
7095 frame points to a local in a more recent stack frame. */
7097 region_model model0 (&mgr);
7098 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7099 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
7101 /* Push a second frame. */
7102 const region *reg_2nd_frame
7103 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7105 /* Have a pointer in the older frame point to a local in the
7106 more recent frame. */
7107 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
7108 model0.set_value (q_in_first_frame, sval_ptr, NULL);
7110 /* Verify that it's pointing at the newer frame. */
7111 const region *reg_pointee = sval_ptr->maybe_get_region ();
7112 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
7114 model0.canonicalize ();
7116 region_model model1 (model0);
7117 ASSERT_EQ (model0, model1);
7119 /* They should be mergeable, and the result should be the same
7120 (after canonicalization, at least). */
7121 region_model merged (&mgr);
7122 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7123 merged.canonicalize ();
7124 ASSERT_EQ (model0, merged);
7127 /* Verify that we can merge a model in which a local points to a global. */
7129 region_model model0 (&mgr);
7130 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7131 model0.set_value (model0.get_lvalue (q, NULL),
7132 model0.get_rvalue (addr_of_y, NULL), NULL);
7134 region_model model1 (model0);
7135 ASSERT_EQ (model0, model1);
7137 /* They should be mergeable, and the result should be the same
7138 (after canonicalization, at least). */
7139 region_model merged (&mgr);
7140 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7141 ASSERT_EQ (model0, merged);
7145 /* Verify that constraints are correctly merged when merging region_model
7149 test_constraint_merging ()
7151 tree int_0 = build_int_cst (integer_type_node, 0);
7152 tree int_5 = build_int_cst (integer_type_node, 5);
7153 tree x = build_global_decl ("x", integer_type_node);
7154 tree y = build_global_decl ("y", integer_type_node);
7155 tree z = build_global_decl ("z", integer_type_node);
7156 tree n = build_global_decl ("n", integer_type_node);
7158 region_model_manager mgr;
7159 test_region_model_context ctxt;
7161 /* model0: 0 <= (x == y) < n. */
7162 region_model model0 (&mgr);
7163 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
7164 model0.add_constraint (x, GE_EXPR, int_0, NULL);
7165 model0.add_constraint (x, LT_EXPR, n, NULL);
7167 /* model1: z != 5 && (0 <= x < n). */
7168 region_model model1 (&mgr);
7169 model1.add_constraint (z, NE_EXPR, int_5, NULL);
7170 model1.add_constraint (x, GE_EXPR, int_0, NULL);
7171 model1.add_constraint (x, LT_EXPR, n, NULL);
7173 /* They should be mergeable; the merged constraints should
7174 be: (0 <= x < n). */
7175 program_point point (program_point::origin (mgr));
7176 region_model merged (&mgr);
7177 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7179 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
7180 tristate (tristate::TS_TRUE));
7181 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
7182 tristate (tristate::TS_TRUE));
7184 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
7185 tristate (tristate::TS_UNKNOWN));
7186 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
7187 tristate (tristate::TS_UNKNOWN));
7190 /* Verify that widening_svalue::eval_condition_without_cm works as
7194 test_widening_constraints ()
7196 region_model_manager mgr;
7197 function_point point (program_point::origin (mgr).get_function_point ());
7198 tree int_0 = build_int_cst (integer_type_node, 0);
7199 tree int_m1 = build_int_cst (integer_type_node, -1);
7200 tree int_1 = build_int_cst (integer_type_node, 1);
7201 tree int_256 = build_int_cst (integer_type_node, 256);
7202 test_region_model_context ctxt;
7203 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
7204 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
7205 const svalue *w_zero_then_one_sval
7206 = mgr.get_or_create_widening_svalue (integer_type_node, point,
7207 int_0_sval, int_1_sval);
7208 const widening_svalue *w_zero_then_one
7209 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
7210 ASSERT_EQ (w_zero_then_one->get_direction (),
7211 widening_svalue::DIR_ASCENDING);
7212 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
7213 tristate::TS_FALSE);
7214 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
7215 tristate::TS_FALSE);
7216 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
7217 tristate::TS_UNKNOWN);
7218 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
7219 tristate::TS_UNKNOWN);
7221 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
7222 tristate::TS_FALSE);
7223 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
7224 tristate::TS_UNKNOWN);
7225 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
7226 tristate::TS_UNKNOWN);
7227 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
7228 tristate::TS_UNKNOWN);
7230 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
7232 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
7233 tristate::TS_UNKNOWN);
7234 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
7235 tristate::TS_UNKNOWN);
7236 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
7237 tristate::TS_UNKNOWN);
7239 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
7241 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
7243 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
7244 tristate::TS_UNKNOWN);
7245 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
7246 tristate::TS_UNKNOWN);
7248 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
7249 tristate::TS_FALSE);
7250 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
7251 tristate::TS_UNKNOWN);
7252 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
7253 tristate::TS_UNKNOWN);
7254 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
7255 tristate::TS_UNKNOWN);
7257 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
7259 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
7260 tristate::TS_UNKNOWN);
7261 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
7262 tristate::TS_UNKNOWN);
7263 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
7264 tristate::TS_UNKNOWN);
7267 /* Verify merging constraints for states simulating successive iterations
7270 for (i = 0; i < 256; i++)
7277 i_11 = PHI <i_15(2), i_23(3)>
7287 and thus these ops (and resultant states):
7290 add_constraint (i_11 <= 255) [for the true edge]
7291 {i_11: 0} [constraint was a no-op]
7295 {i_11: WIDENED (at phi, 0, 1)}
7296 add_constraint (i_11 <= 255) [for the true edge]
7297 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
7299 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
7300 i_11 = PHI(); merge with state at phi above
7301 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
7302 [changing meaning of "WIDENED" here]
7304 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
7311 region_model_manager mgr;
7312 program_point point (program_point::origin (mgr));
7314 tree int_0 = build_int_cst (integer_type_node, 0);
7315 tree int_1 = build_int_cst (integer_type_node, 1);
7316 tree int_256 = build_int_cst (integer_type_node, 256);
7317 tree int_257 = build_int_cst (integer_type_node, 257);
7318 tree i = build_global_decl ("i", integer_type_node);
7320 test_region_model_context ctxt;
7323 region_model model0 (&mgr);
7324 model0.set_value (i, int_0, &ctxt);
7327 region_model model1 (&mgr);
7328 model1.set_value (i, int_1, &ctxt);
7330 /* Should merge "i" to a widened value. */
7331 region_model model2 (&mgr);
7332 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
7333 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
7334 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
7335 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
7336 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
7338 /* Add constraint: i < 256 */
7339 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
7340 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
7341 tristate (tristate::TS_TRUE));
7342 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
7343 tristate (tristate::TS_TRUE));
7345 /* Try merging with the initial state. */
7346 region_model model3 (&mgr);
7347 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
7348 /* Merging the merged value with the initial value should be idempotent,
7349 so that the analysis converges. */
7350 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
7351 /* Merger of 0 and a widening value with constraint < CST
7352 should retain the constraint, even though it was implicit
7354 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
7355 tristate (tristate::TS_TRUE));
7356 /* ...and we should have equality: the analysis should have converged. */
7357 ASSERT_EQ (model3, model2);
7359 /* "i_23 = i_11 + 1;" */
7360 region_model model4 (model3);
7361 ASSERT_EQ (model4, model2);
7362 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
7363 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
7364 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
7366 /* Try merging with the "i: 1" state. */
7367 region_model model5 (&mgr);
7368 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
7369 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
7370 ASSERT_EQ (model5, model4);
7372 /* "i_11 = PHI();" merge with state at phi above.
7373 For i, we should have a merger of WIDENING with WIDENING + 1,
7374 and this should be WIDENING again. */
7375 region_model model6 (&mgr);
7376 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
7377 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
7378 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
7380 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
7383 /* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
7384 all cast pointers to that region are also known to be non-NULL. */
7387 test_malloc_constraints ()
7389 region_model_manager mgr;
7390 region_model model (&mgr);
7391 tree p = build_global_decl ("p", ptr_type_node);
7392 tree char_star = build_pointer_type (char_type_node);
7393 tree q = build_global_decl ("q", char_star);
7394 tree null_ptr = build_int_cst (ptr_type_node, 0);
7396 const svalue *size_in_bytes
7397 = mgr.get_or_create_unknown_svalue (size_type_node);
7399 = model.get_or_create_region_for_heap_alloc (size_in_bytes, NULL);
7400 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
7401 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
7402 model.set_value (q, p, NULL);
7404 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
7405 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
7406 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
7407 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
7409 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
7411 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
7412 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
7413 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
7414 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
7417 /* Smoketest of getting and setting the value of a variable. */
7423 tree i = build_global_decl ("i", integer_type_node);
7425 tree int_17 = build_int_cst (integer_type_node, 17);
7426 tree int_m3 = build_int_cst (integer_type_node, -3);
7428 region_model_manager mgr;
7429 region_model model (&mgr);
7431 const region *i_reg = model.get_lvalue (i, NULL);
7432 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
7434 /* Reading "i" should give a symbolic "initial value". */
7435 const svalue *sval_init = model.get_rvalue (i, NULL);
7436 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
7437 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
7438 /* ..and doing it again should give the same "initial value". */
7439 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
7442 model.set_value (i, int_17, NULL);
7443 ASSERT_EQ (model.get_rvalue (i, NULL),
7444 model.get_rvalue (int_17, NULL));
7447 model.set_value (i, int_m3, NULL);
7448 ASSERT_EQ (model.get_rvalue (i, NULL),
7449 model.get_rvalue (int_m3, NULL));
7451 /* Verify get_offset for "i". */
7453 region_offset offset = i_reg->get_offset (&mgr);
7454 ASSERT_EQ (offset.get_base_region (), i_reg);
7455 ASSERT_EQ (offset.get_bit_offset (), 0);
7462 /* "int arr[10];" */
7463 tree tlen = size_int (10);
7465 = build_array_type (integer_type_node, build_index_type (tlen));
7466 tree arr = build_global_decl ("arr", arr_type);
7469 tree i = build_global_decl ("i", integer_type_node);
7471 tree int_0 = build_int_cst (integer_type_node, 0);
7472 tree int_1 = build_int_cst (integer_type_node, 1);
7474 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
7475 arr, int_0, NULL_TREE, NULL_TREE);
7476 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
7477 arr, int_1, NULL_TREE, NULL_TREE);
7478 tree arr_i = build4 (ARRAY_REF, integer_type_node,
7479 arr, i, NULL_TREE, NULL_TREE);
7481 tree int_17 = build_int_cst (integer_type_node, 17);
7482 tree int_42 = build_int_cst (integer_type_node, 42);
7483 tree int_m3 = build_int_cst (integer_type_node, -3);
7485 region_model_manager mgr;
7486 region_model model (&mgr);
7487 /* "arr[0] = 17;". */
7488 model.set_value (arr_0, int_17, NULL);
7489 /* "arr[1] = -3;". */
7490 model.set_value (arr_1, int_m3, NULL);
7492 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
7493 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
7495 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
7496 model.set_value (arr_1, int_42, NULL);
7497 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
7499 /* Verify get_offset for "arr[0]". */
7501 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
7502 region_offset offset = arr_0_reg->get_offset (&mgr);
7503 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7504 ASSERT_EQ (offset.get_bit_offset (), 0);
7507 /* Verify get_offset for "arr[1]". */
7509 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
7510 region_offset offset = arr_1_reg->get_offset (&mgr);
7511 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7512 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
7515 /* Verify get_offset for "arr[i]". */
7517 const region *arr_i_reg = model.get_lvalue (arr_i, NULL);
7518 region_offset offset = arr_i_reg->get_offset (&mgr);
7519 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7520 ASSERT_EQ (offset.get_symbolic_byte_offset ()->get_kind (), SK_BINOP);
7523 /* "arr[i] = i;" - this should remove the earlier bindings. */
7524 model.set_value (arr_i, i, NULL);
7525 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
7526 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
7528 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
7529 model.set_value (arr_0, int_17, NULL);
7530 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
7531 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
7534 /* Smoketest of dereferencing a pointer via MEM_REF. */
7544 tree x = build_global_decl ("x", integer_type_node);
7545 tree int_star = build_pointer_type (integer_type_node);
7546 tree p = build_global_decl ("p", int_star);
7548 tree int_17 = build_int_cst (integer_type_node, 17);
7549 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
7550 tree offset_0 = build_int_cst (integer_type_node, 0);
7551 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
7553 region_model_manager mgr;
7554 region_model model (&mgr);
7557 model.set_value (x, int_17, NULL);
7560 model.set_value (p, addr_of_x, NULL);
7562 const svalue *sval = model.get_rvalue (star_p, NULL);
7563 ASSERT_EQ (sval->maybe_get_constant (), int_17);
7566 /* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
7567 Analogous to this code:
7568 void test_6 (int a[10])
7570 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
7572 __analyzer_eval (a[3] == 42); [should be TRUE]
7574 from data-model-1.c, which looks like this at the gimple level:
7575 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
7576 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
7577 int _2 = *_1; # MEM_REF
7578 _Bool _3 = _2 == 42;
7580 __analyzer_eval (_4);
7583 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
7586 # __analyzer_eval (a[3] == 42); [should be TRUE]
7587 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
7588 int _7 = *_6; # MEM_REF
7589 _Bool _8 = _7 == 42;
7591 __analyzer_eval (_9); */
7594 test_POINTER_PLUS_EXPR_then_MEM_REF ()
7596 tree int_star = build_pointer_type (integer_type_node);
7597 tree a = build_global_decl ("a", int_star);
7598 tree offset_12 = build_int_cst (size_type_node, 12);
7599 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
7600 tree offset_0 = build_int_cst (integer_type_node, 0);
7601 tree mem_ref = build2 (MEM_REF, integer_type_node,
7602 pointer_plus_expr, offset_0);
7603 region_model_manager mgr;
7604 region_model m (&mgr);
7606 tree int_42 = build_int_cst (integer_type_node, 42);
7607 m.set_value (mem_ref, int_42, NULL);
7608 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
7611 /* Verify that malloc works. */
7616 tree int_star = build_pointer_type (integer_type_node);
7617 tree p = build_global_decl ("p", int_star);
7618 tree n = build_global_decl ("n", integer_type_node);
7619 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
7620 n, build_int_cst (size_type_node, 4));
7622 region_model_manager mgr;
7623 test_region_model_context ctxt;
7624 region_model model (&mgr);
7626 /* "p = malloc (n * 4);". */
7627 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
7629 = model.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
7630 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
7631 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
7632 ASSERT_EQ (model.get_capacity (reg), size_sval);
7635 /* Verify that alloca works. */
7640 auto_vec <tree> param_types;
7641 tree fndecl = make_fndecl (integer_type_node,
7644 allocate_struct_function (fndecl, true);
7647 tree int_star = build_pointer_type (integer_type_node);
7648 tree p = build_global_decl ("p", int_star);
7649 tree n = build_global_decl ("n", integer_type_node);
7650 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
7651 n, build_int_cst (size_type_node, 4));
7653 region_model_manager mgr;
7654 test_region_model_context ctxt;
7655 region_model model (&mgr);
7657 /* Push stack frame. */
7658 const region *frame_reg
7659 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
7661 /* "p = alloca (n * 4);". */
7662 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
7663 const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
7664 ASSERT_EQ (reg->get_parent_region (), frame_reg);
7665 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
7666 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
7667 ASSERT_EQ (model.get_capacity (reg), size_sval);
7669 /* Verify that the pointers to the alloca region are replaced by
7670 poisoned values when the frame is popped. */
7671 model.pop_frame (NULL, NULL, &ctxt);
7672 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
7675 /* Verify that svalue::involves_p works. */
7680 region_model_manager mgr;
7681 tree int_star = build_pointer_type (integer_type_node);
7682 tree p = build_global_decl ("p", int_star);
7683 tree q = build_global_decl ("q", int_star);
7685 test_region_model_context ctxt;
7686 region_model model (&mgr);
7687 const svalue *p_init = model.get_rvalue (p, &ctxt);
7688 const svalue *q_init = model.get_rvalue (q, &ctxt);
7690 ASSERT_TRUE (p_init->involves_p (p_init));
7691 ASSERT_FALSE (p_init->involves_p (q_init));
7693 const region *star_p_reg = mgr.get_symbolic_region (p_init);
7694 const region *star_q_reg = mgr.get_symbolic_region (q_init);
7696 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
7697 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
7699 ASSERT_TRUE (init_star_p->involves_p (p_init));
7700 ASSERT_FALSE (p_init->involves_p (init_star_p));
7701 ASSERT_FALSE (init_star_p->involves_p (q_init));
7702 ASSERT_TRUE (init_star_q->involves_p (q_init));
7703 ASSERT_FALSE (init_star_q->involves_p (p_init));
7706 /* Run all of the selftests within this file. */
7709 analyzer_region_model_cc_tests ()
7711 test_tree_cmp_on_constants ();
7715 test_get_representative_tree ();
7716 test_unique_constants ();
7717 test_unique_unknowns ();
7718 test_initial_svalue_folding ();
7719 test_unaryop_svalue_folding ();
7720 test_binop_svalue_folding ();
7721 test_sub_svalue_folding ();
7722 test_bits_within_svalue_folding ();
7723 test_descendent_of_p ();
7724 test_bit_range_regions ();
7726 test_compound_assignment ();
7727 test_stack_frames ();
7728 test_get_representative_path_var ();
7730 test_canonicalization_2 ();
7731 test_canonicalization_3 ();
7732 test_canonicalization_4 ();
7733 test_state_merging ();
7734 test_constraint_merging ();
7735 test_widening_constraints ();
7736 test_iteration_1 ();
7737 test_malloc_constraints ();
7741 test_POINTER_PLUS_EXPR_then_MEM_REF ();
7747 } // namespace selftest
7749 #endif /* CHECKING_P */
7753 #endif /* #if ENABLE_ANALYZER */