1 /* Classes for modeling the state of memory.
2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #define INCLUDE_MEMORY
24 #include "coretypes.h"
25 #include "make-unique.h"
28 #include "basic-block.h"
30 #include "gimple-iterator.h"
31 #include "diagnostic-core.h"
36 #include "stringpool.h"
39 #include "fold-const.h"
40 #include "tree-pretty-print.h"
41 #include "diagnostic-color.h"
42 #include "diagnostic-metadata.h"
45 #include "analyzer/analyzer.h"
46 #include "analyzer/analyzer-logging.h"
47 #include "ordered-hash-map.h"
51 #include "analyzer/supergraph.h"
53 #include "analyzer/call-string.h"
54 #include "analyzer/program-point.h"
55 #include "analyzer/store.h"
56 #include "analyzer/region-model.h"
57 #include "analyzer/constraint-manager.h"
58 #include "diagnostic-event-id.h"
59 #include "analyzer/sm.h"
60 #include "diagnostic-event-id.h"
61 #include "analyzer/sm.h"
62 #include "analyzer/pending-diagnostic.h"
63 #include "analyzer/region-model-reachability.h"
64 #include "analyzer/analyzer-selftests.h"
65 #include "analyzer/program-state.h"
66 #include "analyzer/call-summary.h"
67 #include "stor-layout.h"
69 #include "tree-object-size.h"
70 #include "gimple-ssa.h"
71 #include "tree-phinodes.h"
72 #include "tree-ssa-operands.h"
73 #include "ssa-iterators.h"
76 #include "gcc-rich-location.h"
82 /* Dump T to PP in language-independent form, for debugging/logging/dumping
86 dump_tree (pretty_printer *pp, tree t)
88 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
91 /* Dump T to PP in language-independent form in quotes, for
92 debugging/logging/dumping purposes. */
95 dump_quoted_tree (pretty_printer *pp, tree t)
97 pp_begin_quote (pp, pp_show_color (pp));
99 pp_end_quote (pp, pp_show_color (pp));
102 /* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
103 calls within other pp_printf calls.
105 default_tree_printer handles 'T' and some other codes by calling
106 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
107 dump_generic_node calls pp_printf in various places, leading to
110 Ideally pp_printf could be made to be reentrant, but in the meantime
111 this function provides a workaround. */
114 print_quoted_type (pretty_printer *pp, tree t)
116 pp_begin_quote (pp, pp_show_color (pp));
117 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
118 pp_end_quote (pp, pp_show_color (pp));
121 /* class region_to_value_map. */
123 /* Assignment operator for region_to_value_map. */
125 region_to_value_map &
126 region_to_value_map::operator= (const region_to_value_map &other)
129 for (auto iter : other.m_hash_map)
131 const region *reg = iter.first;
132 const svalue *sval = iter.second;
133 m_hash_map.put (reg, sval);
138 /* Equality operator for region_to_value_map. */
141 region_to_value_map::operator== (const region_to_value_map &other) const
143 if (m_hash_map.elements () != other.m_hash_map.elements ())
146 for (auto iter : *this)
148 const region *reg = iter.first;
149 const svalue *sval = iter.second;
150 const svalue * const *other_slot = other.get (reg);
151 if (other_slot == NULL)
153 if (sval != *other_slot)
160 /* Dump this object to PP. */
163 region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
164 bool multiline) const
166 auto_vec<const region *> regs;
167 for (iterator iter = begin (); iter != end (); ++iter)
168 regs.safe_push ((*iter).first);
169 regs.qsort (region::cmp_ptr_ptr);
173 pp_string (pp, " {");
176 FOR_EACH_VEC_ELT (regs, i, reg)
181 pp_string (pp, ", ");
182 reg->dump_to_pp (pp, simple);
183 pp_string (pp, ": ");
184 const svalue *sval = *get (reg);
185 sval->dump_to_pp (pp, true);
193 /* Dump this object to stderr. */
196 region_to_value_map::dump (bool simple) const
199 pp_format_decoder (&pp) = default_tree_printer;
200 pp_show_color (&pp) = pp_show_color (global_dc->printer);
201 pp.buffer->stream = stderr;
202 dump_to_pp (&pp, simple, true);
208 /* Attempt to merge THIS with OTHER, writing the result
211 For now, write (region, value) mappings that are in common between THIS
212 and OTHER to OUT, effectively taking the intersection, rather than
213 rejecting differences. */
216 region_to_value_map::can_merge_with_p (const region_to_value_map &other,
217 region_to_value_map *out) const
219 for (auto iter : *this)
221 const region *iter_reg = iter.first;
222 const svalue *iter_sval = iter.second;
223 const svalue * const * other_slot = other.get (iter_reg);
225 if (iter_sval == *other_slot)
226 out->put (iter_reg, iter_sval);
231 /* Purge any state involving SVAL. */
234 region_to_value_map::purge_state_involving (const svalue *sval)
236 auto_vec<const region *> to_purge;
237 for (auto iter : *this)
239 const region *iter_reg = iter.first;
240 const svalue *iter_sval = iter.second;
241 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
242 to_purge.safe_push (iter_reg);
244 for (auto iter : to_purge)
245 m_hash_map.remove (iter);
248 /* class region_model. */
250 /* Ctor for region_model: construct an "empty" model. */
252 region_model::region_model (region_model_manager *mgr)
253 : m_mgr (mgr), m_store (), m_current_frame (NULL),
256 m_constraints = new constraint_manager (mgr);
259 /* region_model's copy ctor. */
261 region_model::region_model (const region_model &other)
262 : m_mgr (other.m_mgr), m_store (other.m_store),
263 m_constraints (new constraint_manager (*other.m_constraints)),
264 m_current_frame (other.m_current_frame),
265 m_dynamic_extents (other.m_dynamic_extents)
269 /* region_model's dtor. */
271 region_model::~region_model ()
273 delete m_constraints;
276 /* region_model's assignment operator. */
279 region_model::operator= (const region_model &other)
281 /* m_mgr is const. */
282 gcc_assert (m_mgr == other.m_mgr);
284 m_store = other.m_store;
286 delete m_constraints;
287 m_constraints = new constraint_manager (*other.m_constraints);
289 m_current_frame = other.m_current_frame;
291 m_dynamic_extents = other.m_dynamic_extents;
296 /* Equality operator for region_model.
298 Amongst other things this directly compares the stores and the constraint
299 managers, so for this to be meaningful both this and OTHER should
300 have been canonicalized. */
303 region_model::operator== (const region_model &other) const
305 /* We can only compare instances that use the same manager. */
306 gcc_assert (m_mgr == other.m_mgr);
308 if (m_store != other.m_store)
311 if (*m_constraints != *other.m_constraints)
314 if (m_current_frame != other.m_current_frame)
317 if (m_dynamic_extents != other.m_dynamic_extents)
320 gcc_checking_assert (hash () == other.hash ());
325 /* Generate a hash value for this region_model. */
328 region_model::hash () const
330 hashval_t result = m_store.hash ();
331 result ^= m_constraints->hash ();
335 /* Dump a representation of this model to PP, showing the
336 stack, the store, and any constraints.
337 Use SIMPLE to control how svalues and regions are printed. */
340 region_model::dump_to_pp (pretty_printer *pp, bool simple,
341 bool multiline) const
344 pp_printf (pp, "stack depth: %i", get_stack_depth ());
348 pp_string (pp, " {");
349 for (const frame_region *iter_frame = m_current_frame; iter_frame;
350 iter_frame = iter_frame->get_calling_frame ())
354 else if (iter_frame != m_current_frame)
355 pp_string (pp, ", ");
356 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
357 iter_frame->dump_to_pp (pp, simple);
366 pp_string (pp, ", {");
367 m_store.dump_to_pp (pp, simple, multiline,
368 m_mgr->get_store_manager ());
372 /* Dump constraints. */
373 pp_string (pp, "constraint_manager:");
377 pp_string (pp, " {");
378 m_constraints->dump_to_pp (pp, multiline);
382 /* Dump sizes of dynamic regions, if any are known. */
383 if (!m_dynamic_extents.is_empty ())
385 pp_string (pp, "dynamic_extents:");
386 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
390 /* Dump a representation of this model to FILE. */
393 region_model::dump (FILE *fp, bool simple, bool multiline) const
396 pp_format_decoder (&pp) = default_tree_printer;
397 pp_show_color (&pp) = pp_show_color (global_dc->printer);
398 pp.buffer->stream = fp;
399 dump_to_pp (&pp, simple, multiline);
404 /* Dump a multiline representation of this model to stderr. */
407 region_model::dump (bool simple) const
409 dump (stderr, simple, true);
412 /* Dump a multiline representation of this model to stderr. */
415 region_model::debug () const
420 /* Assert that this object is valid. */
423 region_model::validate () const
428 /* Canonicalize the store and constraints, to maximize the chance of
429 equality between region_model instances. */
432 region_model::canonicalize ()
434 m_store.canonicalize (m_mgr->get_store_manager ());
435 m_constraints->canonicalize ();
438 /* Return true if this region_model is in canonical form. */
441 region_model::canonicalized_p () const
443 region_model copy (*this);
444 copy.canonicalize ();
445 return *this == copy;
448 /* See the comment for store::loop_replay_fixup. */
451 region_model::loop_replay_fixup (const region_model *dst_state)
453 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
456 /* A subclass of pending_diagnostic for complaining about uses of
459 class poisoned_value_diagnostic
460 : public pending_diagnostic_subclass<poisoned_value_diagnostic>
463 poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
464 const region *src_region)
465 : m_expr (expr), m_pkind (pkind),
466 m_src_region (src_region)
469 const char *get_kind () const final override { return "poisoned_value_diagnostic"; }
471 bool use_of_uninit_p () const final override
473 return m_pkind == POISON_KIND_UNINIT;
476 bool operator== (const poisoned_value_diagnostic &other) const
478 return (m_expr == other.m_expr
479 && m_pkind == other.m_pkind
480 && m_src_region == other.m_src_region);
483 int get_controlling_option () const final override
489 case POISON_KIND_UNINIT:
490 return OPT_Wanalyzer_use_of_uninitialized_value;
491 case POISON_KIND_FREED:
492 return OPT_Wanalyzer_use_after_free;
493 case POISON_KIND_POPPED_STACK:
494 return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame;
498 bool emit (rich_location *rich_loc) final override
504 case POISON_KIND_UNINIT:
506 diagnostic_metadata m;
507 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
508 return warning_meta (rich_loc, m, get_controlling_option (),
509 "use of uninitialized value %qE",
513 case POISON_KIND_FREED:
515 diagnostic_metadata m;
516 m.add_cwe (416); /* "CWE-416: Use After Free". */
517 return warning_meta (rich_loc, m, get_controlling_option (),
518 "use after %<free%> of %qE",
522 case POISON_KIND_POPPED_STACK:
524 /* TODO: which CWE? */
526 (rich_loc, get_controlling_option (),
527 "dereferencing pointer %qE to within stale stack frame",
534 label_text describe_final_event (const evdesc::final_event &ev) final override
540 case POISON_KIND_UNINIT:
541 return ev.formatted_print ("use of uninitialized value %qE here",
543 case POISON_KIND_FREED:
544 return ev.formatted_print ("use after %<free%> of %qE here",
546 case POISON_KIND_POPPED_STACK:
547 return ev.formatted_print
548 ("dereferencing pointer %qE to within stale stack frame",
553 void mark_interesting_stuff (interesting_t *interest) final override
556 interest->add_region_creation (m_src_region);
561 enum poison_kind m_pkind;
562 const region *m_src_region;
565 /* A subclass of pending_diagnostic for complaining about shifts
566 by negative counts. */
568 class shift_count_negative_diagnostic
569 : public pending_diagnostic_subclass<shift_count_negative_diagnostic>
572 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
573 : m_assign (assign), m_count_cst (count_cst)
576 const char *get_kind () const final override
578 return "shift_count_negative_diagnostic";
581 bool operator== (const shift_count_negative_diagnostic &other) const
583 return (m_assign == other.m_assign
584 && same_tree_p (m_count_cst, other.m_count_cst));
587 int get_controlling_option () const final override
589 return OPT_Wanalyzer_shift_count_negative;
592 bool emit (rich_location *rich_loc) final override
594 return warning_at (rich_loc, get_controlling_option (),
595 "shift by negative count (%qE)", m_count_cst);
598 label_text describe_final_event (const evdesc::final_event &ev) final override
600 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
604 const gassign *m_assign;
608 /* A subclass of pending_diagnostic for complaining about shifts
609 by counts >= the width of the operand type. */
611 class shift_count_overflow_diagnostic
612 : public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
615 shift_count_overflow_diagnostic (const gassign *assign,
616 int operand_precision,
618 : m_assign (assign), m_operand_precision (operand_precision),
619 m_count_cst (count_cst)
622 const char *get_kind () const final override
624 return "shift_count_overflow_diagnostic";
627 bool operator== (const shift_count_overflow_diagnostic &other) const
629 return (m_assign == other.m_assign
630 && m_operand_precision == other.m_operand_precision
631 && same_tree_p (m_count_cst, other.m_count_cst));
634 int get_controlling_option () const final override
636 return OPT_Wanalyzer_shift_count_overflow;
639 bool emit (rich_location *rich_loc) final override
641 return warning_at (rich_loc, get_controlling_option (),
642 "shift by count (%qE) >= precision of type (%qi)",
643 m_count_cst, m_operand_precision);
646 label_text describe_final_event (const evdesc::final_event &ev) final override
648 return ev.formatted_print ("shift by count %qE here", m_count_cst);
652 const gassign *m_assign;
653 int m_operand_precision;
657 /* If ASSIGN is a stmt that can be modelled via
658 set_value (lhs_reg, SVALUE, CTXT)
659 for some SVALUE, get the SVALUE.
660 Otherwise return NULL. */
663 region_model::get_gassign_result (const gassign *assign,
664 region_model_context *ctxt)
666 tree lhs = gimple_assign_lhs (assign);
667 tree rhs1 = gimple_assign_rhs1 (assign);
668 enum tree_code op = gimple_assign_rhs_code (assign);
674 case POINTER_PLUS_EXPR:
676 /* e.g. "_1 = a_10(D) + 12;" */
678 tree offset = gimple_assign_rhs2 (assign);
680 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
681 const svalue *offset_sval = get_rvalue (offset, ctxt);
682 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
683 is an integer of type sizetype". */
684 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
686 const svalue *sval_binop
687 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
688 ptr_sval, offset_sval);
693 case POINTER_DIFF_EXPR:
695 /* e.g. "_1 = p_2(D) - q_3(D);". */
696 tree rhs2 = gimple_assign_rhs2 (assign);
697 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
698 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
700 // TODO: perhaps fold to zero if they're known to be equal?
702 const svalue *sval_binop
703 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
704 rhs1_sval, rhs2_sval);
709 /* Assignments of the form
710 set_value (lvalue (LHS), rvalue (EXPR))
712 We already have the lvalue for the LHS above, as "lhs_reg". */
713 case ADDR_EXPR: /* LHS = &RHS; */
715 case COMPONENT_REF: /* LHS = op0.op1; */
722 case SSA_NAME: /* LHS = VAR; */
723 case VAR_DECL: /* LHS = VAR; */
724 case PARM_DECL:/* LHS = VAR; */
727 return get_rvalue (rhs1, ctxt);
737 case VIEW_CONVERT_EXPR:
740 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
741 const svalue *sval_unaryop
742 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
755 tree rhs2 = gimple_assign_rhs2 (assign);
757 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
758 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
760 if (TREE_TYPE (lhs) == boolean_type_node)
762 /* Consider constraints between svalues. */
763 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
765 return m_mgr->get_or_create_constant_svalue
766 (t.is_true () ? boolean_true_node : boolean_false_node);
769 /* Otherwise, generate a symbolic binary op. */
770 const svalue *sval_binop
771 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
772 rhs1_sval, rhs2_sval);
780 case MULT_HIGHPART_EXPR:
803 tree rhs2 = gimple_assign_rhs2 (assign);
805 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
806 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
808 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
810 /* "INT34-C. Do not shift an expression by a negative number of bits
811 or by greater than or equal to the number of bits that exist in
813 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
814 if (TREE_CODE (rhs2_cst) == INTEGER_CST)
816 if (tree_int_cst_sgn (rhs2_cst) < 0)
818 (make_unique<shift_count_negative_diagnostic>
820 else if (compare_tree_int (rhs2_cst,
821 TYPE_PRECISION (TREE_TYPE (rhs1)))
824 (make_unique<shift_count_overflow_diagnostic>
826 int (TYPE_PRECISION (TREE_TYPE (rhs1))),
831 const svalue *sval_binop
832 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
833 rhs1_sval, rhs2_sval);
837 /* Vector expressions. In theory we could implement these elementwise,
838 but for now, simply return unknown values. */
839 case VEC_DUPLICATE_EXPR:
840 case VEC_SERIES_EXPR:
843 case VEC_WIDEN_MULT_HI_EXPR:
844 case VEC_WIDEN_MULT_LO_EXPR:
845 case VEC_WIDEN_MULT_EVEN_EXPR:
846 case VEC_WIDEN_MULT_ODD_EXPR:
847 case VEC_UNPACK_HI_EXPR:
848 case VEC_UNPACK_LO_EXPR:
849 case VEC_UNPACK_FLOAT_HI_EXPR:
850 case VEC_UNPACK_FLOAT_LO_EXPR:
851 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
852 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
853 case VEC_PACK_TRUNC_EXPR:
854 case VEC_PACK_SAT_EXPR:
855 case VEC_PACK_FIX_TRUNC_EXPR:
856 case VEC_PACK_FLOAT_EXPR:
857 case VEC_WIDEN_LSHIFT_HI_EXPR:
858 case VEC_WIDEN_LSHIFT_LO_EXPR:
859 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
863 /* Workaround for discarding certain false positives from
864 -Wanalyzer-use-of-uninitialized-value
866 ((A OR-IF B) OR-IF C)
868 ((A AND-IF B) AND-IF C)
869 where evaluating B is redundant, but could involve simple accesses of
870 uninitialized locals.
872 When optimization is turned on the FE can immediately fold compound
873 conditionals. Specifically, c_parser_condition parses this condition:
874 ((A OR-IF B) OR-IF C)
875 and calls c_fully_fold on the condition.
876 Within c_fully_fold, fold_truth_andor is called, which bails when
877 optimization is off, but if any optimization is turned on can convert the
878 ((A OR-IF B) OR-IF C)
881 for sufficiently simple B
882 i.e. the inner OR-IF becomes an OR.
883 At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
884 giving this for the inner condition:
887 thus effectively synthesizing a redundant access of B when optimization
888 is turned on, when compared to:
889 if (A) goto L1; else goto L4;
890 L1: if (B) goto L2; else goto L4;
891 L2: if (C) goto L3; else goto L4;
892 for the unoptimized case.
894 Return true if CTXT appears to be handling such a short-circuitable stmt,
895 such as the def-stmt for B for the:
897 case above, for the case where A is true and thus B would have been
898 short-circuited without optimization, using MODEL for the value of A. */
901 within_short_circuited_stmt_p (const region_model *model,
902 const gassign *assign_stmt)
904 /* We must have an assignment to a temporary of _Bool type. */
905 tree lhs = gimple_assign_lhs (assign_stmt);
906 if (TREE_TYPE (lhs) != boolean_type_node)
908 if (TREE_CODE (lhs) != SSA_NAME)
910 if (SSA_NAME_VAR (lhs) != NULL_TREE)
913 /* The temporary bool must be used exactly once: as the second arg of
914 a BIT_IOR_EXPR or BIT_AND_EXPR. */
915 use_operand_p use_op;
917 if (!single_imm_use (lhs, &use_op, &use_stmt))
919 const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
922 enum tree_code op = gimple_assign_rhs_code (use_assign);
923 if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
925 if (!(gimple_assign_rhs1 (use_assign) != lhs
926 && gimple_assign_rhs2 (use_assign) == lhs))
929 /* The first arg of the bitwise stmt must have a known value in MODEL
930 that implies that the value of the second arg doesn't matter, i.e.
931 1 for bitwise or, 0 for bitwise and. */
932 tree other_arg = gimple_assign_rhs1 (use_assign);
933 /* Use a NULL ctxt here to avoid generating warnings. */
934 const svalue *other_arg_sval = model->get_rvalue (other_arg, NULL);
935 tree other_arg_cst = other_arg_sval->maybe_get_constant ();
943 if (zerop (other_arg_cst))
947 if (!zerop (other_arg_cst))
952 /* All tests passed. We appear to be in a stmt that generates a boolean
953 temporary with a value that won't matter. */
957 /* Workaround for discarding certain false positives from
958 -Wanalyzer-use-of-uninitialized-value
959 seen with -ftrivial-auto-var-init=.
961 -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
963 If the address of the var is taken, gimplification will give us
966 _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
969 The result of DEFERRED_INIT will be an uninit value; we don't
970 want to emit a false positive for "len = _1;"
972 Return true if ASSIGN_STMT is such a stmt. */
975 due_to_ifn_deferred_init_p (const gassign *assign_stmt)
978 /* We must have an assignment to a decl from an SSA name that's the
979 result of a IFN_DEFERRED_INIT call. */
980 if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
982 tree lhs = gimple_assign_lhs (assign_stmt);
983 if (TREE_CODE (lhs) != VAR_DECL)
985 tree rhs = gimple_assign_rhs1 (assign_stmt);
986 if (TREE_CODE (rhs) != SSA_NAME)
988 const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
989 const gcall *call = dyn_cast <const gcall *> (def_stmt);
992 if (gimple_call_internal_p (call)
993 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
998 /* Check for SVAL being poisoned, adding a warning to CTXT.
999 Return SVAL, or, if a warning is added, another value, to avoid
1000 repeatedly complaining about the same poisoned value in followup code. */
1003 region_model::check_for_poison (const svalue *sval,
1005 region_model_context *ctxt) const
1010 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1012 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1014 /* Ignore uninitialized uses of empty types; there's nothing
1016 if (pkind == POISON_KIND_UNINIT
1017 && sval->get_type ()
1018 && is_empty_type (sval->get_type ()))
1021 if (pkind == POISON_KIND_UNINIT)
1022 if (const gimple *curr_stmt = ctxt->get_stmt ())
1023 if (const gassign *assign_stmt
1024 = dyn_cast <const gassign *> (curr_stmt))
1026 /* Special case to avoid certain false positives. */
1027 if (within_short_circuited_stmt_p (this, assign_stmt))
1030 /* Special case to avoid false positive on
1031 -ftrivial-auto-var-init=. */
1032 if (due_to_ifn_deferred_init_p (assign_stmt))
1036 /* If we have an SSA name for a temporary, we don't want to print
1038 Poisoned values are shared by type, and so we can't reconstruct
1039 the tree other than via the def stmts, using
1040 fixup_tree_for_diagnostic. */
1041 tree diag_arg = fixup_tree_for_diagnostic (expr);
1042 const region *src_region = NULL;
1043 if (pkind == POISON_KIND_UNINIT)
1044 src_region = get_region_for_poisoned_expr (expr);
1045 if (ctxt->warn (make_unique<poisoned_value_diagnostic> (diag_arg,
1049 /* We only want to report use of a poisoned value at the first
1050 place it gets used; return an unknown value to avoid generating
1051 a chain of followup warnings. */
1052 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1061 /* Attempt to get a region for describing EXPR, the source of region of
1062 a poisoned_svalue for use in a poisoned_value_diagnostic.
1063 Return NULL if there is no good region to use. */
1066 region_model::get_region_for_poisoned_expr (tree expr) const
1068 if (TREE_CODE (expr) == SSA_NAME)
1070 tree decl = SSA_NAME_VAR (expr);
1071 if (decl && DECL_P (decl))
1076 return get_lvalue (expr, NULL);
1079 /* Update this model for the ASSIGN stmt, using CTXT to report any
1083 region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1085 tree lhs = gimple_assign_lhs (assign);
1086 tree rhs1 = gimple_assign_rhs1 (assign);
1088 const region *lhs_reg = get_lvalue (lhs, ctxt);
1090 /* Most assignments are handled by:
1091 set_value (lhs_reg, SVALUE, CTXT)
1093 if (const svalue *sval = get_gassign_result (assign, ctxt))
1095 tree expr = get_diagnostic_tree_for_gassign (assign);
1096 check_for_poison (sval, expr, ctxt);
1097 set_value (lhs_reg, sval, ctxt);
1101 enum tree_code op = gimple_assign_rhs_code (assign);
1107 sorry_at (assign->location, "unhandled assignment op: %qs",
1108 get_tree_code_name (op));
1109 const svalue *unknown_sval
1110 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1111 set_value (lhs_reg, unknown_sval, ctxt);
1117 if (TREE_CLOBBER_P (rhs1))
1119 /* e.g. "x ={v} {CLOBBER};" */
1120 clobber_region (lhs_reg);
1124 /* Any CONSTRUCTOR that survives to this point is either
1125 just a zero-init of everything, or a vector. */
1126 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
1127 zero_fill_region (lhs_reg);
1131 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1133 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1135 index = build_int_cst (integer_type_node, ix);
1136 gcc_assert (TREE_CODE (index) == INTEGER_CST);
1137 const svalue *index_sval
1138 = m_mgr->get_or_create_constant_svalue (index);
1139 gcc_assert (index_sval);
1140 const region *sub_reg
1141 = m_mgr->get_element_region (lhs_reg,
1144 const svalue *val_sval = get_rvalue (val, ctxt);
1145 set_value (sub_reg, val_sval, ctxt);
1153 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
1154 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1155 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
1156 ctxt ? ctxt->get_uncertainty () : NULL);
1162 /* A pending_diagnostic subclass for implementing "__analyzer_dump_path". */
1164 class dump_path_diagnostic
1165 : public pending_diagnostic_subclass<dump_path_diagnostic>
1168 int get_controlling_option () const final override
1173 bool emit (rich_location *richloc) final override
1175 inform (richloc, "path");
1179 const char *get_kind () const final override { return "dump_path_diagnostic"; }
1181 bool operator== (const dump_path_diagnostic &) const
1187 /* Handle the pre-sm-state part of STMT, modifying this object in-place.
1188 Write true to *OUT_TERMINATE_PATH if the path should be terminated.
1189 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1193 region_model::on_stmt_pre (const gimple *stmt,
1194 bool *out_terminate_path,
1195 bool *out_unknown_side_effects,
1196 region_model_context *ctxt)
1198 switch (gimple_code (stmt))
1201 /* No-op for now. */
1206 const gassign *assign = as_a <const gassign *> (stmt);
1207 on_assignment (assign, ctxt);
1213 const gasm *asm_stmt = as_a <const gasm *> (stmt);
1214 on_asm_stmt (asm_stmt, ctxt);
1220 /* Track whether we have a gcall to a function that's not recognized by
1221 anything, for which we don't have a function body, or for which we
1222 don't know the fndecl. */
1223 const gcall *call = as_a <const gcall *> (stmt);
1225 /* Debugging/test support. */
1226 if (is_special_named_call_p (call, "__analyzer_describe", 2))
1227 impl_call_analyzer_describe (call, ctxt);
1228 else if (is_special_named_call_p (call, "__analyzer_dump_capacity", 1))
1229 impl_call_analyzer_dump_capacity (call, ctxt);
1230 else if (is_special_named_call_p (call, "__analyzer_dump_escaped", 0))
1231 impl_call_analyzer_dump_escaped (call);
1232 else if (is_special_named_call_p (call, "__analyzer_dump_path", 0))
1234 /* Handle the builtin "__analyzer_dump_path" by queuing a
1235 diagnostic at this exploded_node. */
1236 ctxt->warn (make_unique<dump_path_diagnostic> ());
1238 else if (is_special_named_call_p (call, "__analyzer_dump_region_model",
1241 /* Handle the builtin "__analyzer_dump_region_model" by dumping
1242 the region model's state to stderr. */
1245 else if (is_special_named_call_p (call, "__analyzer_eval", 1))
1246 impl_call_analyzer_eval (call, ctxt);
1247 else if (is_special_named_call_p (call, "__analyzer_break", 0))
1249 /* Handle the builtin "__analyzer_break" by triggering a
1251 /* TODO: is there a good cross-platform way to do this? */
1254 else if (is_special_named_call_p (call,
1255 "__analyzer_dump_exploded_nodes",
1258 /* This is handled elsewhere. */
1260 else if (is_special_named_call_p (call, "__analyzer_get_unknown_ptr",
1263 call_details cd (call, this, ctxt);
1264 impl_call_analyzer_get_unknown_ptr (cd);
1267 *out_unknown_side_effects = on_call_pre (call, ctxt,
1268 out_terminate_path);
1274 const greturn *return_ = as_a <const greturn *> (stmt);
1275 on_return (return_, ctxt);
1281 /* Abstract base class for all out-of-bounds warnings with concrete values. */
1283 class out_of_bounds : public pending_diagnostic_subclass<out_of_bounds>
1286 out_of_bounds (const region *reg, tree diag_arg,
1287 byte_range out_of_bounds_range)
1288 : m_reg (reg), m_diag_arg (diag_arg),
1289 m_out_of_bounds_range (out_of_bounds_range)
1292 const char *get_kind () const final override
1294 return "out_of_bounds_diagnostic";
1297 bool operator== (const out_of_bounds &other) const
1299 return m_reg == other.m_reg
1300 && m_out_of_bounds_range == other.m_out_of_bounds_range
1301 && pending_diagnostic::same_tree_p (m_diag_arg, other.m_diag_arg);
1304 int get_controlling_option () const final override
1306 return OPT_Wanalyzer_out_of_bounds;
1309 void mark_interesting_stuff (interesting_t *interest) final override
1311 interest->add_region_creation (m_reg);
1315 const region *m_reg;
1317 byte_range m_out_of_bounds_range;
1320 /* Abstract subclass to complaing about out-of-bounds
1321 past the end of the buffer. */
1323 class past_the_end : public out_of_bounds
1326 past_the_end (const region *reg, tree diag_arg, byte_range range,
1328 : out_of_bounds (reg, diag_arg, range), m_byte_bound (byte_bound)
1331 bool operator== (const past_the_end &other) const
1333 return out_of_bounds::operator== (other)
1334 && pending_diagnostic::same_tree_p (m_byte_bound,
1335 other.m_byte_bound);
1339 describe_region_creation_event (const evdesc::region_creation &ev) final
1342 if (m_byte_bound && TREE_CODE (m_byte_bound) == INTEGER_CST)
1343 return ev.formatted_print ("capacity is %E bytes", m_byte_bound);
1345 return label_text ();
1352 /* Concrete subclass to complain about buffer overflows. */
1354 class buffer_overflow : public past_the_end
1357 buffer_overflow (const region *reg, tree diag_arg,
1358 byte_range range, tree byte_bound)
1359 : past_the_end (reg, diag_arg, range, byte_bound)
1362 bool emit (rich_location *rich_loc) final override
1364 diagnostic_metadata m;
1366 switch (m_reg->get_memory_space ())
1370 warned = warning_meta (rich_loc, m, get_controlling_option (),
1373 case MEMSPACE_STACK:
1375 warned = warning_meta (rich_loc, m, get_controlling_option (),
1376 "stack-based buffer overflow");
1380 warned = warning_meta (rich_loc, m, get_controlling_option (),
1381 "heap-based buffer overflow");
1387 char num_bytes_past_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1388 print_dec (m_out_of_bounds_range.m_size_in_bytes,
1389 num_bytes_past_buf, UNSIGNED);
1391 inform (rich_loc->get_loc (), "write is %s bytes past the end"
1392 " of %qE", num_bytes_past_buf,
1395 inform (rich_loc->get_loc (), "write is %s bytes past the end"
1397 num_bytes_past_buf);
1403 label_text describe_final_event (const evdesc::final_event &ev)
1406 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
1407 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
1408 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1409 print_dec (start, start_buf, SIGNED);
1410 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1411 print_dec (end, end_buf, SIGNED);
1416 return ev.formatted_print ("out-of-bounds write at byte %s but %qE"
1417 " ends at byte %E", start_buf, m_diag_arg,
1419 return ev.formatted_print ("out-of-bounds write at byte %s but region"
1420 " ends at byte %E", start_buf,
1426 return ev.formatted_print ("out-of-bounds write from byte %s till"
1427 " byte %s but %qE ends at byte %E",
1428 start_buf, end_buf, m_diag_arg,
1430 return ev.formatted_print ("out-of-bounds write from byte %s till"
1431 " byte %s but region ends at byte %E",
1432 start_buf, end_buf, m_byte_bound);
1437 /* Concrete subclass to complain about buffer overreads. */
1439 class buffer_overread : public past_the_end
1442 buffer_overread (const region *reg, tree diag_arg,
1443 byte_range range, tree byte_bound)
1444 : past_the_end (reg, diag_arg, range, byte_bound)
1447 bool emit (rich_location *rich_loc) final override
1449 diagnostic_metadata m;
1451 bool warned = warning_meta (rich_loc, m, get_controlling_option (),
1456 char num_bytes_past_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1457 print_dec (m_out_of_bounds_range.m_size_in_bytes,
1458 num_bytes_past_buf, UNSIGNED);
1460 inform (rich_loc->get_loc (), "read is %s bytes past the end"
1461 " of %qE", num_bytes_past_buf,
1464 inform (rich_loc->get_loc (), "read is %s bytes past the end"
1466 num_bytes_past_buf);
1472 label_text describe_final_event (const evdesc::final_event &ev)
1475 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
1476 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
1477 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1478 print_dec (start, start_buf, SIGNED);
1479 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1480 print_dec (end, end_buf, SIGNED);
1485 return ev.formatted_print ("out-of-bounds read at byte %s but %qE"
1486 " ends at byte %E", start_buf, m_diag_arg,
1488 return ev.formatted_print ("out-of-bounds read at byte %s but region"
1489 " ends at byte %E", start_buf,
1495 return ev.formatted_print ("out-of-bounds read from byte %s till"
1496 " byte %s but %qE ends at byte %E",
1497 start_buf, end_buf, m_diag_arg,
1499 return ev.formatted_print ("out-of-bounds read from byte %s till"
1500 " byte %s but region ends at byte %E",
1501 start_buf, end_buf, m_byte_bound);
1506 /* Concrete subclass to complain about buffer underflows. */
1508 class buffer_underflow : public out_of_bounds
1511 buffer_underflow (const region *reg, tree diag_arg, byte_range range)
1512 : out_of_bounds (reg, diag_arg, range)
1515 bool emit (rich_location *rich_loc) final override
1517 diagnostic_metadata m;
1519 return warning_meta (rich_loc, m, get_controlling_option (),
1520 "buffer underflow");
1523 label_text describe_final_event (const evdesc::final_event &ev)
1526 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
1527 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
1528 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1529 print_dec (start, start_buf, SIGNED);
1530 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1531 print_dec (end, end_buf, SIGNED);
1536 return ev.formatted_print ("out-of-bounds write at byte %s but %qE"
1537 " starts at byte 0", start_buf,
1539 return ev.formatted_print ("out-of-bounds write at byte %s but region"
1540 " starts at byte 0", start_buf);
1545 return ev.formatted_print ("out-of-bounds write from byte %s till"
1546 " byte %s but %qE starts at byte 0",
1547 start_buf, end_buf, m_diag_arg);
1548 return ev.formatted_print ("out-of-bounds write from byte %s till"
1549 " byte %s but region starts at byte 0",
1550 start_buf, end_buf);;
1555 /* Concrete subclass to complain about buffer underreads. */
1557 class buffer_underread : public out_of_bounds
1560 buffer_underread (const region *reg, tree diag_arg, byte_range range)
1561 : out_of_bounds (reg, diag_arg, range)
1564 bool emit (rich_location *rich_loc) final override
1566 diagnostic_metadata m;
1568 return warning_meta (rich_loc, m, get_controlling_option (),
1569 "buffer underread");
1572 label_text describe_final_event (const evdesc::final_event &ev)
1575 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
1576 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
1577 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1578 print_dec (start, start_buf, SIGNED);
1579 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1580 print_dec (end, end_buf, SIGNED);
1585 return ev.formatted_print ("out-of-bounds read at byte %s but %qE"
1586 " starts at byte 0", start_buf,
1588 return ev.formatted_print ("out-of-bounds read at byte %s but region"
1589 " starts at byte 0", start_buf);
1594 return ev.formatted_print ("out-of-bounds read from byte %s till"
1595 " byte %s but %qE starts at byte 0",
1596 start_buf, end_buf, m_diag_arg);
1597 return ev.formatted_print ("out-of-bounds read from byte %s till"
1598 " byte %s but region starts at byte 0",
1599 start_buf, end_buf);;
1604 /* Abstract class to complain about out-of-bounds read/writes where
1605 the values are symbolic. */
1607 class symbolic_past_the_end
1608 : public pending_diagnostic_subclass<symbolic_past_the_end>
1611 symbolic_past_the_end (const region *reg, tree diag_arg, tree offset,
1612 tree num_bytes, tree capacity)
1613 : m_reg (reg), m_diag_arg (diag_arg), m_offset (offset),
1614 m_num_bytes (num_bytes), m_capacity (capacity)
1617 const char *get_kind () const final override
1619 return "symbolic_past_the_end";
1622 bool operator== (const symbolic_past_the_end &other) const
1624 return m_reg == other.m_reg
1625 && pending_diagnostic::same_tree_p (m_diag_arg, other.m_diag_arg)
1626 && pending_diagnostic::same_tree_p (m_offset, other.m_offset)
1627 && pending_diagnostic::same_tree_p (m_num_bytes, other.m_num_bytes)
1628 && pending_diagnostic::same_tree_p (m_capacity, other.m_capacity);
1631 int get_controlling_option () const final override
1633 return OPT_Wanalyzer_out_of_bounds;
1636 void mark_interesting_stuff (interesting_t *interest) final override
1638 interest->add_region_creation (m_reg);
1642 describe_region_creation_event (const evdesc::region_creation &ev) final
1646 return ev.formatted_print ("capacity is %qE bytes", m_capacity);
1648 return label_text ();
1652 describe_final_event (const evdesc::final_event &ev) final override
1654 const char *byte_str;
1655 if (pending_diagnostic::same_tree_p (m_num_bytes, integer_one_node))
1662 if (m_num_bytes && TREE_CODE (m_num_bytes) == INTEGER_CST)
1665 return ev.formatted_print ("%s of %E %s at offset %qE"
1666 " exceeds %qE", m_dir_str,
1667 m_num_bytes, byte_str,
1668 m_offset, m_diag_arg);
1670 return ev.formatted_print ("%s of %E %s at offset %qE"
1671 " exceeds the buffer", m_dir_str,
1672 m_num_bytes, byte_str, m_offset);
1674 else if (m_num_bytes)
1677 return ev.formatted_print ("%s of %qE %s at offset %qE"
1678 " exceeds %qE", m_dir_str,
1679 m_num_bytes, byte_str,
1680 m_offset, m_diag_arg);
1682 return ev.formatted_print ("%s of %qE %s at offset %qE"
1683 " exceeds the buffer", m_dir_str,
1684 m_num_bytes, byte_str, m_offset);
1689 return ev.formatted_print ("%s at offset %qE exceeds %qE",
1690 m_dir_str, m_offset, m_diag_arg);
1692 return ev.formatted_print ("%s at offset %qE exceeds the"
1693 " buffer", m_dir_str, m_offset);
1697 return ev.formatted_print ("out-of-bounds %s on %qE",
1698 m_dir_str, m_diag_arg);
1699 return ev.formatted_print ("out-of-bounds %s", m_dir_str);
1703 const region *m_reg;
1708 const char *m_dir_str;
1711 /* Concrete subclass to complain about overflows with symbolic values. */
1713 class symbolic_buffer_overflow : public symbolic_past_the_end
1716 symbolic_buffer_overflow (const region *reg, tree diag_arg, tree offset,
1717 tree num_bytes, tree capacity)
1718 : symbolic_past_the_end (reg, diag_arg, offset, num_bytes, capacity)
1720 m_dir_str = "write";
1723 bool emit (rich_location *rich_loc) final override
1725 diagnostic_metadata m;
1726 switch (m_reg->get_memory_space ())
1730 return warning_meta (rich_loc, m, get_controlling_option (),
1732 case MEMSPACE_STACK:
1734 return warning_meta (rich_loc, m, get_controlling_option (),
1735 "stack-based buffer overflow");
1738 return warning_meta (rich_loc, m, get_controlling_option (),
1739 "heap-based buffer overflow");
1744 /* Concrete subclass to complain about overreads with symbolic values. */
1746 class symbolic_buffer_overread : public symbolic_past_the_end
1749 symbolic_buffer_overread (const region *reg, tree diag_arg, tree offset,
1750 tree num_bytes, tree capacity)
1751 : symbolic_past_the_end (reg, diag_arg, offset, num_bytes, capacity)
1756 bool emit (rich_location *rich_loc) final override
1758 diagnostic_metadata m;
1760 return warning_meta (rich_loc, m, get_controlling_option (),
1765 /* Check whether an access is past the end of the BASE_REG. */
1767 void region_model::check_symbolic_bounds (const region *base_reg,
1768 const svalue *sym_byte_offset,
1769 const svalue *num_bytes_sval,
1770 const svalue *capacity,
1771 enum access_direction dir,
1772 region_model_context *ctxt) const
1776 const svalue *next_byte
1777 = m_mgr->get_or_create_binop (num_bytes_sval->get_type (), PLUS_EXPR,
1778 sym_byte_offset, num_bytes_sval);
1780 if (eval_condition_without_cm (next_byte, GT_EXPR, capacity).is_true ())
1782 tree diag_arg = get_representative_tree (base_reg);
1783 tree offset_tree = get_representative_tree (sym_byte_offset);
1784 tree num_bytes_tree = get_representative_tree (num_bytes_sval);
1785 tree capacity_tree = get_representative_tree (capacity);
1792 ctxt->warn (make_unique<symbolic_buffer_overread> (base_reg,
1799 ctxt->warn (make_unique<symbolic_buffer_overflow> (base_reg,
1810 maybe_get_integer_cst_tree (const svalue *sval)
1812 tree cst_tree = sval->maybe_get_constant ();
1813 if (cst_tree && TREE_CODE (cst_tree) == INTEGER_CST)
1819 /* May complain when the access on REG is out-of-bounds. */
1822 region_model::check_region_bounds (const region *reg,
1823 enum access_direction dir,
1824 region_model_context *ctxt) const
1828 /* Get the offset. */
1829 region_offset reg_offset = reg->get_offset (m_mgr);
1830 const region *base_reg = reg_offset.get_base_region ();
1832 /* Bail out on symbolic regions.
1833 (e.g. because the analyzer did not see previous offsets on the latter,
1834 it might think that a negative access is before the buffer). */
1835 if (base_reg->symbolic_p ())
1838 /* Find out how many bytes were accessed. */
1839 const svalue *num_bytes_sval = reg->get_byte_size_sval (m_mgr);
1840 tree num_bytes_tree = maybe_get_integer_cst_tree (num_bytes_sval);
1841 /* Bail out if 0 bytes are accessed. */
1842 if (num_bytes_tree && zerop (num_bytes_tree))
1845 /* Get the capacity of the buffer. */
1846 const svalue *capacity = get_capacity (base_reg);
1847 tree cst_capacity_tree = maybe_get_integer_cst_tree (capacity);
1849 /* The constant offset from a pointer is represented internally as a sizetype
1850 but should be interpreted as a signed value here. The statement below
1851 converts the offset from bits to bytes and then to a signed integer with
1852 the same precision the sizetype has on the target system.
1854 For example, this is needed for out-of-bounds-3.c test1 to pass when
1855 compiled with a 64-bit gcc build targeting 32-bit systems. */
1856 byte_offset_t offset;
1857 if (!reg_offset.symbolic_p ())
1858 offset = wi::sext (reg_offset.get_bit_offset () >> LOG2_BITS_PER_UNIT,
1859 TYPE_PRECISION (size_type_node));
1861 /* If either the offset or the number of bytes accessed are symbolic,
1862 we have to reason about symbolic values. */
1863 if (reg_offset.symbolic_p () || !num_bytes_tree)
1865 const svalue* byte_offset_sval;
1866 if (!reg_offset.symbolic_p ())
1868 tree offset_tree = wide_int_to_tree (integer_type_node, offset);
1870 = m_mgr->get_or_create_constant_svalue (offset_tree);
1873 byte_offset_sval = reg_offset.get_symbolic_byte_offset ();
1874 check_symbolic_bounds (base_reg, byte_offset_sval, num_bytes_sval,
1875 capacity, dir, ctxt);
1879 /* Otherwise continue to check with concrete values. */
1880 byte_range out (0, 0);
1881 /* NUM_BYTES_TREE should always be interpreted as unsigned. */
1882 byte_offset_t num_bytes_unsigned = wi::to_offset (num_bytes_tree);
1883 byte_range read_bytes (offset, num_bytes_unsigned);
1884 /* If read_bytes has a subset < 0, we do have an underflow. */
1885 if (read_bytes.falls_short_of_p (0, &out))
1887 tree diag_arg = get_representative_tree (base_reg);
1894 ctxt->warn (make_unique<buffer_underread> (reg, diag_arg, out));
1897 ctxt->warn (make_unique<buffer_underflow> (reg, diag_arg, out));
1902 /* For accesses past the end, we do need a concrete capacity. No need to
1903 do a symbolic check here because the inequality check does not reason
1904 whether constants are greater than symbolic values. */
1905 if (!cst_capacity_tree)
1908 byte_range buffer (0, wi::to_offset (cst_capacity_tree));
1909 /* If READ_BYTES exceeds BUFFER, we do have an overflow. */
1910 if (read_bytes.exceeds_p (buffer, &out))
1912 tree byte_bound = wide_int_to_tree (size_type_node,
1913 buffer.get_next_byte_offset ());
1914 tree diag_arg = get_representative_tree (base_reg);
1922 ctxt->warn (make_unique<buffer_overread> (reg, diag_arg,
1926 ctxt->warn (make_unique<buffer_overflow> (reg, diag_arg,
1933 /* Ensure that all arguments at the call described by CD are checked
1934 for poisoned values, by calling get_rvalue on each argument. */
1937 region_model::check_call_args (const call_details &cd) const
1939 for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1940 cd.get_arg_svalue (arg_idx);
1943 /* Return true if CD is known to be a call to a function with
1944 __attribute__((const)). */
1947 const_fn_p (const call_details &cd)
1949 tree fndecl = cd.get_fndecl_for_call ();
1952 gcc_assert (DECL_P (fndecl));
1953 return TREE_READONLY (fndecl);
1956 /* If this CD is known to be a call to a function with
1957 __attribute__((const)), attempt to get a const_fn_result_svalue
1958 based on the arguments, or return NULL otherwise. */
1960 static const svalue *
1961 maybe_get_const_fn_result (const call_details &cd)
1963 if (!const_fn_p (cd))
1966 unsigned num_args = cd.num_args ();
1967 if (num_args > const_fn_result_svalue::MAX_INPUTS)
1968 /* Too many arguments. */
1971 auto_vec<const svalue *> inputs (num_args);
1972 for (unsigned arg_idx = 0; arg_idx < num_args; arg_idx++)
1974 const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
1975 if (!arg_sval->can_have_associated_state_p ())
1977 inputs.quick_push (arg_sval);
1980 region_model_manager *mgr = cd.get_manager ();
1982 = mgr->get_or_create_const_fn_result_svalue (cd.get_lhs_type (),
1983 cd.get_fndecl_for_call (),
1988 /* Update this model for an outcome of a call that returns a specific
1990 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1991 the state-merger code from merging success and failure outcomes. */
1994 region_model::update_for_int_cst_return (const call_details &cd,
1998 if (!cd.get_lhs_type ())
2000 const svalue *result
2001 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), retval);
2003 result = m_mgr->get_or_create_unmergeable (result);
2004 set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
2007 /* Update this model for an outcome of a call that returns zero.
2008 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
2009 the state-merger code from merging success and failure outcomes. */
2012 region_model::update_for_zero_return (const call_details &cd,
2015 update_for_int_cst_return (cd, 0, unmergeable);
2018 /* Update this model for an outcome of a call that returns non-zero. */
2021 region_model::update_for_nonzero_return (const call_details &cd)
2023 if (!cd.get_lhs_type ())
2026 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
2027 const svalue *result
2028 = get_store_value (cd.get_lhs_region (), cd.get_ctxt ());
2029 add_constraint (result, NE_EXPR, zero, cd.get_ctxt ());
2032 /* Subroutine of region_model::maybe_get_copy_bounds.
2033 The Linux kernel commonly uses
2034 min_t([unsigned] long, VAR, sizeof(T));
2035 to set an upper bound on the size of a copy_to_user.
2036 Attempt to simplify such sizes by trying to get the upper bound as a
2038 Return the simplified svalue if possible, or NULL otherwise. */
2040 static const svalue *
2041 maybe_simplify_upper_bound (const svalue *num_bytes_sval,
2042 region_model_manager *mgr)
2044 tree type = num_bytes_sval->get_type ();
2045 while (const svalue *raw = num_bytes_sval->maybe_undo_cast ())
2046 num_bytes_sval = raw;
2047 if (const binop_svalue *binop_sval = num_bytes_sval->dyn_cast_binop_svalue ())
2048 if (binop_sval->get_op () == MIN_EXPR)
2049 if (binop_sval->get_arg1 ()->get_kind () == SK_CONSTANT)
2051 return mgr->get_or_create_cast (type, binop_sval->get_arg1 ());
2052 /* TODO: we might want to also capture the constraint
2053 when recording the diagnostic, or note that we're using
2059 /* Attempt to get an upper bound for the size of a copy when simulating a
2062 NUM_BYTES_SVAL is the symbolic value for the size of the copy.
2063 Use it if it's constant, otherwise try to simplify it. Failing
2064 that, use the size of SRC_REG if constant.
2066 Return a symbolic value for an upper limit on the number of bytes
2067 copied, or NULL if no such value could be determined. */
2070 region_model::maybe_get_copy_bounds (const region *src_reg,
2071 const svalue *num_bytes_sval)
2073 if (num_bytes_sval->maybe_get_constant ())
2074 return num_bytes_sval;
2076 if (const svalue *simplified
2077 = maybe_simplify_upper_bound (num_bytes_sval, m_mgr))
2078 num_bytes_sval = simplified;
2080 if (num_bytes_sval->maybe_get_constant ())
2081 return num_bytes_sval;
2083 /* For now, try just guessing the size as the capacity of the
2084 base region of the src.
2085 This is a hack; we might get too large a value. */
2086 const region *src_base_reg = src_reg->get_base_region ();
2087 num_bytes_sval = get_capacity (src_base_reg);
2089 if (num_bytes_sval->maybe_get_constant ())
2090 return num_bytes_sval;
2092 /* Non-constant: give up. */
2096 /* Get any known_function for FNDECL, or NULL if there is none. */
2098 const known_function *
2099 region_model::get_known_function (tree fndecl) const
2101 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
2102 return known_fn_mgr->get_by_fndecl (fndecl);
2105 /* Update this model for the CALL stmt, using CTXT to report any
2106 diagnostics - the first half.
2108 Updates to the region_model that should be made *before* sm-states
2109 are updated are done here; other updates to the region_model are done
2110 in region_model::on_call_post.
2112 Return true if the function call has unknown side effects (it wasn't
2113 recognized and we don't have a body for it, or are unable to tell which
2116 Write true to *OUT_TERMINATE_PATH if this execution path should be
2117 terminated (e.g. the function call terminates the process). */
2120 region_model::on_call_pre (const gcall *call, region_model_context *ctxt,
2121 bool *out_terminate_path)
2123 call_details cd (call, this, ctxt);
2125 bool unknown_side_effects = false;
2127 /* Special-case for IFN_DEFERRED_INIT.
2128 We want to report uninitialized variables with -fanalyzer (treating
2129 -ftrivial-auto-var-init= as purely a mitigation feature).
2130 Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
2131 lhs of the call, so that it is still uninitialized from the point of
2132 view of the analyzer. */
2133 if (gimple_call_internal_p (call)
2134 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
2137 /* Get svalues for all of the arguments at the callsite, to ensure that we
2138 complain about any uninitialized arguments. This might lead to
2139 duplicates if any of the handling below also looks up the svalues,
2140 but the deduplication code should deal with that. */
2142 check_call_args (cd);
2144 /* Some of the cases below update the lhs of the call based on the
2145 return value, but not all. Provide a default value, which may
2146 get overwritten below. */
2147 if (tree lhs = gimple_call_lhs (call))
2149 const region *lhs_region = get_lvalue (lhs, ctxt);
2150 const svalue *sval = maybe_get_const_fn_result (cd);
2153 /* For the common case of functions without __attribute__((const)),
2154 use a conjured value, and purge any prior state involving that
2155 value (in case this is in a loop). */
2156 sval = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs), call,
2158 conjured_purge (this,
2161 set_value (lhs_region, sval, ctxt);
2164 if (gimple_call_internal_p (call))
2166 switch (gimple_call_internal_fn (call))
2170 case IFN_BUILTIN_EXPECT:
2171 impl_call_builtin_expect (cd);
2173 case IFN_UBSAN_BOUNDS:
2176 impl_call_va_arg (cd);
2181 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
2183 /* The various impl_call_* member functions are implemented
2184 in region-model-impl-calls.cc.
2185 Having them split out into separate functions makes it easier
2186 to put breakpoints on the handling of specific functions. */
2187 int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
2189 if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
2190 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
2191 switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
2194 if (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
2195 unknown_side_effects = true;
2197 case BUILT_IN_ALLOCA:
2198 case BUILT_IN_ALLOCA_WITH_ALIGN:
2199 impl_call_alloca (cd);
2201 case BUILT_IN_CALLOC:
2202 impl_call_calloc (cd);
2204 case BUILT_IN_EXPECT:
2205 case BUILT_IN_EXPECT_WITH_PROBABILITY:
2206 impl_call_builtin_expect (cd);
2209 /* Handle in "on_call_post". */
2211 case BUILT_IN_MALLOC:
2212 impl_call_malloc (cd);
2214 case BUILT_IN_MEMCPY:
2215 case BUILT_IN_MEMCPY_CHK:
2216 impl_call_memcpy (cd);
2218 case BUILT_IN_MEMSET:
2219 case BUILT_IN_MEMSET_CHK:
2220 impl_call_memset (cd);
2223 case BUILT_IN_REALLOC:
2225 case BUILT_IN_STRCHR:
2226 /* Handle in "on_call_post". */
2228 case BUILT_IN_STRCPY:
2229 case BUILT_IN_STRCPY_CHK:
2230 impl_call_strcpy (cd);
2232 case BUILT_IN_STRLEN:
2233 impl_call_strlen (cd);
2236 case BUILT_IN_STACK_SAVE:
2237 case BUILT_IN_STACK_RESTORE:
2240 /* Stdio builtins. */
2241 case BUILT_IN_FPRINTF:
2242 case BUILT_IN_FPRINTF_UNLOCKED:
2244 case BUILT_IN_PUTC_UNLOCKED:
2245 case BUILT_IN_FPUTC:
2246 case BUILT_IN_FPUTC_UNLOCKED:
2247 case BUILT_IN_FPUTS:
2248 case BUILT_IN_FPUTS_UNLOCKED:
2249 case BUILT_IN_FWRITE:
2250 case BUILT_IN_FWRITE_UNLOCKED:
2251 case BUILT_IN_PRINTF:
2252 case BUILT_IN_PRINTF_UNLOCKED:
2253 case BUILT_IN_PUTCHAR:
2254 case BUILT_IN_PUTCHAR_UNLOCKED:
2256 case BUILT_IN_PUTS_UNLOCKED:
2257 case BUILT_IN_VFPRINTF:
2258 case BUILT_IN_VPRINTF:
2259 /* These stdio builtins have external effects that are out
2260 of scope for the analyzer: we only want to model the effects
2261 on the return value. */
2264 case BUILT_IN_VA_START:
2265 impl_call_va_start (cd);
2267 case BUILT_IN_VA_COPY:
2268 impl_call_va_copy (cd);
2271 else if (is_named_call_p (callee_fndecl, "malloc", call, 1))
2273 impl_call_malloc (cd);
2276 else if (is_named_call_p (callee_fndecl, "calloc", call, 2))
2278 impl_call_calloc (cd);
2281 else if (is_named_call_p (callee_fndecl, "alloca", call, 1))
2283 impl_call_alloca (cd);
2286 else if (is_named_call_p (callee_fndecl, "realloc", call, 2))
2288 impl_call_realloc (cd);
2291 else if (is_named_call_p (callee_fndecl, "__errno_location", call, 0))
2293 impl_call_errno_location (cd);
2296 else if (is_named_call_p (callee_fndecl, "error"))
2298 if (impl_call_error (cd, 3, out_terminate_path))
2301 unknown_side_effects = true;
2303 else if (is_named_call_p (callee_fndecl, "error_at_line"))
2305 if (impl_call_error (cd, 5, out_terminate_path))
2308 unknown_side_effects = true;
2310 else if (is_named_call_p (callee_fndecl, "fgets", call, 3)
2311 || is_named_call_p (callee_fndecl, "fgets_unlocked", call, 3))
2313 impl_call_fgets (cd);
2316 else if (is_named_call_p (callee_fndecl, "fread", call, 4))
2318 impl_call_fread (cd);
2321 else if (is_named_call_p (callee_fndecl, "getchar", call, 0))
2323 /* No side-effects (tracking stream state is out-of-scope
2324 for the analyzer). */
2326 else if (is_named_call_p (callee_fndecl, "memset", call, 3)
2327 && POINTER_TYPE_P (cd.get_arg_type (0)))
2329 impl_call_memset (cd);
2332 else if (is_pipe_call_p (callee_fndecl, "pipe", call, 1)
2333 || is_pipe_call_p (callee_fndecl, "pipe2", call, 2))
2335 /* Handle in "on_call_post"; bail now so that fd array
2336 is left untouched so that we can detect use-of-uninit
2337 for the case where the call fails. */
2340 else if (is_named_call_p (callee_fndecl, "putenv", call, 1)
2341 && POINTER_TYPE_P (cd.get_arg_type (0)))
2343 impl_call_putenv (cd);
2346 else if (is_named_call_p (callee_fndecl, "strchr", call, 2)
2347 && POINTER_TYPE_P (cd.get_arg_type (0)))
2349 /* Handle in "on_call_post". */
2352 else if (is_named_call_p (callee_fndecl, "strlen", call, 1)
2353 && POINTER_TYPE_P (cd.get_arg_type (0)))
2355 impl_call_strlen (cd);
2358 else if (is_named_call_p (callee_fndecl, "operator new", call, 1))
2360 impl_call_operator_new (cd);
2363 else if (is_named_call_p (callee_fndecl, "operator new []", call, 1))
2365 impl_call_operator_new (cd);
2368 else if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
2369 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
2370 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
2372 /* Handle in "on_call_post". */
2374 else if (const known_function *kf = get_known_function (callee_fndecl))
2376 kf->impl_call_pre (cd);
2379 else if (!fndecl_has_gimple_body_p (callee_fndecl)
2380 && (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
2381 && !fndecl_built_in_p (callee_fndecl))
2382 unknown_side_effects = true;
2385 unknown_side_effects = true;
2387 return unknown_side_effects;
2390 /* Update this model for the CALL stmt, using CTXT to report any
2391 diagnostics - the second half.
2393 Updates to the region_model that should be made *after* sm-states
2394 are updated are done here; other updates to the region_model are done
2395 in region_model::on_call_pre.
2397 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
2401 region_model::on_call_post (const gcall *call,
2402 bool unknown_side_effects,
2403 region_model_context *ctxt)
2405 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
2407 call_details cd (call, this, ctxt);
2408 if (is_named_call_p (callee_fndecl, "free", call, 1))
2410 impl_call_free (cd);
2413 if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
2414 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
2415 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
2417 impl_call_operator_delete (cd);
2420 else if (is_pipe_call_p (callee_fndecl, "pipe", call, 1)
2421 || is_pipe_call_p (callee_fndecl, "pipe2", call, 2))
2423 impl_call_pipe (cd);
2426 else if (is_named_call_p (callee_fndecl, "strchr", call, 2)
2427 && POINTER_TYPE_P (cd.get_arg_type (0)))
2429 impl_call_strchr (cd);
2432 /* Was this fndecl referenced by
2433 __attribute__((malloc(FOO)))? */
2434 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
2436 impl_deallocation_call (cd);
2439 if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
2440 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
2441 switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
2445 case BUILT_IN_REALLOC:
2446 impl_call_realloc (cd);
2449 case BUILT_IN_STRCHR:
2450 impl_call_strchr (cd);
2453 case BUILT_IN_VA_END:
2454 impl_call_va_end (cd);
2459 if (unknown_side_effects)
2460 handle_unrecognized_call (call, ctxt);
2463 /* Purge state involving SVAL from this region_model, using CTXT
2464 (if non-NULL) to purge other state in a program_state.
2466 For example, if we're at the def-stmt of an SSA name, then we need to
2467 purge any state for svalues that involve that SSA name. This avoids
2468 false positives in loops, since a symbolic value referring to the
2469 SSA name will be referring to the previous value of that SSA name.
2472 while ((e = hashmap_iter_next(&iter))) {
2473 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
2474 free (e_strbuf->value);
2476 at the def-stmt of e_8:
2477 e_8 = hashmap_iter_next (&iter);
2478 we should purge the "freed" state of:
2479 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
2480 which is the "e_strbuf->value" value from the previous iteration,
2481 or we will erroneously report a double-free - the "e_8" within it
2482 refers to the previous value. */
2485 region_model::purge_state_involving (const svalue *sval,
2486 region_model_context *ctxt)
2488 if (!sval->can_have_associated_state_p ())
2490 m_store.purge_state_involving (sval, m_mgr);
2491 m_constraints->purge_state_involving (sval);
2492 m_dynamic_extents.purge_state_involving (sval);
2494 ctxt->purge_state_involving (sval);
2497 /* A pending_note subclass for adding a note about an
2498 __attribute__((access, ...)) to a diagnostic. */
2500 class reason_attr_access : public pending_note_subclass<reason_attr_access>
2503 reason_attr_access (tree callee_fndecl, const attr_access &access)
2504 : m_callee_fndecl (callee_fndecl),
2505 m_ptr_argno (access.ptrarg),
2506 m_access_str (TREE_STRING_POINTER (access.to_external_string ()))
2510 const char *get_kind () const final override { return "reason_attr_access"; }
2512 void emit () const final override
2514 inform (DECL_SOURCE_LOCATION (m_callee_fndecl),
2515 "parameter %i of %qD marked with attribute %qs",
2516 m_ptr_argno + 1, m_callee_fndecl, m_access_str);
2519 bool operator== (const reason_attr_access &other) const
2521 return (m_callee_fndecl == other.m_callee_fndecl
2522 && m_ptr_argno == other.m_ptr_argno
2523 && !strcmp (m_access_str, other.m_access_str));
2527 tree m_callee_fndecl;
2528 unsigned m_ptr_argno;
2529 const char *m_access_str;
2532 /* Check CALL a call to external function CALLEE_FNDECL based on
2533 any __attribute__ ((access, ....) on the latter, complaining to
2534 CTXT about any issues.
2536 Currently we merely call check_region_for_write on any regions
2537 pointed to by arguments marked with a "write_only" or "read_write"
2542 check_external_function_for_access_attr (const gcall *call,
2544 region_model_context *ctxt) const
2547 gcc_assert (callee_fndecl);
2550 tree fntype = TREE_TYPE (callee_fndecl);
2554 if (!TYPE_ATTRIBUTES (fntype))
2557 /* Initialize a map of attribute access specifications for arguments
2558 to the function call. */
2560 init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
2564 for (tree iter = TYPE_ARG_TYPES (fntype); iter;
2565 iter = TREE_CHAIN (iter), ++argno)
2567 const attr_access* access = rdwr_idx.get (argno);
2571 /* Ignore any duplicate entry in the map for the size argument. */
2572 if (access->ptrarg != argno)
2575 if (access->mode == access_write_only
2576 || access->mode == access_read_write)
2578 /* Subclass of decorated_region_model_context that
2579 adds a note about the attr access to any saved diagnostics. */
2580 class annotating_ctxt : public note_adding_context
2583 annotating_ctxt (tree callee_fndecl,
2584 const attr_access &access,
2585 region_model_context *ctxt)
2586 : note_adding_context (ctxt),
2587 m_callee_fndecl (callee_fndecl),
2591 std::unique_ptr<pending_note> make_note () final override
2593 return make_unique<reason_attr_access>
2594 (m_callee_fndecl, m_access);
2597 tree m_callee_fndecl;
2598 const attr_access &m_access;
2601 /* Use this ctxt below so that any diagnostics get the
2602 note added to them. */
2603 annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
2605 tree ptr_tree = gimple_call_arg (call, access->ptrarg);
2606 const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
2607 const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
2608 check_region_for_write (reg, &my_ctxt);
2609 /* We don't use the size arg for now. */
2614 /* Handle a call CALL to a function with unknown behavior.
2616 Traverse the regions in this model, determining what regions are
2617 reachable from pointer arguments to CALL and from global variables,
2620 Set all reachable regions to new unknown values and purge sm-state
2621 from their values, and from values that point to them. */
2624 region_model::handle_unrecognized_call (const gcall *call,
2625 region_model_context *ctxt)
2627 tree fndecl = get_fndecl_for_call (call, ctxt);
2630 check_external_function_for_access_attr (call, fndecl, ctxt);
2632 reachable_regions reachable_regs (this);
2634 /* Determine the reachable regions and their mutability. */
2636 /* Add globals and regions that already escaped in previous
2638 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
2641 /* Params that are pointers. */
2642 tree iter_param_types = NULL_TREE;
2644 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2645 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
2647 /* Track expected param type, where available. */
2648 tree param_type = NULL_TREE;
2649 if (iter_param_types)
2651 param_type = TREE_VALUE (iter_param_types);
2652 gcc_assert (param_type);
2653 iter_param_types = TREE_CHAIN (iter_param_types);
2656 tree parm = gimple_call_arg (call, arg_idx);
2657 const svalue *parm_sval = get_rvalue (parm, ctxt);
2658 reachable_regs.handle_parm (parm_sval, param_type);
2662 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
2664 /* Purge sm-state for the svalues that were reachable,
2665 both in non-mutable and mutable form. */
2666 for (svalue_set::iterator iter
2667 = reachable_regs.begin_reachable_svals ();
2668 iter != reachable_regs.end_reachable_svals (); ++iter)
2670 const svalue *sval = (*iter);
2672 ctxt->on_unknown_change (sval, false);
2674 for (svalue_set::iterator iter
2675 = reachable_regs.begin_mutable_svals ();
2676 iter != reachable_regs.end_mutable_svals (); ++iter)
2678 const svalue *sval = (*iter);
2680 ctxt->on_unknown_change (sval, true);
2682 uncertainty->on_mutable_sval_at_unknown_call (sval);
2685 /* Mark any clusters that have escaped. */
2686 reachable_regs.mark_escaped_clusters (ctxt);
2688 /* Update bindings for all clusters that have escaped, whether above,
2690 m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
2691 conjured_purge (this, ctxt));
2693 /* Purge dynamic extents from any regions that have escaped mutably:
2694 realloc could have been called on them. */
2695 for (hash_set<const region *>::iterator
2696 iter = reachable_regs.begin_mutable_base_regs ();
2697 iter != reachable_regs.end_mutable_base_regs ();
2700 const region *base_reg = (*iter);
2701 unset_dynamic_extents (base_reg);
2705 /* Traverse the regions in this model, determining what regions are
2706 reachable from the store and populating *OUT.
2708 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
2709 for reachability (for handling return values from functions when
2710 analyzing return of the only function on the stack).
2712 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
2713 within it as being maybe-bound as additional "roots" for reachability.
2715 Find svalues that haven't leaked. */
2718 region_model::get_reachable_svalues (svalue_set *out,
2719 const svalue *extra_sval,
2720 const uncertainty_t *uncertainty)
2722 reachable_regions reachable_regs (this);
2724 /* Add globals and regions that already escaped in previous
2726 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
2730 reachable_regs.handle_sval (extra_sval);
2733 for (uncertainty_t::iterator iter
2734 = uncertainty->begin_maybe_bound_svals ();
2735 iter != uncertainty->end_maybe_bound_svals (); ++iter)
2736 reachable_regs.handle_sval (*iter);
2738 /* Get regions for locals that have explicitly bound values. */
2739 for (store::cluster_map_t::iterator iter = m_store.begin ();
2740 iter != m_store.end (); ++iter)
2742 const region *base_reg = (*iter).first;
2743 if (const region *parent = base_reg->get_parent_region ())
2744 if (parent->get_kind () == RK_FRAME)
2745 reachable_regs.add (base_reg, false);
2748 /* Populate *OUT based on the values that were reachable. */
2749 for (svalue_set::iterator iter
2750 = reachable_regs.begin_reachable_svals ();
2751 iter != reachable_regs.end_reachable_svals (); ++iter)
2755 /* Update this model for the RETURN_STMT, using CTXT to report any
2759 region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
2761 tree callee = get_current_function ()->decl;
2762 tree lhs = DECL_RESULT (callee);
2763 tree rhs = gimple_return_retval (return_stmt);
2767 const svalue *sval = get_rvalue (rhs, ctxt);
2768 const region *ret_reg = get_lvalue (lhs, ctxt);
2769 set_value (ret_reg, sval, ctxt);
2773 /* Update this model for a call and return of setjmp/sigsetjmp at CALL within
2774 ENODE, using CTXT to report any diagnostics.
2776 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
2777 0), as opposed to any second return due to longjmp/sigsetjmp. */
2780 region_model::on_setjmp (const gcall *call, const exploded_node *enode,
2781 region_model_context *ctxt)
2783 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
2784 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
2787 /* Create a setjmp_svalue for this call and store it in BUF_REG's
2791 setjmp_record r (enode, call);
2793 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
2794 set_value (buf_reg, sval, ctxt);
2797 /* Direct calls to setjmp return 0. */
2798 if (tree lhs = gimple_call_lhs (call))
2800 const svalue *new_sval
2801 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
2802 const region *lhs_reg = get_lvalue (lhs, ctxt);
2803 set_value (lhs_reg, new_sval, ctxt);
2807 /* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
2808 to a "setjmp" at SETJMP_CALL where the final stack depth should be
2809 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
2810 done, and should be done by the caller. */
2813 region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
2814 int setjmp_stack_depth, region_model_context *ctxt)
2816 /* Evaluate the val, using the frame of the "longjmp". */
2817 tree fake_retval = gimple_call_arg (longjmp_call, 1);
2818 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
2820 /* Pop any frames until we reach the stack depth of the function where
2821 setjmp was called. */
2822 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
2823 while (get_stack_depth () > setjmp_stack_depth)
2824 pop_frame (NULL, NULL, ctxt);
2826 gcc_assert (get_stack_depth () == setjmp_stack_depth);
2828 /* Assign to LHS of "setjmp" in new_state. */
2829 if (tree lhs = gimple_call_lhs (setjmp_call))
2831 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
2832 const svalue *zero_sval
2833 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
2834 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
2835 /* If we have 0, use 1. */
2836 if (eq_zero.is_true ())
2838 const svalue *one_sval
2839 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
2840 fake_retval_sval = one_sval;
2844 /* Otherwise note that the value is nonzero. */
2845 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
2848 /* Decorate the return value from setjmp as being unmergeable,
2849 so that we don't attempt to merge states with it as zero
2850 with states in which it's nonzero, leading to a clean distinction
2851 in the exploded_graph betweeen the first return and the second
2853 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
2855 const region *lhs_reg = get_lvalue (lhs, ctxt);
2856 set_value (lhs_reg, fake_retval_sval, ctxt);
2860 /* Update this region_model for a phi stmt of the form
2861 LHS = PHI <...RHS...>.
2862 where RHS is for the appropriate edge.
2863 Get state from OLD_STATE so that all of the phi stmts for a basic block
2864 are effectively handled simultaneously. */
2867 region_model::handle_phi (const gphi *phi,
2869 const region_model &old_state,
2870 region_model_context *ctxt)
2872 /* For now, don't bother tracking the .MEM SSA names. */
2873 if (tree var = SSA_NAME_VAR (lhs))
2874 if (TREE_CODE (var) == VAR_DECL)
2875 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
2878 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
2879 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
2881 set_value (dst_reg, src_sval, ctxt);
2884 ctxt->on_phi (phi, rhs);
2887 /* Implementation of region_model::get_lvalue; the latter adds type-checking.
2889 Get the id of the region for PV within this region_model,
2890 emitting any diagnostics to CTXT. */
2893 region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
2895 tree expr = pv.m_tree;
2899 switch (TREE_CODE (expr))
2902 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
2903 dump_location_t ());
2907 tree array = TREE_OPERAND (expr, 0);
2908 tree index = TREE_OPERAND (expr, 1);
2910 const region *array_reg = get_lvalue (array, ctxt);
2911 const svalue *index_sval = get_rvalue (index, ctxt);
2912 return m_mgr->get_element_region (array_reg,
2913 TREE_TYPE (TREE_TYPE (array)),
2920 tree inner_expr = TREE_OPERAND (expr, 0);
2921 const region *inner_reg = get_lvalue (inner_expr, ctxt);
2922 tree num_bits = TREE_OPERAND (expr, 1);
2923 tree first_bit_offset = TREE_OPERAND (expr, 2);
2924 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2925 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2926 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2927 TREE_INT_CST_LOW (num_bits));
2928 return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
2934 tree ptr = TREE_OPERAND (expr, 0);
2935 tree offset = TREE_OPERAND (expr, 1);
2936 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2937 const svalue *offset_sval = get_rvalue (offset, ctxt);
2938 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
2939 return m_mgr->get_offset_region (star_ptr,
2946 return m_mgr->get_region_for_fndecl (expr);
2949 return m_mgr->get_region_for_label (expr);
2952 /* Handle globals. */
2953 if (is_global_var (expr))
2954 return m_mgr->get_region_for_global (expr);
2962 gcc_assert (TREE_CODE (expr) == SSA_NAME
2963 || TREE_CODE (expr) == PARM_DECL
2964 || TREE_CODE (expr) == VAR_DECL
2965 || TREE_CODE (expr) == RESULT_DECL);
2967 int stack_index = pv.m_stack_depth;
2968 const frame_region *frame = get_frame_at_index (stack_index);
2970 return frame->get_region_for_local (m_mgr, expr, ctxt);
2976 tree obj = TREE_OPERAND (expr, 0);
2977 tree field = TREE_OPERAND (expr, 1);
2978 const region *obj_reg = get_lvalue (obj, ctxt);
2979 return m_mgr->get_field_region (obj_reg, field);
2984 return m_mgr->get_region_for_string (expr);
2988 /* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
2991 assert_compat_types (tree src_type, tree dst_type)
2993 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2996 if (!(useless_type_conversion_p (src_type, dst_type)))
2997 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
3002 /* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
3005 compat_types_p (tree src_type, tree dst_type)
3007 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
3008 if (!(useless_type_conversion_p (src_type, dst_type)))
3013 /* Get the region for PV within this region_model,
3014 emitting any diagnostics to CTXT. */
3017 region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
3019 if (pv.m_tree == NULL_TREE)
3022 const region *result_reg = get_lvalue_1 (pv, ctxt);
3023 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
3027 /* Get the region for EXPR within this region_model (assuming the most
3028 recent stack frame if it's a local). */
3031 region_model::get_lvalue (tree expr, region_model_context *ctxt) const
3033 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
3036 /* Implementation of region_model::get_rvalue; the latter adds type-checking.
3038 Get the value of PV within this region_model,
3039 emitting any diagnostics to CTXT. */
3042 region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
3044 gcc_assert (pv.m_tree);
3046 switch (TREE_CODE (pv.m_tree))
3049 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
3054 tree expr = pv.m_tree;
3055 tree op0 = TREE_OPERAND (expr, 0);
3056 const region *expr_reg = get_lvalue (op0, ctxt);
3057 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
3063 tree expr = pv.m_tree;
3064 tree op0 = TREE_OPERAND (expr, 0);
3065 const region *reg = get_lvalue (op0, ctxt);
3066 tree num_bits = TREE_OPERAND (expr, 1);
3067 tree first_bit_offset = TREE_OPERAND (expr, 2);
3068 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
3069 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
3070 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
3071 TREE_INT_CST_LOW (num_bits));
3072 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
3081 const region *reg = get_lvalue (pv, ctxt);
3082 return get_store_value (reg, ctxt);
3087 case VIEW_CONVERT_EXPR:
3089 tree expr = pv.m_tree;
3090 tree arg = TREE_OPERAND (expr, 0);
3091 const svalue *arg_sval = get_rvalue (arg, ctxt);
3092 const svalue *sval_unaryop
3093 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
3095 return sval_unaryop;
3103 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
3105 case POINTER_PLUS_EXPR:
3107 tree expr = pv.m_tree;
3108 tree ptr = TREE_OPERAND (expr, 0);
3109 tree offset = TREE_OPERAND (expr, 1);
3110 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
3111 const svalue *offset_sval = get_rvalue (offset, ctxt);
3112 const svalue *sval_binop
3113 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
3114 ptr_sval, offset_sval);
3122 tree expr = pv.m_tree;
3123 tree arg0 = TREE_OPERAND (expr, 0);
3124 tree arg1 = TREE_OPERAND (expr, 1);
3125 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
3126 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
3127 const svalue *sval_binop
3128 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
3129 arg0_sval, arg1_sval);
3136 const region *ref_reg = get_lvalue (pv, ctxt);
3137 return get_store_value (ref_reg, ctxt);
3141 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
3142 return get_rvalue (expr, ctxt);
3147 /* Get the value of PV within this region_model,
3148 emitting any diagnostics to CTXT. */
3151 region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
3153 if (pv.m_tree == NULL_TREE)
3156 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
3158 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
3160 result_sval = check_for_poison (result_sval, pv.m_tree, ctxt);
3165 /* Get the value of EXPR within this region_model (assuming the most
3166 recent stack frame if it's a local). */
3169 region_model::get_rvalue (tree expr, region_model_context *ctxt) const
3171 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
3174 /* Return true if this model is on a path with "main" as the entrypoint
3175 (as opposed to one in which we're merely analyzing a subset of the
3176 path through the code). */
3179 region_model::called_from_main_p () const
3181 if (!m_current_frame)
3183 /* Determine if the oldest stack frame in this model is for "main". */
3184 const frame_region *frame0 = get_frame_at_index (0);
3185 gcc_assert (frame0);
3186 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
3189 /* Subroutine of region_model::get_store_value for when REG is (or is within)
3190 a global variable that hasn't been touched since the start of this path
3191 (or was implicitly touched due to a call to an unknown function). */
3194 region_model::get_initial_value_for_global (const region *reg) const
3196 /* Get the decl that REG is for (or is within). */
3197 const decl_region *base_reg
3198 = reg->get_base_region ()->dyn_cast_decl_region ();
3199 gcc_assert (base_reg);
3200 tree decl = base_reg->get_decl ();
3202 /* Special-case: to avoid having to explicitly update all previously
3203 untracked globals when calling an unknown fn, they implicitly have
3204 an unknown value if an unknown call has occurred, unless this is
3205 static to-this-TU and hasn't escaped. Globals that have escaped
3206 are explicitly tracked, so we shouldn't hit this case for them. */
3207 if (m_store.called_unknown_fn_p ()
3208 && TREE_PUBLIC (decl)
3209 && !TREE_READONLY (decl))
3210 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
3212 /* If we are on a path from the entrypoint from "main" and we have a
3213 global decl defined in this TU that hasn't been touched yet, then
3214 the initial value of REG can be taken from the initialization value
3216 if (called_from_main_p () || TREE_READONLY (decl))
3218 /* Attempt to get the initializer value for base_reg. */
3219 if (const svalue *base_reg_init
3220 = base_reg->get_svalue_for_initializer (m_mgr))
3222 if (reg == base_reg)
3223 return base_reg_init;
3226 /* Get the value for REG within base_reg_init. */
3227 binding_cluster c (base_reg);
3228 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init);
3230 = c.get_any_binding (m_mgr->get_store_manager (), reg);
3233 if (reg->get_type ())
3234 sval = m_mgr->get_or_create_cast (reg->get_type (),
3242 /* Otherwise, return INIT_VAL(REG). */
3243 return m_mgr->get_or_create_initial_value (reg);
3246 /* Get a value for REG, looking it up in the store, or otherwise falling
3247 back to "initial" or "unknown" values.
3248 Use CTXT to report any warnings associated with reading from REG. */
3251 region_model::get_store_value (const region *reg,
3252 region_model_context *ctxt) const
3254 check_region_for_read (reg, ctxt);
3256 /* Special-case: handle var_decls in the constant pool. */
3257 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
3258 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
3262 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
3265 if (reg->get_type ())
3266 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
3270 /* Special-case: read at a constant index within a STRING_CST. */
3271 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
3272 if (tree byte_offset_cst
3273 = offset_reg->get_byte_offset ()->maybe_get_constant ())
3274 if (const string_region *str_reg
3275 = reg->get_parent_region ()->dyn_cast_string_region ())
3277 tree string_cst = str_reg->get_string_cst ();
3278 if (const svalue *char_sval
3279 = m_mgr->maybe_get_char_from_string_cst (string_cst,
3281 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
3284 /* Special-case: read the initial char of a STRING_CST. */
3285 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
3286 if (const string_region *str_reg
3287 = cast_reg->get_original_region ()->dyn_cast_string_region ())
3289 tree string_cst = str_reg->get_string_cst ();
3290 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
3291 if (const svalue *char_sval
3292 = m_mgr->maybe_get_char_from_string_cst (string_cst,
3294 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
3297 /* Otherwise we implicitly have the initial value of the region
3298 (if the cluster had been touched, binding_cluster::get_any_binding,
3299 would have returned UNKNOWN, and we would already have returned
3302 /* Handle globals. */
3303 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
3305 return get_initial_value_for_global (reg);
3307 return m_mgr->get_or_create_initial_value (reg);
3310 /* Return false if REG does not exist, true if it may do.
3311 This is for detecting regions within the stack that don't exist anymore
3312 after frames are popped. */
3315 region_model::region_exists_p (const region *reg) const
3317 /* If within a stack frame, check that the stack frame is live. */
3318 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
3320 /* Check that the current frame is the enclosing frame, or is called
3322 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
3323 iter_frame = iter_frame->get_calling_frame ())
3324 if (iter_frame == enclosing_frame)
3332 /* Get a region for referencing PTR_SVAL, creating a region if need be, and
3333 potentially generating warnings via CTXT.
3334 PTR_SVAL must be of pointer type.
3335 PTR_TREE if non-NULL can be used when emitting diagnostics. */
3338 region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
3339 region_model_context *ctxt) const
3341 gcc_assert (ptr_sval);
3342 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
3344 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
3345 as a constraint. This suppresses false positives from
3346 -Wanalyzer-null-dereference for the case where we later have an
3347 if (PTR_SVAL) that would occur if we considered the false branch
3348 and transitioned the malloc state machine from start->null. */
3349 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
3350 const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
3351 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
3353 switch (ptr_sval->get_kind ())
3360 const region_svalue *region_sval
3361 = as_a <const region_svalue *> (ptr_sval);
3362 return region_sval->get_pointee ();
3367 const binop_svalue *binop_sval
3368 = as_a <const binop_svalue *> (ptr_sval);
3369 switch (binop_sval->get_op ())
3371 case POINTER_PLUS_EXPR:
3373 /* If we have a symbolic value expressing pointer arithmentic,
3374 try to convert it to a suitable region. */
3375 const region *parent_region
3376 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
3377 const svalue *offset = binop_sval->get_arg1 ();
3378 tree type= TREE_TYPE (ptr_sval->get_type ());
3379 return m_mgr->get_offset_region (parent_region, type, offset);
3391 tree ptr = get_representative_tree (ptr_sval);
3392 /* If we can't get a representative tree for PTR_SVAL
3393 (e.g. if it hasn't been bound into the store), then
3394 fall back on PTR_TREE, if non-NULL. */
3399 const poisoned_svalue *poisoned_sval
3400 = as_a <const poisoned_svalue *> (ptr_sval);
3401 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
3402 ctxt->warn (make_unique<poisoned_value_diagnostic>
3403 (ptr, pkind, NULL));
3410 return m_mgr->get_symbolic_region (ptr_sval);
3413 /* Attempt to get BITS within any value of REG, as TYPE.
3414 In particular, extract values from compound_svalues for the case
3415 where there's a concrete binding at BITS.
3416 Return an unknown svalue if we can't handle the given case.
3417 Use CTXT to report any warnings associated with reading from REG. */
3420 region_model::get_rvalue_for_bits (tree type,
3422 const bit_range &bits,
3423 region_model_context *ctxt) const
3425 const svalue *sval = get_store_value (reg, ctxt);
3426 return m_mgr->get_or_create_bits_within (type, bits, sval);
3429 /* A subclass of pending_diagnostic for complaining about writes to
3430 constant regions of memory. */
3432 class write_to_const_diagnostic
3433 : public pending_diagnostic_subclass<write_to_const_diagnostic>
3436 write_to_const_diagnostic (const region *reg, tree decl)
3437 : m_reg (reg), m_decl (decl)
3440 const char *get_kind () const final override
3442 return "write_to_const_diagnostic";
3445 bool operator== (const write_to_const_diagnostic &other) const
3447 return (m_reg == other.m_reg
3448 && m_decl == other.m_decl);
3451 int get_controlling_option () const final override
3453 return OPT_Wanalyzer_write_to_const;
3456 bool emit (rich_location *rich_loc) final override
3458 auto_diagnostic_group d;
3460 switch (m_reg->get_kind ())
3463 warned = warning_at (rich_loc, get_controlling_option (),
3464 "write to %<const%> object %qE", m_decl);
3467 warned = warning_at (rich_loc, get_controlling_option (),
3468 "write to function %qE", m_decl);
3471 warned = warning_at (rich_loc, get_controlling_option (),
3472 "write to label %qE", m_decl);
3476 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
3480 label_text describe_final_event (const evdesc::final_event &ev) final override
3482 switch (m_reg->get_kind ())
3485 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
3487 return ev.formatted_print ("write to function %qE here", m_decl);
3489 return ev.formatted_print ("write to label %qE here", m_decl);
3494 const region *m_reg;
3498 /* A subclass of pending_diagnostic for complaining about writes to
3501 class write_to_string_literal_diagnostic
3502 : public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
3505 write_to_string_literal_diagnostic (const region *reg)
3509 const char *get_kind () const final override
3511 return "write_to_string_literal_diagnostic";
3514 bool operator== (const write_to_string_literal_diagnostic &other) const
3516 return m_reg == other.m_reg;
3519 int get_controlling_option () const final override
3521 return OPT_Wanalyzer_write_to_string_literal;
3524 bool emit (rich_location *rich_loc) final override
3526 return warning_at (rich_loc, get_controlling_option (),
3527 "write to string literal");
3528 /* Ideally we would show the location of the STRING_CST as well,
3529 but it is not available at this point. */
3532 label_text describe_final_event (const evdesc::final_event &ev) final override
3534 return ev.formatted_print ("write to string literal here");
3538 const region *m_reg;
3541 /* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
3544 region_model::check_for_writable_region (const region* dest_reg,
3545 region_model_context *ctxt) const
3547 /* Fail gracefully if CTXT is NULL. */
3551 const region *base_reg = dest_reg->get_base_region ();
3552 switch (base_reg->get_kind ())
3558 const function_region *func_reg = as_a <const function_region *> (base_reg);
3559 tree fndecl = func_reg->get_fndecl ();
3560 ctxt->warn (make_unique<write_to_const_diagnostic>
3561 (func_reg, fndecl));
3566 const label_region *label_reg = as_a <const label_region *> (base_reg);
3567 tree label = label_reg->get_label ();
3568 ctxt->warn (make_unique<write_to_const_diagnostic>
3569 (label_reg, label));
3574 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
3575 tree decl = decl_reg->get_decl ();
3576 /* Warn about writes to const globals.
3577 Don't warn for writes to const locals, and params in particular,
3578 since we would warn in push_frame when setting them up (e.g the
3579 "this" param is "T* const"). */
3580 if (TREE_READONLY (decl)
3581 && is_global_var (decl))
3582 ctxt->warn (make_unique<write_to_const_diagnostic> (dest_reg, decl));
3586 ctxt->warn (make_unique<write_to_string_literal_diagnostic> (dest_reg));
3591 /* Get the capacity of REG in bytes. */
3594 region_model::get_capacity (const region *reg) const
3596 switch (reg->get_kind ())
3602 const decl_region *decl_reg = as_a <const decl_region *> (reg);
3603 tree decl = decl_reg->get_decl ();
3604 if (TREE_CODE (decl) == SSA_NAME)
3606 tree type = TREE_TYPE (decl);
3607 tree size = TYPE_SIZE (type);
3608 return get_rvalue (size, NULL);
3612 tree size = decl_init_size (decl, false);
3614 return get_rvalue (size, NULL);
3619 /* Look through sized regions to get at the capacity
3620 of the underlying regions. */
3621 return get_capacity (reg->get_parent_region ());
3624 if (const svalue *recorded = get_dynamic_extents (reg))
3627 return m_mgr->get_or_create_unknown_svalue (sizetype);
3630 /* Return the string size, including the 0-terminator, if SVAL is a
3631 constant_svalue holding a string. Otherwise, return an unknown_svalue. */
3634 region_model::get_string_size (const svalue *sval) const
3636 tree cst = sval->maybe_get_constant ();
3637 if (!cst || TREE_CODE (cst) != STRING_CST)
3638 return m_mgr->get_or_create_unknown_svalue (size_type_node);
3640 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
3641 return m_mgr->get_or_create_constant_svalue (out);
3644 /* Return the string size, including the 0-terminator, if REG is a
3645 string_region. Otherwise, return an unknown_svalue. */
3648 region_model::get_string_size (const region *reg) const
3650 const string_region *str_reg = dyn_cast <const string_region *> (reg);
3652 return m_mgr->get_or_create_unknown_svalue (size_type_node);
3654 tree cst = str_reg->get_string_cst ();
3655 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
3656 return m_mgr->get_or_create_constant_svalue (out);
3659 /* If CTXT is non-NULL, use it to warn about any problems accessing REG,
3660 using DIR to determine if this access is a read or write. */
3663 region_model::check_region_access (const region *reg,
3664 enum access_direction dir,
3665 region_model_context *ctxt) const
3667 /* Fail gracefully if CTXT is NULL. */
3671 check_region_for_taint (reg, dir, ctxt);
3672 check_region_bounds (reg, dir, ctxt);
3679 /* Currently a no-op. */
3682 check_for_writable_region (reg, ctxt);
3687 /* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
3690 region_model::check_region_for_write (const region *dest_reg,
3691 region_model_context *ctxt) const
3693 check_region_access (dest_reg, DIR_WRITE, ctxt);
3696 /* If CTXT is non-NULL, use it to warn about any problems reading from REG. */
3699 region_model::check_region_for_read (const region *src_reg,
3700 region_model_context *ctxt) const
3702 check_region_access (src_reg, DIR_READ, ctxt);
3705 /* Concrete subclass for casts of pointers that lead to trailing bytes. */
3707 class dubious_allocation_size
3708 : public pending_diagnostic_subclass<dubious_allocation_size>
3711 dubious_allocation_size (const region *lhs, const region *rhs)
3712 : m_lhs (lhs), m_rhs (rhs), m_expr (NULL_TREE)
3715 dubious_allocation_size (const region *lhs, const region *rhs,
3717 : m_lhs (lhs), m_rhs (rhs), m_expr (expr)
3720 const char *get_kind () const final override
3722 return "dubious_allocation_size";
3725 bool operator== (const dubious_allocation_size &other) const
3727 return m_lhs == other.m_lhs && m_rhs == other.m_rhs
3728 && pending_diagnostic::same_tree_p (m_expr, other.m_expr);
3731 int get_controlling_option () const final override
3733 return OPT_Wanalyzer_allocation_size;
3736 bool emit (rich_location *rich_loc) final override
3738 diagnostic_metadata m;
3741 return warning_meta (rich_loc, m, get_controlling_option (),
3742 "allocated buffer size is not a multiple"
3743 " of the pointee's size");
3747 describe_region_creation_event (const evdesc::region_creation &ev) final
3750 m_allocation_event = &ev;
3753 if (TREE_CODE (m_expr) == INTEGER_CST)
3754 return ev.formatted_print ("allocated %E bytes here", m_expr);
3756 return ev.formatted_print ("allocated %qE bytes here", m_expr);
3759 return ev.formatted_print ("allocated here");
3762 label_text describe_final_event (const evdesc::final_event &ev) final
3765 tree pointee_type = TREE_TYPE (m_lhs->get_type ());
3766 if (m_allocation_event)
3767 /* Fallback: Typically, we should always
3768 see an m_allocation_event before. */
3769 return ev.formatted_print ("assigned to %qT here;"
3770 " %<sizeof (%T)%> is %qE",
3771 m_lhs->get_type (), pointee_type,
3772 size_in_bytes (pointee_type));
3776 if (TREE_CODE (m_expr) == INTEGER_CST)
3777 return ev.formatted_print ("allocated %E bytes and assigned to"
3778 " %qT here; %<sizeof (%T)%> is %qE",
3779 m_expr, m_lhs->get_type (), pointee_type,
3780 size_in_bytes (pointee_type));
3782 return ev.formatted_print ("allocated %qE bytes and assigned to"
3783 " %qT here; %<sizeof (%T)%> is %qE",
3784 m_expr, m_lhs->get_type (), pointee_type,
3785 size_in_bytes (pointee_type));
3788 return ev.formatted_print ("allocated and assigned to %qT here;"
3789 " %<sizeof (%T)%> is %qE",
3790 m_lhs->get_type (), pointee_type,
3791 size_in_bytes (pointee_type));
3794 void mark_interesting_stuff (interesting_t *interest) final override
3796 interest->add_region_creation (m_rhs);
3800 const region *m_lhs;
3801 const region *m_rhs;
3803 const evdesc::region_creation *m_allocation_event;
3806 /* Return true on dubious allocation sizes for constant sizes. */
3809 capacity_compatible_with_type (tree cst, tree pointee_size_tree,
3812 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
3813 gcc_assert (TREE_CODE (pointee_size_tree) == INTEGER_CST);
3815 unsigned HOST_WIDE_INT pointee_size = TREE_INT_CST_LOW (pointee_size_tree);
3816 unsigned HOST_WIDE_INT alloc_size = TREE_INT_CST_LOW (cst);
3819 return alloc_size == 0 || alloc_size >= pointee_size;
3820 return alloc_size % pointee_size == 0;
3824 capacity_compatible_with_type (tree cst, tree pointee_size_tree)
3826 return capacity_compatible_with_type (cst, pointee_size_tree, false);
3829 /* Checks whether SVAL could be a multiple of SIZE_CST.
3831 It works by visiting all svalues inside SVAL until it reaches
3832 atomic nodes. From those, it goes back up again and adds each
3833 node that might be a multiple of SIZE_CST to the RESULT_SET. */
3835 class size_visitor : public visitor
3838 size_visitor (tree size_cst, const svalue *root_sval, constraint_manager *cm)
3839 : m_size_cst (size_cst), m_root_sval (root_sval), m_cm (cm)
3841 m_root_sval->accept (this);
3846 return result_set.contains (m_root_sval);
3849 void visit_constant_svalue (const constant_svalue *sval) final override
3851 check_constant (sval->get_constant (), sval);
3854 void visit_unknown_svalue (const unknown_svalue *sval ATTRIBUTE_UNUSED)
3857 result_set.add (sval);
3860 void visit_poisoned_svalue (const poisoned_svalue *sval ATTRIBUTE_UNUSED)
3863 result_set.add (sval);
3866 void visit_unaryop_svalue (const unaryop_svalue *sval) final override
3868 const svalue *arg = sval->get_arg ();
3869 if (result_set.contains (arg))
3870 result_set.add (sval);
3873 void visit_binop_svalue (const binop_svalue *sval) final override
3875 const svalue *arg0 = sval->get_arg0 ();
3876 const svalue *arg1 = sval->get_arg1 ();
3878 if (sval->get_op () == MULT_EXPR)
3880 if (result_set.contains (arg0) || result_set.contains (arg1))
3881 result_set.add (sval);
3885 if (result_set.contains (arg0) && result_set.contains (arg1))
3886 result_set.add (sval);
3890 void visit_repeated_svalue (const repeated_svalue *sval) final override
3892 sval->get_inner_svalue ()->accept (this);
3893 if (result_set.contains (sval->get_inner_svalue ()))
3894 result_set.add (sval);
3897 void visit_unmergeable_svalue (const unmergeable_svalue *sval) final override
3899 sval->get_arg ()->accept (this);
3900 if (result_set.contains (sval->get_arg ()))
3901 result_set.add (sval);
3904 void visit_widening_svalue (const widening_svalue *sval) final override
3906 const svalue *base = sval->get_base_svalue ();
3907 const svalue *iter = sval->get_iter_svalue ();
3909 if (result_set.contains (base) && result_set.contains (iter))
3910 result_set.add (sval);
3913 void visit_conjured_svalue (const conjured_svalue *sval ATTRIBUTE_UNUSED)
3916 equiv_class_id id (-1);
3917 if (m_cm->get_equiv_class_by_svalue (sval, &id))
3919 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3920 check_constant (cst, sval);
3922 result_set.add (sval);
3926 void visit_asm_output_svalue (const asm_output_svalue *sval ATTRIBUTE_UNUSED)
3929 result_set.add (sval);
3932 void visit_const_fn_result_svalue (const const_fn_result_svalue
3933 *sval ATTRIBUTE_UNUSED) final override
3935 result_set.add (sval);
3939 void check_constant (tree cst, const svalue *sval)
3941 switch (TREE_CODE (cst))
3944 /* Assume all unhandled operands are compatible. */
3945 result_set.add (sval);
3948 if (capacity_compatible_with_type (cst, m_size_cst))
3949 result_set.add (sval);
3955 const svalue *m_root_sval;
3956 constraint_manager *m_cm;
3957 svalue_set result_set; /* Used as a mapping of svalue*->bool. */
3960 /* Return true if a struct or union either uses the inheritance pattern,
3961 where the first field is a base struct, or the flexible array member
3962 pattern, where the last field is an array without a specified size. */
3965 struct_or_union_with_inheritance_p (tree struc)
3967 tree iter = TYPE_FIELDS (struc);
3968 if (iter == NULL_TREE)
3970 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (iter)))
3974 while (iter != NULL_TREE)
3977 iter = DECL_CHAIN (iter);
3980 if (last_field != NULL_TREE
3981 && TREE_CODE (TREE_TYPE (last_field)) == ARRAY_TYPE)
3987 /* Return true if the lhs and rhs of an assignment have different types. */
3990 is_any_cast_p (const gimple *stmt)
3992 if (const gassign *assign = dyn_cast <const gassign *> (stmt))
3993 return gimple_assign_cast_p (assign)
3994 || !pending_diagnostic::same_tree_p (
3995 TREE_TYPE (gimple_assign_lhs (assign)),
3996 TREE_TYPE (gimple_assign_rhs1 (assign)));
3997 else if (const gcall *call = dyn_cast <const gcall *> (stmt))
3999 tree lhs = gimple_call_lhs (call);
4000 return lhs != NULL_TREE && !pending_diagnostic::same_tree_p (
4001 TREE_TYPE (gimple_call_lhs (call)),
4002 gimple_call_return_type (call));
4008 /* On pointer assignments, check whether the buffer size of
4009 RHS_SVAL is compatible with the type of the LHS_REG.
4010 Use a non-null CTXT to report allocation size warnings. */
4013 region_model::check_region_size (const region *lhs_reg, const svalue *rhs_sval,
4014 region_model_context *ctxt) const
4016 if (!ctxt || ctxt->get_stmt () == NULL)
4018 /* Only report warnings on assignments that actually change the type. */
4019 if (!is_any_cast_p (ctxt->get_stmt ()))
4022 const region_svalue *reg_sval = dyn_cast <const region_svalue *> (rhs_sval);
4026 tree pointer_type = lhs_reg->get_type ();
4027 if (pointer_type == NULL_TREE || !POINTER_TYPE_P (pointer_type))
4030 tree pointee_type = TREE_TYPE (pointer_type);
4031 /* Make sure that the type on the left-hand size actually has a size. */
4032 if (pointee_type == NULL_TREE || VOID_TYPE_P (pointee_type)
4033 || TYPE_SIZE_UNIT (pointee_type) == NULL_TREE)
4036 /* Bail out early on pointers to structs where we can
4037 not deduce whether the buffer size is compatible. */
4038 bool is_struct = RECORD_OR_UNION_TYPE_P (pointee_type);
4039 if (is_struct && struct_or_union_with_inheritance_p (pointee_type))
4042 tree pointee_size_tree = size_in_bytes (pointee_type);
4043 /* We give up if the type size is not known at compile-time or the
4044 type size is always compatible regardless of the buffer size. */
4045 if (TREE_CODE (pointee_size_tree) != INTEGER_CST
4046 || integer_zerop (pointee_size_tree)
4047 || integer_onep (pointee_size_tree))
4050 const region *rhs_reg = reg_sval->get_pointee ();
4051 const svalue *capacity = get_capacity (rhs_reg);
4052 switch (capacity->get_kind ())
4054 case svalue_kind::SK_CONSTANT:
4056 const constant_svalue *cst_cap_sval
4057 = as_a <const constant_svalue *> (capacity);
4058 tree cst_cap = cst_cap_sval->get_constant ();
4059 if (TREE_CODE (cst_cap) == INTEGER_CST
4060 && !capacity_compatible_with_type (cst_cap, pointee_size_tree,
4062 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg, rhs_reg,
4070 size_visitor v (pointee_size_tree, capacity, m_constraints);
4071 if (!v.get_result ())
4073 tree expr = get_representative_tree (capacity);
4074 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg,
4084 /* Set the value of the region given by LHS_REG to the value given
4086 Use CTXT to report any warnings associated with writing to LHS_REG. */
4089 region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
4090 region_model_context *ctxt)
4092 gcc_assert (lhs_reg);
4093 gcc_assert (rhs_sval);
4095 check_region_size (lhs_reg, rhs_sval, ctxt);
4097 check_region_for_write (lhs_reg, ctxt);
4099 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
4100 ctxt ? ctxt->get_uncertainty () : NULL);
4103 /* Set the value of the region given by LHS to the value given by RHS. */
4106 region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
4108 const region *lhs_reg = get_lvalue (lhs, ctxt);
4109 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
4110 gcc_assert (lhs_reg);
4111 gcc_assert (rhs_sval);
4112 set_value (lhs_reg, rhs_sval, ctxt);
4115 /* Remove all bindings overlapping REG within the store. */
4118 region_model::clobber_region (const region *reg)
4120 m_store.clobber_region (m_mgr->get_store_manager(), reg);
4123 /* Remove any bindings for REG within the store. */
4126 region_model::purge_region (const region *reg)
4128 m_store.purge_region (m_mgr->get_store_manager(), reg);
4131 /* Fill REG with SVAL. */
4134 region_model::fill_region (const region *reg, const svalue *sval)
4136 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
4139 /* Zero-fill REG. */
4142 region_model::zero_fill_region (const region *reg)
4144 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
4147 /* Mark REG as having unknown content. */
4150 region_model::mark_region_as_unknown (const region *reg,
4151 uncertainty_t *uncertainty)
4153 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
4157 /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
4161 region_model::eval_condition (const svalue *lhs,
4163 const svalue *rhs) const
4165 /* For now, make no attempt to capture constraints on floating-point
4167 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
4168 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
4169 return tristate::unknown ();
4171 tristate ts = eval_condition_without_cm (lhs, op, rhs);
4175 /* Otherwise, try constraints. */
4176 return m_constraints->eval_condition (lhs, op, rhs);
4179 /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
4180 this model, without resorting to the constraint_manager.
4182 This is exposed so that impl_region_model_context::on_state_leak can
4183 check for equality part-way through region_model::purge_unused_svalues
4184 without risking creating new ECs. */
4187 region_model::eval_condition_without_cm (const svalue *lhs,
4189 const svalue *rhs) const
4194 /* See what we know based on the values. */
4196 /* For now, make no attempt to capture constraints on floating-point
4198 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
4199 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
4200 return tristate::unknown ();
4202 /* Unwrap any unmergeable values. */
4203 lhs = lhs->unwrap_any_unmergeable ();
4204 rhs = rhs->unwrap_any_unmergeable ();
4208 /* If we have the same svalue, then we have equality
4209 (apart from NaN-handling).
4210 TODO: should this definitely be the case for poisoned values? */
4211 /* Poisoned and unknown values are "unknowable". */
4212 if (lhs->get_kind () == SK_POISONED
4213 || lhs->get_kind () == SK_UNKNOWN)
4214 return tristate::TS_UNKNOWN;
4221 return tristate::TS_TRUE;
4226 return tristate::TS_FALSE;
4229 /* For other ops, use the logic below. */
4234 /* If we have a pair of region_svalues, compare them. */
4235 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
4236 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
4238 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
4239 if (res.is_known ())
4241 /* Otherwise, only known through constraints. */
4244 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
4246 /* If we have a pair of constants, compare them. */
4247 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
4248 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
4251 /* When we have one constant, put it on the RHS. */
4252 std::swap (lhs, rhs);
4253 op = swap_tree_comparison (op);
4256 gcc_assert (lhs->get_kind () != SK_CONSTANT);
4258 /* Handle comparison against zero. */
4259 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
4260 if (zerop (cst_rhs->get_constant ()))
4262 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
4264 /* A region_svalue is a non-NULL pointer, except in certain
4265 special cases (see the comment for region::non_null_p). */
4266 const region *pointee = ptr->get_pointee ();
4267 if (pointee->non_null_p ())
4277 return tristate::TS_FALSE;
4282 return tristate::TS_TRUE;
4286 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
4288 /* Treat offsets from a non-NULL pointer as being non-NULL. This
4289 isn't strictly true, in that eventually ptr++ will wrap
4290 around and be NULL, but it won't occur in practise and thus
4291 can be used to suppress effectively false positives that we
4292 shouldn't warn for. */
4293 if (binop->get_op () == POINTER_PLUS_EXPR)
4296 = eval_condition_without_cm (binop->get_arg0 (),
4298 if (lhs_ts.is_known ())
4304 /* Handle rejection of equality for comparisons of the initial values of
4305 "external" values (such as params) with the address of locals. */
4306 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
4307 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
4309 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
4310 if (res.is_known ())
4313 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
4314 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
4316 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
4317 if (res.is_known ())
4321 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
4322 if (tree rhs_cst = rhs->maybe_get_constant ())
4324 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
4325 if (res.is_known ())
4329 /* Handle comparisons between two svalues with more than one operand. */
4330 if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
4338 /* TODO: binops can be equal even if they are not structurally
4339 equal in case of commutative operators. */
4340 tristate res = structural_equality (lhs, rhs);
4347 tristate res = structural_equality (lhs, rhs);
4354 tristate res = structural_equality (lhs, rhs);
4357 res = symbolic_greater_than (binop, rhs);
4364 tristate res = symbolic_greater_than (binop, rhs);
4372 return tristate::TS_UNKNOWN;
4375 /* Subroutine of region_model::eval_condition_without_cm, for rejecting
4376 equality of INIT_VAL(PARM) with &LOCAL. */
4379 region_model::compare_initial_and_pointer (const initial_svalue *init,
4380 const region_svalue *ptr) const
4382 const region *pointee = ptr->get_pointee ();
4384 /* If we have a pointer to something within a stack frame, it can't be the
4385 initial value of a param. */
4386 if (pointee->maybe_get_frame_region ())
4387 if (init->initial_value_of_param_p ())
4388 return tristate::TS_FALSE;
4390 return tristate::TS_UNKNOWN;
4393 /* Return true if SVAL is definitely positive. */
4396 is_positive_svalue (const svalue *sval)
4398 if (tree cst = sval->maybe_get_constant ())
4399 return !zerop (cst) && get_range_pos_neg (cst) == 1;
4400 tree type = sval->get_type ();
4403 /* Consider a binary operation size_t + int. The analyzer wraps the int in
4404 an unaryop_svalue, converting it to a size_t, but in the dynamic execution
4405 the result is smaller than the first operand. Thus, we have to look if
4406 the argument of the unaryop_svalue is also positive. */
4407 if (const unaryop_svalue *un_op = dyn_cast <const unaryop_svalue *> (sval))
4408 return CONVERT_EXPR_CODE_P (un_op->get_op ()) && TYPE_UNSIGNED (type)
4409 && is_positive_svalue (un_op->get_arg ());
4410 return TYPE_UNSIGNED (type);
4413 /* Return true if A is definitely larger than B.
4415 Limitation: does not account for integer overflows and does not try to
4416 return false, so it can not be used negated. */
4419 region_model::symbolic_greater_than (const binop_svalue *bin_a,
4420 const svalue *b) const
4422 if (bin_a->get_op () == PLUS_EXPR || bin_a->get_op () == MULT_EXPR)
4424 /* Eliminate the right-hand side of both svalues. */
4425 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
4426 if (bin_a->get_op () == bin_b->get_op ()
4427 && eval_condition_without_cm (bin_a->get_arg1 (),
4429 bin_b->get_arg1 ()).is_true ()
4430 && eval_condition_without_cm (bin_a->get_arg0 (),
4432 bin_b->get_arg0 ()).is_true ())
4433 return tristate (tristate::TS_TRUE);
4435 /* Otherwise, try to remove a positive offset or factor from BIN_A. */
4436 if (is_positive_svalue (bin_a->get_arg1 ())
4437 && eval_condition_without_cm (bin_a->get_arg0 (),
4438 GE_EXPR, b).is_true ())
4439 return tristate (tristate::TS_TRUE);
4441 return tristate::unknown ();
4444 /* Return true if A and B are equal structurally.
4446 Structural equality means that A and B are equal if the svalues A and B have
4447 the same nodes at the same positions in the tree and the leafs are equal.
4448 Equality for conjured_svalues and initial_svalues is determined by comparing
4449 the pointers while constants are compared by value. That behavior is useful
4450 to check for binaryop_svlaues that evaluate to the same concrete value but
4451 might use one operand with a different type but the same constant value.
4454 binop_svalue (mult_expr,
4455 initial_svalue (‘size_t’, decl_region (..., 'some_var')),
4456 constant_svalue (‘size_t’, 4))
4458 binop_svalue (mult_expr,
4459 initial_svalue (‘size_t’, decl_region (..., 'some_var'),
4460 constant_svalue (‘sizetype’, 4))
4461 are structurally equal. A concrete C code example, where this occurs, can
4462 be found in test7 of out-of-bounds-5.c. */
4465 region_model::structural_equality (const svalue *a, const svalue *b) const
4467 /* If A and B are referentially equal, they are also structurally equal. */
4469 return tristate (tristate::TS_TRUE);
4471 switch (a->get_kind ())
4474 return tristate::unknown ();
4475 /* SK_CONJURED and SK_INITIAL are already handled
4476 by the referential equality above. */
4479 tree a_cst = a->maybe_get_constant ();
4480 tree b_cst = b->maybe_get_constant ();
4482 return tristate (tree_int_cst_equal (a_cst, b_cst));
4484 return tristate (tristate::TS_FALSE);
4487 const unaryop_svalue *un_a = as_a <const unaryop_svalue *> (a);
4488 if (const unaryop_svalue *un_b = dyn_cast <const unaryop_svalue *> (b))
4489 return tristate (pending_diagnostic::same_tree_p (un_a->get_type (),
4491 && un_a->get_op () == un_b->get_op ()
4492 && structural_equality (un_a->get_arg (),
4495 return tristate (tristate::TS_FALSE);
4498 const binop_svalue *bin_a = as_a <const binop_svalue *> (a);
4499 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
4500 return tristate (bin_a->get_op () == bin_b->get_op ()
4501 && structural_equality (bin_a->get_arg0 (),
4503 && structural_equality (bin_a->get_arg1 (),
4504 bin_b->get_arg1 ()));
4506 return tristate (tristate::TS_FALSE);
4510 /* Handle various constraints of the form:
4511 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
4515 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
4518 by adding constraints for INNER_LHS INNEROP INNER_RHS.
4520 Return true if this function can fully handle the constraint; if
4521 so, add the implied constraint(s) and write true to *OUT if they
4522 are consistent with existing constraints, or write false to *OUT
4523 if they contradicts existing constraints.
4525 Return false for cases that this function doeesn't know how to handle.
4527 For example, if we're checking a stored conditional, we'll have
4529 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
4532 which this function can turn into an add_constraint of:
4533 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
4535 Similarly, optimized && and || conditionals lead to e.g.
4537 becoming gimple like this:
4541 On the "_3 is false" branch we can have constraints of the form:
4542 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
4543 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
4545 which implies that both _1 and _2 are false,
4546 which this function can turn into a pair of add_constraints of
4547 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
4549 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
4552 region_model::add_constraints_from_binop (const svalue *outer_lhs,
4553 enum tree_code outer_op,
4554 const svalue *outer_rhs,
4556 region_model_context *ctxt)
4558 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
4560 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
4563 if (!outer_rhs->all_zeroes_p ())
4566 const svalue *inner_lhs = binop_sval->get_arg0 ();
4567 enum tree_code inner_op = binop_sval->get_op ();
4568 const svalue *inner_rhs = binop_sval->get_arg1 ();
4570 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
4574 - "OUTER_LHS != false" (i.e. OUTER is true), or
4575 - "OUTER_LHS == false" (i.e. OUTER is false). */
4576 bool is_true = outer_op == NE_EXPR;
4586 /* ...and "(inner_lhs OP inner_rhs) == 0"
4587 then (inner_lhs OP inner_rhs) must have the same
4588 logical value as LHS. */
4590 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
4591 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
4599 /* ...and "(inner_lhs & inner_rhs) != 0"
4600 then both inner_lhs and inner_rhs must be true. */
4601 const svalue *false_sval
4602 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
4603 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
4604 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
4605 *out = sat1 && sat2;
4613 /* ...and "(inner_lhs | inner_rhs) == 0"
4614 i.e. "(inner_lhs | inner_rhs)" is false
4615 then both inner_lhs and inner_rhs must be false. */
4616 const svalue *false_sval
4617 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
4618 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
4619 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
4620 *out = sat1 && sat2;
4627 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
4628 If it is consistent with existing constraints, add it, and return true.
4629 Return false if it contradicts existing constraints.
4630 Use CTXT for reporting any diagnostics associated with the accesses. */
4633 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
4634 region_model_context *ctxt)
4636 /* For now, make no attempt to capture constraints on floating-point
4638 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
4641 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
4642 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
4644 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
4647 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
4648 If it is consistent with existing constraints, add it, and return true.
4649 Return false if it contradicts existing constraints.
4650 Use CTXT for reporting any diagnostics associated with the accesses. */
4653 region_model::add_constraint (const svalue *lhs,
4656 region_model_context *ctxt)
4658 tristate t_cond = eval_condition (lhs, op, rhs);
4660 /* If we already have the condition, do nothing. */
4661 if (t_cond.is_true ())
4664 /* Reject a constraint that would contradict existing knowledge, as
4666 if (t_cond.is_false ())
4670 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
4673 /* Attempt to store the constraint. */
4674 if (!m_constraints->add_constraint (lhs, op, rhs))
4677 /* Notify the context, if any. This exists so that the state machines
4678 in a program_state can be notified about the condition, and so can
4679 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
4680 when synthesizing constraints as above. */
4682 ctxt->on_condition (lhs, op, rhs);
4684 /* If we have ®ION == NULL, then drop dynamic extents for REGION (for
4685 the case where REGION is heap-allocated and thus could be NULL). */
4686 if (tree rhs_cst = rhs->maybe_get_constant ())
4687 if (op == EQ_EXPR && zerop (rhs_cst))
4688 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
4689 unset_dynamic_extents (region_sval->get_pointee ());
4694 /* As above, but when returning false, if OUT is non-NULL, write a
4695 new rejected_constraint to *OUT. */
4698 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
4699 region_model_context *ctxt,
4700 rejected_constraint **out)
4702 bool sat = add_constraint (lhs, op, rhs, ctxt);
4704 *out = new rejected_op_constraint (*this, lhs, op, rhs);
4708 /* Determine what is known about the condition "LHS OP RHS" within
4710 Use CTXT for reporting any diagnostics associated with the accesses. */
4713 region_model::eval_condition (tree lhs,
4716 region_model_context *ctxt)
4718 /* For now, make no attempt to model constraints on floating-point
4720 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
4721 return tristate::unknown ();
4723 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
4726 /* Implementation of region_model::get_representative_path_var.
4727 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
4728 Use VISITED to prevent infinite mutual recursion with the overload for
4732 region_model::get_representative_path_var_1 (const svalue *sval,
4733 svalue_set *visited) const
4737 /* Prevent infinite recursion. */
4738 if (visited->contains (sval))
4739 return path_var (NULL_TREE, 0);
4740 visited->add (sval);
4742 /* Handle casts by recursion into get_representative_path_var. */
4743 if (const svalue *cast_sval = sval->maybe_undo_cast ())
4745 path_var result = get_representative_path_var (cast_sval, visited);
4746 tree orig_type = sval->get_type ();
4747 /* If necessary, wrap the result in a cast. */
4748 if (result.m_tree && orig_type)
4749 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
4753 auto_vec<path_var> pvs;
4754 m_store.get_representative_path_vars (this, visited, sval, &pvs);
4756 if (tree cst = sval->maybe_get_constant ())
4757 pvs.safe_push (path_var (cst, 0));
4759 /* Handle string literals and various other pointers. */
4760 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
4762 const region *reg = ptr_sval->get_pointee ();
4763 if (path_var pv = get_representative_path_var (reg, visited))
4764 return path_var (build1 (ADDR_EXPR,
4770 /* If we have a sub_svalue, look for ways to represent the parent. */
4771 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
4773 const svalue *parent_sval = sub_sval->get_parent ();
4774 const region *subreg = sub_sval->get_subregion ();
4775 if (path_var parent_pv
4776 = get_representative_path_var (parent_sval, visited))
4777 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
4778 return path_var (build3 (COMPONENT_REF,
4781 field_reg->get_field (),
4783 parent_pv.m_stack_depth);
4786 /* Handle binops. */
4787 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
4789 = get_representative_path_var (binop_sval->get_arg0 (), visited))
4791 = get_representative_path_var (binop_sval->get_arg1 (), visited))
4792 return path_var (build2 (binop_sval->get_op (),
4794 lhs_pv.m_tree, rhs_pv.m_tree),
4795 lhs_pv.m_stack_depth);
4797 if (pvs.length () < 1)
4798 return path_var (NULL_TREE, 0);
4800 pvs.qsort (readability_comparator);
4804 /* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
4805 Use VISITED to prevent infinite mutual recursion with the overload for
4808 This function defers to get_representative_path_var_1 to do the work;
4809 it adds verification that get_representative_path_var_1 returned a tree
4810 of the correct type. */
4813 region_model::get_representative_path_var (const svalue *sval,
4814 svalue_set *visited) const
4817 return path_var (NULL_TREE, 0);
4819 tree orig_type = sval->get_type ();
4821 path_var result = get_representative_path_var_1 (sval, visited);
4823 /* Verify that the result has the same type as SVAL, if any. */
4824 if (result.m_tree && orig_type)
4825 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
4830 /* Attempt to return a tree that represents SVAL, or return NULL_TREE.
4832 Strip off any top-level cast, to avoid messages like
4833 double-free of '(void *)ptr'
4834 from analyzer diagnostics. */
4837 region_model::get_representative_tree (const svalue *sval) const
4840 tree expr = get_representative_path_var (sval, &visited).m_tree;
4842 /* Strip off any top-level cast. */
4843 if (expr && TREE_CODE (expr) == NOP_EXPR)
4844 expr = TREE_OPERAND (expr, 0);
4846 return fixup_tree_for_diagnostic (expr);
4850 region_model::get_representative_tree (const region *reg) const
4853 tree expr = get_representative_path_var (reg, &visited).m_tree;
4855 /* Strip off any top-level cast. */
4856 if (expr && TREE_CODE (expr) == NOP_EXPR)
4857 expr = TREE_OPERAND (expr, 0);
4859 return fixup_tree_for_diagnostic (expr);
4862 /* Implementation of region_model::get_representative_path_var.
4864 Attempt to return a path_var that represents REG, or return
4866 For example, a region for a field of a local would be a path_var
4867 wrapping a COMPONENT_REF.
4868 Use VISITED to prevent infinite mutual recursion with the overload for
4872 region_model::get_representative_path_var_1 (const region *reg,
4873 svalue_set *visited) const
4875 switch (reg->get_kind ())
4886 /* Regions that represent memory spaces are not expressible as trees. */
4887 return path_var (NULL_TREE, 0);
4891 const function_region *function_reg
4892 = as_a <const function_region *> (reg);
4893 return path_var (function_reg->get_fndecl (), 0);
4897 const label_region *label_reg = as_a <const label_region *> (reg);
4898 return path_var (label_reg->get_label (), 0);
4903 const symbolic_region *symbolic_reg
4904 = as_a <const symbolic_region *> (reg);
4905 const svalue *pointer = symbolic_reg->get_pointer ();
4906 path_var pointer_pv = get_representative_path_var (pointer, visited);
4908 return path_var (NULL_TREE, 0);
4909 tree offset = build_int_cst (pointer->get_type (), 0);
4910 return path_var (build2 (MEM_REF,
4914 pointer_pv.m_stack_depth);
4918 const decl_region *decl_reg = as_a <const decl_region *> (reg);
4919 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
4923 const field_region *field_reg = as_a <const field_region *> (reg);
4925 = get_representative_path_var (reg->get_parent_region (), visited);
4927 return path_var (NULL_TREE, 0);
4928 return path_var (build3 (COMPONENT_REF,
4931 field_reg->get_field (),
4933 parent_pv.m_stack_depth);
4938 const element_region *element_reg
4939 = as_a <const element_region *> (reg);
4941 = get_representative_path_var (reg->get_parent_region (), visited);
4943 return path_var (NULL_TREE, 0);
4945 = get_representative_path_var (element_reg->get_index (), visited);
4947 return path_var (NULL_TREE, 0);
4948 return path_var (build4 (ARRAY_REF,
4950 parent_pv.m_tree, index_pv.m_tree,
4951 NULL_TREE, NULL_TREE),
4952 parent_pv.m_stack_depth);
4957 const offset_region *offset_reg
4958 = as_a <const offset_region *> (reg);
4960 = get_representative_path_var (reg->get_parent_region (), visited);
4962 return path_var (NULL_TREE, 0);
4964 = get_representative_path_var (offset_reg->get_byte_offset (),
4966 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
4967 return path_var (NULL_TREE, 0);
4968 tree addr_parent = build1 (ADDR_EXPR,
4969 build_pointer_type (reg->get_type ()),
4971 return path_var (build2 (MEM_REF,
4973 addr_parent, offset_pv.m_tree),
4974 parent_pv.m_stack_depth);
4978 return path_var (NULL_TREE, 0);
4983 = get_representative_path_var (reg->get_parent_region (), visited);
4985 return path_var (NULL_TREE, 0);
4986 return path_var (build1 (NOP_EXPR,
4989 parent_pv.m_stack_depth);
4992 case RK_HEAP_ALLOCATED:
4994 /* No good way to express heap-allocated/alloca regions as trees. */
4995 return path_var (NULL_TREE, 0);
4999 const string_region *string_reg = as_a <const string_region *> (reg);
5000 return path_var (string_reg->get_string_cst (), 0);
5005 return path_var (NULL_TREE, 0);
5009 /* Attempt to return a path_var that represents REG, or return
5011 For example, a region for a field of a local would be a path_var
5012 wrapping a COMPONENT_REF.
5013 Use VISITED to prevent infinite mutual recursion with the overload for
5016 This function defers to get_representative_path_var_1 to do the work;
5017 it adds verification that get_representative_path_var_1 returned a tree
5018 of the correct type. */
5021 region_model::get_representative_path_var (const region *reg,
5022 svalue_set *visited) const
5024 path_var result = get_representative_path_var_1 (reg, visited);
5026 /* Verify that the result has the same type as REG, if any. */
5027 if (result.m_tree && reg->get_type ())
5028 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
5033 /* Update this model for any phis in SNODE, assuming we came from
5034 LAST_CFG_SUPEREDGE. */
5037 region_model::update_for_phis (const supernode *snode,
5038 const cfg_superedge *last_cfg_superedge,
5039 region_model_context *ctxt)
5041 gcc_assert (last_cfg_superedge);
5043 /* Copy this state and pass it to handle_phi so that all of the phi stmts
5044 are effectively handled simultaneously. */
5045 const region_model old_state (*this);
5047 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
5048 !gsi_end_p (gpi); gsi_next (&gpi))
5050 gphi *phi = gpi.phi ();
5052 tree src = last_cfg_superedge->get_phi_arg (phi);
5053 tree lhs = gimple_phi_result (phi);
5055 /* Update next_state based on phi and old_state. */
5056 handle_phi (phi, lhs, src, old_state, ctxt);
5060 /* Attempt to update this model for taking EDGE (where the last statement
5061 was LAST_STMT), returning true if the edge can be taken, false
5063 When returning false, if OUT is non-NULL, write a new rejected_constraint
5066 For CFG superedges where LAST_STMT is a conditional or a switch
5067 statement, attempt to add the relevant conditions for EDGE to this
5068 model, returning true if they are feasible, or false if they are
5071 For call superedges, push frame information and store arguments
5074 For return superedges, pop frame information and store return
5075 values into any lhs.
5077 Rejection of call/return superedges happens elsewhere, in
5078 program_point::on_edge (i.e. based on program point, rather
5079 than program state). */
5082 region_model::maybe_update_for_edge (const superedge &edge,
5083 const gimple *last_stmt,
5084 region_model_context *ctxt,
5085 rejected_constraint **out)
5087 /* Handle frame updates for interprocedural edges. */
5088 switch (edge.m_kind)
5093 case SUPEREDGE_CALL:
5095 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
5096 update_for_call_superedge (*call_edge, ctxt);
5100 case SUPEREDGE_RETURN:
5102 const return_superedge *return_edge
5103 = as_a <const return_superedge *> (&edge);
5104 update_for_return_superedge (*return_edge, ctxt);
5108 case SUPEREDGE_INTRAPROCEDURAL_CALL:
5109 /* This is a no-op for call summaries; we should already
5110 have handled the effect of the call summary at the call stmt. */
5114 if (last_stmt == NULL)
5117 /* Apply any constraints for conditionals/switch statements. */
5119 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
5121 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
5122 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
5125 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
5127 const switch_cfg_superedge *switch_sedge
5128 = as_a <const switch_cfg_superedge *> (&edge);
5129 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
5133 /* Apply any constraints due to an exception being thrown. */
5134 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
5135 if (cfg_sedge->get_flags () & EDGE_EH)
5136 return apply_constraints_for_exception (last_stmt, ctxt, out);
5141 /* Push a new frame_region on to the stack region.
5142 Populate the frame_region with child regions for the function call's
5143 parameters, using values from the arguments at the callsite in the
5147 region_model::update_for_gcall (const gcall *call_stmt,
5148 region_model_context *ctxt,
5151 /* Build a vec of argument svalues, using the current top
5152 frame for resolving tree expressions. */
5153 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
5155 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
5157 tree arg = gimple_call_arg (call_stmt, i);
5158 arg_svals.quick_push (get_rvalue (arg, ctxt));
5163 /* Get the function * from the gcall. */
5164 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
5165 callee = DECL_STRUCT_FUNCTION (fn_decl);
5168 push_frame (callee, &arg_svals, ctxt);
5171 /* Pop the top-most frame_region from the stack, and copy the return
5172 region's values (if any) into the region for the lvalue of the LHS of
5173 the call (if any). */
5176 region_model::update_for_return_gcall (const gcall *call_stmt,
5177 region_model_context *ctxt)
5179 /* Get the lvalue for the result of the call, passing it to pop_frame,
5180 so that pop_frame can determine the region with respect to the
5182 tree lhs = gimple_call_lhs (call_stmt);
5183 pop_frame (lhs, NULL, ctxt);
5186 /* Extract calling information from the superedge and update the model for the
5190 region_model::update_for_call_superedge (const call_superedge &call_edge,
5191 region_model_context *ctxt)
5193 const gcall *call_stmt = call_edge.get_call_stmt ();
5194 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
5197 /* Extract calling information from the return superedge and update the model
5198 for the returning call */
5201 region_model::update_for_return_superedge (const return_superedge &return_edge,
5202 region_model_context *ctxt)
5204 const gcall *call_stmt = return_edge.get_call_stmt ();
5205 update_for_return_gcall (call_stmt, ctxt);
5208 /* Attempt to to use R to replay SUMMARY into this object.
5209 Return true if it is possible. */
5212 region_model::replay_call_summary (call_summary_replay &r,
5213 const region_model &summary)
5215 gcc_assert (summary.get_stack_depth () == 1);
5217 m_store.replay_call_summary (r, summary.m_store);
5219 if (!m_constraints->replay_call_summary (r, *summary.m_constraints))
5222 for (auto kv : summary.m_dynamic_extents)
5224 const region *summary_reg = kv.first;
5225 const region *caller_reg = r.convert_region_from_summary (summary_reg);
5228 const svalue *summary_sval = kv.second;
5229 const svalue *caller_sval = r.convert_svalue_from_summary (summary_sval);
5232 m_dynamic_extents.put (caller_reg, caller_sval);
5238 /* Given a true or false edge guarded by conditional statement COND_STMT,
5239 determine appropriate constraints for the edge to be taken.
5241 If they are feasible, add the constraints and return true.
5243 Return false if the constraints contradict existing knowledge
5244 (and so the edge should not be taken).
5245 When returning false, if OUT is non-NULL, write a new rejected_constraint
5249 region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
5250 const gcond *cond_stmt,
5251 region_model_context *ctxt,
5252 rejected_constraint **out)
5254 ::edge cfg_edge = sedge.get_cfg_edge ();
5255 gcc_assert (cfg_edge != NULL);
5256 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
5258 enum tree_code op = gimple_cond_code (cond_stmt);
5259 tree lhs = gimple_cond_lhs (cond_stmt);
5260 tree rhs = gimple_cond_rhs (cond_stmt);
5261 if (cfg_edge->flags & EDGE_FALSE_VALUE)
5262 op = invert_tree_comparison (op, false /* honor_nans */);
5263 return add_constraint (lhs, op, rhs, ctxt, out);
5266 /* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
5267 for the edge to be taken.
5269 If they are feasible, add the constraints and return true.
5271 Return false if the constraints contradict existing knowledge
5272 (and so the edge should not be taken).
5273 When returning false, if OUT is non-NULL, write a new rejected_constraint
5277 region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
5278 const gswitch *switch_stmt,
5279 region_model_context *ctxt,
5280 rejected_constraint **out)
5282 bounded_ranges_manager *ranges_mgr = get_range_manager ();
5283 const bounded_ranges *all_cases_ranges
5284 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
5285 tree index = gimple_switch_index (switch_stmt);
5286 const svalue *index_sval = get_rvalue (index, ctxt);
5287 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
5289 *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
5290 if (sat && ctxt && !all_cases_ranges->empty_p ())
5291 ctxt->on_bounded_ranges (*index_sval, *all_cases_ranges);
5295 /* Apply any constraints due to an exception being thrown at LAST_STMT.
5297 If they are feasible, add the constraints and return true.
5299 Return false if the constraints contradict existing knowledge
5300 (and so the edge should not be taken).
5301 When returning false, if OUT is non-NULL, write a new rejected_constraint
5305 region_model::apply_constraints_for_exception (const gimple *last_stmt,
5306 region_model_context *ctxt,
5307 rejected_constraint **out)
5309 gcc_assert (last_stmt);
5310 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
5311 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
5312 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
5313 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
5315 /* We have an exception thrown from operator new.
5316 Add a constraint that the result was NULL, to avoid a false
5317 leak report due to the result being lost when following
5319 if (tree lhs = gimple_call_lhs (call))
5320 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
5326 /* For use with push_frame when handling a top-level call within the analysis.
5327 PARAM has a defined but unknown initial value.
5328 Anything it points to has escaped, since the calling context "knows"
5329 the pointer, and thus calls to unknown functions could read/write into
5333 region_model::on_top_level_param (tree param,
5334 region_model_context *ctxt)
5336 if (POINTER_TYPE_P (TREE_TYPE (param)))
5338 const region *param_reg = get_lvalue (param, ctxt);
5339 const svalue *init_ptr_sval
5340 = m_mgr->get_or_create_initial_value (param_reg);
5341 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
5342 m_store.mark_as_escaped (pointee_reg);
5346 /* Update this region_model to reflect pushing a frame onto the stack
5349 If ARG_SVALS is non-NULL, use it to populate the parameters
5351 Otherwise, the params have their initial_svalues.
5353 Return the frame_region for the new frame. */
5356 region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
5357 region_model_context *ctxt)
5359 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
5362 /* Arguments supplied from a caller frame. */
5363 tree fndecl = fun->decl;
5365 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
5366 iter_parm = DECL_CHAIN (iter_parm), ++idx)
5368 /* If there's a mismatching declaration, the call stmt might
5369 not have enough args. Handle this case by leaving the
5370 rest of the params as uninitialized. */
5371 if (idx >= arg_svals->length ())
5373 tree parm_lval = iter_parm;
5374 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
5375 parm_lval = parm_default_ssa;
5376 const region *parm_reg = get_lvalue (parm_lval, ctxt);
5377 const svalue *arg_sval = (*arg_svals)[idx];
5378 set_value (parm_reg, arg_sval, ctxt);
5381 /* Handle any variadic args. */
5382 unsigned va_arg_idx = 0;
5383 for (; idx < arg_svals->length (); idx++, va_arg_idx++)
5385 const svalue *arg_sval = (*arg_svals)[idx];
5386 const region *var_arg_reg
5387 = m_mgr->get_var_arg_region (m_current_frame,
5389 set_value (var_arg_reg, arg_sval, ctxt);
5394 /* Otherwise we have a top-level call within the analysis. The params
5395 have defined but unknown initial values.
5396 Anything they point to has escaped. */
5397 tree fndecl = fun->decl;
5398 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
5399 iter_parm = DECL_CHAIN (iter_parm))
5401 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
5402 on_top_level_param (parm_default_ssa, ctxt);
5404 on_top_level_param (iter_parm, ctxt);
5408 return m_current_frame;
5411 /* Get the function of the top-most frame in this region_model's stack.
5412 There must be such a frame. */
5415 region_model::get_current_function () const
5417 const frame_region *frame = get_current_frame ();
5419 return frame->get_function ();
5422 /* Pop the topmost frame_region from this region_model's stack;
5424 If RESULT_LVALUE is non-null, copy any return value from the frame
5425 into the corresponding region (evaluated with respect to the *caller*
5426 frame, rather than the called frame).
5427 If OUT_RESULT is non-null, copy any return value from the frame
5430 Purge the frame region and all its descendent regions.
5431 Convert any pointers that point into such regions into
5432 POISON_KIND_POPPED_STACK svalues. */
5435 region_model::pop_frame (tree result_lvalue,
5436 const svalue **out_result,
5437 region_model_context *ctxt)
5439 gcc_assert (m_current_frame);
5441 /* Evaluate the result, within the callee frame. */
5442 const frame_region *frame_reg = m_current_frame;
5443 tree fndecl = m_current_frame->get_function ()->decl;
5444 tree result = DECL_RESULT (fndecl);
5445 const svalue *retval = NULL;
5446 if (result && TREE_TYPE (result) != void_type_node)
5448 retval = get_rvalue (result, ctxt);
5450 *out_result = retval;
5453 /* Pop the frame. */
5454 m_current_frame = m_current_frame->get_calling_frame ();
5456 if (result_lvalue && retval)
5458 /* Compute result_dst_reg using RESULT_LVALUE *after* popping
5459 the frame, but before poisoning pointers into the old frame. */
5460 const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
5461 set_value (result_dst_reg, retval, ctxt);
5464 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
5467 /* Get the number of frames in this region_model's stack. */
5470 region_model::get_stack_depth () const
5472 const frame_region *frame = get_current_frame ();
5474 return frame->get_stack_depth ();
5479 /* Get the frame_region with the given index within the stack.
5480 The frame_region must exist. */
5482 const frame_region *
5483 region_model::get_frame_at_index (int index) const
5485 const frame_region *frame = get_current_frame ();
5487 gcc_assert (index >= 0);
5488 gcc_assert (index <= frame->get_index ());
5489 while (index != frame->get_index ())
5491 frame = frame->get_calling_frame ();
5497 /* Unbind svalues for any regions in REG and below.
5498 Find any pointers to such regions; convert them to
5499 poisoned values of kind PKIND.
5500 Also purge any dynamic extents. */
5503 region_model::unbind_region_and_descendents (const region *reg,
5504 enum poison_kind pkind)
5506 /* Gather a set of base regions to be unbound. */
5507 hash_set<const region *> base_regs;
5508 for (store::cluster_map_t::iterator iter = m_store.begin ();
5509 iter != m_store.end (); ++iter)
5511 const region *iter_base_reg = (*iter).first;
5512 if (iter_base_reg->descendent_of_p (reg))
5513 base_regs.add (iter_base_reg);
5515 for (hash_set<const region *>::iterator iter = base_regs.begin ();
5516 iter != base_regs.end (); ++iter)
5517 m_store.purge_cluster (*iter);
5519 /* Find any pointers to REG or its descendents; convert to poisoned. */
5520 poison_any_pointers_to_descendents (reg, pkind);
5522 /* Purge dynamic extents of any base regions in REG and below
5523 (e.g. VLAs and alloca stack regions). */
5524 for (auto iter : m_dynamic_extents)
5526 const region *iter_reg = iter.first;
5527 if (iter_reg->descendent_of_p (reg))
5528 unset_dynamic_extents (iter_reg);
5532 /* Implementation of BindingVisitor.
5533 Update the bound svalues for regions below REG to use poisoned
5536 struct bad_pointer_finder
5538 bad_pointer_finder (const region *reg, enum poison_kind pkind,
5539 region_model_manager *mgr)
5540 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
5543 void on_binding (const binding_key *, const svalue *&sval)
5545 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
5547 const region *ptr_dst = ptr_sval->get_pointee ();
5548 /* Poison ptrs to descendents of REG, but not to REG itself,
5549 otherwise double-free detection doesn't work (since sm-state
5550 for "free" is stored on the original ptr svalue). */
5551 if (ptr_dst->descendent_of_p (m_reg)
5552 && ptr_dst != m_reg)
5554 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
5561 const region *m_reg;
5562 enum poison_kind m_pkind;
5563 region_model_manager *const m_mgr;
5567 /* Find any pointers to REG or its descendents; convert them to
5568 poisoned values of kind PKIND.
5569 Return the number of pointers that were poisoned. */
5572 region_model::poison_any_pointers_to_descendents (const region *reg,
5573 enum poison_kind pkind)
5575 bad_pointer_finder bv (reg, pkind, m_mgr);
5576 m_store.for_each_binding (bv);
5580 /* Attempt to merge THIS with OTHER_MODEL, writing the result
5581 to OUT_MODEL. Use POINT to distinguish values created as a
5582 result of merging. */
5585 region_model::can_merge_with_p (const region_model &other_model,
5586 const program_point &point,
5587 region_model *out_model,
5588 const extrinsic_state *ext_state,
5589 const program_state *state_a,
5590 const program_state *state_b) const
5592 gcc_assert (out_model);
5593 gcc_assert (m_mgr == other_model.m_mgr);
5594 gcc_assert (m_mgr == out_model->m_mgr);
5596 if (m_current_frame != other_model.m_current_frame)
5598 out_model->m_current_frame = m_current_frame;
5600 model_merger m (this, &other_model, point, out_model,
5601 ext_state, state_a, state_b);
5603 if (!store::can_merge_p (&m_store, &other_model.m_store,
5604 &out_model->m_store, m_mgr->get_store_manager (),
5608 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
5609 &out_model->m_dynamic_extents))
5612 /* Merge constraints. */
5613 constraint_manager::merge (*m_constraints,
5614 *other_model.m_constraints,
5615 out_model->m_constraints);
5620 /* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
5624 region_model::get_fndecl_for_call (const gcall *call,
5625 region_model_context *ctxt)
5627 tree fn_ptr = gimple_call_fn (call);
5628 if (fn_ptr == NULL_TREE)
5630 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
5631 if (const region_svalue *fn_ptr_ptr
5632 = fn_ptr_sval->dyn_cast_region_svalue ())
5634 const region *reg = fn_ptr_ptr->get_pointee ();
5635 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
5637 tree fn_decl = fn_reg->get_fndecl ();
5638 cgraph_node *node = cgraph_node::get (fn_decl);
5641 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
5643 return ultimate_node->decl;
5650 /* Would be much simpler to use a lambda here, if it were supported. */
5652 struct append_regions_cb_data
5654 const region_model *model;
5655 auto_vec<const decl_region *> *out;
5658 /* Populate *OUT with all decl_regions in the current
5659 frame that have clusters within the store. */
5663 get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
5665 append_regions_cb_data data;
5668 m_store.for_each_cluster (append_regions_cb, &data);
5671 /* Implementation detail of get_regions_for_current_frame. */
5674 region_model::append_regions_cb (const region *base_reg,
5675 append_regions_cb_data *cb_data)
5677 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
5679 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
5680 cb_data->out->safe_push (decl_reg);
5684 /* Abstract class for diagnostics related to the use of
5685 floating-point arithmetic where precision is needed. */
5687 class imprecise_floating_point_arithmetic : public pending_diagnostic
5690 int get_controlling_option () const final override
5692 return OPT_Wanalyzer_imprecise_fp_arithmetic;
5696 /* Concrete diagnostic to complain about uses of floating-point arithmetic
5697 in the size argument of malloc etc. */
5699 class float_as_size_arg : public imprecise_floating_point_arithmetic
5702 float_as_size_arg (tree arg) : m_arg (arg)
5705 const char *get_kind () const final override
5707 return "float_as_size_arg_diagnostic";
5710 bool subclass_equal_p (const pending_diagnostic &other) const final override
5712 return same_tree_p (m_arg, ((const float_as_size_arg &) other).m_arg);
5715 bool emit (rich_location *rich_loc) final override
5717 diagnostic_metadata m;
5718 bool warned = warning_meta (rich_loc, m, get_controlling_option (),
5719 "use of floating-point arithmetic here might"
5720 " yield unexpected results");
5722 inform (rich_loc->get_loc (), "only use operands of an integer type"
5723 " inside the size argument");
5727 label_text describe_final_event (const evdesc::final_event &ev) final
5731 return ev.formatted_print ("operand %qE is of type %qT",
5732 m_arg, TREE_TYPE (m_arg));
5733 return ev.formatted_print ("at least one operand of the size argument is"
5734 " of a floating-point type");
5741 /* Visitor to find uses of floating-point variables/constants in an svalue. */
5743 class contains_floating_point_visitor : public visitor
5746 contains_floating_point_visitor (const svalue *root_sval) : m_result (NULL)
5748 root_sval->accept (this);
5751 const svalue *get_svalue_to_report ()
5756 void visit_constant_svalue (const constant_svalue *sval) final override
5758 /* At the point the analyzer runs, constant integer operands in a floating
5759 point expression are already implictly converted to floating-points.
5760 Thus, we do prefer to report non-constants such that the diagnostic
5761 always reports a floating-point operand. */
5762 tree type = sval->get_type ();
5763 if (type && FLOAT_TYPE_P (type) && !m_result)
5767 void visit_conjured_svalue (const conjured_svalue *sval) final override
5769 tree type = sval->get_type ();
5770 if (type && FLOAT_TYPE_P (type))
5774 void visit_initial_svalue (const initial_svalue *sval) final override
5776 tree type = sval->get_type ();
5777 if (type && FLOAT_TYPE_P (type))
5782 /* Non-null if at least one floating-point operand was found. */
5783 const svalue *m_result;
5786 /* May complain about uses of floating-point operands in SIZE_IN_BYTES. */
5789 region_model::check_dynamic_size_for_floats (const svalue *size_in_bytes,
5790 region_model_context *ctxt) const
5794 contains_floating_point_visitor v (size_in_bytes);
5795 if (const svalue *float_sval = v.get_svalue_to_report ())
5797 tree diag_arg = get_representative_tree (float_sval);
5798 ctxt->warn (make_unique<float_as_size_arg> (diag_arg));
5802 /* Return a new region describing a heap-allocated block of memory.
5803 Use CTXT to complain about tainted sizes. */
5806 region_model::create_region_for_heap_alloc (const svalue *size_in_bytes,
5807 region_model_context *ctxt)
5809 const region *reg = m_mgr->create_region_for_heap_alloc ();
5810 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
5811 set_dynamic_extents (reg, size_in_bytes, ctxt);
5815 /* Return a new region describing a block of memory allocated within the
5817 Use CTXT to complain about tainted sizes. */
5820 region_model::create_region_for_alloca (const svalue *size_in_bytes,
5821 region_model_context *ctxt)
5823 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
5824 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
5825 set_dynamic_extents (reg, size_in_bytes, ctxt);
5829 /* Record that the size of REG is SIZE_IN_BYTES.
5830 Use CTXT to complain about tainted sizes. */
5833 region_model::set_dynamic_extents (const region *reg,
5834 const svalue *size_in_bytes,
5835 region_model_context *ctxt)
5837 assert_compat_types (size_in_bytes->get_type (), size_type_node);
5840 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
5842 check_dynamic_size_for_floats (size_in_bytes, ctxt);
5844 m_dynamic_extents.put (reg, size_in_bytes);
5847 /* Get the recording of REG in bytes, or NULL if no dynamic size was
5851 region_model::get_dynamic_extents (const region *reg) const
5853 if (const svalue * const *slot = m_dynamic_extents.get (reg))
5858 /* Unset any recorded dynamic size of REG. */
5861 region_model::unset_dynamic_extents (const region *reg)
5863 m_dynamic_extents.remove (reg);
5866 /* Information of the layout of a RECORD_TYPE, capturing it as a vector
5867 of items, where each item is either a field or padding. */
5872 /* An item within a record; either a field, or padding after a field. */
5876 item (const bit_range &br,
5881 m_is_padding (is_padding)
5885 bit_offset_t get_start_bit_offset () const
5887 return m_bit_range.get_start_bit_offset ();
5889 bit_offset_t get_next_bit_offset () const
5891 return m_bit_range.get_next_bit_offset ();
5894 bool contains_p (bit_offset_t offset) const
5896 return m_bit_range.contains_p (offset);
5899 void dump_to_pp (pretty_printer *pp) const
5902 pp_printf (pp, "padding after %qD", m_field);
5904 pp_printf (pp, "%qD", m_field);
5905 pp_string (pp, ", ");
5906 m_bit_range.dump_to_pp (pp);
5909 bit_range m_bit_range;
5914 record_layout (tree record_type)
5916 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE);
5918 for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE;
5919 iter = DECL_CHAIN (iter))
5921 if (TREE_CODE (iter) == FIELD_DECL)
5923 int iter_field_offset = int_bit_position (iter);
5924 bit_size_t size_in_bits;
5925 if (!int_size_in_bits (TREE_TYPE (iter), &size_in_bits))
5928 maybe_pad_to (iter_field_offset);
5931 m_items.safe_push (item (bit_range (iter_field_offset,
5937 /* Add any trailing padding. */
5938 bit_size_t size_in_bits;
5939 if (int_size_in_bits (record_type, &size_in_bits))
5940 maybe_pad_to (size_in_bits);
5943 void dump_to_pp (pretty_printer *pp) const
5947 FOR_EACH_VEC_ELT (m_items, i, it)
5949 it->dump_to_pp (pp);
5954 DEBUG_FUNCTION void dump () const
5957 pp_format_decoder (&pp) = default_tree_printer;
5958 pp.buffer->stream = stderr;
5963 const record_layout::item *get_item_at (bit_offset_t offset) const
5967 FOR_EACH_VEC_ELT (m_items, i, it)
5968 if (it->contains_p (offset))
5974 /* Subroutine of ctor. Add padding item to NEXT_OFFSET if necessary. */
5976 void maybe_pad_to (bit_offset_t next_offset)
5978 if (m_items.length () > 0)
5980 const item &last_item = m_items[m_items.length () - 1];
5981 bit_offset_t offset_after_last_item
5982 = last_item.get_next_bit_offset ();
5983 if (next_offset > offset_after_last_item)
5985 bit_size_t padding_size
5986 = next_offset - offset_after_last_item;
5987 m_items.safe_push (item (bit_range (offset_after_last_item,
5989 last_item.m_field, true));
5994 auto_vec<item> m_items;
5997 /* A subclass of pending_diagnostic for complaining about uninitialized data
5998 being copied across a trust boundary to an untrusted output
5999 (e.g. copy_to_user infoleaks in the Linux kernel). */
6001 class exposure_through_uninit_copy
6002 : public pending_diagnostic_subclass<exposure_through_uninit_copy>
6005 exposure_through_uninit_copy (const region *src_region,
6006 const region *dest_region,
6007 const svalue *copied_sval)
6008 : m_src_region (src_region),
6009 m_dest_region (dest_region),
6010 m_copied_sval (copied_sval)
6012 gcc_assert (m_copied_sval->get_kind () == SK_POISONED
6013 || m_copied_sval->get_kind () == SK_COMPOUND);
6016 const char *get_kind () const final override
6018 return "exposure_through_uninit_copy";
6021 bool operator== (const exposure_through_uninit_copy &other) const
6023 return (m_src_region == other.m_src_region
6024 && m_dest_region == other.m_dest_region
6025 && m_copied_sval == other.m_copied_sval);
6028 int get_controlling_option () const final override
6030 return OPT_Wanalyzer_exposure_through_uninit_copy;
6033 bool emit (rich_location *rich_loc) final override
6035 diagnostic_metadata m;
6036 /* CWE-200: Exposure of Sensitive Information to an Unauthorized Actor. */
6038 enum memory_space mem_space = get_src_memory_space ();
6043 warned = warning_meta
6044 (rich_loc, m, get_controlling_option (),
6045 "potential exposure of sensitive information"
6046 " by copying uninitialized data across trust boundary");
6048 case MEMSPACE_STACK:
6049 warned = warning_meta
6050 (rich_loc, m, get_controlling_option (),
6051 "potential exposure of sensitive information"
6052 " by copying uninitialized data from stack across trust boundary");
6055 warned = warning_meta
6056 (rich_loc, m, get_controlling_option (),
6057 "potential exposure of sensitive information"
6058 " by copying uninitialized data from heap across trust boundary");
6063 location_t loc = rich_loc->get_loc ();
6064 inform_number_of_uninit_bits (loc);
6065 complain_about_uninit_ranges (loc);
6067 if (mem_space == MEMSPACE_STACK)
6068 maybe_emit_fixit_hint ();
6073 label_text describe_final_event (const evdesc::final_event &) final override
6075 enum memory_space mem_space = get_src_memory_space ();
6079 return label_text::borrow ("uninitialized data copied here");
6081 case MEMSPACE_STACK:
6082 return label_text::borrow ("uninitialized data copied from stack here");
6085 return label_text::borrow ("uninitialized data copied from heap here");
6089 void mark_interesting_stuff (interesting_t *interest) final override
6092 interest->add_region_creation (m_src_region);
6096 enum memory_space get_src_memory_space () const
6098 return m_src_region ? m_src_region->get_memory_space () : MEMSPACE_UNKNOWN;
6101 bit_size_t calc_num_uninit_bits () const
6103 switch (m_copied_sval->get_kind ())
6110 const poisoned_svalue *poisoned_sval
6111 = as_a <const poisoned_svalue *> (m_copied_sval);
6112 gcc_assert (poisoned_sval->get_poison_kind () == POISON_KIND_UNINIT);
6114 /* Give up if don't have type information. */
6115 if (m_copied_sval->get_type () == NULL_TREE)
6118 bit_size_t size_in_bits;
6119 if (int_size_in_bits (m_copied_sval->get_type (), &size_in_bits))
6120 return size_in_bits;
6122 /* Give up if we can't get the size of the type. */
6128 const compound_svalue *compound_sval
6129 = as_a <const compound_svalue *> (m_copied_sval);
6130 bit_size_t result = 0;
6131 /* Find keys for uninit svals. */
6132 for (auto iter : *compound_sval)
6134 const svalue *sval = iter.second;
6135 if (const poisoned_svalue *psval
6136 = sval->dyn_cast_poisoned_svalue ())
6137 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
6139 const binding_key *key = iter.first;
6140 const concrete_binding *ckey
6141 = key->dyn_cast_concrete_binding ();
6143 result += ckey->get_size_in_bits ();
6151 void inform_number_of_uninit_bits (location_t loc) const
6153 bit_size_t num_uninit_bits = calc_num_uninit_bits ();
6154 if (num_uninit_bits <= 0)
6156 if (num_uninit_bits % BITS_PER_UNIT == 0)
6158 /* Express in bytes. */
6159 byte_size_t num_uninit_bytes = num_uninit_bits / BITS_PER_UNIT;
6160 if (num_uninit_bytes == 1)
6161 inform (loc, "1 byte is uninitialized");
6164 "%wu bytes are uninitialized", num_uninit_bytes.to_uhwi ());
6168 /* Express in bits. */
6169 if (num_uninit_bits == 1)
6170 inform (loc, "1 bit is uninitialized");
6173 "%wu bits are uninitialized", num_uninit_bits.to_uhwi ());
6177 void complain_about_uninit_ranges (location_t loc) const
6179 if (const compound_svalue *compound_sval
6180 = m_copied_sval->dyn_cast_compound_svalue ())
6182 /* Find keys for uninit svals. */
6183 auto_vec<const concrete_binding *> uninit_keys;
6184 for (auto iter : *compound_sval)
6186 const svalue *sval = iter.second;
6187 if (const poisoned_svalue *psval
6188 = sval->dyn_cast_poisoned_svalue ())
6189 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
6191 const binding_key *key = iter.first;
6192 const concrete_binding *ckey
6193 = key->dyn_cast_concrete_binding ();
6195 uninit_keys.safe_push (ckey);
6198 /* Complain about them in sorted order. */
6199 uninit_keys.qsort (concrete_binding::cmp_ptr_ptr);
6201 std::unique_ptr<record_layout> layout;
6203 tree type = m_copied_sval->get_type ();
6204 if (type && TREE_CODE (type) == RECORD_TYPE)
6206 // (std::make_unique is C++14)
6207 layout = std::unique_ptr<record_layout> (new record_layout (type));
6214 const concrete_binding *ckey;
6215 FOR_EACH_VEC_ELT (uninit_keys, i, ckey)
6217 bit_offset_t start_bit = ckey->get_start_bit_offset ();
6218 bit_offset_t next_bit = ckey->get_next_bit_offset ();
6219 complain_about_uninit_range (loc, start_bit, next_bit,
6225 void complain_about_uninit_range (location_t loc,
6226 bit_offset_t start_bit,
6227 bit_offset_t next_bit,
6228 const record_layout *layout) const
6232 while (start_bit < next_bit)
6234 if (const record_layout::item *item
6235 = layout->get_item_at (start_bit))
6237 gcc_assert (start_bit >= item->get_start_bit_offset ());
6238 gcc_assert (start_bit < item->get_next_bit_offset ());
6239 if (item->get_start_bit_offset () == start_bit
6240 && item->get_next_bit_offset () <= next_bit)
6241 complain_about_fully_uninit_item (*item);
6243 complain_about_partially_uninit_item (*item);
6244 start_bit = item->get_next_bit_offset ();
6252 if (start_bit >= next_bit)
6255 if (start_bit % 8 == 0 && next_bit % 8 == 0)
6257 /* Express in bytes. */
6258 byte_offset_t start_byte = start_bit / 8;
6259 byte_offset_t last_byte = (next_bit / 8) - 1;
6260 if (last_byte == start_byte)
6262 "byte %wu is uninitialized",
6263 start_byte.to_uhwi ());
6266 "bytes %wu - %wu are uninitialized",
6267 start_byte.to_uhwi (),
6268 last_byte.to_uhwi ());
6272 /* Express in bits. */
6273 bit_offset_t last_bit = next_bit - 1;
6274 if (last_bit == start_bit)
6276 "bit %wu is uninitialized",
6277 start_bit.to_uhwi ());
6280 "bits %wu - %wu are uninitialized",
6281 start_bit.to_uhwi (),
6282 last_bit.to_uhwi ());
6287 complain_about_fully_uninit_item (const record_layout::item &item)
6289 tree field = item.m_field;
6290 bit_size_t num_bits = item.m_bit_range.m_size_in_bits;
6291 if (item.m_is_padding)
6293 if (num_bits % 8 == 0)
6295 /* Express in bytes. */
6296 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
6298 inform (DECL_SOURCE_LOCATION (field),
6299 "padding after field %qD is uninitialized (1 byte)",
6302 inform (DECL_SOURCE_LOCATION (field),
6303 "padding after field %qD is uninitialized (%wu bytes)",
6304 field, num_bytes.to_uhwi ());
6308 /* Express in bits. */
6310 inform (DECL_SOURCE_LOCATION (field),
6311 "padding after field %qD is uninitialized (1 bit)",
6314 inform (DECL_SOURCE_LOCATION (field),
6315 "padding after field %qD is uninitialized (%wu bits)",
6316 field, num_bits.to_uhwi ());
6321 if (num_bits % 8 == 0)
6323 /* Express in bytes. */
6324 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
6326 inform (DECL_SOURCE_LOCATION (field),
6327 "field %qD is uninitialized (1 byte)", field);
6329 inform (DECL_SOURCE_LOCATION (field),
6330 "field %qD is uninitialized (%wu bytes)",
6331 field, num_bytes.to_uhwi ());
6335 /* Express in bits. */
6337 inform (DECL_SOURCE_LOCATION (field),
6338 "field %qD is uninitialized (1 bit)", field);
6340 inform (DECL_SOURCE_LOCATION (field),
6341 "field %qD is uninitialized (%wu bits)",
6342 field, num_bits.to_uhwi ());
6348 complain_about_partially_uninit_item (const record_layout::item &item)
6350 tree field = item.m_field;
6351 if (item.m_is_padding)
6352 inform (DECL_SOURCE_LOCATION (field),
6353 "padding after field %qD is partially uninitialized",
6356 inform (DECL_SOURCE_LOCATION (field),
6357 "field %qD is partially uninitialized",
6359 /* TODO: ideally we'd describe what parts are uninitialized. */
6362 void maybe_emit_fixit_hint () const
6364 if (tree decl = m_src_region->maybe_get_decl ())
6366 gcc_rich_location hint_richloc (DECL_SOURCE_LOCATION (decl));
6367 hint_richloc.add_fixit_insert_after (" = {0}");
6368 inform (&hint_richloc,
6369 "suggest forcing zero-initialization by"
6370 " providing a %<{0}%> initializer");
6375 const region *m_src_region;
6376 const region *m_dest_region;
6377 const svalue *m_copied_sval;
6380 /* Return true if any part of SVAL is uninitialized. */
6383 contains_uninit_p (const svalue *sval)
6385 struct uninit_finder : public visitor
6388 uninit_finder () : m_found_uninit (false) {}
6389 void visit_poisoned_svalue (const poisoned_svalue *sval)
6391 if (sval->get_poison_kind () == POISON_KIND_UNINIT)
6392 m_found_uninit = true;
6394 bool m_found_uninit;
6400 return v.m_found_uninit;
6403 /* Function for use by plugins when simulating writing data through a
6404 pointer to an "untrusted" region DST_REG (and thus crossing a security
6405 boundary), such as copying data to user space in an OS kernel.
6407 Check that COPIED_SVAL is fully initialized. If not, complain about
6408 an infoleak to CTXT.
6410 SRC_REG can be NULL; if non-NULL it is used as a hint in the diagnostic
6411 as to where COPIED_SVAL came from. */
6414 region_model::maybe_complain_about_infoleak (const region *dst_reg,
6415 const svalue *copied_sval,
6416 const region *src_reg,
6417 region_model_context *ctxt)
6419 /* Check for exposure. */
6420 if (contains_uninit_p (copied_sval))
6421 ctxt->warn (make_unique<exposure_through_uninit_copy> (src_reg,
6426 /* Set errno to a positive symbolic int, as if some error has occurred. */
6429 region_model::set_errno (const call_details &cd)
6431 const region *errno_reg = m_mgr->get_errno_region ();
6432 conjured_purge p (this, cd.get_ctxt ());
6433 const svalue *new_errno_sval
6434 = m_mgr->get_or_create_conjured_svalue (integer_type_node,
6435 cd.get_call_stmt (),
6438 = m_mgr->get_or_create_int_cst (integer_type_node, 0);
6439 add_constraint (new_errno_sval, GT_EXPR, zero, cd.get_ctxt ());
6440 set_value (errno_reg, new_errno_sval, cd.get_ctxt ());
6443 /* class noop_region_model_context : public region_model_context. */
6446 noop_region_model_context::add_note (std::unique_ptr<pending_note>)
6451 noop_region_model_context::bifurcate (std::unique_ptr<custom_edge_info>)
6456 noop_region_model_context::terminate_path ()
6460 /* struct model_merger. */
6462 /* Dump a multiline representation of this merger to PP. */
6465 model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
6467 pp_string (pp, "model A:");
6469 m_model_a->dump_to_pp (pp, simple, true);
6472 pp_string (pp, "model B:");
6474 m_model_b->dump_to_pp (pp, simple, true);
6477 pp_string (pp, "merged model:");
6479 m_merged_model->dump_to_pp (pp, simple, true);
6483 /* Dump a multiline representation of this merger to FILE. */
6486 model_merger::dump (FILE *fp, bool simple) const
6489 pp_format_decoder (&pp) = default_tree_printer;
6490 pp_show_color (&pp) = pp_show_color (global_dc->printer);
6491 pp.buffer->stream = fp;
6492 dump_to_pp (&pp, simple);
6496 /* Dump a multiline representation of this merger to stderr. */
6499 model_merger::dump (bool simple) const
6501 dump (stderr, simple);
6504 /* Return true if it's OK to merge SVAL with other svalues. */
6507 model_merger::mergeable_svalue_p (const svalue *sval) const
6511 /* Reject merging svalues that have non-purgable sm-state,
6512 to avoid falsely reporting memory leaks by merging them
6513 with something else. For example, given a local var "p",
6514 reject the merger of a:
6515 store_a mapping "p" to a malloc-ed ptr
6517 store_b mapping "p" to a NULL ptr. */
6519 if (!m_state_a->can_purge_p (*m_ext_state, sval))
6522 if (!m_state_b->can_purge_p (*m_ext_state, sval))
6530 /* Dump RMODEL fully to stderr (i.e. without summarization). */
6533 debug (const region_model &rmodel)
6535 rmodel.dump (false);
6538 /* class rejected_op_constraint : public rejected_constraint. */
6541 rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
6543 region_model m (m_model);
6544 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
6545 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
6546 lhs_sval->dump_to_pp (pp, true);
6547 pp_printf (pp, " %s ", op_symbol_code (m_op));
6548 rhs_sval->dump_to_pp (pp, true);
6551 /* class rejected_ranges_constraint : public rejected_constraint. */
6554 rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
6556 region_model m (m_model);
6557 const svalue *sval = m.get_rvalue (m_expr, NULL);
6558 sval->dump_to_pp (pp, true);
6559 pp_string (pp, " in ");
6560 m_ranges->dump_to_pp (pp, true);
6565 /* engine's ctor. */
6567 engine::engine (const supergraph *sg, logger *logger)
6568 : m_sg (sg), m_mgr (logger)
6572 /* Dump the managed objects by class to LOGGER, and the per-class totals. */
6575 engine::log_stats (logger *logger) const
6577 m_mgr.log_stats (logger, true);
6584 namespace selftest {
6586 /* Build a constant tree of the given type from STR. */
6589 build_real_cst_from_string (tree type, const char *str)
6591 REAL_VALUE_TYPE real;
6592 real_from_string (&real, str);
6593 return build_real (type, real);
6596 /* Append various "interesting" constants to OUT (e.g. NaN). */
6599 append_interesting_constants (auto_vec<tree> *out)
6601 out->safe_push (build_int_cst (integer_type_node, 0));
6602 out->safe_push (build_int_cst (integer_type_node, 42));
6603 out->safe_push (build_int_cst (unsigned_type_node, 0));
6604 out->safe_push (build_int_cst (unsigned_type_node, 42));
6605 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
6606 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
6607 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
6608 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
6609 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
6610 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
6611 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
6612 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
6615 /* Verify that tree_cmp is a well-behaved comparator for qsort, even
6616 if the underlying constants aren't comparable. */
6619 test_tree_cmp_on_constants ()
6621 auto_vec<tree> csts;
6622 append_interesting_constants (&csts);
6624 /* Try sorting every triple. */
6625 const unsigned num = csts.length ();
6626 for (unsigned i = 0; i < num; i++)
6627 for (unsigned j = 0; j < num; j++)
6628 for (unsigned k = 0; k < num; k++)
6630 auto_vec<tree> v (3);
6631 v.quick_push (csts[i]);
6632 v.quick_push (csts[j]);
6633 v.quick_push (csts[k]);
6638 /* Implementation detail of the ASSERT_CONDITION_* macros. */
6641 assert_condition (const location &loc,
6642 region_model &model,
6643 const svalue *lhs, tree_code op, const svalue *rhs,
6646 tristate actual = model.eval_condition (lhs, op, rhs);
6647 ASSERT_EQ_AT (loc, actual, expected);
6650 /* Implementation detail of the ASSERT_CONDITION_* macros. */
6653 assert_condition (const location &loc,
6654 region_model &model,
6655 tree lhs, tree_code op, tree rhs,
6658 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
6659 ASSERT_EQ_AT (loc, actual, expected);
6662 /* Implementation detail of ASSERT_DUMP_TREE_EQ. */
6665 assert_dump_tree_eq (const location &loc, tree t, const char *expected)
6667 auto_fix_quotes sentinel;
6669 pp_format_decoder (&pp) = default_tree_printer;
6671 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
6674 /* Assert that dump_tree (T) is EXPECTED. */
6676 #define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
6677 SELFTEST_BEGIN_STMT \
6678 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
6681 /* Implementation detail of ASSERT_DUMP_EQ. */
6684 assert_dump_eq (const location &loc,
6685 const region_model &model,
6687 const char *expected)
6689 auto_fix_quotes sentinel;
6691 pp_format_decoder (&pp) = default_tree_printer;
6693 model.dump_to_pp (&pp, summarize, true);
6694 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
6697 /* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
6699 #define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
6700 SELFTEST_BEGIN_STMT \
6701 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
6704 /* Smoketest for region_model::dump_to_pp. */
6709 region_model_manager mgr;
6710 region_model model (&mgr);
6712 ASSERT_DUMP_EQ (model, false,
6714 "m_called_unknown_fn: FALSE\n"
6715 "constraint_manager:\n"
6718 ASSERT_DUMP_EQ (model, true,
6720 "m_called_unknown_fn: FALSE\n"
6721 "constraint_manager:\n"
6726 /* Helper function for selftests. Create a struct or union type named NAME,
6727 with the fields given by the FIELD_DECLS in FIELDS.
6728 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
6729 create a UNION_TYPE. */
6732 make_test_compound_type (const char *name, bool is_struct,
6733 const auto_vec<tree> *fields)
6735 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
6736 TYPE_NAME (t) = get_identifier (name);
6739 tree fieldlist = NULL;
6742 FOR_EACH_VEC_ELT (*fields, i, field)
6744 gcc_assert (TREE_CODE (field) == FIELD_DECL);
6745 DECL_CONTEXT (field) = t;
6746 fieldlist = chainon (field, fieldlist);
6748 fieldlist = nreverse (fieldlist);
6749 TYPE_FIELDS (t) = fieldlist;
6755 /* Selftest fixture for creating the type "struct coord {int x; int y; };". */
6761 auto_vec<tree> fields;
6762 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6763 get_identifier ("x"), integer_type_node);
6764 fields.safe_push (m_x_field);
6765 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6766 get_identifier ("y"), integer_type_node);
6767 fields.safe_push (m_y_field);
6768 m_coord_type = make_test_compound_type ("coord", true, &fields);
6776 /* Verify usage of a struct. */
6783 tree c = build_global_decl ("c", ct.m_coord_type);
6784 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6785 c, ct.m_x_field, NULL_TREE);
6786 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6787 c, ct.m_y_field, NULL_TREE);
6789 tree int_17 = build_int_cst (integer_type_node, 17);
6790 tree int_m3 = build_int_cst (integer_type_node, -3);
6792 region_model_manager mgr;
6793 region_model model (&mgr);
6794 model.set_value (c_x, int_17, NULL);
6795 model.set_value (c_y, int_m3, NULL);
6797 /* Verify get_offset for "c.x". */
6799 const region *c_x_reg = model.get_lvalue (c_x, NULL);
6800 region_offset offset = c_x_reg->get_offset (&mgr);
6801 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6802 ASSERT_EQ (offset.get_bit_offset (), 0);
6805 /* Verify get_offset for "c.y". */
6807 const region *c_y_reg = model.get_lvalue (c_y, NULL);
6808 region_offset offset = c_y_reg->get_offset (&mgr);
6809 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6810 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
6814 /* Verify usage of an array element. */
6819 tree tlen = size_int (10);
6820 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6822 tree a = build_global_decl ("a", arr_type);
6824 region_model_manager mgr;
6825 region_model model (&mgr);
6826 tree int_0 = build_int_cst (integer_type_node, 0);
6827 tree a_0 = build4 (ARRAY_REF, char_type_node,
6828 a, int_0, NULL_TREE, NULL_TREE);
6829 tree char_A = build_int_cst (char_type_node, 'A');
6830 model.set_value (a_0, char_A, NULL);
6833 /* Verify that region_model::get_representative_tree works as expected. */
6836 test_get_representative_tree ()
6838 region_model_manager mgr;
6842 tree string_cst = build_string (4, "foo");
6843 region_model m (&mgr);
6844 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
6845 tree rep = m.get_representative_tree (str_sval);
6846 ASSERT_EQ (rep, string_cst);
6849 /* String literal. */
6851 tree string_cst_ptr = build_string_literal (4, "foo");
6852 region_model m (&mgr);
6853 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
6854 tree rep = m.get_representative_tree (str_sval);
6855 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
6858 /* Value of an element within an array. */
6860 tree tlen = size_int (10);
6861 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6862 tree a = build_global_decl ("a", arr_type);
6863 placeholder_svalue test_sval (char_type_node, "test value");
6865 /* Value of a[3]. */
6867 test_region_model_context ctxt;
6868 region_model model (&mgr);
6869 tree int_3 = build_int_cst (integer_type_node, 3);
6870 tree a_3 = build4 (ARRAY_REF, char_type_node,
6871 a, int_3, NULL_TREE, NULL_TREE);
6872 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
6873 model.set_value (a_3_reg, &test_sval, &ctxt);
6874 tree rep = model.get_representative_tree (&test_sval);
6875 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
6878 /* Value of a[0]. */
6880 test_region_model_context ctxt;
6881 region_model model (&mgr);
6882 tree idx = build_int_cst (integer_type_node, 0);
6883 tree a_0 = build4 (ARRAY_REF, char_type_node,
6884 a, idx, NULL_TREE, NULL_TREE);
6885 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
6886 model.set_value (a_0_reg, &test_sval, &ctxt);
6887 tree rep = model.get_representative_tree (&test_sval);
6888 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
6892 /* Value of a field within a struct. */
6896 tree c = build_global_decl ("c", ct.m_coord_type);
6897 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6898 c, ct.m_x_field, NULL_TREE);
6899 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6900 c, ct.m_y_field, NULL_TREE);
6902 test_region_model_context ctxt;
6904 /* Value of initial field. */
6906 region_model m (&mgr);
6907 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
6908 placeholder_svalue test_sval_x (integer_type_node, "test x val");
6909 m.set_value (c_x_reg, &test_sval_x, &ctxt);
6910 tree rep = m.get_representative_tree (&test_sval_x);
6911 ASSERT_DUMP_TREE_EQ (rep, "c.x");
6914 /* Value of non-initial field. */
6916 region_model m (&mgr);
6917 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
6918 placeholder_svalue test_sval_y (integer_type_node, "test y val");
6919 m.set_value (c_y_reg, &test_sval_y, &ctxt);
6920 tree rep = m.get_representative_tree (&test_sval_y);
6921 ASSERT_DUMP_TREE_EQ (rep, "c.y");
6926 /* Verify that calling region_model::get_rvalue repeatedly on the same
6927 tree constant retrieves the same svalue *. */
6930 test_unique_constants ()
6932 tree int_0 = build_int_cst (integer_type_node, 0);
6933 tree int_42 = build_int_cst (integer_type_node, 42);
6935 test_region_model_context ctxt;
6936 region_model_manager mgr;
6937 region_model model (&mgr);
6938 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
6939 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
6940 model.get_rvalue (int_42, &ctxt));
6941 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
6942 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
6944 /* A "(const int)42" will be a different tree from "(int)42)"... */
6945 tree const_int_type_node
6946 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
6947 tree const_int_42 = build_int_cst (const_int_type_node, 42);
6948 ASSERT_NE (int_42, const_int_42);
6949 /* It should have a different const_svalue. */
6950 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
6951 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
6952 ASSERT_NE (int_42_sval, const_int_42_sval);
6953 /* But they should compare as equal. */
6954 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
6955 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
6958 /* Verify that each type gets its own singleton unknown_svalue within a
6959 region_model_manager, and that NULL_TREE gets its own singleton. */
6962 test_unique_unknowns ()
6964 region_model_manager mgr;
6965 const svalue *unknown_int
6966 = mgr.get_or_create_unknown_svalue (integer_type_node);
6967 /* Repeated calls with the same type should get the same "unknown"
6969 const svalue *unknown_int_2
6970 = mgr.get_or_create_unknown_svalue (integer_type_node);
6971 ASSERT_EQ (unknown_int, unknown_int_2);
6973 /* Different types (or the NULL type) should have different
6975 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
6976 ASSERT_NE (unknown_NULL_type, unknown_int);
6978 /* Repeated calls with NULL for the type should get the same "unknown"
6980 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
6981 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
6984 /* Verify that initial_svalue are handled as expected. */
6987 test_initial_svalue_folding ()
6989 region_model_manager mgr;
6990 tree x = build_global_decl ("x", integer_type_node);
6991 tree y = build_global_decl ("y", integer_type_node);
6993 test_region_model_context ctxt;
6994 region_model model (&mgr);
6995 const svalue *x_init = model.get_rvalue (x, &ctxt);
6996 const svalue *y_init = model.get_rvalue (y, &ctxt);
6997 ASSERT_NE (x_init, y_init);
6998 const region *x_reg = model.get_lvalue (x, &ctxt);
6999 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
7003 /* Verify that unary ops are folded as expected. */
7006 test_unaryop_svalue_folding ()
7008 region_model_manager mgr;
7009 tree x = build_global_decl ("x", integer_type_node);
7010 tree y = build_global_decl ("y", integer_type_node);
7012 test_region_model_context ctxt;
7013 region_model model (&mgr);
7014 const svalue *x_init = model.get_rvalue (x, &ctxt);
7015 const svalue *y_init = model.get_rvalue (y, &ctxt);
7016 const region *x_reg = model.get_lvalue (x, &ctxt);
7017 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
7019 /* "(int)x" -> "x". */
7020 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
7022 /* "(void *)x" -> something other than "x". */
7023 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
7025 /* "!(x == y)" -> "x != y". */
7026 ASSERT_EQ (mgr.get_or_create_unaryop
7027 (boolean_type_node, TRUTH_NOT_EXPR,
7028 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
7030 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
7032 /* "!(x > y)" -> "x <= y". */
7033 ASSERT_EQ (mgr.get_or_create_unaryop
7034 (boolean_type_node, TRUTH_NOT_EXPR,
7035 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
7037 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
7041 /* Verify that binops on constant svalues are folded. */
7044 test_binop_svalue_folding ()
7047 tree cst_int[NUM_CSTS];
7048 region_model_manager mgr;
7049 const svalue *cst_sval[NUM_CSTS];
7050 for (int i = 0; i < NUM_CSTS; i++)
7052 cst_int[i] = build_int_cst (integer_type_node, i);
7053 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
7054 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
7055 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
7058 for (int i = 0; i < NUM_CSTS; i++)
7059 for (int j = 0; j < NUM_CSTS; j++)
7062 ASSERT_NE (cst_sval[i], cst_sval[j]);
7063 if (i + j < NUM_CSTS)
7066 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7067 cst_sval[i], cst_sval[j]);
7068 ASSERT_EQ (sum, cst_sval[i + j]);
7072 const svalue *difference
7073 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
7074 cst_sval[i], cst_sval[j]);
7075 ASSERT_EQ (difference, cst_sval[i - j]);
7077 if (i * j < NUM_CSTS)
7079 const svalue *product
7080 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7081 cst_sval[i], cst_sval[j]);
7082 ASSERT_EQ (product, cst_sval[i * j]);
7084 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
7085 cst_sval[i], cst_sval[j]);
7086 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
7087 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
7088 cst_sval[i], cst_sval[j]);
7089 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
7093 tree x = build_global_decl ("x", integer_type_node);
7095 test_region_model_context ctxt;
7096 region_model model (&mgr);
7097 const svalue *x_init = model.get_rvalue (x, &ctxt);
7099 /* PLUS_EXPR folding. */
7100 const svalue *x_init_plus_zero
7101 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7102 x_init, cst_sval[0]);
7103 ASSERT_EQ (x_init_plus_zero, x_init);
7104 const svalue *zero_plus_x_init
7105 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7106 cst_sval[0], x_init);
7107 ASSERT_EQ (zero_plus_x_init, x_init);
7109 /* MULT_EXPR folding. */
7110 const svalue *x_init_times_zero
7111 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7112 x_init, cst_sval[0]);
7113 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
7114 const svalue *zero_times_x_init
7115 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7116 cst_sval[0], x_init);
7117 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
7119 const svalue *x_init_times_one
7120 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7121 x_init, cst_sval[1]);
7122 ASSERT_EQ (x_init_times_one, x_init);
7123 const svalue *one_times_x_init
7124 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7125 cst_sval[1], x_init);
7126 ASSERT_EQ (one_times_x_init, x_init);
7129 // TODO: do we want to use the match-and-simplify DSL for this?
7131 /* Verify that binops put any constants on the RHS. */
7132 const svalue *four_times_x_init
7133 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7134 cst_sval[4], x_init);
7135 const svalue *x_init_times_four
7136 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7137 x_init, cst_sval[4]);
7138 ASSERT_EQ (four_times_x_init, x_init_times_four);
7139 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
7140 ASSERT_EQ (binop->get_op (), MULT_EXPR);
7141 ASSERT_EQ (binop->get_arg0 (), x_init);
7142 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
7144 /* Verify that ((x + 1) + 1) == (x + 2). */
7145 const svalue *x_init_plus_one
7146 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7147 x_init, cst_sval[1]);
7148 const svalue *x_init_plus_two
7149 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7150 x_init, cst_sval[2]);
7151 const svalue *x_init_plus_one_plus_one
7152 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7153 x_init_plus_one, cst_sval[1]);
7154 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
7156 /* Verify various binops on booleans. */
7158 const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
7159 const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
7160 const svalue *sval_unknown
7161 = mgr.get_or_create_unknown_svalue (boolean_type_node);
7162 const placeholder_svalue sval_placeholder (boolean_type_node, "v");
7163 for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
7165 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7166 sval_true, sval_unknown),
7168 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7169 sval_false, sval_unknown),
7171 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7172 sval_false, &sval_placeholder),
7175 for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
7177 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7178 sval_false, sval_unknown),
7180 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7181 sval_true, sval_unknown),
7183 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7184 sval_true, &sval_placeholder),
7190 /* Verify that sub_svalues are folded as expected. */
7193 test_sub_svalue_folding ()
7196 tree c = build_global_decl ("c", ct.m_coord_type);
7197 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7198 c, ct.m_x_field, NULL_TREE);
7200 region_model_manager mgr;
7201 region_model model (&mgr);
7202 test_region_model_context ctxt;
7203 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
7205 /* Verify that sub_svalue of "unknown" simply
7206 yields an unknown. */
7208 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
7209 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
7211 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
7212 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
7215 /* Get BIT within VAL as a symbolic value within MGR. */
7217 static const svalue *
7218 get_bit (region_model_manager *mgr,
7220 unsigned HOST_WIDE_INT val)
7222 const svalue *inner_svalue
7223 = mgr->get_or_create_int_cst (unsigned_type_node, val);
7224 return mgr->get_or_create_bits_within (boolean_type_node,
7229 /* Verify that bits_within_svalues are folded as expected. */
7232 test_bits_within_svalue_folding ()
7234 region_model_manager mgr;
7236 const svalue *zero = mgr.get_or_create_int_cst (boolean_type_node, 0);
7237 const svalue *one = mgr.get_or_create_int_cst (boolean_type_node, 1);
7240 const unsigned val = 0x0000;
7241 for (unsigned bit = 0; bit < 16; bit++)
7242 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
7246 const unsigned val = 0x0001;
7247 ASSERT_EQ (get_bit (&mgr, 0, val), one);
7248 for (unsigned bit = 1; bit < 16; bit++)
7249 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
7253 const unsigned val = 0x8000;
7254 for (unsigned bit = 0; bit < 15; bit++)
7255 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
7256 ASSERT_EQ (get_bit (&mgr, 15, val), one);
7260 const unsigned val = 0xFFFF;
7261 for (unsigned bit = 0; bit < 16; bit++)
7262 ASSERT_EQ (get_bit (&mgr, bit, val), one);
7266 /* Test that region::descendent_of_p works as expected. */
7269 test_descendent_of_p ()
7271 region_model_manager mgr;
7272 const region *stack = mgr.get_stack_region ();
7273 const region *heap = mgr.get_heap_region ();
7274 const region *code = mgr.get_code_region ();
7275 const region *globals = mgr.get_globals_region ();
7277 /* descendent_of_p should return true when used on the region itself. */
7278 ASSERT_TRUE (stack->descendent_of_p (stack));
7279 ASSERT_FALSE (stack->descendent_of_p (heap));
7280 ASSERT_FALSE (stack->descendent_of_p (code));
7281 ASSERT_FALSE (stack->descendent_of_p (globals));
7283 tree x = build_global_decl ("x", integer_type_node);
7284 const region *x_reg = mgr.get_region_for_global (x);
7285 ASSERT_TRUE (x_reg->descendent_of_p (globals));
7287 /* A cast_region should be a descendent of the original region. */
7288 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
7289 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
7292 /* Verify that bit_range_region works as expected. */
7295 test_bit_range_regions ()
7297 tree x = build_global_decl ("x", integer_type_node);
7298 region_model_manager mgr;
7299 const region *x_reg = mgr.get_region_for_global (x);
7301 = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
7303 = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
7304 ASSERT_TRUE (byte0->descendent_of_p (x_reg));
7305 ASSERT_TRUE (byte1->descendent_of_p (x_reg));
7306 ASSERT_NE (byte0, byte1);
7309 /* Verify that simple assignments work as expected. */
7314 tree int_0 = build_int_cst (integer_type_node, 0);
7315 tree x = build_global_decl ("x", integer_type_node);
7316 tree y = build_global_decl ("y", integer_type_node);
7318 /* "x == 0", then use of y, then "y = 0;". */
7319 region_model_manager mgr;
7320 region_model model (&mgr);
7321 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
7322 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
7323 model.set_value (model.get_lvalue (y, NULL),
7324 model.get_rvalue (int_0, NULL),
7326 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
7327 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
7330 /* Verify that compound assignments work as expected. */
7333 test_compound_assignment ()
7337 tree c = build_global_decl ("c", ct.m_coord_type);
7338 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7339 c, ct.m_x_field, NULL_TREE);
7340 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7341 c, ct.m_y_field, NULL_TREE);
7342 tree d = build_global_decl ("d", ct.m_coord_type);
7343 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7344 d, ct.m_x_field, NULL_TREE);
7345 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7346 d, ct.m_y_field, NULL_TREE);
7348 tree int_17 = build_int_cst (integer_type_node, 17);
7349 tree int_m3 = build_int_cst (integer_type_node, -3);
7351 region_model_manager mgr;
7352 region_model model (&mgr);
7353 model.set_value (c_x, int_17, NULL);
7354 model.set_value (c_y, int_m3, NULL);
7357 const svalue *sval = model.get_rvalue (c, NULL);
7358 model.set_value (model.get_lvalue (d, NULL), sval, NULL);
7360 /* Check that the fields have the same svalues. */
7361 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
7362 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
7365 /* Verify the details of pushing and popping stack frames. */
7368 test_stack_frames ()
7370 tree int_42 = build_int_cst (integer_type_node, 42);
7371 tree int_10 = build_int_cst (integer_type_node, 10);
7372 tree int_5 = build_int_cst (integer_type_node, 5);
7373 tree int_0 = build_int_cst (integer_type_node, 0);
7375 auto_vec <tree> param_types;
7376 tree parent_fndecl = make_fndecl (integer_type_node,
7379 allocate_struct_function (parent_fndecl, true);
7381 tree child_fndecl = make_fndecl (integer_type_node,
7384 allocate_struct_function (child_fndecl, true);
7386 /* "a" and "b" in the parent frame. */
7387 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7388 get_identifier ("a"),
7390 DECL_CONTEXT (a) = parent_fndecl;
7391 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7392 get_identifier ("b"),
7394 DECL_CONTEXT (b) = parent_fndecl;
7395 /* "x" and "y" in a child frame. */
7396 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7397 get_identifier ("x"),
7399 DECL_CONTEXT (x) = child_fndecl;
7400 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7401 get_identifier ("y"),
7403 DECL_CONTEXT (y) = child_fndecl;
7406 tree p = build_global_decl ("p", ptr_type_node);
7409 tree q = build_global_decl ("q", ptr_type_node);
7411 region_model_manager mgr;
7412 test_region_model_context ctxt;
7413 region_model model (&mgr);
7415 /* Push stack frame for "parent_fn". */
7416 const region *parent_frame_reg
7417 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
7419 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
7420 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
7421 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
7422 model.set_value (a_in_parent_reg,
7423 model.get_rvalue (int_42, &ctxt),
7425 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
7427 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
7428 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
7429 tristate (tristate::TS_TRUE));
7431 /* Push stack frame for "child_fn". */
7432 const region *child_frame_reg
7433 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
7434 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
7435 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
7436 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
7437 model.set_value (x_in_child_reg,
7438 model.get_rvalue (int_0, &ctxt),
7440 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
7442 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
7443 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
7444 tristate (tristate::TS_TRUE));
7446 /* Point a global pointer at a local in the child frame: p = &x. */
7447 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
7448 model.set_value (p_in_globals_reg,
7449 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
7451 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
7453 /* Point another global pointer at p: q = &p. */
7454 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
7455 model.set_value (q_in_globals_reg,
7456 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
7459 /* Test region::descendent_of_p. */
7460 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
7461 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
7462 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
7464 /* Pop the "child_fn" frame from the stack. */
7465 model.pop_frame (NULL, NULL, &ctxt);
7466 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
7467 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
7469 /* Verify that p (which was pointing at the local "x" in the popped
7470 frame) has been poisoned. */
7471 const svalue *new_p_sval = model.get_rvalue (p, NULL);
7472 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
7473 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
7474 POISON_KIND_POPPED_STACK);
7476 /* Verify that q still points to p, in spite of the region
7478 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
7479 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
7480 ASSERT_EQ (new_q_sval->maybe_get_region (),
7481 model.get_lvalue (p, &ctxt));
7483 /* Verify that top of stack has been updated. */
7484 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
7486 /* Verify locals in parent frame. */
7487 /* Verify "a" still has its value. */
7488 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
7489 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
7490 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
7492 /* Verify "b" still has its constraint. */
7493 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
7494 tristate (tristate::TS_TRUE));
7497 /* Verify that get_representative_path_var works as expected, that
7498 we can map from regions to parms and back within a recursive call
7502 test_get_representative_path_var ()
7504 auto_vec <tree> param_types;
7505 tree fndecl = make_fndecl (integer_type_node,
7508 allocate_struct_function (fndecl, true);
7511 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7512 get_identifier ("n"),
7514 DECL_CONTEXT (n) = fndecl;
7516 region_model_manager mgr;
7517 test_region_model_context ctxt;
7518 region_model model (&mgr);
7520 /* Push 5 stack frames for "factorial", each with a param */
7521 auto_vec<const region *> parm_regs;
7522 auto_vec<const svalue *> parm_svals;
7523 for (int depth = 0; depth < 5; depth++)
7525 const region *frame_n_reg
7526 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
7527 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
7528 parm_regs.safe_push (parm_n_reg);
7530 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
7531 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
7532 parm_svals.safe_push (sval_n);
7535 /* Verify that we can recognize that the regions are the parms,
7537 for (int depth = 0; depth < 5; depth++)
7541 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
7543 path_var (n, depth + 1));
7545 /* ...and that we can lookup lvalues for locals for all frames,
7546 not just the top. */
7547 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
7549 /* ...and that we can locate the svalues. */
7552 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
7554 path_var (n, depth + 1));
7559 /* Ensure that region_model::operator== works as expected. */
7564 tree int_42 = build_int_cst (integer_type_node, 42);
7565 tree int_17 = build_int_cst (integer_type_node, 17);
7567 /* Verify that "empty" region_model instances are equal to each other. */
7568 region_model_manager mgr;
7569 region_model model0 (&mgr);
7570 region_model model1 (&mgr);
7571 ASSERT_EQ (model0, model1);
7573 /* Verify that setting state in model1 makes the models non-equal. */
7574 tree x = build_global_decl ("x", integer_type_node);
7575 model0.set_value (x, int_42, NULL);
7576 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
7577 ASSERT_NE (model0, model1);
7579 /* Verify the copy-ctor. */
7580 region_model model2 (model0);
7581 ASSERT_EQ (model0, model2);
7582 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
7583 ASSERT_NE (model1, model2);
7585 /* Verify that models obtained from copy-ctor are independently editable
7586 w/o affecting the original model. */
7587 model2.set_value (x, int_17, NULL);
7588 ASSERT_NE (model0, model2);
7589 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
7590 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
7593 /* Verify that region models for
7600 test_canonicalization_2 ()
7602 tree int_42 = build_int_cst (integer_type_node, 42);
7603 tree int_113 = build_int_cst (integer_type_node, 113);
7604 tree x = build_global_decl ("x", integer_type_node);
7605 tree y = build_global_decl ("y", integer_type_node);
7607 region_model_manager mgr;
7608 region_model model0 (&mgr);
7609 model0.set_value (model0.get_lvalue (x, NULL),
7610 model0.get_rvalue (int_42, NULL),
7612 model0.set_value (model0.get_lvalue (y, NULL),
7613 model0.get_rvalue (int_113, NULL),
7616 region_model model1 (&mgr);
7617 model1.set_value (model1.get_lvalue (y, NULL),
7618 model1.get_rvalue (int_113, NULL),
7620 model1.set_value (model1.get_lvalue (x, NULL),
7621 model1.get_rvalue (int_42, NULL),
7624 ASSERT_EQ (model0, model1);
7627 /* Verify that constraints for
7631 are equal after canonicalization. */
7634 test_canonicalization_3 ()
7636 tree int_3 = build_int_cst (integer_type_node, 3);
7637 tree int_42 = build_int_cst (integer_type_node, 42);
7638 tree x = build_global_decl ("x", integer_type_node);
7639 tree y = build_global_decl ("y", integer_type_node);
7641 region_model_manager mgr;
7642 region_model model0 (&mgr);
7643 model0.add_constraint (x, GT_EXPR, int_3, NULL);
7644 model0.add_constraint (y, GT_EXPR, int_42, NULL);
7646 region_model model1 (&mgr);
7647 model1.add_constraint (y, GT_EXPR, int_42, NULL);
7648 model1.add_constraint (x, GT_EXPR, int_3, NULL);
7650 model0.canonicalize ();
7651 model1.canonicalize ();
7652 ASSERT_EQ (model0, model1);
7655 /* Verify that we can canonicalize a model containing NaN and other real
7659 test_canonicalization_4 ()
7661 auto_vec<tree> csts;
7662 append_interesting_constants (&csts);
7664 region_model_manager mgr;
7665 region_model model (&mgr);
7667 for (tree cst : csts)
7668 model.get_rvalue (cst, NULL);
7670 model.canonicalize ();
7673 /* Assert that if we have two region_model instances
7674 with values VAL_A and VAL_B for EXPR that they are
7675 mergable. Write the merged model to *OUT_MERGED_MODEL,
7676 and the merged svalue ptr to *OUT_MERGED_SVALUE.
7677 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
7678 for that region_model. */
7681 assert_region_models_merge (tree expr, tree val_a, tree val_b,
7682 region_model *out_merged_model,
7683 const svalue **out_merged_svalue)
7685 region_model_manager *mgr = out_merged_model->get_manager ();
7686 program_point point (program_point::origin (*mgr));
7687 test_region_model_context ctxt;
7688 region_model model0 (mgr);
7689 region_model model1 (mgr);
7691 model0.set_value (model0.get_lvalue (expr, &ctxt),
7692 model0.get_rvalue (val_a, &ctxt),
7695 model1.set_value (model1.get_lvalue (expr, &ctxt),
7696 model1.get_rvalue (val_b, &ctxt),
7699 /* They should be mergeable. */
7700 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
7701 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
7704 /* Verify that we can merge region_model instances. */
7707 test_state_merging ()
7709 tree int_42 = build_int_cst (integer_type_node, 42);
7710 tree int_113 = build_int_cst (integer_type_node, 113);
7711 tree x = build_global_decl ("x", integer_type_node);
7712 tree y = build_global_decl ("y", integer_type_node);
7713 tree z = build_global_decl ("z", integer_type_node);
7714 tree p = build_global_decl ("p", ptr_type_node);
7716 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
7717 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
7719 auto_vec <tree> param_types;
7720 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
7721 allocate_struct_function (test_fndecl, true);
7724 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7725 get_identifier ("a"),
7727 DECL_CONTEXT (a) = test_fndecl;
7728 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
7730 /* Param "q", a pointer. */
7731 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7732 get_identifier ("q"),
7734 DECL_CONTEXT (q) = test_fndecl;
7736 region_model_manager mgr;
7737 program_point point (program_point::origin (mgr));
7740 region_model model0 (&mgr);
7741 region_model model1 (&mgr);
7742 region_model merged (&mgr);
7743 /* Verify empty models can be merged. */
7744 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7745 ASSERT_EQ (model0, merged);
7748 /* Verify that we can merge two contradictory constraints on the
7749 value for a global. */
7750 /* TODO: verify that the merged model doesn't have a value for
7753 region_model model0 (&mgr);
7754 region_model model1 (&mgr);
7755 region_model merged (&mgr);
7756 test_region_model_context ctxt;
7757 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7758 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
7759 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7760 ASSERT_NE (model0, merged);
7761 ASSERT_NE (model1, merged);
7764 /* Verify handling of a PARM_DECL. */
7766 test_region_model_context ctxt;
7767 region_model model0 (&mgr);
7768 region_model model1 (&mgr);
7769 ASSERT_EQ (model0.get_stack_depth (), 0);
7770 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7771 ASSERT_EQ (model0.get_stack_depth (), 1);
7772 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7774 placeholder_svalue test_sval (integer_type_node, "test sval");
7775 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
7776 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
7777 ASSERT_EQ (model0, model1);
7779 /* They should be mergeable, and the result should be the same. */
7780 region_model merged (&mgr);
7781 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7782 ASSERT_EQ (model0, merged);
7783 /* In particular, "a" should have the placeholder value. */
7784 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
7787 /* Verify handling of a global. */
7789 test_region_model_context ctxt;
7790 region_model model0 (&mgr);
7791 region_model model1 (&mgr);
7793 placeholder_svalue test_sval (integer_type_node, "test sval");
7794 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7795 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7796 ASSERT_EQ (model0, model1);
7798 /* They should be mergeable, and the result should be the same. */
7799 region_model merged (&mgr);
7800 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7801 ASSERT_EQ (model0, merged);
7802 /* In particular, "x" should have the placeholder value. */
7803 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
7806 /* Use global-handling to verify various combinations of values. */
7808 /* Two equal constant values. */
7810 region_model merged (&mgr);
7811 const svalue *merged_x_sval;
7812 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
7814 /* In particular, there should be a constant value for "x". */
7815 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
7816 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
7820 /* Two non-equal constant values. */
7822 region_model merged (&mgr);
7823 const svalue *merged_x_sval;
7824 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
7826 /* In particular, there should be a "widening" value for "x". */
7827 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
7830 /* Initial and constant. */
7832 region_model merged (&mgr);
7833 const svalue *merged_x_sval;
7834 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
7836 /* In particular, there should be an unknown value for "x". */
7837 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7840 /* Constant and initial. */
7842 region_model merged (&mgr);
7843 const svalue *merged_x_sval;
7844 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
7846 /* In particular, there should be an unknown value for "x". */
7847 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7850 /* Unknown and constant. */
7853 /* Pointers: NULL and NULL. */
7856 /* Pointers: NULL and non-NULL. */
7859 /* Pointers: non-NULL and non-NULL: ptr to a local. */
7861 region_model model0 (&mgr);
7862 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7863 model0.set_value (model0.get_lvalue (p, NULL),
7864 model0.get_rvalue (addr_of_a, NULL), NULL);
7866 region_model model1 (model0);
7867 ASSERT_EQ (model0, model1);
7869 /* They should be mergeable, and the result should be the same. */
7870 region_model merged (&mgr);
7871 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7872 ASSERT_EQ (model0, merged);
7875 /* Pointers: non-NULL and non-NULL: ptr to a global. */
7877 region_model merged (&mgr);
7878 /* p == &y in both input models. */
7879 const svalue *merged_p_sval;
7880 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
7883 /* We should get p == &y in the merged model. */
7884 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
7885 const region_svalue *merged_p_ptr
7886 = merged_p_sval->dyn_cast_region_svalue ();
7887 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
7888 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
7891 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
7893 region_model merged (&mgr);
7894 /* x == &y vs x == &z in the input models; these are actually casts
7895 of the ptrs to "int". */
7896 const svalue *merged_x_sval;
7898 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
7901 /* We should get x == unknown in the merged model. */
7902 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7905 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
7907 test_region_model_context ctxt;
7908 region_model model0 (&mgr);
7909 tree size = build_int_cst (size_type_node, 1024);
7910 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
7911 const region *new_reg
7912 = model0.create_region_for_heap_alloc (size_sval, &ctxt);
7913 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
7914 model0.set_value (model0.get_lvalue (p, &ctxt),
7917 region_model model1 (model0);
7919 ASSERT_EQ (model0, model1);
7921 region_model merged (&mgr);
7922 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7924 /* The merged model ought to be identical. */
7925 ASSERT_EQ (model0, merged);
7928 /* Two regions sharing the same placeholder svalue should continue sharing
7929 it after self-merger. */
7931 test_region_model_context ctxt;
7932 region_model model0 (&mgr);
7933 placeholder_svalue placeholder_sval (integer_type_node, "test");
7934 model0.set_value (model0.get_lvalue (x, &ctxt),
7935 &placeholder_sval, &ctxt);
7936 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
7937 region_model model1 (model0);
7939 /* They should be mergeable, and the result should be the same. */
7940 region_model merged (&mgr);
7941 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7942 ASSERT_EQ (model0, merged);
7944 /* In particular, we should have x == y. */
7945 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
7946 tristate (tristate::TS_TRUE));
7950 region_model model0 (&mgr);
7951 region_model model1 (&mgr);
7952 test_region_model_context ctxt;
7953 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7954 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7955 region_model merged (&mgr);
7956 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7960 region_model model0 (&mgr);
7961 region_model model1 (&mgr);
7962 test_region_model_context ctxt;
7963 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7964 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7965 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
7966 region_model merged (&mgr);
7967 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7970 // TODO: what can't we merge? need at least one such test
7972 /* TODO: various things
7975 - every combination, but in particular
7981 test_region_model_context ctxt;
7982 region_model model0 (&mgr);
7984 const region *x_reg = model0.get_lvalue (x, &ctxt);
7985 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
7986 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
7988 region_model model1 (model0);
7989 ASSERT_EQ (model1, model0);
7991 /* They should be mergeable, and the result should be the same. */
7992 region_model merged (&mgr);
7993 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7996 /* Verify that we can merge a model in which a local in an older stack
7997 frame points to a local in a more recent stack frame. */
7999 region_model model0 (&mgr);
8000 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
8001 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
8003 /* Push a second frame. */
8004 const region *reg_2nd_frame
8005 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
8007 /* Have a pointer in the older frame point to a local in the
8008 more recent frame. */
8009 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
8010 model0.set_value (q_in_first_frame, sval_ptr, NULL);
8012 /* Verify that it's pointing at the newer frame. */
8013 const region *reg_pointee = sval_ptr->maybe_get_region ();
8014 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
8016 model0.canonicalize ();
8018 region_model model1 (model0);
8019 ASSERT_EQ (model0, model1);
8021 /* They should be mergeable, and the result should be the same
8022 (after canonicalization, at least). */
8023 region_model merged (&mgr);
8024 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8025 merged.canonicalize ();
8026 ASSERT_EQ (model0, merged);
8029 /* Verify that we can merge a model in which a local points to a global. */
8031 region_model model0 (&mgr);
8032 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
8033 model0.set_value (model0.get_lvalue (q, NULL),
8034 model0.get_rvalue (addr_of_y, NULL), NULL);
8036 region_model model1 (model0);
8037 ASSERT_EQ (model0, model1);
8039 /* They should be mergeable, and the result should be the same
8040 (after canonicalization, at least). */
8041 region_model merged (&mgr);
8042 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8043 ASSERT_EQ (model0, merged);
8047 /* Verify that constraints are correctly merged when merging region_model
8051 test_constraint_merging ()
8053 tree int_0 = build_int_cst (integer_type_node, 0);
8054 tree int_5 = build_int_cst (integer_type_node, 5);
8055 tree x = build_global_decl ("x", integer_type_node);
8056 tree y = build_global_decl ("y", integer_type_node);
8057 tree z = build_global_decl ("z", integer_type_node);
8058 tree n = build_global_decl ("n", integer_type_node);
8060 region_model_manager mgr;
8061 test_region_model_context ctxt;
8063 /* model0: 0 <= (x == y) < n. */
8064 region_model model0 (&mgr);
8065 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
8066 model0.add_constraint (x, GE_EXPR, int_0, NULL);
8067 model0.add_constraint (x, LT_EXPR, n, NULL);
8069 /* model1: z != 5 && (0 <= x < n). */
8070 region_model model1 (&mgr);
8071 model1.add_constraint (z, NE_EXPR, int_5, NULL);
8072 model1.add_constraint (x, GE_EXPR, int_0, NULL);
8073 model1.add_constraint (x, LT_EXPR, n, NULL);
8075 /* They should be mergeable; the merged constraints should
8076 be: (0 <= x < n). */
8077 program_point point (program_point::origin (mgr));
8078 region_model merged (&mgr);
8079 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8081 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
8082 tristate (tristate::TS_TRUE));
8083 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
8084 tristate (tristate::TS_TRUE));
8086 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
8087 tristate (tristate::TS_UNKNOWN));
8088 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
8089 tristate (tristate::TS_UNKNOWN));
8092 /* Verify that widening_svalue::eval_condition_without_cm works as
8096 test_widening_constraints ()
8098 region_model_manager mgr;
8099 function_point point (program_point::origin (mgr).get_function_point ());
8100 tree int_0 = build_int_cst (integer_type_node, 0);
8101 tree int_m1 = build_int_cst (integer_type_node, -1);
8102 tree int_1 = build_int_cst (integer_type_node, 1);
8103 tree int_256 = build_int_cst (integer_type_node, 256);
8104 test_region_model_context ctxt;
8105 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
8106 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
8107 const svalue *w_zero_then_one_sval
8108 = mgr.get_or_create_widening_svalue (integer_type_node, point,
8109 int_0_sval, int_1_sval);
8110 const widening_svalue *w_zero_then_one
8111 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
8112 ASSERT_EQ (w_zero_then_one->get_direction (),
8113 widening_svalue::DIR_ASCENDING);
8114 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
8115 tristate::TS_FALSE);
8116 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
8117 tristate::TS_FALSE);
8118 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
8119 tristate::TS_UNKNOWN);
8120 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
8121 tristate::TS_UNKNOWN);
8123 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
8124 tristate::TS_FALSE);
8125 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
8126 tristate::TS_UNKNOWN);
8127 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
8128 tristate::TS_UNKNOWN);
8129 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
8130 tristate::TS_UNKNOWN);
8132 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
8134 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
8135 tristate::TS_UNKNOWN);
8136 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
8137 tristate::TS_UNKNOWN);
8138 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
8139 tristate::TS_UNKNOWN);
8141 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
8143 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
8145 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
8146 tristate::TS_UNKNOWN);
8147 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
8148 tristate::TS_UNKNOWN);
8150 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
8151 tristate::TS_FALSE);
8152 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
8153 tristate::TS_UNKNOWN);
8154 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
8155 tristate::TS_UNKNOWN);
8156 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
8157 tristate::TS_UNKNOWN);
8159 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
8161 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
8162 tristate::TS_UNKNOWN);
8163 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
8164 tristate::TS_UNKNOWN);
8165 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
8166 tristate::TS_UNKNOWN);
8169 /* Verify merging constraints for states simulating successive iterations
8172 for (i = 0; i < 256; i++)
8179 i_11 = PHI <i_15(2), i_23(3)>
8189 and thus these ops (and resultant states):
8192 add_constraint (i_11 <= 255) [for the true edge]
8193 {i_11: 0} [constraint was a no-op]
8197 {i_11: WIDENED (at phi, 0, 1)}
8198 add_constraint (i_11 <= 255) [for the true edge]
8199 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
8201 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
8202 i_11 = PHI(); merge with state at phi above
8203 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
8204 [changing meaning of "WIDENED" here]
8206 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
8213 region_model_manager mgr;
8214 program_point point (program_point::origin (mgr));
8216 tree int_0 = build_int_cst (integer_type_node, 0);
8217 tree int_1 = build_int_cst (integer_type_node, 1);
8218 tree int_256 = build_int_cst (integer_type_node, 256);
8219 tree int_257 = build_int_cst (integer_type_node, 257);
8220 tree i = build_global_decl ("i", integer_type_node);
8222 test_region_model_context ctxt;
8225 region_model model0 (&mgr);
8226 model0.set_value (i, int_0, &ctxt);
8229 region_model model1 (&mgr);
8230 model1.set_value (i, int_1, &ctxt);
8232 /* Should merge "i" to a widened value. */
8233 region_model model2 (&mgr);
8234 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
8235 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
8236 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
8237 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
8238 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
8240 /* Add constraint: i < 256 */
8241 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
8242 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
8243 tristate (tristate::TS_TRUE));
8244 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
8245 tristate (tristate::TS_TRUE));
8247 /* Try merging with the initial state. */
8248 region_model model3 (&mgr);
8249 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
8250 /* Merging the merged value with the initial value should be idempotent,
8251 so that the analysis converges. */
8252 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
8253 /* Merger of 0 and a widening value with constraint < CST
8254 should retain the constraint, even though it was implicit
8256 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
8257 tristate (tristate::TS_TRUE));
8258 /* ...and we should have equality: the analysis should have converged. */
8259 ASSERT_EQ (model3, model2);
8261 /* "i_23 = i_11 + 1;" */
8262 region_model model4 (model3);
8263 ASSERT_EQ (model4, model2);
8264 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
8265 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
8266 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
8268 /* Try merging with the "i: 1" state. */
8269 region_model model5 (&mgr);
8270 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
8271 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
8272 ASSERT_EQ (model5, model4);
8274 /* "i_11 = PHI();" merge with state at phi above.
8275 For i, we should have a merger of WIDENING with WIDENING + 1,
8276 and this should be WIDENING again. */
8277 region_model model6 (&mgr);
8278 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
8279 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
8280 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
8282 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
8285 /* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
8286 all cast pointers to that region are also known to be non-NULL. */
8289 test_malloc_constraints ()
8291 region_model_manager mgr;
8292 region_model model (&mgr);
8293 tree p = build_global_decl ("p", ptr_type_node);
8294 tree char_star = build_pointer_type (char_type_node);
8295 tree q = build_global_decl ("q", char_star);
8296 tree null_ptr = build_int_cst (ptr_type_node, 0);
8298 const svalue *size_in_bytes
8299 = mgr.get_or_create_unknown_svalue (size_type_node);
8300 const region *reg = model.create_region_for_heap_alloc (size_in_bytes, NULL);
8301 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
8302 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
8303 model.set_value (q, p, NULL);
8305 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
8306 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
8307 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
8308 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
8310 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
8312 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
8313 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
8314 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
8315 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
8318 /* Smoketest of getting and setting the value of a variable. */
8324 tree i = build_global_decl ("i", integer_type_node);
8326 tree int_17 = build_int_cst (integer_type_node, 17);
8327 tree int_m3 = build_int_cst (integer_type_node, -3);
8329 region_model_manager mgr;
8330 region_model model (&mgr);
8332 const region *i_reg = model.get_lvalue (i, NULL);
8333 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
8335 /* Reading "i" should give a symbolic "initial value". */
8336 const svalue *sval_init = model.get_rvalue (i, NULL);
8337 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
8338 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
8339 /* ..and doing it again should give the same "initial value". */
8340 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
8343 model.set_value (i, int_17, NULL);
8344 ASSERT_EQ (model.get_rvalue (i, NULL),
8345 model.get_rvalue (int_17, NULL));
8348 model.set_value (i, int_m3, NULL);
8349 ASSERT_EQ (model.get_rvalue (i, NULL),
8350 model.get_rvalue (int_m3, NULL));
8352 /* Verify get_offset for "i". */
8354 region_offset offset = i_reg->get_offset (&mgr);
8355 ASSERT_EQ (offset.get_base_region (), i_reg);
8356 ASSERT_EQ (offset.get_bit_offset (), 0);
8363 /* "int arr[10];" */
8364 tree tlen = size_int (10);
8366 = build_array_type (integer_type_node, build_index_type (tlen));
8367 tree arr = build_global_decl ("arr", arr_type);
8370 tree i = build_global_decl ("i", integer_type_node);
8372 tree int_0 = build_int_cst (integer_type_node, 0);
8373 tree int_1 = build_int_cst (integer_type_node, 1);
8375 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
8376 arr, int_0, NULL_TREE, NULL_TREE);
8377 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
8378 arr, int_1, NULL_TREE, NULL_TREE);
8379 tree arr_i = build4 (ARRAY_REF, integer_type_node,
8380 arr, i, NULL_TREE, NULL_TREE);
8382 tree int_17 = build_int_cst (integer_type_node, 17);
8383 tree int_42 = build_int_cst (integer_type_node, 42);
8384 tree int_m3 = build_int_cst (integer_type_node, -3);
8386 region_model_manager mgr;
8387 region_model model (&mgr);
8388 /* "arr[0] = 17;". */
8389 model.set_value (arr_0, int_17, NULL);
8390 /* "arr[1] = -3;". */
8391 model.set_value (arr_1, int_m3, NULL);
8393 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
8394 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
8396 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
8397 model.set_value (arr_1, int_42, NULL);
8398 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
8400 /* Verify get_offset for "arr[0]". */
8402 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
8403 region_offset offset = arr_0_reg->get_offset (&mgr);
8404 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
8405 ASSERT_EQ (offset.get_bit_offset (), 0);
8408 /* Verify get_offset for "arr[1]". */
8410 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
8411 region_offset offset = arr_1_reg->get_offset (&mgr);
8412 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
8413 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
8416 /* Verify get_offset for "arr[i]". */
8418 const region *arr_i_reg = model.get_lvalue (arr_i, NULL);
8419 region_offset offset = arr_i_reg->get_offset (&mgr);
8420 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
8421 ASSERT_EQ (offset.get_symbolic_byte_offset ()->get_kind (), SK_BINOP);
8424 /* "arr[i] = i;" - this should remove the earlier bindings. */
8425 model.set_value (arr_i, i, NULL);
8426 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
8427 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
8429 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
8430 model.set_value (arr_0, int_17, NULL);
8431 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
8432 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
8435 /* Smoketest of dereferencing a pointer via MEM_REF. */
8445 tree x = build_global_decl ("x", integer_type_node);
8446 tree int_star = build_pointer_type (integer_type_node);
8447 tree p = build_global_decl ("p", int_star);
8449 tree int_17 = build_int_cst (integer_type_node, 17);
8450 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
8451 tree offset_0 = build_int_cst (integer_type_node, 0);
8452 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
8454 region_model_manager mgr;
8455 region_model model (&mgr);
8458 model.set_value (x, int_17, NULL);
8461 model.set_value (p, addr_of_x, NULL);
8463 const svalue *sval = model.get_rvalue (star_p, NULL);
8464 ASSERT_EQ (sval->maybe_get_constant (), int_17);
8467 /* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
8468 Analogous to this code:
8469 void test_6 (int a[10])
8471 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
8473 __analyzer_eval (a[3] == 42); [should be TRUE]
8475 from data-model-1.c, which looks like this at the gimple level:
8476 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
8477 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
8478 int _2 = *_1; # MEM_REF
8479 _Bool _3 = _2 == 42;
8481 __analyzer_eval (_4);
8484 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
8487 # __analyzer_eval (a[3] == 42); [should be TRUE]
8488 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
8489 int _7 = *_6; # MEM_REF
8490 _Bool _8 = _7 == 42;
8492 __analyzer_eval (_9); */
8495 test_POINTER_PLUS_EXPR_then_MEM_REF ()
8497 tree int_star = build_pointer_type (integer_type_node);
8498 tree a = build_global_decl ("a", int_star);
8499 tree offset_12 = build_int_cst (size_type_node, 12);
8500 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
8501 tree offset_0 = build_int_cst (integer_type_node, 0);
8502 tree mem_ref = build2 (MEM_REF, integer_type_node,
8503 pointer_plus_expr, offset_0);
8504 region_model_manager mgr;
8505 region_model m (&mgr);
8507 tree int_42 = build_int_cst (integer_type_node, 42);
8508 m.set_value (mem_ref, int_42, NULL);
8509 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
8512 /* Verify that malloc works. */
8517 tree int_star = build_pointer_type (integer_type_node);
8518 tree p = build_global_decl ("p", int_star);
8519 tree n = build_global_decl ("n", integer_type_node);
8520 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
8521 n, build_int_cst (size_type_node, 4));
8523 region_model_manager mgr;
8524 test_region_model_context ctxt;
8525 region_model model (&mgr);
8527 /* "p = malloc (n * 4);". */
8528 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
8529 const region *reg = model.create_region_for_heap_alloc (size_sval, &ctxt);
8530 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
8531 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
8532 ASSERT_EQ (model.get_capacity (reg), size_sval);
8535 /* Verify that alloca works. */
8540 auto_vec <tree> param_types;
8541 tree fndecl = make_fndecl (integer_type_node,
8544 allocate_struct_function (fndecl, true);
8547 tree int_star = build_pointer_type (integer_type_node);
8548 tree p = build_global_decl ("p", int_star);
8549 tree n = build_global_decl ("n", integer_type_node);
8550 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
8551 n, build_int_cst (size_type_node, 4));
8553 region_model_manager mgr;
8554 test_region_model_context ctxt;
8555 region_model model (&mgr);
8557 /* Push stack frame. */
8558 const region *frame_reg
8559 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
8561 /* "p = alloca (n * 4);". */
8562 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
8563 const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
8564 ASSERT_EQ (reg->get_parent_region (), frame_reg);
8565 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
8566 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
8567 ASSERT_EQ (model.get_capacity (reg), size_sval);
8569 /* Verify that the pointers to the alloca region are replaced by
8570 poisoned values when the frame is popped. */
8571 model.pop_frame (NULL, NULL, &ctxt);
8572 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
8575 /* Verify that svalue::involves_p works. */
8580 region_model_manager mgr;
8581 tree int_star = build_pointer_type (integer_type_node);
8582 tree p = build_global_decl ("p", int_star);
8583 tree q = build_global_decl ("q", int_star);
8585 test_region_model_context ctxt;
8586 region_model model (&mgr);
8587 const svalue *p_init = model.get_rvalue (p, &ctxt);
8588 const svalue *q_init = model.get_rvalue (q, &ctxt);
8590 ASSERT_TRUE (p_init->involves_p (p_init));
8591 ASSERT_FALSE (p_init->involves_p (q_init));
8593 const region *star_p_reg = mgr.get_symbolic_region (p_init);
8594 const region *star_q_reg = mgr.get_symbolic_region (q_init);
8596 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
8597 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
8599 ASSERT_TRUE (init_star_p->involves_p (p_init));
8600 ASSERT_FALSE (p_init->involves_p (init_star_p));
8601 ASSERT_FALSE (init_star_p->involves_p (q_init));
8602 ASSERT_TRUE (init_star_q->involves_p (q_init));
8603 ASSERT_FALSE (init_star_q->involves_p (p_init));
8606 /* Run all of the selftests within this file. */
8609 analyzer_region_model_cc_tests ()
8611 test_tree_cmp_on_constants ();
8615 test_get_representative_tree ();
8616 test_unique_constants ();
8617 test_unique_unknowns ();
8618 test_initial_svalue_folding ();
8619 test_unaryop_svalue_folding ();
8620 test_binop_svalue_folding ();
8621 test_sub_svalue_folding ();
8622 test_bits_within_svalue_folding ();
8623 test_descendent_of_p ();
8624 test_bit_range_regions ();
8626 test_compound_assignment ();
8627 test_stack_frames ();
8628 test_get_representative_path_var ();
8630 test_canonicalization_2 ();
8631 test_canonicalization_3 ();
8632 test_canonicalization_4 ();
8633 test_state_merging ();
8634 test_constraint_merging ();
8635 test_widening_constraints ();
8636 test_iteration_1 ();
8637 test_malloc_constraints ();
8641 test_POINTER_PLUS_EXPR_then_MEM_REF ();
8647 } // namespace selftest
8649 #endif /* CHECKING_P */
8653 #endif /* #if ENABLE_ANALYZER */