1 /* Classes for modeling the state of memory.
2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #define INCLUDE_MEMORY
24 #include "coretypes.h"
25 #include "make-unique.h"
28 #include "basic-block.h"
30 #include "gimple-iterator.h"
31 #include "diagnostic-core.h"
36 #include "stringpool.h"
39 #include "fold-const.h"
40 #include "tree-pretty-print.h"
41 #include "diagnostic-color.h"
42 #include "diagnostic-metadata.h"
45 #include "analyzer/analyzer.h"
46 #include "analyzer/analyzer-logging.h"
47 #include "ordered-hash-map.h"
51 #include "analyzer/supergraph.h"
53 #include "analyzer/call-string.h"
54 #include "analyzer/program-point.h"
55 #include "analyzer/store.h"
56 #include "analyzer/region-model.h"
57 #include "analyzer/constraint-manager.h"
58 #include "diagnostic-event-id.h"
59 #include "analyzer/sm.h"
60 #include "diagnostic-event-id.h"
61 #include "analyzer/sm.h"
62 #include "analyzer/pending-diagnostic.h"
63 #include "analyzer/region-model-reachability.h"
64 #include "analyzer/analyzer-selftests.h"
65 #include "analyzer/program-state.h"
66 #include "analyzer/call-summary.h"
67 #include "stor-layout.h"
69 #include "tree-object-size.h"
70 #include "gimple-ssa.h"
71 #include "tree-phinodes.h"
72 #include "tree-ssa-operands.h"
73 #include "ssa-iterators.h"
76 #include "gcc-rich-location.h"
82 /* Dump T to PP in language-independent form, for debugging/logging/dumping
86 dump_tree (pretty_printer *pp, tree t)
88 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
91 /* Dump T to PP in language-independent form in quotes, for
92 debugging/logging/dumping purposes. */
95 dump_quoted_tree (pretty_printer *pp, tree t)
97 pp_begin_quote (pp, pp_show_color (pp));
99 pp_end_quote (pp, pp_show_color (pp));
102 /* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
103 calls within other pp_printf calls.
105 default_tree_printer handles 'T' and some other codes by calling
106 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
107 dump_generic_node calls pp_printf in various places, leading to
110 Ideally pp_printf could be made to be reentrant, but in the meantime
111 this function provides a workaround. */
114 print_quoted_type (pretty_printer *pp, tree t)
116 pp_begin_quote (pp, pp_show_color (pp));
117 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
118 pp_end_quote (pp, pp_show_color (pp));
121 /* class region_to_value_map. */
123 /* Assignment operator for region_to_value_map. */
125 region_to_value_map &
126 region_to_value_map::operator= (const region_to_value_map &other)
129 for (auto iter : other.m_hash_map)
131 const region *reg = iter.first;
132 const svalue *sval = iter.second;
133 m_hash_map.put (reg, sval);
138 /* Equality operator for region_to_value_map. */
141 region_to_value_map::operator== (const region_to_value_map &other) const
143 if (m_hash_map.elements () != other.m_hash_map.elements ())
146 for (auto iter : *this)
148 const region *reg = iter.first;
149 const svalue *sval = iter.second;
150 const svalue * const *other_slot = other.get (reg);
151 if (other_slot == NULL)
153 if (sval != *other_slot)
160 /* Dump this object to PP. */
163 region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
164 bool multiline) const
166 auto_vec<const region *> regs;
167 for (iterator iter = begin (); iter != end (); ++iter)
168 regs.safe_push ((*iter).first);
169 regs.qsort (region::cmp_ptr_ptr);
173 pp_string (pp, " {");
176 FOR_EACH_VEC_ELT (regs, i, reg)
181 pp_string (pp, ", ");
182 reg->dump_to_pp (pp, simple);
183 pp_string (pp, ": ");
184 const svalue *sval = *get (reg);
185 sval->dump_to_pp (pp, true);
193 /* Dump this object to stderr. */
196 region_to_value_map::dump (bool simple) const
199 pp_format_decoder (&pp) = default_tree_printer;
200 pp_show_color (&pp) = pp_show_color (global_dc->printer);
201 pp.buffer->stream = stderr;
202 dump_to_pp (&pp, simple, true);
208 /* Attempt to merge THIS with OTHER, writing the result
211 For now, write (region, value) mappings that are in common between THIS
212 and OTHER to OUT, effectively taking the intersection, rather than
213 rejecting differences. */
216 region_to_value_map::can_merge_with_p (const region_to_value_map &other,
217 region_to_value_map *out) const
219 for (auto iter : *this)
221 const region *iter_reg = iter.first;
222 const svalue *iter_sval = iter.second;
223 const svalue * const * other_slot = other.get (iter_reg);
225 if (iter_sval == *other_slot)
226 out->put (iter_reg, iter_sval);
231 /* Purge any state involving SVAL. */
234 region_to_value_map::purge_state_involving (const svalue *sval)
236 auto_vec<const region *> to_purge;
237 for (auto iter : *this)
239 const region *iter_reg = iter.first;
240 const svalue *iter_sval = iter.second;
241 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
242 to_purge.safe_push (iter_reg);
244 for (auto iter : to_purge)
245 m_hash_map.remove (iter);
248 /* class region_model. */
250 /* Ctor for region_model: construct an "empty" model. */
252 region_model::region_model (region_model_manager *mgr)
253 : m_mgr (mgr), m_store (), m_current_frame (NULL),
256 m_constraints = new constraint_manager (mgr);
259 /* region_model's copy ctor. */
261 region_model::region_model (const region_model &other)
262 : m_mgr (other.m_mgr), m_store (other.m_store),
263 m_constraints (new constraint_manager (*other.m_constraints)),
264 m_current_frame (other.m_current_frame),
265 m_dynamic_extents (other.m_dynamic_extents)
269 /* region_model's dtor. */
271 region_model::~region_model ()
273 delete m_constraints;
276 /* region_model's assignment operator. */
279 region_model::operator= (const region_model &other)
281 /* m_mgr is const. */
282 gcc_assert (m_mgr == other.m_mgr);
284 m_store = other.m_store;
286 delete m_constraints;
287 m_constraints = new constraint_manager (*other.m_constraints);
289 m_current_frame = other.m_current_frame;
291 m_dynamic_extents = other.m_dynamic_extents;
296 /* Equality operator for region_model.
298 Amongst other things this directly compares the stores and the constraint
299 managers, so for this to be meaningful both this and OTHER should
300 have been canonicalized. */
303 region_model::operator== (const region_model &other) const
305 /* We can only compare instances that use the same manager. */
306 gcc_assert (m_mgr == other.m_mgr);
308 if (m_store != other.m_store)
311 if (*m_constraints != *other.m_constraints)
314 if (m_current_frame != other.m_current_frame)
317 if (m_dynamic_extents != other.m_dynamic_extents)
320 gcc_checking_assert (hash () == other.hash ());
325 /* Generate a hash value for this region_model. */
328 region_model::hash () const
330 hashval_t result = m_store.hash ();
331 result ^= m_constraints->hash ();
335 /* Dump a representation of this model to PP, showing the
336 stack, the store, and any constraints.
337 Use SIMPLE to control how svalues and regions are printed. */
340 region_model::dump_to_pp (pretty_printer *pp, bool simple,
341 bool multiline) const
344 pp_printf (pp, "stack depth: %i", get_stack_depth ());
348 pp_string (pp, " {");
349 for (const frame_region *iter_frame = m_current_frame; iter_frame;
350 iter_frame = iter_frame->get_calling_frame ())
354 else if (iter_frame != m_current_frame)
355 pp_string (pp, ", ");
356 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
357 iter_frame->dump_to_pp (pp, simple);
366 pp_string (pp, ", {");
367 m_store.dump_to_pp (pp, simple, multiline,
368 m_mgr->get_store_manager ());
372 /* Dump constraints. */
373 pp_string (pp, "constraint_manager:");
377 pp_string (pp, " {");
378 m_constraints->dump_to_pp (pp, multiline);
382 /* Dump sizes of dynamic regions, if any are known. */
383 if (!m_dynamic_extents.is_empty ())
385 pp_string (pp, "dynamic_extents:");
386 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
390 /* Dump a representation of this model to FILE. */
393 region_model::dump (FILE *fp, bool simple, bool multiline) const
396 pp_format_decoder (&pp) = default_tree_printer;
397 pp_show_color (&pp) = pp_show_color (global_dc->printer);
398 pp.buffer->stream = fp;
399 dump_to_pp (&pp, simple, multiline);
404 /* Dump a multiline representation of this model to stderr. */
407 region_model::dump (bool simple) const
409 dump (stderr, simple, true);
412 /* Dump a multiline representation of this model to stderr. */
415 region_model::debug () const
420 /* Assert that this object is valid. */
423 region_model::validate () const
428 /* Canonicalize the store and constraints, to maximize the chance of
429 equality between region_model instances. */
432 region_model::canonicalize ()
434 m_store.canonicalize (m_mgr->get_store_manager ());
435 m_constraints->canonicalize ();
438 /* Return true if this region_model is in canonical form. */
441 region_model::canonicalized_p () const
443 region_model copy (*this);
444 copy.canonicalize ();
445 return *this == copy;
448 /* See the comment for store::loop_replay_fixup. */
451 region_model::loop_replay_fixup (const region_model *dst_state)
453 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
456 /* A subclass of pending_diagnostic for complaining about uses of
459 class poisoned_value_diagnostic
460 : public pending_diagnostic_subclass<poisoned_value_diagnostic>
463 poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
464 const region *src_region)
465 : m_expr (expr), m_pkind (pkind),
466 m_src_region (src_region)
469 const char *get_kind () const final override { return "poisoned_value_diagnostic"; }
471 bool use_of_uninit_p () const final override
473 return m_pkind == POISON_KIND_UNINIT;
476 bool operator== (const poisoned_value_diagnostic &other) const
478 return (m_expr == other.m_expr
479 && m_pkind == other.m_pkind
480 && m_src_region == other.m_src_region);
483 int get_controlling_option () const final override
489 case POISON_KIND_UNINIT:
490 return OPT_Wanalyzer_use_of_uninitialized_value;
491 case POISON_KIND_FREED:
492 return OPT_Wanalyzer_use_after_free;
493 case POISON_KIND_POPPED_STACK:
494 return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame;
498 bool emit (rich_location *rich_loc) final override
504 case POISON_KIND_UNINIT:
506 diagnostic_metadata m;
507 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
508 return warning_meta (rich_loc, m, get_controlling_option (),
509 "use of uninitialized value %qE",
513 case POISON_KIND_FREED:
515 diagnostic_metadata m;
516 m.add_cwe (416); /* "CWE-416: Use After Free". */
517 return warning_meta (rich_loc, m, get_controlling_option (),
518 "use after %<free%> of %qE",
522 case POISON_KIND_POPPED_STACK:
524 /* TODO: which CWE? */
526 (rich_loc, get_controlling_option (),
527 "dereferencing pointer %qE to within stale stack frame",
534 label_text describe_final_event (const evdesc::final_event &ev) final override
540 case POISON_KIND_UNINIT:
541 return ev.formatted_print ("use of uninitialized value %qE here",
543 case POISON_KIND_FREED:
544 return ev.formatted_print ("use after %<free%> of %qE here",
546 case POISON_KIND_POPPED_STACK:
547 return ev.formatted_print
548 ("dereferencing pointer %qE to within stale stack frame",
553 void mark_interesting_stuff (interesting_t *interest) final override
556 interest->add_region_creation (m_src_region);
561 enum poison_kind m_pkind;
562 const region *m_src_region;
565 /* A subclass of pending_diagnostic for complaining about shifts
566 by negative counts. */
568 class shift_count_negative_diagnostic
569 : public pending_diagnostic_subclass<shift_count_negative_diagnostic>
572 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
573 : m_assign (assign), m_count_cst (count_cst)
576 const char *get_kind () const final override
578 return "shift_count_negative_diagnostic";
581 bool operator== (const shift_count_negative_diagnostic &other) const
583 return (m_assign == other.m_assign
584 && same_tree_p (m_count_cst, other.m_count_cst));
587 int get_controlling_option () const final override
589 return OPT_Wanalyzer_shift_count_negative;
592 bool emit (rich_location *rich_loc) final override
594 return warning_at (rich_loc, get_controlling_option (),
595 "shift by negative count (%qE)", m_count_cst);
598 label_text describe_final_event (const evdesc::final_event &ev) final override
600 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
604 const gassign *m_assign;
608 /* A subclass of pending_diagnostic for complaining about shifts
609 by counts >= the width of the operand type. */
611 class shift_count_overflow_diagnostic
612 : public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
615 shift_count_overflow_diagnostic (const gassign *assign,
616 int operand_precision,
618 : m_assign (assign), m_operand_precision (operand_precision),
619 m_count_cst (count_cst)
622 const char *get_kind () const final override
624 return "shift_count_overflow_diagnostic";
627 bool operator== (const shift_count_overflow_diagnostic &other) const
629 return (m_assign == other.m_assign
630 && m_operand_precision == other.m_operand_precision
631 && same_tree_p (m_count_cst, other.m_count_cst));
634 int get_controlling_option () const final override
636 return OPT_Wanalyzer_shift_count_overflow;
639 bool emit (rich_location *rich_loc) final override
641 return warning_at (rich_loc, get_controlling_option (),
642 "shift by count (%qE) >= precision of type (%qi)",
643 m_count_cst, m_operand_precision);
646 label_text describe_final_event (const evdesc::final_event &ev) final override
648 return ev.formatted_print ("shift by count %qE here", m_count_cst);
652 const gassign *m_assign;
653 int m_operand_precision;
657 /* If ASSIGN is a stmt that can be modelled via
658 set_value (lhs_reg, SVALUE, CTXT)
659 for some SVALUE, get the SVALUE.
660 Otherwise return NULL. */
663 region_model::get_gassign_result (const gassign *assign,
664 region_model_context *ctxt)
666 tree lhs = gimple_assign_lhs (assign);
667 tree rhs1 = gimple_assign_rhs1 (assign);
668 enum tree_code op = gimple_assign_rhs_code (assign);
674 case POINTER_PLUS_EXPR:
676 /* e.g. "_1 = a_10(D) + 12;" */
678 tree offset = gimple_assign_rhs2 (assign);
680 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
681 const svalue *offset_sval = get_rvalue (offset, ctxt);
682 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
683 is an integer of type sizetype". */
684 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
686 const svalue *sval_binop
687 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
688 ptr_sval, offset_sval);
693 case POINTER_DIFF_EXPR:
695 /* e.g. "_1 = p_2(D) - q_3(D);". */
696 tree rhs2 = gimple_assign_rhs2 (assign);
697 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
698 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
700 // TODO: perhaps fold to zero if they're known to be equal?
702 const svalue *sval_binop
703 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
704 rhs1_sval, rhs2_sval);
709 /* Assignments of the form
710 set_value (lvalue (LHS), rvalue (EXPR))
712 We already have the lvalue for the LHS above, as "lhs_reg". */
713 case ADDR_EXPR: /* LHS = &RHS; */
715 case COMPONENT_REF: /* LHS = op0.op1; */
722 case SSA_NAME: /* LHS = VAR; */
723 case VAR_DECL: /* LHS = VAR; */
724 case PARM_DECL:/* LHS = VAR; */
727 return get_rvalue (rhs1, ctxt);
737 case VIEW_CONVERT_EXPR:
740 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
741 const svalue *sval_unaryop
742 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
755 tree rhs2 = gimple_assign_rhs2 (assign);
757 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
758 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
760 if (TREE_TYPE (lhs) == boolean_type_node)
762 /* Consider constraints between svalues. */
763 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
765 return m_mgr->get_or_create_constant_svalue
766 (t.is_true () ? boolean_true_node : boolean_false_node);
769 /* Otherwise, generate a symbolic binary op. */
770 const svalue *sval_binop
771 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
772 rhs1_sval, rhs2_sval);
780 case MULT_HIGHPART_EXPR:
803 tree rhs2 = gimple_assign_rhs2 (assign);
805 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
806 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
808 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
810 /* "INT34-C. Do not shift an expression by a negative number of bits
811 or by greater than or equal to the number of bits that exist in
813 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
814 if (TREE_CODE (rhs2_cst) == INTEGER_CST)
816 if (tree_int_cst_sgn (rhs2_cst) < 0)
818 (make_unique<shift_count_negative_diagnostic>
820 else if (compare_tree_int (rhs2_cst,
821 TYPE_PRECISION (TREE_TYPE (rhs1)))
824 (make_unique<shift_count_overflow_diagnostic>
826 int (TYPE_PRECISION (TREE_TYPE (rhs1))),
831 const svalue *sval_binop
832 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
833 rhs1_sval, rhs2_sval);
837 /* Vector expressions. In theory we could implement these elementwise,
838 but for now, simply return unknown values. */
839 case VEC_DUPLICATE_EXPR:
840 case VEC_SERIES_EXPR:
843 case VEC_WIDEN_MULT_HI_EXPR:
844 case VEC_WIDEN_MULT_LO_EXPR:
845 case VEC_WIDEN_MULT_EVEN_EXPR:
846 case VEC_WIDEN_MULT_ODD_EXPR:
847 case VEC_UNPACK_HI_EXPR:
848 case VEC_UNPACK_LO_EXPR:
849 case VEC_UNPACK_FLOAT_HI_EXPR:
850 case VEC_UNPACK_FLOAT_LO_EXPR:
851 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
852 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
853 case VEC_PACK_TRUNC_EXPR:
854 case VEC_PACK_SAT_EXPR:
855 case VEC_PACK_FIX_TRUNC_EXPR:
856 case VEC_PACK_FLOAT_EXPR:
857 case VEC_WIDEN_LSHIFT_HI_EXPR:
858 case VEC_WIDEN_LSHIFT_LO_EXPR:
859 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
863 /* Workaround for discarding certain false positives from
864 -Wanalyzer-use-of-uninitialized-value
866 ((A OR-IF B) OR-IF C)
868 ((A AND-IF B) AND-IF C)
869 where evaluating B is redundant, but could involve simple accesses of
870 uninitialized locals.
872 When optimization is turned on the FE can immediately fold compound
873 conditionals. Specifically, c_parser_condition parses this condition:
874 ((A OR-IF B) OR-IF C)
875 and calls c_fully_fold on the condition.
876 Within c_fully_fold, fold_truth_andor is called, which bails when
877 optimization is off, but if any optimization is turned on can convert the
878 ((A OR-IF B) OR-IF C)
881 for sufficiently simple B
882 i.e. the inner OR-IF becomes an OR.
883 At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
884 giving this for the inner condition:
887 thus effectively synthesizing a redundant access of B when optimization
888 is turned on, when compared to:
889 if (A) goto L1; else goto L4;
890 L1: if (B) goto L2; else goto L4;
891 L2: if (C) goto L3; else goto L4;
892 for the unoptimized case.
894 Return true if CTXT appears to be handling such a short-circuitable stmt,
895 such as the def-stmt for B for the:
897 case above, for the case where A is true and thus B would have been
898 short-circuited without optimization, using MODEL for the value of A. */
901 within_short_circuited_stmt_p (const region_model *model,
902 const gassign *assign_stmt)
904 /* We must have an assignment to a temporary of _Bool type. */
905 tree lhs = gimple_assign_lhs (assign_stmt);
906 if (TREE_TYPE (lhs) != boolean_type_node)
908 if (TREE_CODE (lhs) != SSA_NAME)
910 if (SSA_NAME_VAR (lhs) != NULL_TREE)
913 /* The temporary bool must be used exactly once: as the second arg of
914 a BIT_IOR_EXPR or BIT_AND_EXPR. */
915 use_operand_p use_op;
917 if (!single_imm_use (lhs, &use_op, &use_stmt))
919 const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
922 enum tree_code op = gimple_assign_rhs_code (use_assign);
923 if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
925 if (!(gimple_assign_rhs1 (use_assign) != lhs
926 && gimple_assign_rhs2 (use_assign) == lhs))
929 /* The first arg of the bitwise stmt must have a known value in MODEL
930 that implies that the value of the second arg doesn't matter, i.e.
931 1 for bitwise or, 0 for bitwise and. */
932 tree other_arg = gimple_assign_rhs1 (use_assign);
933 /* Use a NULL ctxt here to avoid generating warnings. */
934 const svalue *other_arg_sval = model->get_rvalue (other_arg, NULL);
935 tree other_arg_cst = other_arg_sval->maybe_get_constant ();
943 if (zerop (other_arg_cst))
947 if (!zerop (other_arg_cst))
952 /* All tests passed. We appear to be in a stmt that generates a boolean
953 temporary with a value that won't matter. */
957 /* Workaround for discarding certain false positives from
958 -Wanalyzer-use-of-uninitialized-value
959 seen with -ftrivial-auto-var-init=.
961 -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
963 If the address of the var is taken, gimplification will give us
966 _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
969 The result of DEFERRED_INIT will be an uninit value; we don't
970 want to emit a false positive for "len = _1;"
972 Return true if ASSIGN_STMT is such a stmt. */
975 due_to_ifn_deferred_init_p (const gassign *assign_stmt)
978 /* We must have an assignment to a decl from an SSA name that's the
979 result of a IFN_DEFERRED_INIT call. */
980 if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
982 tree lhs = gimple_assign_lhs (assign_stmt);
983 if (TREE_CODE (lhs) != VAR_DECL)
985 tree rhs = gimple_assign_rhs1 (assign_stmt);
986 if (TREE_CODE (rhs) != SSA_NAME)
988 const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
989 const gcall *call = dyn_cast <const gcall *> (def_stmt);
992 if (gimple_call_internal_p (call)
993 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
998 /* Check for SVAL being poisoned, adding a warning to CTXT.
999 Return SVAL, or, if a warning is added, another value, to avoid
1000 repeatedly complaining about the same poisoned value in followup code. */
1003 region_model::check_for_poison (const svalue *sval,
1005 region_model_context *ctxt) const
1010 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1012 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1014 /* Ignore uninitialized uses of empty types; there's nothing
1016 if (pkind == POISON_KIND_UNINIT
1017 && sval->get_type ()
1018 && is_empty_type (sval->get_type ()))
1021 if (pkind == POISON_KIND_UNINIT)
1022 if (const gimple *curr_stmt = ctxt->get_stmt ())
1023 if (const gassign *assign_stmt
1024 = dyn_cast <const gassign *> (curr_stmt))
1026 /* Special case to avoid certain false positives. */
1027 if (within_short_circuited_stmt_p (this, assign_stmt))
1030 /* Special case to avoid false positive on
1031 -ftrivial-auto-var-init=. */
1032 if (due_to_ifn_deferred_init_p (assign_stmt))
1036 /* If we have an SSA name for a temporary, we don't want to print
1038 Poisoned values are shared by type, and so we can't reconstruct
1039 the tree other than via the def stmts, using
1040 fixup_tree_for_diagnostic. */
1041 tree diag_arg = fixup_tree_for_diagnostic (expr);
1042 const region *src_region = NULL;
1043 if (pkind == POISON_KIND_UNINIT)
1044 src_region = get_region_for_poisoned_expr (expr);
1045 if (ctxt->warn (make_unique<poisoned_value_diagnostic> (diag_arg,
1049 /* We only want to report use of a poisoned value at the first
1050 place it gets used; return an unknown value to avoid generating
1051 a chain of followup warnings. */
1052 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1061 /* Attempt to get a region for describing EXPR, the source of region of
1062 a poisoned_svalue for use in a poisoned_value_diagnostic.
1063 Return NULL if there is no good region to use. */
1066 region_model::get_region_for_poisoned_expr (tree expr) const
1068 if (TREE_CODE (expr) == SSA_NAME)
1070 tree decl = SSA_NAME_VAR (expr);
1071 if (decl && DECL_P (decl))
1076 return get_lvalue (expr, NULL);
1079 /* Update this model for the ASSIGN stmt, using CTXT to report any
1083 region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1085 tree lhs = gimple_assign_lhs (assign);
1086 tree rhs1 = gimple_assign_rhs1 (assign);
1088 const region *lhs_reg = get_lvalue (lhs, ctxt);
1090 /* Most assignments are handled by:
1091 set_value (lhs_reg, SVALUE, CTXT)
1093 if (const svalue *sval = get_gassign_result (assign, ctxt))
1095 tree expr = get_diagnostic_tree_for_gassign (assign);
1096 check_for_poison (sval, expr, ctxt);
1097 set_value (lhs_reg, sval, ctxt);
1101 enum tree_code op = gimple_assign_rhs_code (assign);
1107 sorry_at (assign->location, "unhandled assignment op: %qs",
1108 get_tree_code_name (op));
1109 const svalue *unknown_sval
1110 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1111 set_value (lhs_reg, unknown_sval, ctxt);
1117 if (TREE_CLOBBER_P (rhs1))
1119 /* e.g. "x ={v} {CLOBBER};" */
1120 clobber_region (lhs_reg);
1124 /* Any CONSTRUCTOR that survives to this point is either
1125 just a zero-init of everything, or a vector. */
1126 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
1127 zero_fill_region (lhs_reg);
1131 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1133 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1135 index = build_int_cst (integer_type_node, ix);
1136 gcc_assert (TREE_CODE (index) == INTEGER_CST);
1137 const svalue *index_sval
1138 = m_mgr->get_or_create_constant_svalue (index);
1139 gcc_assert (index_sval);
1140 const region *sub_reg
1141 = m_mgr->get_element_region (lhs_reg,
1144 const svalue *val_sval = get_rvalue (val, ctxt);
1145 set_value (sub_reg, val_sval, ctxt);
1153 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
1154 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1155 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
1156 ctxt ? ctxt->get_uncertainty () : NULL);
1162 /* A pending_diagnostic subclass for implementing "__analyzer_dump_path". */
1164 class dump_path_diagnostic
1165 : public pending_diagnostic_subclass<dump_path_diagnostic>
1168 int get_controlling_option () const final override
1173 bool emit (rich_location *richloc) final override
1175 inform (richloc, "path");
1179 const char *get_kind () const final override { return "dump_path_diagnostic"; }
1181 bool operator== (const dump_path_diagnostic &) const
1187 /* Handle the pre-sm-state part of STMT, modifying this object in-place.
1188 Write true to *OUT_TERMINATE_PATH if the path should be terminated.
1189 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1193 region_model::on_stmt_pre (const gimple *stmt,
1194 bool *out_terminate_path,
1195 bool *out_unknown_side_effects,
1196 region_model_context *ctxt)
1198 switch (gimple_code (stmt))
1201 /* No-op for now. */
1206 const gassign *assign = as_a <const gassign *> (stmt);
1207 on_assignment (assign, ctxt);
1213 const gasm *asm_stmt = as_a <const gasm *> (stmt);
1214 on_asm_stmt (asm_stmt, ctxt);
1220 /* Track whether we have a gcall to a function that's not recognized by
1221 anything, for which we don't have a function body, or for which we
1222 don't know the fndecl. */
1223 const gcall *call = as_a <const gcall *> (stmt);
1225 /* Debugging/test support. */
1226 if (is_special_named_call_p (call, "__analyzer_describe", 2))
1227 impl_call_analyzer_describe (call, ctxt);
1228 else if (is_special_named_call_p (call, "__analyzer_dump_capacity", 1))
1229 impl_call_analyzer_dump_capacity (call, ctxt);
1230 else if (is_special_named_call_p (call, "__analyzer_dump_escaped", 0))
1231 impl_call_analyzer_dump_escaped (call);
1232 else if (is_special_named_call_p (call, "__analyzer_dump_path", 0))
1234 /* Handle the builtin "__analyzer_dump_path" by queuing a
1235 diagnostic at this exploded_node. */
1236 ctxt->warn (make_unique<dump_path_diagnostic> ());
1238 else if (is_special_named_call_p (call, "__analyzer_dump_region_model",
1241 /* Handle the builtin "__analyzer_dump_region_model" by dumping
1242 the region model's state to stderr. */
1245 else if (is_special_named_call_p (call, "__analyzer_eval", 1))
1246 impl_call_analyzer_eval (call, ctxt);
1247 else if (is_special_named_call_p (call, "__analyzer_break", 0))
1249 /* Handle the builtin "__analyzer_break" by triggering a
1251 /* TODO: is there a good cross-platform way to do this? */
1254 else if (is_special_named_call_p (call,
1255 "__analyzer_dump_exploded_nodes",
1258 /* This is handled elsewhere. */
1260 else if (is_special_named_call_p (call, "__analyzer_get_unknown_ptr",
1263 call_details cd (call, this, ctxt);
1264 impl_call_analyzer_get_unknown_ptr (cd);
1267 *out_unknown_side_effects = on_call_pre (call, ctxt,
1268 out_terminate_path);
1274 const greturn *return_ = as_a <const greturn *> (stmt);
1275 on_return (return_, ctxt);
1281 /* Abstract base class for all out-of-bounds warnings with concrete values. */
1283 class out_of_bounds : public pending_diagnostic_subclass<out_of_bounds>
1286 out_of_bounds (const region *reg, tree diag_arg,
1287 byte_range out_of_bounds_range)
1288 : m_reg (reg), m_diag_arg (diag_arg),
1289 m_out_of_bounds_range (out_of_bounds_range)
1292 const char *get_kind () const final override
1294 return "out_of_bounds_diagnostic";
1297 bool operator== (const out_of_bounds &other) const
1299 return m_reg == other.m_reg
1300 && m_out_of_bounds_range == other.m_out_of_bounds_range
1301 && pending_diagnostic::same_tree_p (m_diag_arg, other.m_diag_arg);
1304 int get_controlling_option () const final override
1306 return OPT_Wanalyzer_out_of_bounds;
1309 void mark_interesting_stuff (interesting_t *interest) final override
1311 interest->add_region_creation (m_reg);
1315 const region *m_reg;
1317 byte_range m_out_of_bounds_range;
1320 /* Abstract subclass to complaing about out-of-bounds
1321 past the end of the buffer. */
1323 class past_the_end : public out_of_bounds
1326 past_the_end (const region *reg, tree diag_arg, byte_range range,
1328 : out_of_bounds (reg, diag_arg, range), m_byte_bound (byte_bound)
1331 bool operator== (const past_the_end &other) const
1333 return out_of_bounds::operator== (other)
1334 && pending_diagnostic::same_tree_p (m_byte_bound,
1335 other.m_byte_bound);
1339 describe_region_creation_event (const evdesc::region_creation &ev) final
1342 if (m_byte_bound && TREE_CODE (m_byte_bound) == INTEGER_CST)
1343 return ev.formatted_print ("capacity is %E bytes", m_byte_bound);
1345 return label_text ();
1352 /* Concrete subclass to complain about buffer overflows. */
1354 class buffer_overflow : public past_the_end
1357 buffer_overflow (const region *reg, tree diag_arg,
1358 byte_range range, tree byte_bound)
1359 : past_the_end (reg, diag_arg, range, byte_bound)
1362 bool emit (rich_location *rich_loc) final override
1364 diagnostic_metadata m;
1366 switch (m_reg->get_memory_space ())
1370 warned = warning_meta (rich_loc, m, get_controlling_option (),
1373 case MEMSPACE_STACK:
1375 warned = warning_meta (rich_loc, m, get_controlling_option (),
1376 "stack-based buffer overflow");
1380 warned = warning_meta (rich_loc, m, get_controlling_option (),
1381 "heap-based buffer overflow");
1387 char num_bytes_past_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1388 print_dec (m_out_of_bounds_range.m_size_in_bytes,
1389 num_bytes_past_buf, UNSIGNED);
1391 inform (rich_loc->get_loc (), "write is %s bytes past the end"
1392 " of %qE", num_bytes_past_buf,
1395 inform (rich_loc->get_loc (), "write is %s bytes past the end"
1397 num_bytes_past_buf);
1403 label_text describe_final_event (const evdesc::final_event &ev)
1406 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
1407 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
1408 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1409 print_dec (start, start_buf, SIGNED);
1410 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1411 print_dec (end, end_buf, SIGNED);
1416 return ev.formatted_print ("out-of-bounds write at byte %s but %qE"
1417 " ends at byte %E", start_buf, m_diag_arg,
1419 return ev.formatted_print ("out-of-bounds write at byte %s but region"
1420 " ends at byte %E", start_buf,
1426 return ev.formatted_print ("out-of-bounds write from byte %s till"
1427 " byte %s but %qE ends at byte %E",
1428 start_buf, end_buf, m_diag_arg,
1430 return ev.formatted_print ("out-of-bounds write from byte %s till"
1431 " byte %s but region ends at byte %E",
1432 start_buf, end_buf, m_byte_bound);
1437 /* Concrete subclass to complain about buffer overreads. */
1439 class buffer_overread : public past_the_end
1442 buffer_overread (const region *reg, tree diag_arg,
1443 byte_range range, tree byte_bound)
1444 : past_the_end (reg, diag_arg, range, byte_bound)
1447 bool emit (rich_location *rich_loc) final override
1449 diagnostic_metadata m;
1451 bool warned = warning_meta (rich_loc, m, get_controlling_option (),
1456 char num_bytes_past_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1457 print_dec (m_out_of_bounds_range.m_size_in_bytes,
1458 num_bytes_past_buf, UNSIGNED);
1460 inform (rich_loc->get_loc (), "read is %s bytes past the end"
1461 " of %qE", num_bytes_past_buf,
1464 inform (rich_loc->get_loc (), "read is %s bytes past the end"
1466 num_bytes_past_buf);
1472 label_text describe_final_event (const evdesc::final_event &ev)
1475 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
1476 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
1477 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1478 print_dec (start, start_buf, SIGNED);
1479 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1480 print_dec (end, end_buf, SIGNED);
1485 return ev.formatted_print ("out-of-bounds read at byte %s but %qE"
1486 " ends at byte %E", start_buf, m_diag_arg,
1488 return ev.formatted_print ("out-of-bounds read at byte %s but region"
1489 " ends at byte %E", start_buf,
1495 return ev.formatted_print ("out-of-bounds read from byte %s till"
1496 " byte %s but %qE ends at byte %E",
1497 start_buf, end_buf, m_diag_arg,
1499 return ev.formatted_print ("out-of-bounds read from byte %s till"
1500 " byte %s but region ends at byte %E",
1501 start_buf, end_buf, m_byte_bound);
1506 /* Concrete subclass to complain about buffer underflows. */
1508 class buffer_underflow : public out_of_bounds
1511 buffer_underflow (const region *reg, tree diag_arg, byte_range range)
1512 : out_of_bounds (reg, diag_arg, range)
1515 bool emit (rich_location *rich_loc) final override
1517 diagnostic_metadata m;
1519 return warning_meta (rich_loc, m, get_controlling_option (),
1520 "buffer underflow");
1523 label_text describe_final_event (const evdesc::final_event &ev)
1526 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
1527 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
1528 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1529 print_dec (start, start_buf, SIGNED);
1530 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1531 print_dec (end, end_buf, SIGNED);
1536 return ev.formatted_print ("out-of-bounds write at byte %s but %qE"
1537 " starts at byte 0", start_buf,
1539 return ev.formatted_print ("out-of-bounds write at byte %s but region"
1540 " starts at byte 0", start_buf);
1545 return ev.formatted_print ("out-of-bounds write from byte %s till"
1546 " byte %s but %qE starts at byte 0",
1547 start_buf, end_buf, m_diag_arg);
1548 return ev.formatted_print ("out-of-bounds write from byte %s till"
1549 " byte %s but region starts at byte 0",
1550 start_buf, end_buf);;
1555 /* Concrete subclass to complain about buffer underreads. */
1557 class buffer_underread : public out_of_bounds
1560 buffer_underread (const region *reg, tree diag_arg, byte_range range)
1561 : out_of_bounds (reg, diag_arg, range)
1564 bool emit (rich_location *rich_loc) final override
1566 diagnostic_metadata m;
1568 return warning_meta (rich_loc, m, get_controlling_option (),
1569 "buffer underread");
1572 label_text describe_final_event (const evdesc::final_event &ev)
1575 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
1576 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
1577 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1578 print_dec (start, start_buf, SIGNED);
1579 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1580 print_dec (end, end_buf, SIGNED);
1585 return ev.formatted_print ("out-of-bounds read at byte %s but %qE"
1586 " starts at byte 0", start_buf,
1588 return ev.formatted_print ("out-of-bounds read at byte %s but region"
1589 " starts at byte 0", start_buf);
1594 return ev.formatted_print ("out-of-bounds read from byte %s till"
1595 " byte %s but %qE starts at byte 0",
1596 start_buf, end_buf, m_diag_arg);
1597 return ev.formatted_print ("out-of-bounds read from byte %s till"
1598 " byte %s but region starts at byte 0",
1599 start_buf, end_buf);;
1604 /* Abstract class to complain about out-of-bounds read/writes where
1605 the values are symbolic. */
1607 class symbolic_past_the_end
1608 : public pending_diagnostic_subclass<symbolic_past_the_end>
1611 symbolic_past_the_end (const region *reg, tree diag_arg, tree offset,
1612 tree num_bytes, tree capacity)
1613 : m_reg (reg), m_diag_arg (diag_arg), m_offset (offset),
1614 m_num_bytes (num_bytes), m_capacity (capacity)
1617 const char *get_kind () const final override
1619 return "symbolic_past_the_end";
1622 bool operator== (const symbolic_past_the_end &other) const
1624 return m_reg == other.m_reg
1625 && pending_diagnostic::same_tree_p (m_diag_arg, other.m_diag_arg)
1626 && pending_diagnostic::same_tree_p (m_offset, other.m_offset)
1627 && pending_diagnostic::same_tree_p (m_num_bytes, other.m_num_bytes)
1628 && pending_diagnostic::same_tree_p (m_capacity, other.m_capacity);
1631 int get_controlling_option () const final override
1633 return OPT_Wanalyzer_out_of_bounds;
1636 void mark_interesting_stuff (interesting_t *interest) final override
1638 interest->add_region_creation (m_reg);
1642 describe_region_creation_event (const evdesc::region_creation &ev) final
1646 return ev.formatted_print ("capacity is %qE bytes", m_capacity);
1648 return label_text ();
1652 describe_final_event (const evdesc::final_event &ev) final override
1654 const char *byte_str;
1655 if (pending_diagnostic::same_tree_p (m_num_bytes, integer_one_node))
1662 if (m_num_bytes && TREE_CODE (m_num_bytes) == INTEGER_CST)
1665 return ev.formatted_print ("%s of %E %s at offset %qE"
1666 " exceeds %qE", m_dir_str,
1667 m_num_bytes, byte_str,
1668 m_offset, m_diag_arg);
1670 return ev.formatted_print ("%s of %E %s at offset %qE"
1671 " exceeds the buffer", m_dir_str,
1672 m_num_bytes, byte_str, m_offset);
1674 else if (m_num_bytes)
1677 return ev.formatted_print ("%s of %qE %s at offset %qE"
1678 " exceeds %qE", m_dir_str,
1679 m_num_bytes, byte_str,
1680 m_offset, m_diag_arg);
1682 return ev.formatted_print ("%s of %qE %s at offset %qE"
1683 " exceeds the buffer", m_dir_str,
1684 m_num_bytes, byte_str, m_offset);
1689 return ev.formatted_print ("%s at offset %qE exceeds %qE",
1690 m_dir_str, m_offset, m_diag_arg);
1692 return ev.formatted_print ("%s at offset %qE exceeds the"
1693 " buffer", m_dir_str, m_offset);
1697 return ev.formatted_print ("out-of-bounds %s on %qE",
1698 m_dir_str, m_diag_arg);
1699 return ev.formatted_print ("out-of-bounds %s", m_dir_str);
1703 const region *m_reg;
1708 const char *m_dir_str;
1711 /* Concrete subclass to complain about overflows with symbolic values. */
1713 class symbolic_buffer_overflow : public symbolic_past_the_end
1716 symbolic_buffer_overflow (const region *reg, tree diag_arg, tree offset,
1717 tree num_bytes, tree capacity)
1718 : symbolic_past_the_end (reg, diag_arg, offset, num_bytes, capacity)
1720 m_dir_str = "write";
1723 bool emit (rich_location *rich_loc) final override
1725 diagnostic_metadata m;
1726 switch (m_reg->get_memory_space ())
1730 return warning_meta (rich_loc, m, get_controlling_option (),
1732 case MEMSPACE_STACK:
1734 return warning_meta (rich_loc, m, get_controlling_option (),
1735 "stack-based buffer overflow");
1738 return warning_meta (rich_loc, m, get_controlling_option (),
1739 "heap-based buffer overflow");
1744 /* Concrete subclass to complain about overreads with symbolic values. */
1746 class symbolic_buffer_overread : public symbolic_past_the_end
1749 symbolic_buffer_overread (const region *reg, tree diag_arg, tree offset,
1750 tree num_bytes, tree capacity)
1751 : symbolic_past_the_end (reg, diag_arg, offset, num_bytes, capacity)
1756 bool emit (rich_location *rich_loc) final override
1758 diagnostic_metadata m;
1760 return warning_meta (rich_loc, m, get_controlling_option (),
1765 /* Check whether an access is past the end of the BASE_REG. */
1767 void region_model::check_symbolic_bounds (const region *base_reg,
1768 const svalue *sym_byte_offset,
1769 const svalue *num_bytes_sval,
1770 const svalue *capacity,
1771 enum access_direction dir,
1772 region_model_context *ctxt) const
1776 const svalue *next_byte
1777 = m_mgr->get_or_create_binop (num_bytes_sval->get_type (), PLUS_EXPR,
1778 sym_byte_offset, num_bytes_sval);
1780 if (eval_condition_without_cm (next_byte, GT_EXPR, capacity).is_true ())
1782 tree diag_arg = get_representative_tree (base_reg);
1783 tree offset_tree = get_representative_tree (sym_byte_offset);
1784 tree num_bytes_tree = get_representative_tree (num_bytes_sval);
1785 tree capacity_tree = get_representative_tree (capacity);
1792 ctxt->warn (make_unique<symbolic_buffer_overread> (base_reg,
1799 ctxt->warn (make_unique<symbolic_buffer_overflow> (base_reg,
1810 maybe_get_integer_cst_tree (const svalue *sval)
1812 tree cst_tree = sval->maybe_get_constant ();
1813 if (cst_tree && TREE_CODE (cst_tree) == INTEGER_CST)
1819 /* May complain when the access on REG is out-of-bounds. */
1822 region_model::check_region_bounds (const region *reg,
1823 enum access_direction dir,
1824 region_model_context *ctxt) const
1828 /* Get the offset. */
1829 region_offset reg_offset = reg->get_offset (m_mgr);
1830 const region *base_reg = reg_offset.get_base_region ();
1832 /* Bail out on symbolic regions.
1833 (e.g. because the analyzer did not see previous offsets on the latter,
1834 it might think that a negative access is before the buffer). */
1835 if (base_reg->symbolic_p ())
1838 /* Find out how many bytes were accessed. */
1839 const svalue *num_bytes_sval = reg->get_byte_size_sval (m_mgr);
1840 tree num_bytes_tree = maybe_get_integer_cst_tree (num_bytes_sval);
1841 /* Bail out if 0 bytes are accessed. */
1842 if (num_bytes_tree && zerop (num_bytes_tree))
1845 /* Get the capacity of the buffer. */
1846 const svalue *capacity = get_capacity (base_reg);
1847 tree cst_capacity_tree = maybe_get_integer_cst_tree (capacity);
1849 /* The constant offset from a pointer is represented internally as a sizetype
1850 but should be interpreted as a signed value here. The statement below
1851 converts the offset from bits to bytes and then to a signed integer with
1852 the same precision the sizetype has on the target system.
1854 For example, this is needed for out-of-bounds-3.c test1 to pass when
1855 compiled with a 64-bit gcc build targeting 32-bit systems. */
1856 byte_offset_t offset;
1857 if (!reg_offset.symbolic_p ())
1858 offset = wi::sext (reg_offset.get_bit_offset () >> LOG2_BITS_PER_UNIT,
1859 TYPE_PRECISION (size_type_node));
1861 /* If either the offset or the number of bytes accessed are symbolic,
1862 we have to reason about symbolic values. */
1863 if (reg_offset.symbolic_p () || !num_bytes_tree)
1865 const svalue* byte_offset_sval;
1866 if (!reg_offset.symbolic_p ())
1868 tree offset_tree = wide_int_to_tree (integer_type_node, offset);
1870 = m_mgr->get_or_create_constant_svalue (offset_tree);
1873 byte_offset_sval = reg_offset.get_symbolic_byte_offset ();
1874 check_symbolic_bounds (base_reg, byte_offset_sval, num_bytes_sval,
1875 capacity, dir, ctxt);
1879 /* Otherwise continue to check with concrete values. */
1880 byte_range out (0, 0);
1881 /* NUM_BYTES_TREE should always be interpreted as unsigned. */
1882 byte_offset_t num_bytes_unsigned = wi::to_offset (num_bytes_tree);
1883 byte_range read_bytes (offset, num_bytes_unsigned);
1884 /* If read_bytes has a subset < 0, we do have an underflow. */
1885 if (read_bytes.falls_short_of_p (0, &out))
1887 tree diag_arg = get_representative_tree (base_reg);
1894 ctxt->warn (make_unique<buffer_underread> (reg, diag_arg, out));
1897 ctxt->warn (make_unique<buffer_underflow> (reg, diag_arg, out));
1902 /* For accesses past the end, we do need a concrete capacity. No need to
1903 do a symbolic check here because the inequality check does not reason
1904 whether constants are greater than symbolic values. */
1905 if (!cst_capacity_tree)
1908 byte_range buffer (0, wi::to_offset (cst_capacity_tree));
1909 /* If READ_BYTES exceeds BUFFER, we do have an overflow. */
1910 if (read_bytes.exceeds_p (buffer, &out))
1912 tree byte_bound = wide_int_to_tree (size_type_node,
1913 buffer.get_next_byte_offset ());
1914 tree diag_arg = get_representative_tree (base_reg);
1922 ctxt->warn (make_unique<buffer_overread> (reg, diag_arg,
1926 ctxt->warn (make_unique<buffer_overflow> (reg, diag_arg,
1933 /* Ensure that all arguments at the call described by CD are checked
1934 for poisoned values, by calling get_rvalue on each argument. */
1937 region_model::check_call_args (const call_details &cd) const
1939 for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1940 cd.get_arg_svalue (arg_idx);
1943 /* Return true if CD is known to be a call to a function with
1944 __attribute__((const)). */
1947 const_fn_p (const call_details &cd)
1949 tree fndecl = cd.get_fndecl_for_call ();
1952 gcc_assert (DECL_P (fndecl));
1953 return TREE_READONLY (fndecl);
1956 /* If this CD is known to be a call to a function with
1957 __attribute__((const)), attempt to get a const_fn_result_svalue
1958 based on the arguments, or return NULL otherwise. */
1960 static const svalue *
1961 maybe_get_const_fn_result (const call_details &cd)
1963 if (!const_fn_p (cd))
1966 unsigned num_args = cd.num_args ();
1967 if (num_args > const_fn_result_svalue::MAX_INPUTS)
1968 /* Too many arguments. */
1971 auto_vec<const svalue *> inputs (num_args);
1972 for (unsigned arg_idx = 0; arg_idx < num_args; arg_idx++)
1974 const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
1975 if (!arg_sval->can_have_associated_state_p ())
1977 inputs.quick_push (arg_sval);
1980 region_model_manager *mgr = cd.get_manager ();
1982 = mgr->get_or_create_const_fn_result_svalue (cd.get_lhs_type (),
1983 cd.get_fndecl_for_call (),
1988 /* Update this model for an outcome of a call that returns a specific
1990 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1991 the state-merger code from merging success and failure outcomes. */
1994 region_model::update_for_int_cst_return (const call_details &cd,
1998 if (!cd.get_lhs_type ())
2000 const svalue *result
2001 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), retval);
2003 result = m_mgr->get_or_create_unmergeable (result);
2004 set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
2007 /* Update this model for an outcome of a call that returns zero.
2008 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
2009 the state-merger code from merging success and failure outcomes. */
2012 region_model::update_for_zero_return (const call_details &cd,
2015 update_for_int_cst_return (cd, 0, unmergeable);
2018 /* Update this model for an outcome of a call that returns non-zero. */
2021 region_model::update_for_nonzero_return (const call_details &cd)
2023 if (!cd.get_lhs_type ())
2026 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
2027 const svalue *result
2028 = get_store_value (cd.get_lhs_region (), cd.get_ctxt ());
2029 add_constraint (result, NE_EXPR, zero, cd.get_ctxt ());
2032 /* Subroutine of region_model::maybe_get_copy_bounds.
2033 The Linux kernel commonly uses
2034 min_t([unsigned] long, VAR, sizeof(T));
2035 to set an upper bound on the size of a copy_to_user.
2036 Attempt to simplify such sizes by trying to get the upper bound as a
2038 Return the simplified svalue if possible, or NULL otherwise. */
2040 static const svalue *
2041 maybe_simplify_upper_bound (const svalue *num_bytes_sval,
2042 region_model_manager *mgr)
2044 tree type = num_bytes_sval->get_type ();
2045 while (const svalue *raw = num_bytes_sval->maybe_undo_cast ())
2046 num_bytes_sval = raw;
2047 if (const binop_svalue *binop_sval = num_bytes_sval->dyn_cast_binop_svalue ())
2048 if (binop_sval->get_op () == MIN_EXPR)
2049 if (binop_sval->get_arg1 ()->get_kind () == SK_CONSTANT)
2051 return mgr->get_or_create_cast (type, binop_sval->get_arg1 ());
2052 /* TODO: we might want to also capture the constraint
2053 when recording the diagnostic, or note that we're using
2059 /* Attempt to get an upper bound for the size of a copy when simulating a
2062 NUM_BYTES_SVAL is the symbolic value for the size of the copy.
2063 Use it if it's constant, otherwise try to simplify it. Failing
2064 that, use the size of SRC_REG if constant.
2066 Return a symbolic value for an upper limit on the number of bytes
2067 copied, or NULL if no such value could be determined. */
2070 region_model::maybe_get_copy_bounds (const region *src_reg,
2071 const svalue *num_bytes_sval)
2073 if (num_bytes_sval->maybe_get_constant ())
2074 return num_bytes_sval;
2076 if (const svalue *simplified
2077 = maybe_simplify_upper_bound (num_bytes_sval, m_mgr))
2078 num_bytes_sval = simplified;
2080 if (num_bytes_sval->maybe_get_constant ())
2081 return num_bytes_sval;
2083 /* For now, try just guessing the size as the capacity of the
2084 base region of the src.
2085 This is a hack; we might get too large a value. */
2086 const region *src_base_reg = src_reg->get_base_region ();
2087 num_bytes_sval = get_capacity (src_base_reg);
2089 if (num_bytes_sval->maybe_get_constant ())
2090 return num_bytes_sval;
2092 /* Non-constant: give up. */
2096 /* Get any known_function for FNDECL, or NULL if there is none. */
2098 const known_function *
2099 region_model::get_known_function (tree fndecl) const
2101 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
2102 return known_fn_mgr->get_by_fndecl (fndecl);
2105 /* Update this model for the CALL stmt, using CTXT to report any
2106 diagnostics - the first half.
2108 Updates to the region_model that should be made *before* sm-states
2109 are updated are done here; other updates to the region_model are done
2110 in region_model::on_call_post.
2112 Return true if the function call has unknown side effects (it wasn't
2113 recognized and we don't have a body for it, or are unable to tell which
2116 Write true to *OUT_TERMINATE_PATH if this execution path should be
2117 terminated (e.g. the function call terminates the process). */
2120 region_model::on_call_pre (const gcall *call, region_model_context *ctxt,
2121 bool *out_terminate_path)
2123 call_details cd (call, this, ctxt);
2125 bool unknown_side_effects = false;
2127 /* Special-case for IFN_DEFERRED_INIT.
2128 We want to report uninitialized variables with -fanalyzer (treating
2129 -ftrivial-auto-var-init= as purely a mitigation feature).
2130 Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
2131 lhs of the call, so that it is still uninitialized from the point of
2132 view of the analyzer. */
2133 if (gimple_call_internal_p (call)
2134 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
2137 /* Get svalues for all of the arguments at the callsite, to ensure that we
2138 complain about any uninitialized arguments. This might lead to
2139 duplicates if any of the handling below also looks up the svalues,
2140 but the deduplication code should deal with that. */
2142 check_call_args (cd);
2144 /* Some of the cases below update the lhs of the call based on the
2145 return value, but not all. Provide a default value, which may
2146 get overwritten below. */
2147 if (tree lhs = gimple_call_lhs (call))
2149 const region *lhs_region = get_lvalue (lhs, ctxt);
2150 const svalue *sval = maybe_get_const_fn_result (cd);
2153 /* For the common case of functions without __attribute__((const)),
2154 use a conjured value, and purge any prior state involving that
2155 value (in case this is in a loop). */
2156 sval = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs), call,
2158 conjured_purge (this,
2161 set_value (lhs_region, sval, ctxt);
2164 if (gimple_call_internal_p (call))
2166 switch (gimple_call_internal_fn (call))
2170 case IFN_BUILTIN_EXPECT:
2171 impl_call_builtin_expect (cd);
2173 case IFN_UBSAN_BOUNDS:
2176 impl_call_va_arg (cd);
2181 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
2183 /* The various impl_call_* member functions are implemented
2184 in region-model-impl-calls.cc.
2185 Having them split out into separate functions makes it easier
2186 to put breakpoints on the handling of specific functions. */
2187 int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
2189 if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
2190 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
2191 switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
2194 if (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
2195 unknown_side_effects = true;
2197 case BUILT_IN_ALLOCA:
2198 case BUILT_IN_ALLOCA_WITH_ALIGN:
2199 impl_call_alloca (cd);
2201 case BUILT_IN_CALLOC:
2202 impl_call_calloc (cd);
2204 case BUILT_IN_EXPECT:
2205 case BUILT_IN_EXPECT_WITH_PROBABILITY:
2206 impl_call_builtin_expect (cd);
2209 /* Handle in "on_call_post". */
2211 case BUILT_IN_MALLOC:
2212 impl_call_malloc (cd);
2214 case BUILT_IN_MEMCPY:
2215 case BUILT_IN_MEMCPY_CHK:
2216 impl_call_memcpy (cd);
2218 case BUILT_IN_MEMSET:
2219 case BUILT_IN_MEMSET_CHK:
2220 impl_call_memset (cd);
2223 case BUILT_IN_REALLOC:
2225 case BUILT_IN_STRCHR:
2226 /* Handle in "on_call_post". */
2228 case BUILT_IN_STRCPY:
2229 case BUILT_IN_STRCPY_CHK:
2230 impl_call_strcpy (cd);
2232 case BUILT_IN_STRLEN:
2233 impl_call_strlen (cd);
2236 case BUILT_IN_STACK_SAVE:
2237 case BUILT_IN_STACK_RESTORE:
2240 /* Stdio builtins. */
2241 case BUILT_IN_FPRINTF:
2242 case BUILT_IN_FPRINTF_UNLOCKED:
2244 case BUILT_IN_PUTC_UNLOCKED:
2245 case BUILT_IN_FPUTC:
2246 case BUILT_IN_FPUTC_UNLOCKED:
2247 case BUILT_IN_FPUTS:
2248 case BUILT_IN_FPUTS_UNLOCKED:
2249 case BUILT_IN_FWRITE:
2250 case BUILT_IN_FWRITE_UNLOCKED:
2251 case BUILT_IN_PRINTF:
2252 case BUILT_IN_PRINTF_UNLOCKED:
2253 case BUILT_IN_PUTCHAR:
2254 case BUILT_IN_PUTCHAR_UNLOCKED:
2256 case BUILT_IN_PUTS_UNLOCKED:
2257 case BUILT_IN_VFPRINTF:
2258 case BUILT_IN_VPRINTF:
2259 /* These stdio builtins have external effects that are out
2260 of scope for the analyzer: we only want to model the effects
2261 on the return value. */
2264 case BUILT_IN_VA_START:
2265 impl_call_va_start (cd);
2267 case BUILT_IN_VA_COPY:
2268 impl_call_va_copy (cd);
2271 else if (is_named_call_p (callee_fndecl, "malloc", call, 1))
2273 impl_call_malloc (cd);
2276 else if (is_named_call_p (callee_fndecl, "calloc", call, 2))
2278 impl_call_calloc (cd);
2281 else if (is_named_call_p (callee_fndecl, "alloca", call, 1))
2283 impl_call_alloca (cd);
2286 else if (is_named_call_p (callee_fndecl, "realloc", call, 2))
2288 impl_call_realloc (cd);
2291 else if (is_named_call_p (callee_fndecl, "error"))
2293 if (impl_call_error (cd, 3, out_terminate_path))
2296 unknown_side_effects = true;
2298 else if (is_named_call_p (callee_fndecl, "error_at_line"))
2300 if (impl_call_error (cd, 5, out_terminate_path))
2303 unknown_side_effects = true;
2305 else if (is_named_call_p (callee_fndecl, "fgets", call, 3)
2306 || is_named_call_p (callee_fndecl, "fgets_unlocked", call, 3))
2308 impl_call_fgets (cd);
2311 else if (is_named_call_p (callee_fndecl, "fread", call, 4))
2313 impl_call_fread (cd);
2316 else if (is_named_call_p (callee_fndecl, "getchar", call, 0))
2318 /* No side-effects (tracking stream state is out-of-scope
2319 for the analyzer). */
2321 else if (is_named_call_p (callee_fndecl, "memset", call, 3)
2322 && POINTER_TYPE_P (cd.get_arg_type (0)))
2324 impl_call_memset (cd);
2327 else if (is_pipe_call_p (callee_fndecl, "pipe", call, 1)
2328 || is_pipe_call_p (callee_fndecl, "pipe2", call, 2))
2330 /* Handle in "on_call_post"; bail now so that fd array
2331 is left untouched so that we can detect use-of-uninit
2332 for the case where the call fails. */
2335 else if (is_named_call_p (callee_fndecl, "putenv", call, 1)
2336 && POINTER_TYPE_P (cd.get_arg_type (0)))
2338 impl_call_putenv (cd);
2341 else if (is_named_call_p (callee_fndecl, "strchr", call, 2)
2342 && POINTER_TYPE_P (cd.get_arg_type (0)))
2344 /* Handle in "on_call_post". */
2347 else if (is_named_call_p (callee_fndecl, "strlen", call, 1)
2348 && POINTER_TYPE_P (cd.get_arg_type (0)))
2350 impl_call_strlen (cd);
2353 else if (is_named_call_p (callee_fndecl, "operator new", call, 1))
2355 impl_call_operator_new (cd);
2358 else if (is_named_call_p (callee_fndecl, "operator new []", call, 1))
2360 impl_call_operator_new (cd);
2363 else if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
2364 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
2365 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
2367 /* Handle in "on_call_post". */
2369 else if (const known_function *kf = get_known_function (callee_fndecl))
2371 kf->impl_call_pre (cd);
2374 else if (!fndecl_has_gimple_body_p (callee_fndecl)
2375 && (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
2376 && !fndecl_built_in_p (callee_fndecl))
2377 unknown_side_effects = true;
2380 unknown_side_effects = true;
2382 return unknown_side_effects;
2385 /* Update this model for the CALL stmt, using CTXT to report any
2386 diagnostics - the second half.
2388 Updates to the region_model that should be made *after* sm-states
2389 are updated are done here; other updates to the region_model are done
2390 in region_model::on_call_pre.
2392 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
2396 region_model::on_call_post (const gcall *call,
2397 bool unknown_side_effects,
2398 region_model_context *ctxt)
2400 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
2402 call_details cd (call, this, ctxt);
2403 if (is_named_call_p (callee_fndecl, "free", call, 1))
2405 impl_call_free (cd);
2408 if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
2409 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
2410 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
2412 impl_call_operator_delete (cd);
2415 else if (is_pipe_call_p (callee_fndecl, "pipe", call, 1)
2416 || is_pipe_call_p (callee_fndecl, "pipe2", call, 2))
2418 impl_call_pipe (cd);
2421 else if (is_named_call_p (callee_fndecl, "strchr", call, 2)
2422 && POINTER_TYPE_P (cd.get_arg_type (0)))
2424 impl_call_strchr (cd);
2427 /* Was this fndecl referenced by
2428 __attribute__((malloc(FOO)))? */
2429 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
2431 impl_deallocation_call (cd);
2434 if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
2435 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
2436 switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
2440 case BUILT_IN_REALLOC:
2441 impl_call_realloc (cd);
2444 case BUILT_IN_STRCHR:
2445 impl_call_strchr (cd);
2448 case BUILT_IN_VA_END:
2449 impl_call_va_end (cd);
2454 if (unknown_side_effects)
2455 handle_unrecognized_call (call, ctxt);
2458 /* Purge state involving SVAL from this region_model, using CTXT
2459 (if non-NULL) to purge other state in a program_state.
2461 For example, if we're at the def-stmt of an SSA name, then we need to
2462 purge any state for svalues that involve that SSA name. This avoids
2463 false positives in loops, since a symbolic value referring to the
2464 SSA name will be referring to the previous value of that SSA name.
2467 while ((e = hashmap_iter_next(&iter))) {
2468 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
2469 free (e_strbuf->value);
2471 at the def-stmt of e_8:
2472 e_8 = hashmap_iter_next (&iter);
2473 we should purge the "freed" state of:
2474 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
2475 which is the "e_strbuf->value" value from the previous iteration,
2476 or we will erroneously report a double-free - the "e_8" within it
2477 refers to the previous value. */
2480 region_model::purge_state_involving (const svalue *sval,
2481 region_model_context *ctxt)
2483 if (!sval->can_have_associated_state_p ())
2485 m_store.purge_state_involving (sval, m_mgr);
2486 m_constraints->purge_state_involving (sval);
2487 m_dynamic_extents.purge_state_involving (sval);
2489 ctxt->purge_state_involving (sval);
2492 /* A pending_note subclass for adding a note about an
2493 __attribute__((access, ...)) to a diagnostic. */
2495 class reason_attr_access : public pending_note_subclass<reason_attr_access>
2498 reason_attr_access (tree callee_fndecl, const attr_access &access)
2499 : m_callee_fndecl (callee_fndecl),
2500 m_ptr_argno (access.ptrarg),
2501 m_access_str (TREE_STRING_POINTER (access.to_external_string ()))
2505 const char *get_kind () const final override { return "reason_attr_access"; }
2507 void emit () const final override
2509 inform (DECL_SOURCE_LOCATION (m_callee_fndecl),
2510 "parameter %i of %qD marked with attribute %qs",
2511 m_ptr_argno + 1, m_callee_fndecl, m_access_str);
2514 bool operator== (const reason_attr_access &other) const
2516 return (m_callee_fndecl == other.m_callee_fndecl
2517 && m_ptr_argno == other.m_ptr_argno
2518 && !strcmp (m_access_str, other.m_access_str));
2522 tree m_callee_fndecl;
2523 unsigned m_ptr_argno;
2524 const char *m_access_str;
2527 /* Check CALL a call to external function CALLEE_FNDECL based on
2528 any __attribute__ ((access, ....) on the latter, complaining to
2529 CTXT about any issues.
2531 Currently we merely call check_region_for_write on any regions
2532 pointed to by arguments marked with a "write_only" or "read_write"
2537 check_external_function_for_access_attr (const gcall *call,
2539 region_model_context *ctxt) const
2542 gcc_assert (callee_fndecl);
2545 tree fntype = TREE_TYPE (callee_fndecl);
2549 if (!TYPE_ATTRIBUTES (fntype))
2552 /* Initialize a map of attribute access specifications for arguments
2553 to the function call. */
2555 init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
2559 for (tree iter = TYPE_ARG_TYPES (fntype); iter;
2560 iter = TREE_CHAIN (iter), ++argno)
2562 const attr_access* access = rdwr_idx.get (argno);
2566 /* Ignore any duplicate entry in the map for the size argument. */
2567 if (access->ptrarg != argno)
2570 if (access->mode == access_write_only
2571 || access->mode == access_read_write)
2573 /* Subclass of decorated_region_model_context that
2574 adds a note about the attr access to any saved diagnostics. */
2575 class annotating_ctxt : public note_adding_context
2578 annotating_ctxt (tree callee_fndecl,
2579 const attr_access &access,
2580 region_model_context *ctxt)
2581 : note_adding_context (ctxt),
2582 m_callee_fndecl (callee_fndecl),
2586 std::unique_ptr<pending_note> make_note () final override
2588 return make_unique<reason_attr_access>
2589 (m_callee_fndecl, m_access);
2592 tree m_callee_fndecl;
2593 const attr_access &m_access;
2596 /* Use this ctxt below so that any diagnostics get the
2597 note added to them. */
2598 annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
2600 tree ptr_tree = gimple_call_arg (call, access->ptrarg);
2601 const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
2602 const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
2603 check_region_for_write (reg, &my_ctxt);
2604 /* We don't use the size arg for now. */
2609 /* Handle a call CALL to a function with unknown behavior.
2611 Traverse the regions in this model, determining what regions are
2612 reachable from pointer arguments to CALL and from global variables,
2615 Set all reachable regions to new unknown values and purge sm-state
2616 from their values, and from values that point to them. */
2619 region_model::handle_unrecognized_call (const gcall *call,
2620 region_model_context *ctxt)
2622 tree fndecl = get_fndecl_for_call (call, ctxt);
2625 check_external_function_for_access_attr (call, fndecl, ctxt);
2627 reachable_regions reachable_regs (this);
2629 /* Determine the reachable regions and their mutability. */
2631 /* Add globals and regions that already escaped in previous
2633 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
2636 /* Params that are pointers. */
2637 tree iter_param_types = NULL_TREE;
2639 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2640 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
2642 /* Track expected param type, where available. */
2643 tree param_type = NULL_TREE;
2644 if (iter_param_types)
2646 param_type = TREE_VALUE (iter_param_types);
2647 gcc_assert (param_type);
2648 iter_param_types = TREE_CHAIN (iter_param_types);
2651 tree parm = gimple_call_arg (call, arg_idx);
2652 const svalue *parm_sval = get_rvalue (parm, ctxt);
2653 reachable_regs.handle_parm (parm_sval, param_type);
2657 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
2659 /* Purge sm-state for the svalues that were reachable,
2660 both in non-mutable and mutable form. */
2661 for (svalue_set::iterator iter
2662 = reachable_regs.begin_reachable_svals ();
2663 iter != reachable_regs.end_reachable_svals (); ++iter)
2665 const svalue *sval = (*iter);
2667 ctxt->on_unknown_change (sval, false);
2669 for (svalue_set::iterator iter
2670 = reachable_regs.begin_mutable_svals ();
2671 iter != reachable_regs.end_mutable_svals (); ++iter)
2673 const svalue *sval = (*iter);
2675 ctxt->on_unknown_change (sval, true);
2677 uncertainty->on_mutable_sval_at_unknown_call (sval);
2680 /* Mark any clusters that have escaped. */
2681 reachable_regs.mark_escaped_clusters (ctxt);
2683 /* Update bindings for all clusters that have escaped, whether above,
2685 m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
2686 conjured_purge (this, ctxt));
2688 /* Purge dynamic extents from any regions that have escaped mutably:
2689 realloc could have been called on them. */
2690 for (hash_set<const region *>::iterator
2691 iter = reachable_regs.begin_mutable_base_regs ();
2692 iter != reachable_regs.end_mutable_base_regs ();
2695 const region *base_reg = (*iter);
2696 unset_dynamic_extents (base_reg);
2700 /* Traverse the regions in this model, determining what regions are
2701 reachable from the store and populating *OUT.
2703 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
2704 for reachability (for handling return values from functions when
2705 analyzing return of the only function on the stack).
2707 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
2708 within it as being maybe-bound as additional "roots" for reachability.
2710 Find svalues that haven't leaked. */
2713 region_model::get_reachable_svalues (svalue_set *out,
2714 const svalue *extra_sval,
2715 const uncertainty_t *uncertainty)
2717 reachable_regions reachable_regs (this);
2719 /* Add globals and regions that already escaped in previous
2721 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
2725 reachable_regs.handle_sval (extra_sval);
2728 for (uncertainty_t::iterator iter
2729 = uncertainty->begin_maybe_bound_svals ();
2730 iter != uncertainty->end_maybe_bound_svals (); ++iter)
2731 reachable_regs.handle_sval (*iter);
2733 /* Get regions for locals that have explicitly bound values. */
2734 for (store::cluster_map_t::iterator iter = m_store.begin ();
2735 iter != m_store.end (); ++iter)
2737 const region *base_reg = (*iter).first;
2738 if (const region *parent = base_reg->get_parent_region ())
2739 if (parent->get_kind () == RK_FRAME)
2740 reachable_regs.add (base_reg, false);
2743 /* Populate *OUT based on the values that were reachable. */
2744 for (svalue_set::iterator iter
2745 = reachable_regs.begin_reachable_svals ();
2746 iter != reachable_regs.end_reachable_svals (); ++iter)
2750 /* Update this model for the RETURN_STMT, using CTXT to report any
2754 region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
2756 tree callee = get_current_function ()->decl;
2757 tree lhs = DECL_RESULT (callee);
2758 tree rhs = gimple_return_retval (return_stmt);
2762 const svalue *sval = get_rvalue (rhs, ctxt);
2763 const region *ret_reg = get_lvalue (lhs, ctxt);
2764 set_value (ret_reg, sval, ctxt);
2768 /* Update this model for a call and return of setjmp/sigsetjmp at CALL within
2769 ENODE, using CTXT to report any diagnostics.
2771 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
2772 0), as opposed to any second return due to longjmp/sigsetjmp. */
2775 region_model::on_setjmp (const gcall *call, const exploded_node *enode,
2776 region_model_context *ctxt)
2778 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
2779 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
2782 /* Create a setjmp_svalue for this call and store it in BUF_REG's
2786 setjmp_record r (enode, call);
2788 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
2789 set_value (buf_reg, sval, ctxt);
2792 /* Direct calls to setjmp return 0. */
2793 if (tree lhs = gimple_call_lhs (call))
2795 const svalue *new_sval
2796 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
2797 const region *lhs_reg = get_lvalue (lhs, ctxt);
2798 set_value (lhs_reg, new_sval, ctxt);
2802 /* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
2803 to a "setjmp" at SETJMP_CALL where the final stack depth should be
2804 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
2805 done, and should be done by the caller. */
2808 region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
2809 int setjmp_stack_depth, region_model_context *ctxt)
2811 /* Evaluate the val, using the frame of the "longjmp". */
2812 tree fake_retval = gimple_call_arg (longjmp_call, 1);
2813 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
2815 /* Pop any frames until we reach the stack depth of the function where
2816 setjmp was called. */
2817 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
2818 while (get_stack_depth () > setjmp_stack_depth)
2819 pop_frame (NULL, NULL, ctxt);
2821 gcc_assert (get_stack_depth () == setjmp_stack_depth);
2823 /* Assign to LHS of "setjmp" in new_state. */
2824 if (tree lhs = gimple_call_lhs (setjmp_call))
2826 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
2827 const svalue *zero_sval
2828 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
2829 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
2830 /* If we have 0, use 1. */
2831 if (eq_zero.is_true ())
2833 const svalue *one_sval
2834 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
2835 fake_retval_sval = one_sval;
2839 /* Otherwise note that the value is nonzero. */
2840 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
2843 /* Decorate the return value from setjmp as being unmergeable,
2844 so that we don't attempt to merge states with it as zero
2845 with states in which it's nonzero, leading to a clean distinction
2846 in the exploded_graph betweeen the first return and the second
2848 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
2850 const region *lhs_reg = get_lvalue (lhs, ctxt);
2851 set_value (lhs_reg, fake_retval_sval, ctxt);
2855 /* Update this region_model for a phi stmt of the form
2856 LHS = PHI <...RHS...>.
2857 where RHS is for the appropriate edge.
2858 Get state from OLD_STATE so that all of the phi stmts for a basic block
2859 are effectively handled simultaneously. */
2862 region_model::handle_phi (const gphi *phi,
2864 const region_model &old_state,
2865 region_model_context *ctxt)
2867 /* For now, don't bother tracking the .MEM SSA names. */
2868 if (tree var = SSA_NAME_VAR (lhs))
2869 if (TREE_CODE (var) == VAR_DECL)
2870 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
2873 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
2874 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
2876 set_value (dst_reg, src_sval, ctxt);
2879 ctxt->on_phi (phi, rhs);
2882 /* Implementation of region_model::get_lvalue; the latter adds type-checking.
2884 Get the id of the region for PV within this region_model,
2885 emitting any diagnostics to CTXT. */
2888 region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
2890 tree expr = pv.m_tree;
2894 switch (TREE_CODE (expr))
2897 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
2898 dump_location_t ());
2902 tree array = TREE_OPERAND (expr, 0);
2903 tree index = TREE_OPERAND (expr, 1);
2905 const region *array_reg = get_lvalue (array, ctxt);
2906 const svalue *index_sval = get_rvalue (index, ctxt);
2907 return m_mgr->get_element_region (array_reg,
2908 TREE_TYPE (TREE_TYPE (array)),
2915 tree inner_expr = TREE_OPERAND (expr, 0);
2916 const region *inner_reg = get_lvalue (inner_expr, ctxt);
2917 tree num_bits = TREE_OPERAND (expr, 1);
2918 tree first_bit_offset = TREE_OPERAND (expr, 2);
2919 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2920 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2921 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2922 TREE_INT_CST_LOW (num_bits));
2923 return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
2929 tree ptr = TREE_OPERAND (expr, 0);
2930 tree offset = TREE_OPERAND (expr, 1);
2931 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2932 const svalue *offset_sval = get_rvalue (offset, ctxt);
2933 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
2934 return m_mgr->get_offset_region (star_ptr,
2941 return m_mgr->get_region_for_fndecl (expr);
2944 return m_mgr->get_region_for_label (expr);
2947 /* Handle globals. */
2948 if (is_global_var (expr))
2949 return m_mgr->get_region_for_global (expr);
2957 gcc_assert (TREE_CODE (expr) == SSA_NAME
2958 || TREE_CODE (expr) == PARM_DECL
2959 || TREE_CODE (expr) == VAR_DECL
2960 || TREE_CODE (expr) == RESULT_DECL);
2962 int stack_index = pv.m_stack_depth;
2963 const frame_region *frame = get_frame_at_index (stack_index);
2965 return frame->get_region_for_local (m_mgr, expr, ctxt);
2971 tree obj = TREE_OPERAND (expr, 0);
2972 tree field = TREE_OPERAND (expr, 1);
2973 const region *obj_reg = get_lvalue (obj, ctxt);
2974 return m_mgr->get_field_region (obj_reg, field);
2979 return m_mgr->get_region_for_string (expr);
2983 /* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
2986 assert_compat_types (tree src_type, tree dst_type)
2988 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2991 if (!(useless_type_conversion_p (src_type, dst_type)))
2992 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
2997 /* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
3000 compat_types_p (tree src_type, tree dst_type)
3002 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
3003 if (!(useless_type_conversion_p (src_type, dst_type)))
3008 /* Get the region for PV within this region_model,
3009 emitting any diagnostics to CTXT. */
3012 region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
3014 if (pv.m_tree == NULL_TREE)
3017 const region *result_reg = get_lvalue_1 (pv, ctxt);
3018 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
3022 /* Get the region for EXPR within this region_model (assuming the most
3023 recent stack frame if it's a local). */
3026 region_model::get_lvalue (tree expr, region_model_context *ctxt) const
3028 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
3031 /* Implementation of region_model::get_rvalue; the latter adds type-checking.
3033 Get the value of PV within this region_model,
3034 emitting any diagnostics to CTXT. */
3037 region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
3039 gcc_assert (pv.m_tree);
3041 switch (TREE_CODE (pv.m_tree))
3044 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
3049 tree expr = pv.m_tree;
3050 tree op0 = TREE_OPERAND (expr, 0);
3051 const region *expr_reg = get_lvalue (op0, ctxt);
3052 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
3058 tree expr = pv.m_tree;
3059 tree op0 = TREE_OPERAND (expr, 0);
3060 const region *reg = get_lvalue (op0, ctxt);
3061 tree num_bits = TREE_OPERAND (expr, 1);
3062 tree first_bit_offset = TREE_OPERAND (expr, 2);
3063 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
3064 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
3065 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
3066 TREE_INT_CST_LOW (num_bits));
3067 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
3076 const region *reg = get_lvalue (pv, ctxt);
3077 return get_store_value (reg, ctxt);
3082 case VIEW_CONVERT_EXPR:
3084 tree expr = pv.m_tree;
3085 tree arg = TREE_OPERAND (expr, 0);
3086 const svalue *arg_sval = get_rvalue (arg, ctxt);
3087 const svalue *sval_unaryop
3088 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
3090 return sval_unaryop;
3098 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
3100 case POINTER_PLUS_EXPR:
3102 tree expr = pv.m_tree;
3103 tree ptr = TREE_OPERAND (expr, 0);
3104 tree offset = TREE_OPERAND (expr, 1);
3105 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
3106 const svalue *offset_sval = get_rvalue (offset, ctxt);
3107 const svalue *sval_binop
3108 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
3109 ptr_sval, offset_sval);
3117 tree expr = pv.m_tree;
3118 tree arg0 = TREE_OPERAND (expr, 0);
3119 tree arg1 = TREE_OPERAND (expr, 1);
3120 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
3121 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
3122 const svalue *sval_binop
3123 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
3124 arg0_sval, arg1_sval);
3131 const region *ref_reg = get_lvalue (pv, ctxt);
3132 return get_store_value (ref_reg, ctxt);
3136 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
3137 return get_rvalue (expr, ctxt);
3142 /* Get the value of PV within this region_model,
3143 emitting any diagnostics to CTXT. */
3146 region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
3148 if (pv.m_tree == NULL_TREE)
3151 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
3153 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
3155 result_sval = check_for_poison (result_sval, pv.m_tree, ctxt);
3160 /* Get the value of EXPR within this region_model (assuming the most
3161 recent stack frame if it's a local). */
3164 region_model::get_rvalue (tree expr, region_model_context *ctxt) const
3166 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
3169 /* Return true if this model is on a path with "main" as the entrypoint
3170 (as opposed to one in which we're merely analyzing a subset of the
3171 path through the code). */
3174 region_model::called_from_main_p () const
3176 if (!m_current_frame)
3178 /* Determine if the oldest stack frame in this model is for "main". */
3179 const frame_region *frame0 = get_frame_at_index (0);
3180 gcc_assert (frame0);
3181 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
3184 /* Subroutine of region_model::get_store_value for when REG is (or is within)
3185 a global variable that hasn't been touched since the start of this path
3186 (or was implicitly touched due to a call to an unknown function). */
3189 region_model::get_initial_value_for_global (const region *reg) const
3191 /* Get the decl that REG is for (or is within). */
3192 const decl_region *base_reg
3193 = reg->get_base_region ()->dyn_cast_decl_region ();
3194 gcc_assert (base_reg);
3195 tree decl = base_reg->get_decl ();
3197 /* Special-case: to avoid having to explicitly update all previously
3198 untracked globals when calling an unknown fn, they implicitly have
3199 an unknown value if an unknown call has occurred, unless this is
3200 static to-this-TU and hasn't escaped. Globals that have escaped
3201 are explicitly tracked, so we shouldn't hit this case for them. */
3202 if (m_store.called_unknown_fn_p ()
3203 && TREE_PUBLIC (decl)
3204 && !TREE_READONLY (decl))
3205 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
3207 /* If we are on a path from the entrypoint from "main" and we have a
3208 global decl defined in this TU that hasn't been touched yet, then
3209 the initial value of REG can be taken from the initialization value
3211 if (called_from_main_p () || TREE_READONLY (decl))
3213 /* Attempt to get the initializer value for base_reg. */
3214 if (const svalue *base_reg_init
3215 = base_reg->get_svalue_for_initializer (m_mgr))
3217 if (reg == base_reg)
3218 return base_reg_init;
3221 /* Get the value for REG within base_reg_init. */
3222 binding_cluster c (base_reg);
3223 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init);
3225 = c.get_any_binding (m_mgr->get_store_manager (), reg);
3228 if (reg->get_type ())
3229 sval = m_mgr->get_or_create_cast (reg->get_type (),
3237 /* Otherwise, return INIT_VAL(REG). */
3238 return m_mgr->get_or_create_initial_value (reg);
3241 /* Get a value for REG, looking it up in the store, or otherwise falling
3242 back to "initial" or "unknown" values.
3243 Use CTXT to report any warnings associated with reading from REG. */
3246 region_model::get_store_value (const region *reg,
3247 region_model_context *ctxt) const
3249 check_region_for_read (reg, ctxt);
3251 /* Special-case: handle var_decls in the constant pool. */
3252 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
3253 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
3257 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
3260 if (reg->get_type ())
3261 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
3265 /* Special-case: read at a constant index within a STRING_CST. */
3266 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
3267 if (tree byte_offset_cst
3268 = offset_reg->get_byte_offset ()->maybe_get_constant ())
3269 if (const string_region *str_reg
3270 = reg->get_parent_region ()->dyn_cast_string_region ())
3272 tree string_cst = str_reg->get_string_cst ();
3273 if (const svalue *char_sval
3274 = m_mgr->maybe_get_char_from_string_cst (string_cst,
3276 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
3279 /* Special-case: read the initial char of a STRING_CST. */
3280 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
3281 if (const string_region *str_reg
3282 = cast_reg->get_original_region ()->dyn_cast_string_region ())
3284 tree string_cst = str_reg->get_string_cst ();
3285 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
3286 if (const svalue *char_sval
3287 = m_mgr->maybe_get_char_from_string_cst (string_cst,
3289 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
3292 /* Otherwise we implicitly have the initial value of the region
3293 (if the cluster had been touched, binding_cluster::get_any_binding,
3294 would have returned UNKNOWN, and we would already have returned
3297 /* Handle globals. */
3298 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
3300 return get_initial_value_for_global (reg);
3302 return m_mgr->get_or_create_initial_value (reg);
3305 /* Return false if REG does not exist, true if it may do.
3306 This is for detecting regions within the stack that don't exist anymore
3307 after frames are popped. */
3310 region_model::region_exists_p (const region *reg) const
3312 /* If within a stack frame, check that the stack frame is live. */
3313 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
3315 /* Check that the current frame is the enclosing frame, or is called
3317 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
3318 iter_frame = iter_frame->get_calling_frame ())
3319 if (iter_frame == enclosing_frame)
3327 /* Get a region for referencing PTR_SVAL, creating a region if need be, and
3328 potentially generating warnings via CTXT.
3329 PTR_SVAL must be of pointer type.
3330 PTR_TREE if non-NULL can be used when emitting diagnostics. */
3333 region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
3334 region_model_context *ctxt) const
3336 gcc_assert (ptr_sval);
3337 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
3339 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
3340 as a constraint. This suppresses false positives from
3341 -Wanalyzer-null-dereference for the case where we later have an
3342 if (PTR_SVAL) that would occur if we considered the false branch
3343 and transitioned the malloc state machine from start->null. */
3344 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
3345 const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
3346 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
3348 switch (ptr_sval->get_kind ())
3355 const region_svalue *region_sval
3356 = as_a <const region_svalue *> (ptr_sval);
3357 return region_sval->get_pointee ();
3362 const binop_svalue *binop_sval
3363 = as_a <const binop_svalue *> (ptr_sval);
3364 switch (binop_sval->get_op ())
3366 case POINTER_PLUS_EXPR:
3368 /* If we have a symbolic value expressing pointer arithmentic,
3369 try to convert it to a suitable region. */
3370 const region *parent_region
3371 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
3372 const svalue *offset = binop_sval->get_arg1 ();
3373 tree type= TREE_TYPE (ptr_sval->get_type ());
3374 return m_mgr->get_offset_region (parent_region, type, offset);
3386 tree ptr = get_representative_tree (ptr_sval);
3387 /* If we can't get a representative tree for PTR_SVAL
3388 (e.g. if it hasn't been bound into the store), then
3389 fall back on PTR_TREE, if non-NULL. */
3394 const poisoned_svalue *poisoned_sval
3395 = as_a <const poisoned_svalue *> (ptr_sval);
3396 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
3397 ctxt->warn (make_unique<poisoned_value_diagnostic>
3398 (ptr, pkind, NULL));
3405 return m_mgr->get_symbolic_region (ptr_sval);
3408 /* Attempt to get BITS within any value of REG, as TYPE.
3409 In particular, extract values from compound_svalues for the case
3410 where there's a concrete binding at BITS.
3411 Return an unknown svalue if we can't handle the given case.
3412 Use CTXT to report any warnings associated with reading from REG. */
3415 region_model::get_rvalue_for_bits (tree type,
3417 const bit_range &bits,
3418 region_model_context *ctxt) const
3420 const svalue *sval = get_store_value (reg, ctxt);
3421 return m_mgr->get_or_create_bits_within (type, bits, sval);
3424 /* A subclass of pending_diagnostic for complaining about writes to
3425 constant regions of memory. */
3427 class write_to_const_diagnostic
3428 : public pending_diagnostic_subclass<write_to_const_diagnostic>
3431 write_to_const_diagnostic (const region *reg, tree decl)
3432 : m_reg (reg), m_decl (decl)
3435 const char *get_kind () const final override
3437 return "write_to_const_diagnostic";
3440 bool operator== (const write_to_const_diagnostic &other) const
3442 return (m_reg == other.m_reg
3443 && m_decl == other.m_decl);
3446 int get_controlling_option () const final override
3448 return OPT_Wanalyzer_write_to_const;
3451 bool emit (rich_location *rich_loc) final override
3453 auto_diagnostic_group d;
3455 switch (m_reg->get_kind ())
3458 warned = warning_at (rich_loc, get_controlling_option (),
3459 "write to %<const%> object %qE", m_decl);
3462 warned = warning_at (rich_loc, get_controlling_option (),
3463 "write to function %qE", m_decl);
3466 warned = warning_at (rich_loc, get_controlling_option (),
3467 "write to label %qE", m_decl);
3471 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
3475 label_text describe_final_event (const evdesc::final_event &ev) final override
3477 switch (m_reg->get_kind ())
3480 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
3482 return ev.formatted_print ("write to function %qE here", m_decl);
3484 return ev.formatted_print ("write to label %qE here", m_decl);
3489 const region *m_reg;
3493 /* A subclass of pending_diagnostic for complaining about writes to
3496 class write_to_string_literal_diagnostic
3497 : public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
3500 write_to_string_literal_diagnostic (const region *reg)
3504 const char *get_kind () const final override
3506 return "write_to_string_literal_diagnostic";
3509 bool operator== (const write_to_string_literal_diagnostic &other) const
3511 return m_reg == other.m_reg;
3514 int get_controlling_option () const final override
3516 return OPT_Wanalyzer_write_to_string_literal;
3519 bool emit (rich_location *rich_loc) final override
3521 return warning_at (rich_loc, get_controlling_option (),
3522 "write to string literal");
3523 /* Ideally we would show the location of the STRING_CST as well,
3524 but it is not available at this point. */
3527 label_text describe_final_event (const evdesc::final_event &ev) final override
3529 return ev.formatted_print ("write to string literal here");
3533 const region *m_reg;
3536 /* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
3539 region_model::check_for_writable_region (const region* dest_reg,
3540 region_model_context *ctxt) const
3542 /* Fail gracefully if CTXT is NULL. */
3546 const region *base_reg = dest_reg->get_base_region ();
3547 switch (base_reg->get_kind ())
3553 const function_region *func_reg = as_a <const function_region *> (base_reg);
3554 tree fndecl = func_reg->get_fndecl ();
3555 ctxt->warn (make_unique<write_to_const_diagnostic>
3556 (func_reg, fndecl));
3561 const label_region *label_reg = as_a <const label_region *> (base_reg);
3562 tree label = label_reg->get_label ();
3563 ctxt->warn (make_unique<write_to_const_diagnostic>
3564 (label_reg, label));
3569 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
3570 tree decl = decl_reg->get_decl ();
3571 /* Warn about writes to const globals.
3572 Don't warn for writes to const locals, and params in particular,
3573 since we would warn in push_frame when setting them up (e.g the
3574 "this" param is "T* const"). */
3575 if (TREE_READONLY (decl)
3576 && is_global_var (decl))
3577 ctxt->warn (make_unique<write_to_const_diagnostic> (dest_reg, decl));
3581 ctxt->warn (make_unique<write_to_string_literal_diagnostic> (dest_reg));
3586 /* Get the capacity of REG in bytes. */
3589 region_model::get_capacity (const region *reg) const
3591 switch (reg->get_kind ())
3597 const decl_region *decl_reg = as_a <const decl_region *> (reg);
3598 tree decl = decl_reg->get_decl ();
3599 if (TREE_CODE (decl) == SSA_NAME)
3601 tree type = TREE_TYPE (decl);
3602 tree size = TYPE_SIZE (type);
3603 return get_rvalue (size, NULL);
3607 tree size = decl_init_size (decl, false);
3609 return get_rvalue (size, NULL);
3614 /* Look through sized regions to get at the capacity
3615 of the underlying regions. */
3616 return get_capacity (reg->get_parent_region ());
3619 if (const svalue *recorded = get_dynamic_extents (reg))
3622 return m_mgr->get_or_create_unknown_svalue (sizetype);
3625 /* Return the string size, including the 0-terminator, if SVAL is a
3626 constant_svalue holding a string. Otherwise, return an unknown_svalue. */
3629 region_model::get_string_size (const svalue *sval) const
3631 tree cst = sval->maybe_get_constant ();
3632 if (!cst || TREE_CODE (cst) != STRING_CST)
3633 return m_mgr->get_or_create_unknown_svalue (size_type_node);
3635 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
3636 return m_mgr->get_or_create_constant_svalue (out);
3639 /* Return the string size, including the 0-terminator, if REG is a
3640 string_region. Otherwise, return an unknown_svalue. */
3643 region_model::get_string_size (const region *reg) const
3645 const string_region *str_reg = dyn_cast <const string_region *> (reg);
3647 return m_mgr->get_or_create_unknown_svalue (size_type_node);
3649 tree cst = str_reg->get_string_cst ();
3650 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
3651 return m_mgr->get_or_create_constant_svalue (out);
3654 /* If CTXT is non-NULL, use it to warn about any problems accessing REG,
3655 using DIR to determine if this access is a read or write. */
3658 region_model::check_region_access (const region *reg,
3659 enum access_direction dir,
3660 region_model_context *ctxt) const
3662 /* Fail gracefully if CTXT is NULL. */
3666 check_region_for_taint (reg, dir, ctxt);
3667 check_region_bounds (reg, dir, ctxt);
3674 /* Currently a no-op. */
3677 check_for_writable_region (reg, ctxt);
3682 /* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
3685 region_model::check_region_for_write (const region *dest_reg,
3686 region_model_context *ctxt) const
3688 check_region_access (dest_reg, DIR_WRITE, ctxt);
3691 /* If CTXT is non-NULL, use it to warn about any problems reading from REG. */
3694 region_model::check_region_for_read (const region *src_reg,
3695 region_model_context *ctxt) const
3697 check_region_access (src_reg, DIR_READ, ctxt);
3700 /* Concrete subclass for casts of pointers that lead to trailing bytes. */
3702 class dubious_allocation_size
3703 : public pending_diagnostic_subclass<dubious_allocation_size>
3706 dubious_allocation_size (const region *lhs, const region *rhs)
3707 : m_lhs (lhs), m_rhs (rhs), m_expr (NULL_TREE)
3710 dubious_allocation_size (const region *lhs, const region *rhs,
3712 : m_lhs (lhs), m_rhs (rhs), m_expr (expr)
3715 const char *get_kind () const final override
3717 return "dubious_allocation_size";
3720 bool operator== (const dubious_allocation_size &other) const
3722 return m_lhs == other.m_lhs && m_rhs == other.m_rhs
3723 && pending_diagnostic::same_tree_p (m_expr, other.m_expr);
3726 int get_controlling_option () const final override
3728 return OPT_Wanalyzer_allocation_size;
3731 bool emit (rich_location *rich_loc) final override
3733 diagnostic_metadata m;
3736 return warning_meta (rich_loc, m, get_controlling_option (),
3737 "allocated buffer size is not a multiple"
3738 " of the pointee's size");
3742 describe_region_creation_event (const evdesc::region_creation &ev) final
3745 m_allocation_event = &ev;
3748 if (TREE_CODE (m_expr) == INTEGER_CST)
3749 return ev.formatted_print ("allocated %E bytes here", m_expr);
3751 return ev.formatted_print ("allocated %qE bytes here", m_expr);
3754 return ev.formatted_print ("allocated here");
3757 label_text describe_final_event (const evdesc::final_event &ev) final
3760 tree pointee_type = TREE_TYPE (m_lhs->get_type ());
3761 if (m_allocation_event)
3762 /* Fallback: Typically, we should always
3763 see an m_allocation_event before. */
3764 return ev.formatted_print ("assigned to %qT here;"
3765 " %<sizeof (%T)%> is %qE",
3766 m_lhs->get_type (), pointee_type,
3767 size_in_bytes (pointee_type));
3771 if (TREE_CODE (m_expr) == INTEGER_CST)
3772 return ev.formatted_print ("allocated %E bytes and assigned to"
3773 " %qT here; %<sizeof (%T)%> is %qE",
3774 m_expr, m_lhs->get_type (), pointee_type,
3775 size_in_bytes (pointee_type));
3777 return ev.formatted_print ("allocated %qE bytes and assigned to"
3778 " %qT here; %<sizeof (%T)%> is %qE",
3779 m_expr, m_lhs->get_type (), pointee_type,
3780 size_in_bytes (pointee_type));
3783 return ev.formatted_print ("allocated and assigned to %qT here;"
3784 " %<sizeof (%T)%> is %qE",
3785 m_lhs->get_type (), pointee_type,
3786 size_in_bytes (pointee_type));
3789 void mark_interesting_stuff (interesting_t *interest) final override
3791 interest->add_region_creation (m_rhs);
3795 const region *m_lhs;
3796 const region *m_rhs;
3798 const evdesc::region_creation *m_allocation_event;
3801 /* Return true on dubious allocation sizes for constant sizes. */
3804 capacity_compatible_with_type (tree cst, tree pointee_size_tree,
3807 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
3808 gcc_assert (TREE_CODE (pointee_size_tree) == INTEGER_CST);
3810 unsigned HOST_WIDE_INT pointee_size = TREE_INT_CST_LOW (pointee_size_tree);
3811 unsigned HOST_WIDE_INT alloc_size = TREE_INT_CST_LOW (cst);
3814 return alloc_size == 0 || alloc_size >= pointee_size;
3815 return alloc_size % pointee_size == 0;
3819 capacity_compatible_with_type (tree cst, tree pointee_size_tree)
3821 return capacity_compatible_with_type (cst, pointee_size_tree, false);
3824 /* Checks whether SVAL could be a multiple of SIZE_CST.
3826 It works by visiting all svalues inside SVAL until it reaches
3827 atomic nodes. From those, it goes back up again and adds each
3828 node that might be a multiple of SIZE_CST to the RESULT_SET. */
3830 class size_visitor : public visitor
3833 size_visitor (tree size_cst, const svalue *root_sval, constraint_manager *cm)
3834 : m_size_cst (size_cst), m_root_sval (root_sval), m_cm (cm)
3836 m_root_sval->accept (this);
3841 return result_set.contains (m_root_sval);
3844 void visit_constant_svalue (const constant_svalue *sval) final override
3846 check_constant (sval->get_constant (), sval);
3849 void visit_unknown_svalue (const unknown_svalue *sval ATTRIBUTE_UNUSED)
3852 result_set.add (sval);
3855 void visit_poisoned_svalue (const poisoned_svalue *sval ATTRIBUTE_UNUSED)
3858 result_set.add (sval);
3861 void visit_unaryop_svalue (const unaryop_svalue *sval) final override
3863 const svalue *arg = sval->get_arg ();
3864 if (result_set.contains (arg))
3865 result_set.add (sval);
3868 void visit_binop_svalue (const binop_svalue *sval) final override
3870 const svalue *arg0 = sval->get_arg0 ();
3871 const svalue *arg1 = sval->get_arg1 ();
3873 if (sval->get_op () == MULT_EXPR)
3875 if (result_set.contains (arg0) || result_set.contains (arg1))
3876 result_set.add (sval);
3880 if (result_set.contains (arg0) && result_set.contains (arg1))
3881 result_set.add (sval);
3885 void visit_repeated_svalue (const repeated_svalue *sval) final override
3887 sval->get_inner_svalue ()->accept (this);
3888 if (result_set.contains (sval->get_inner_svalue ()))
3889 result_set.add (sval);
3892 void visit_unmergeable_svalue (const unmergeable_svalue *sval) final override
3894 sval->get_arg ()->accept (this);
3895 if (result_set.contains (sval->get_arg ()))
3896 result_set.add (sval);
3899 void visit_widening_svalue (const widening_svalue *sval) final override
3901 const svalue *base = sval->get_base_svalue ();
3902 const svalue *iter = sval->get_iter_svalue ();
3904 if (result_set.contains (base) && result_set.contains (iter))
3905 result_set.add (sval);
3908 void visit_conjured_svalue (const conjured_svalue *sval ATTRIBUTE_UNUSED)
3911 equiv_class_id id (-1);
3912 if (m_cm->get_equiv_class_by_svalue (sval, &id))
3914 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3915 check_constant (cst, sval);
3917 result_set.add (sval);
3921 void visit_asm_output_svalue (const asm_output_svalue *sval ATTRIBUTE_UNUSED)
3924 result_set.add (sval);
3927 void visit_const_fn_result_svalue (const const_fn_result_svalue
3928 *sval ATTRIBUTE_UNUSED) final override
3930 result_set.add (sval);
3934 void check_constant (tree cst, const svalue *sval)
3936 switch (TREE_CODE (cst))
3939 /* Assume all unhandled operands are compatible. */
3940 result_set.add (sval);
3943 if (capacity_compatible_with_type (cst, m_size_cst))
3944 result_set.add (sval);
3950 const svalue *m_root_sval;
3951 constraint_manager *m_cm;
3952 svalue_set result_set; /* Used as a mapping of svalue*->bool. */
3955 /* Return true if a struct or union either uses the inheritance pattern,
3956 where the first field is a base struct, or the flexible array member
3957 pattern, where the last field is an array without a specified size. */
3960 struct_or_union_with_inheritance_p (tree struc)
3962 tree iter = TYPE_FIELDS (struc);
3963 if (iter == NULL_TREE)
3965 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (iter)))
3969 while (iter != NULL_TREE)
3972 iter = DECL_CHAIN (iter);
3975 if (last_field != NULL_TREE
3976 && TREE_CODE (TREE_TYPE (last_field)) == ARRAY_TYPE)
3982 /* Return true if the lhs and rhs of an assignment have different types. */
3985 is_any_cast_p (const gimple *stmt)
3987 if (const gassign *assign = dyn_cast <const gassign *> (stmt))
3988 return gimple_assign_cast_p (assign)
3989 || !pending_diagnostic::same_tree_p (
3990 TREE_TYPE (gimple_assign_lhs (assign)),
3991 TREE_TYPE (gimple_assign_rhs1 (assign)));
3992 else if (const gcall *call = dyn_cast <const gcall *> (stmt))
3994 tree lhs = gimple_call_lhs (call);
3995 return lhs != NULL_TREE && !pending_diagnostic::same_tree_p (
3996 TREE_TYPE (gimple_call_lhs (call)),
3997 gimple_call_return_type (call));
4003 /* On pointer assignments, check whether the buffer size of
4004 RHS_SVAL is compatible with the type of the LHS_REG.
4005 Use a non-null CTXT to report allocation size warnings. */
4008 region_model::check_region_size (const region *lhs_reg, const svalue *rhs_sval,
4009 region_model_context *ctxt) const
4011 if (!ctxt || ctxt->get_stmt () == NULL)
4013 /* Only report warnings on assignments that actually change the type. */
4014 if (!is_any_cast_p (ctxt->get_stmt ()))
4017 const region_svalue *reg_sval = dyn_cast <const region_svalue *> (rhs_sval);
4021 tree pointer_type = lhs_reg->get_type ();
4022 if (pointer_type == NULL_TREE || !POINTER_TYPE_P (pointer_type))
4025 tree pointee_type = TREE_TYPE (pointer_type);
4026 /* Make sure that the type on the left-hand size actually has a size. */
4027 if (pointee_type == NULL_TREE || VOID_TYPE_P (pointee_type)
4028 || TYPE_SIZE_UNIT (pointee_type) == NULL_TREE)
4031 /* Bail out early on pointers to structs where we can
4032 not deduce whether the buffer size is compatible. */
4033 bool is_struct = RECORD_OR_UNION_TYPE_P (pointee_type);
4034 if (is_struct && struct_or_union_with_inheritance_p (pointee_type))
4037 tree pointee_size_tree = size_in_bytes (pointee_type);
4038 /* We give up if the type size is not known at compile-time or the
4039 type size is always compatible regardless of the buffer size. */
4040 if (TREE_CODE (pointee_size_tree) != INTEGER_CST
4041 || integer_zerop (pointee_size_tree)
4042 || integer_onep (pointee_size_tree))
4045 const region *rhs_reg = reg_sval->get_pointee ();
4046 const svalue *capacity = get_capacity (rhs_reg);
4047 switch (capacity->get_kind ())
4049 case svalue_kind::SK_CONSTANT:
4051 const constant_svalue *cst_cap_sval
4052 = as_a <const constant_svalue *> (capacity);
4053 tree cst_cap = cst_cap_sval->get_constant ();
4054 if (TREE_CODE (cst_cap) == INTEGER_CST
4055 && !capacity_compatible_with_type (cst_cap, pointee_size_tree,
4057 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg, rhs_reg,
4065 size_visitor v (pointee_size_tree, capacity, m_constraints);
4066 if (!v.get_result ())
4068 tree expr = get_representative_tree (capacity);
4069 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg,
4079 /* Set the value of the region given by LHS_REG to the value given
4081 Use CTXT to report any warnings associated with writing to LHS_REG. */
4084 region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
4085 region_model_context *ctxt)
4087 gcc_assert (lhs_reg);
4088 gcc_assert (rhs_sval);
4090 check_region_size (lhs_reg, rhs_sval, ctxt);
4092 check_region_for_write (lhs_reg, ctxt);
4094 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
4095 ctxt ? ctxt->get_uncertainty () : NULL);
4098 /* Set the value of the region given by LHS to the value given by RHS. */
4101 region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
4103 const region *lhs_reg = get_lvalue (lhs, ctxt);
4104 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
4105 gcc_assert (lhs_reg);
4106 gcc_assert (rhs_sval);
4107 set_value (lhs_reg, rhs_sval, ctxt);
4110 /* Remove all bindings overlapping REG within the store. */
4113 region_model::clobber_region (const region *reg)
4115 m_store.clobber_region (m_mgr->get_store_manager(), reg);
4118 /* Remove any bindings for REG within the store. */
4121 region_model::purge_region (const region *reg)
4123 m_store.purge_region (m_mgr->get_store_manager(), reg);
4126 /* Fill REG with SVAL. */
4129 region_model::fill_region (const region *reg, const svalue *sval)
4131 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
4134 /* Zero-fill REG. */
4137 region_model::zero_fill_region (const region *reg)
4139 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
4142 /* Mark REG as having unknown content. */
4145 region_model::mark_region_as_unknown (const region *reg,
4146 uncertainty_t *uncertainty)
4148 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
4152 /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
4156 region_model::eval_condition (const svalue *lhs,
4158 const svalue *rhs) const
4160 /* For now, make no attempt to capture constraints on floating-point
4162 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
4163 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
4164 return tristate::unknown ();
4166 tristate ts = eval_condition_without_cm (lhs, op, rhs);
4170 /* Otherwise, try constraints. */
4171 return m_constraints->eval_condition (lhs, op, rhs);
4174 /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
4175 this model, without resorting to the constraint_manager.
4177 This is exposed so that impl_region_model_context::on_state_leak can
4178 check for equality part-way through region_model::purge_unused_svalues
4179 without risking creating new ECs. */
4182 region_model::eval_condition_without_cm (const svalue *lhs,
4184 const svalue *rhs) const
4189 /* See what we know based on the values. */
4191 /* For now, make no attempt to capture constraints on floating-point
4193 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
4194 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
4195 return tristate::unknown ();
4197 /* Unwrap any unmergeable values. */
4198 lhs = lhs->unwrap_any_unmergeable ();
4199 rhs = rhs->unwrap_any_unmergeable ();
4203 /* If we have the same svalue, then we have equality
4204 (apart from NaN-handling).
4205 TODO: should this definitely be the case for poisoned values? */
4206 /* Poisoned and unknown values are "unknowable". */
4207 if (lhs->get_kind () == SK_POISONED
4208 || lhs->get_kind () == SK_UNKNOWN)
4209 return tristate::TS_UNKNOWN;
4216 return tristate::TS_TRUE;
4221 return tristate::TS_FALSE;
4224 /* For other ops, use the logic below. */
4229 /* If we have a pair of region_svalues, compare them. */
4230 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
4231 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
4233 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
4234 if (res.is_known ())
4236 /* Otherwise, only known through constraints. */
4239 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
4241 /* If we have a pair of constants, compare them. */
4242 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
4243 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
4246 /* When we have one constant, put it on the RHS. */
4247 std::swap (lhs, rhs);
4248 op = swap_tree_comparison (op);
4251 gcc_assert (lhs->get_kind () != SK_CONSTANT);
4253 /* Handle comparison against zero. */
4254 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
4255 if (zerop (cst_rhs->get_constant ()))
4257 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
4259 /* A region_svalue is a non-NULL pointer, except in certain
4260 special cases (see the comment for region::non_null_p). */
4261 const region *pointee = ptr->get_pointee ();
4262 if (pointee->non_null_p ())
4272 return tristate::TS_FALSE;
4277 return tristate::TS_TRUE;
4281 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
4283 /* Treat offsets from a non-NULL pointer as being non-NULL. This
4284 isn't strictly true, in that eventually ptr++ will wrap
4285 around and be NULL, but it won't occur in practise and thus
4286 can be used to suppress effectively false positives that we
4287 shouldn't warn for. */
4288 if (binop->get_op () == POINTER_PLUS_EXPR)
4291 = eval_condition_without_cm (binop->get_arg0 (),
4293 if (lhs_ts.is_known ())
4299 /* Handle rejection of equality for comparisons of the initial values of
4300 "external" values (such as params) with the address of locals. */
4301 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
4302 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
4304 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
4305 if (res.is_known ())
4308 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
4309 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
4311 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
4312 if (res.is_known ())
4316 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
4317 if (tree rhs_cst = rhs->maybe_get_constant ())
4319 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
4320 if (res.is_known ())
4324 /* Handle comparisons between two svalues with more than one operand. */
4325 if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
4333 /* TODO: binops can be equal even if they are not structurally
4334 equal in case of commutative operators. */
4335 tristate res = structural_equality (lhs, rhs);
4342 tristate res = structural_equality (lhs, rhs);
4349 tristate res = structural_equality (lhs, rhs);
4352 res = symbolic_greater_than (binop, rhs);
4359 tristate res = symbolic_greater_than (binop, rhs);
4367 return tristate::TS_UNKNOWN;
4370 /* Subroutine of region_model::eval_condition_without_cm, for rejecting
4371 equality of INIT_VAL(PARM) with &LOCAL. */
4374 region_model::compare_initial_and_pointer (const initial_svalue *init,
4375 const region_svalue *ptr) const
4377 const region *pointee = ptr->get_pointee ();
4379 /* If we have a pointer to something within a stack frame, it can't be the
4380 initial value of a param. */
4381 if (pointee->maybe_get_frame_region ())
4382 if (init->initial_value_of_param_p ())
4383 return tristate::TS_FALSE;
4385 return tristate::TS_UNKNOWN;
4388 /* Return true if SVAL is definitely positive. */
4391 is_positive_svalue (const svalue *sval)
4393 if (tree cst = sval->maybe_get_constant ())
4394 return !zerop (cst) && get_range_pos_neg (cst) == 1;
4395 tree type = sval->get_type ();
4398 /* Consider a binary operation size_t + int. The analyzer wraps the int in
4399 an unaryop_svalue, converting it to a size_t, but in the dynamic execution
4400 the result is smaller than the first operand. Thus, we have to look if
4401 the argument of the unaryop_svalue is also positive. */
4402 if (const unaryop_svalue *un_op = dyn_cast <const unaryop_svalue *> (sval))
4403 return CONVERT_EXPR_CODE_P (un_op->get_op ()) && TYPE_UNSIGNED (type)
4404 && is_positive_svalue (un_op->get_arg ());
4405 return TYPE_UNSIGNED (type);
4408 /* Return true if A is definitely larger than B.
4410 Limitation: does not account for integer overflows and does not try to
4411 return false, so it can not be used negated. */
4414 region_model::symbolic_greater_than (const binop_svalue *bin_a,
4415 const svalue *b) const
4417 if (bin_a->get_op () == PLUS_EXPR || bin_a->get_op () == MULT_EXPR)
4419 /* Eliminate the right-hand side of both svalues. */
4420 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
4421 if (bin_a->get_op () == bin_b->get_op ()
4422 && eval_condition_without_cm (bin_a->get_arg1 (),
4424 bin_b->get_arg1 ()).is_true ()
4425 && eval_condition_without_cm (bin_a->get_arg0 (),
4427 bin_b->get_arg0 ()).is_true ())
4428 return tristate (tristate::TS_TRUE);
4430 /* Otherwise, try to remove a positive offset or factor from BIN_A. */
4431 if (is_positive_svalue (bin_a->get_arg1 ())
4432 && eval_condition_without_cm (bin_a->get_arg0 (),
4433 GE_EXPR, b).is_true ())
4434 return tristate (tristate::TS_TRUE);
4436 return tristate::unknown ();
4439 /* Return true if A and B are equal structurally.
4441 Structural equality means that A and B are equal if the svalues A and B have
4442 the same nodes at the same positions in the tree and the leafs are equal.
4443 Equality for conjured_svalues and initial_svalues is determined by comparing
4444 the pointers while constants are compared by value. That behavior is useful
4445 to check for binaryop_svlaues that evaluate to the same concrete value but
4446 might use one operand with a different type but the same constant value.
4449 binop_svalue (mult_expr,
4450 initial_svalue (‘size_t’, decl_region (..., 'some_var')),
4451 constant_svalue (‘size_t’, 4))
4453 binop_svalue (mult_expr,
4454 initial_svalue (‘size_t’, decl_region (..., 'some_var'),
4455 constant_svalue (‘sizetype’, 4))
4456 are structurally equal. A concrete C code example, where this occurs, can
4457 be found in test7 of out-of-bounds-5.c. */
4460 region_model::structural_equality (const svalue *a, const svalue *b) const
4462 /* If A and B are referentially equal, they are also structurally equal. */
4464 return tristate (tristate::TS_TRUE);
4466 switch (a->get_kind ())
4469 return tristate::unknown ();
4470 /* SK_CONJURED and SK_INITIAL are already handled
4471 by the referential equality above. */
4474 tree a_cst = a->maybe_get_constant ();
4475 tree b_cst = b->maybe_get_constant ();
4477 return tristate (tree_int_cst_equal (a_cst, b_cst));
4479 return tristate (tristate::TS_FALSE);
4482 const unaryop_svalue *un_a = as_a <const unaryop_svalue *> (a);
4483 if (const unaryop_svalue *un_b = dyn_cast <const unaryop_svalue *> (b))
4484 return tristate (pending_diagnostic::same_tree_p (un_a->get_type (),
4486 && un_a->get_op () == un_b->get_op ()
4487 && structural_equality (un_a->get_arg (),
4490 return tristate (tristate::TS_FALSE);
4493 const binop_svalue *bin_a = as_a <const binop_svalue *> (a);
4494 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
4495 return tristate (bin_a->get_op () == bin_b->get_op ()
4496 && structural_equality (bin_a->get_arg0 (),
4498 && structural_equality (bin_a->get_arg1 (),
4499 bin_b->get_arg1 ()));
4501 return tristate (tristate::TS_FALSE);
4505 /* Handle various constraints of the form:
4506 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
4510 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
4513 by adding constraints for INNER_LHS INNEROP INNER_RHS.
4515 Return true if this function can fully handle the constraint; if
4516 so, add the implied constraint(s) and write true to *OUT if they
4517 are consistent with existing constraints, or write false to *OUT
4518 if they contradicts existing constraints.
4520 Return false for cases that this function doeesn't know how to handle.
4522 For example, if we're checking a stored conditional, we'll have
4524 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
4527 which this function can turn into an add_constraint of:
4528 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
4530 Similarly, optimized && and || conditionals lead to e.g.
4532 becoming gimple like this:
4536 On the "_3 is false" branch we can have constraints of the form:
4537 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
4538 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
4540 which implies that both _1 and _2 are false,
4541 which this function can turn into a pair of add_constraints of
4542 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
4544 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
4547 region_model::add_constraints_from_binop (const svalue *outer_lhs,
4548 enum tree_code outer_op,
4549 const svalue *outer_rhs,
4551 region_model_context *ctxt)
4553 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
4555 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
4558 if (!outer_rhs->all_zeroes_p ())
4561 const svalue *inner_lhs = binop_sval->get_arg0 ();
4562 enum tree_code inner_op = binop_sval->get_op ();
4563 const svalue *inner_rhs = binop_sval->get_arg1 ();
4565 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
4569 - "OUTER_LHS != false" (i.e. OUTER is true), or
4570 - "OUTER_LHS == false" (i.e. OUTER is false). */
4571 bool is_true = outer_op == NE_EXPR;
4581 /* ...and "(inner_lhs OP inner_rhs) == 0"
4582 then (inner_lhs OP inner_rhs) must have the same
4583 logical value as LHS. */
4585 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
4586 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
4594 /* ...and "(inner_lhs & inner_rhs) != 0"
4595 then both inner_lhs and inner_rhs must be true. */
4596 const svalue *false_sval
4597 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
4598 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
4599 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
4600 *out = sat1 && sat2;
4608 /* ...and "(inner_lhs | inner_rhs) == 0"
4609 i.e. "(inner_lhs | inner_rhs)" is false
4610 then both inner_lhs and inner_rhs must be false. */
4611 const svalue *false_sval
4612 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
4613 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
4614 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
4615 *out = sat1 && sat2;
4622 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
4623 If it is consistent with existing constraints, add it, and return true.
4624 Return false if it contradicts existing constraints.
4625 Use CTXT for reporting any diagnostics associated with the accesses. */
4628 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
4629 region_model_context *ctxt)
4631 /* For now, make no attempt to capture constraints on floating-point
4633 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
4636 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
4637 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
4639 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
4642 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
4643 If it is consistent with existing constraints, add it, and return true.
4644 Return false if it contradicts existing constraints.
4645 Use CTXT for reporting any diagnostics associated with the accesses. */
4648 region_model::add_constraint (const svalue *lhs,
4651 region_model_context *ctxt)
4653 tristate t_cond = eval_condition (lhs, op, rhs);
4655 /* If we already have the condition, do nothing. */
4656 if (t_cond.is_true ())
4659 /* Reject a constraint that would contradict existing knowledge, as
4661 if (t_cond.is_false ())
4665 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
4668 /* Attempt to store the constraint. */
4669 if (!m_constraints->add_constraint (lhs, op, rhs))
4672 /* Notify the context, if any. This exists so that the state machines
4673 in a program_state can be notified about the condition, and so can
4674 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
4675 when synthesizing constraints as above. */
4677 ctxt->on_condition (lhs, op, rhs);
4679 /* If we have ®ION == NULL, then drop dynamic extents for REGION (for
4680 the case where REGION is heap-allocated and thus could be NULL). */
4681 if (tree rhs_cst = rhs->maybe_get_constant ())
4682 if (op == EQ_EXPR && zerop (rhs_cst))
4683 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
4684 unset_dynamic_extents (region_sval->get_pointee ());
4689 /* As above, but when returning false, if OUT is non-NULL, write a
4690 new rejected_constraint to *OUT. */
4693 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
4694 region_model_context *ctxt,
4695 rejected_constraint **out)
4697 bool sat = add_constraint (lhs, op, rhs, ctxt);
4699 *out = new rejected_op_constraint (*this, lhs, op, rhs);
4703 /* Determine what is known about the condition "LHS OP RHS" within
4705 Use CTXT for reporting any diagnostics associated with the accesses. */
4708 region_model::eval_condition (tree lhs,
4711 region_model_context *ctxt)
4713 /* For now, make no attempt to model constraints on floating-point
4715 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
4716 return tristate::unknown ();
4718 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
4721 /* Implementation of region_model::get_representative_path_var.
4722 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
4723 Use VISITED to prevent infinite mutual recursion with the overload for
4727 region_model::get_representative_path_var_1 (const svalue *sval,
4728 svalue_set *visited) const
4732 /* Prevent infinite recursion. */
4733 if (visited->contains (sval))
4734 return path_var (NULL_TREE, 0);
4735 visited->add (sval);
4737 /* Handle casts by recursion into get_representative_path_var. */
4738 if (const svalue *cast_sval = sval->maybe_undo_cast ())
4740 path_var result = get_representative_path_var (cast_sval, visited);
4741 tree orig_type = sval->get_type ();
4742 /* If necessary, wrap the result in a cast. */
4743 if (result.m_tree && orig_type)
4744 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
4748 auto_vec<path_var> pvs;
4749 m_store.get_representative_path_vars (this, visited, sval, &pvs);
4751 if (tree cst = sval->maybe_get_constant ())
4752 pvs.safe_push (path_var (cst, 0));
4754 /* Handle string literals and various other pointers. */
4755 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
4757 const region *reg = ptr_sval->get_pointee ();
4758 if (path_var pv = get_representative_path_var (reg, visited))
4759 return path_var (build1 (ADDR_EXPR,
4765 /* If we have a sub_svalue, look for ways to represent the parent. */
4766 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
4768 const svalue *parent_sval = sub_sval->get_parent ();
4769 const region *subreg = sub_sval->get_subregion ();
4770 if (path_var parent_pv
4771 = get_representative_path_var (parent_sval, visited))
4772 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
4773 return path_var (build3 (COMPONENT_REF,
4776 field_reg->get_field (),
4778 parent_pv.m_stack_depth);
4781 /* Handle binops. */
4782 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
4784 = get_representative_path_var (binop_sval->get_arg0 (), visited))
4786 = get_representative_path_var (binop_sval->get_arg1 (), visited))
4787 return path_var (build2 (binop_sval->get_op (),
4789 lhs_pv.m_tree, rhs_pv.m_tree),
4790 lhs_pv.m_stack_depth);
4792 if (pvs.length () < 1)
4793 return path_var (NULL_TREE, 0);
4795 pvs.qsort (readability_comparator);
4799 /* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
4800 Use VISITED to prevent infinite mutual recursion with the overload for
4803 This function defers to get_representative_path_var_1 to do the work;
4804 it adds verification that get_representative_path_var_1 returned a tree
4805 of the correct type. */
4808 region_model::get_representative_path_var (const svalue *sval,
4809 svalue_set *visited) const
4812 return path_var (NULL_TREE, 0);
4814 tree orig_type = sval->get_type ();
4816 path_var result = get_representative_path_var_1 (sval, visited);
4818 /* Verify that the result has the same type as SVAL, if any. */
4819 if (result.m_tree && orig_type)
4820 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
4825 /* Attempt to return a tree that represents SVAL, or return NULL_TREE.
4827 Strip off any top-level cast, to avoid messages like
4828 double-free of '(void *)ptr'
4829 from analyzer diagnostics. */
4832 region_model::get_representative_tree (const svalue *sval) const
4835 tree expr = get_representative_path_var (sval, &visited).m_tree;
4837 /* Strip off any top-level cast. */
4838 if (expr && TREE_CODE (expr) == NOP_EXPR)
4839 expr = TREE_OPERAND (expr, 0);
4841 return fixup_tree_for_diagnostic (expr);
4845 region_model::get_representative_tree (const region *reg) const
4848 tree expr = get_representative_path_var (reg, &visited).m_tree;
4850 /* Strip off any top-level cast. */
4851 if (expr && TREE_CODE (expr) == NOP_EXPR)
4852 expr = TREE_OPERAND (expr, 0);
4854 return fixup_tree_for_diagnostic (expr);
4857 /* Implementation of region_model::get_representative_path_var.
4859 Attempt to return a path_var that represents REG, or return
4861 For example, a region for a field of a local would be a path_var
4862 wrapping a COMPONENT_REF.
4863 Use VISITED to prevent infinite mutual recursion with the overload for
4867 region_model::get_representative_path_var_1 (const region *reg,
4868 svalue_set *visited) const
4870 switch (reg->get_kind ())
4881 /* Regions that represent memory spaces are not expressible as trees. */
4882 return path_var (NULL_TREE, 0);
4886 const function_region *function_reg
4887 = as_a <const function_region *> (reg);
4888 return path_var (function_reg->get_fndecl (), 0);
4892 const label_region *label_reg = as_a <const label_region *> (reg);
4893 return path_var (label_reg->get_label (), 0);
4898 const symbolic_region *symbolic_reg
4899 = as_a <const symbolic_region *> (reg);
4900 const svalue *pointer = symbolic_reg->get_pointer ();
4901 path_var pointer_pv = get_representative_path_var (pointer, visited);
4903 return path_var (NULL_TREE, 0);
4904 tree offset = build_int_cst (pointer->get_type (), 0);
4905 return path_var (build2 (MEM_REF,
4909 pointer_pv.m_stack_depth);
4913 const decl_region *decl_reg = as_a <const decl_region *> (reg);
4914 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
4918 const field_region *field_reg = as_a <const field_region *> (reg);
4920 = get_representative_path_var (reg->get_parent_region (), visited);
4922 return path_var (NULL_TREE, 0);
4923 return path_var (build3 (COMPONENT_REF,
4926 field_reg->get_field (),
4928 parent_pv.m_stack_depth);
4933 const element_region *element_reg
4934 = as_a <const element_region *> (reg);
4936 = get_representative_path_var (reg->get_parent_region (), visited);
4938 return path_var (NULL_TREE, 0);
4940 = get_representative_path_var (element_reg->get_index (), visited);
4942 return path_var (NULL_TREE, 0);
4943 return path_var (build4 (ARRAY_REF,
4945 parent_pv.m_tree, index_pv.m_tree,
4946 NULL_TREE, NULL_TREE),
4947 parent_pv.m_stack_depth);
4952 const offset_region *offset_reg
4953 = as_a <const offset_region *> (reg);
4955 = get_representative_path_var (reg->get_parent_region (), visited);
4957 return path_var (NULL_TREE, 0);
4959 = get_representative_path_var (offset_reg->get_byte_offset (),
4961 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
4962 return path_var (NULL_TREE, 0);
4963 tree addr_parent = build1 (ADDR_EXPR,
4964 build_pointer_type (reg->get_type ()),
4966 return path_var (build2 (MEM_REF,
4968 addr_parent, offset_pv.m_tree),
4969 parent_pv.m_stack_depth);
4973 return path_var (NULL_TREE, 0);
4978 = get_representative_path_var (reg->get_parent_region (), visited);
4980 return path_var (NULL_TREE, 0);
4981 return path_var (build1 (NOP_EXPR,
4984 parent_pv.m_stack_depth);
4987 case RK_HEAP_ALLOCATED:
4989 /* No good way to express heap-allocated/alloca regions as trees. */
4990 return path_var (NULL_TREE, 0);
4994 const string_region *string_reg = as_a <const string_region *> (reg);
4995 return path_var (string_reg->get_string_cst (), 0);
5000 return path_var (NULL_TREE, 0);
5004 /* Attempt to return a path_var that represents REG, or return
5006 For example, a region for a field of a local would be a path_var
5007 wrapping a COMPONENT_REF.
5008 Use VISITED to prevent infinite mutual recursion with the overload for
5011 This function defers to get_representative_path_var_1 to do the work;
5012 it adds verification that get_representative_path_var_1 returned a tree
5013 of the correct type. */
5016 region_model::get_representative_path_var (const region *reg,
5017 svalue_set *visited) const
5019 path_var result = get_representative_path_var_1 (reg, visited);
5021 /* Verify that the result has the same type as REG, if any. */
5022 if (result.m_tree && reg->get_type ())
5023 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
5028 /* Update this model for any phis in SNODE, assuming we came from
5029 LAST_CFG_SUPEREDGE. */
5032 region_model::update_for_phis (const supernode *snode,
5033 const cfg_superedge *last_cfg_superedge,
5034 region_model_context *ctxt)
5036 gcc_assert (last_cfg_superedge);
5038 /* Copy this state and pass it to handle_phi so that all of the phi stmts
5039 are effectively handled simultaneously. */
5040 const region_model old_state (*this);
5042 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
5043 !gsi_end_p (gpi); gsi_next (&gpi))
5045 gphi *phi = gpi.phi ();
5047 tree src = last_cfg_superedge->get_phi_arg (phi);
5048 tree lhs = gimple_phi_result (phi);
5050 /* Update next_state based on phi and old_state. */
5051 handle_phi (phi, lhs, src, old_state, ctxt);
5055 /* Attempt to update this model for taking EDGE (where the last statement
5056 was LAST_STMT), returning true if the edge can be taken, false
5058 When returning false, if OUT is non-NULL, write a new rejected_constraint
5061 For CFG superedges where LAST_STMT is a conditional or a switch
5062 statement, attempt to add the relevant conditions for EDGE to this
5063 model, returning true if they are feasible, or false if they are
5066 For call superedges, push frame information and store arguments
5069 For return superedges, pop frame information and store return
5070 values into any lhs.
5072 Rejection of call/return superedges happens elsewhere, in
5073 program_point::on_edge (i.e. based on program point, rather
5074 than program state). */
5077 region_model::maybe_update_for_edge (const superedge &edge,
5078 const gimple *last_stmt,
5079 region_model_context *ctxt,
5080 rejected_constraint **out)
5082 /* Handle frame updates for interprocedural edges. */
5083 switch (edge.m_kind)
5088 case SUPEREDGE_CALL:
5090 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
5091 update_for_call_superedge (*call_edge, ctxt);
5095 case SUPEREDGE_RETURN:
5097 const return_superedge *return_edge
5098 = as_a <const return_superedge *> (&edge);
5099 update_for_return_superedge (*return_edge, ctxt);
5103 case SUPEREDGE_INTRAPROCEDURAL_CALL:
5104 /* This is a no-op for call summaries; we should already
5105 have handled the effect of the call summary at the call stmt. */
5109 if (last_stmt == NULL)
5112 /* Apply any constraints for conditionals/switch statements. */
5114 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
5116 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
5117 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
5120 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
5122 const switch_cfg_superedge *switch_sedge
5123 = as_a <const switch_cfg_superedge *> (&edge);
5124 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
5128 /* Apply any constraints due to an exception being thrown. */
5129 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
5130 if (cfg_sedge->get_flags () & EDGE_EH)
5131 return apply_constraints_for_exception (last_stmt, ctxt, out);
5136 /* Push a new frame_region on to the stack region.
5137 Populate the frame_region with child regions for the function call's
5138 parameters, using values from the arguments at the callsite in the
5142 region_model::update_for_gcall (const gcall *call_stmt,
5143 region_model_context *ctxt,
5146 /* Build a vec of argument svalues, using the current top
5147 frame for resolving tree expressions. */
5148 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
5150 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
5152 tree arg = gimple_call_arg (call_stmt, i);
5153 arg_svals.quick_push (get_rvalue (arg, ctxt));
5158 /* Get the function * from the gcall. */
5159 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
5160 callee = DECL_STRUCT_FUNCTION (fn_decl);
5163 push_frame (callee, &arg_svals, ctxt);
5166 /* Pop the top-most frame_region from the stack, and copy the return
5167 region's values (if any) into the region for the lvalue of the LHS of
5168 the call (if any). */
5171 region_model::update_for_return_gcall (const gcall *call_stmt,
5172 region_model_context *ctxt)
5174 /* Get the lvalue for the result of the call, passing it to pop_frame,
5175 so that pop_frame can determine the region with respect to the
5177 tree lhs = gimple_call_lhs (call_stmt);
5178 pop_frame (lhs, NULL, ctxt);
5181 /* Extract calling information from the superedge and update the model for the
5185 region_model::update_for_call_superedge (const call_superedge &call_edge,
5186 region_model_context *ctxt)
5188 const gcall *call_stmt = call_edge.get_call_stmt ();
5189 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
5192 /* Extract calling information from the return superedge and update the model
5193 for the returning call */
5196 region_model::update_for_return_superedge (const return_superedge &return_edge,
5197 region_model_context *ctxt)
5199 const gcall *call_stmt = return_edge.get_call_stmt ();
5200 update_for_return_gcall (call_stmt, ctxt);
5203 /* Attempt to to use R to replay SUMMARY into this object.
5204 Return true if it is possible. */
5207 region_model::replay_call_summary (call_summary_replay &r,
5208 const region_model &summary)
5210 gcc_assert (summary.get_stack_depth () == 1);
5212 m_store.replay_call_summary (r, summary.m_store);
5214 if (!m_constraints->replay_call_summary (r, *summary.m_constraints))
5217 for (auto kv : summary.m_dynamic_extents)
5219 const region *summary_reg = kv.first;
5220 const region *caller_reg = r.convert_region_from_summary (summary_reg);
5223 const svalue *summary_sval = kv.second;
5224 const svalue *caller_sval = r.convert_svalue_from_summary (summary_sval);
5227 m_dynamic_extents.put (caller_reg, caller_sval);
5233 /* Given a true or false edge guarded by conditional statement COND_STMT,
5234 determine appropriate constraints for the edge to be taken.
5236 If they are feasible, add the constraints and return true.
5238 Return false if the constraints contradict existing knowledge
5239 (and so the edge should not be taken).
5240 When returning false, if OUT is non-NULL, write a new rejected_constraint
5244 region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
5245 const gcond *cond_stmt,
5246 region_model_context *ctxt,
5247 rejected_constraint **out)
5249 ::edge cfg_edge = sedge.get_cfg_edge ();
5250 gcc_assert (cfg_edge != NULL);
5251 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
5253 enum tree_code op = gimple_cond_code (cond_stmt);
5254 tree lhs = gimple_cond_lhs (cond_stmt);
5255 tree rhs = gimple_cond_rhs (cond_stmt);
5256 if (cfg_edge->flags & EDGE_FALSE_VALUE)
5257 op = invert_tree_comparison (op, false /* honor_nans */);
5258 return add_constraint (lhs, op, rhs, ctxt, out);
5261 /* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
5262 for the edge to be taken.
5264 If they are feasible, add the constraints and return true.
5266 Return false if the constraints contradict existing knowledge
5267 (and so the edge should not be taken).
5268 When returning false, if OUT is non-NULL, write a new rejected_constraint
5272 region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
5273 const gswitch *switch_stmt,
5274 region_model_context *ctxt,
5275 rejected_constraint **out)
5277 bounded_ranges_manager *ranges_mgr = get_range_manager ();
5278 const bounded_ranges *all_cases_ranges
5279 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
5280 tree index = gimple_switch_index (switch_stmt);
5281 const svalue *index_sval = get_rvalue (index, ctxt);
5282 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
5284 *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
5285 if (sat && ctxt && !all_cases_ranges->empty_p ())
5286 ctxt->on_bounded_ranges (*index_sval, *all_cases_ranges);
5290 /* Apply any constraints due to an exception being thrown at LAST_STMT.
5292 If they are feasible, add the constraints and return true.
5294 Return false if the constraints contradict existing knowledge
5295 (and so the edge should not be taken).
5296 When returning false, if OUT is non-NULL, write a new rejected_constraint
5300 region_model::apply_constraints_for_exception (const gimple *last_stmt,
5301 region_model_context *ctxt,
5302 rejected_constraint **out)
5304 gcc_assert (last_stmt);
5305 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
5306 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
5307 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
5308 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
5310 /* We have an exception thrown from operator new.
5311 Add a constraint that the result was NULL, to avoid a false
5312 leak report due to the result being lost when following
5314 if (tree lhs = gimple_call_lhs (call))
5315 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
5321 /* For use with push_frame when handling a top-level call within the analysis.
5322 PARAM has a defined but unknown initial value.
5323 Anything it points to has escaped, since the calling context "knows"
5324 the pointer, and thus calls to unknown functions could read/write into
5328 region_model::on_top_level_param (tree param,
5329 region_model_context *ctxt)
5331 if (POINTER_TYPE_P (TREE_TYPE (param)))
5333 const region *param_reg = get_lvalue (param, ctxt);
5334 const svalue *init_ptr_sval
5335 = m_mgr->get_or_create_initial_value (param_reg);
5336 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
5337 m_store.mark_as_escaped (pointee_reg);
5341 /* Update this region_model to reflect pushing a frame onto the stack
5344 If ARG_SVALS is non-NULL, use it to populate the parameters
5346 Otherwise, the params have their initial_svalues.
5348 Return the frame_region for the new frame. */
5351 region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
5352 region_model_context *ctxt)
5354 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
5357 /* Arguments supplied from a caller frame. */
5358 tree fndecl = fun->decl;
5360 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
5361 iter_parm = DECL_CHAIN (iter_parm), ++idx)
5363 /* If there's a mismatching declaration, the call stmt might
5364 not have enough args. Handle this case by leaving the
5365 rest of the params as uninitialized. */
5366 if (idx >= arg_svals->length ())
5368 tree parm_lval = iter_parm;
5369 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
5370 parm_lval = parm_default_ssa;
5371 const region *parm_reg = get_lvalue (parm_lval, ctxt);
5372 const svalue *arg_sval = (*arg_svals)[idx];
5373 set_value (parm_reg, arg_sval, ctxt);
5376 /* Handle any variadic args. */
5377 unsigned va_arg_idx = 0;
5378 for (; idx < arg_svals->length (); idx++, va_arg_idx++)
5380 const svalue *arg_sval = (*arg_svals)[idx];
5381 const region *var_arg_reg
5382 = m_mgr->get_var_arg_region (m_current_frame,
5384 set_value (var_arg_reg, arg_sval, ctxt);
5389 /* Otherwise we have a top-level call within the analysis. The params
5390 have defined but unknown initial values.
5391 Anything they point to has escaped. */
5392 tree fndecl = fun->decl;
5393 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
5394 iter_parm = DECL_CHAIN (iter_parm))
5396 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
5397 on_top_level_param (parm_default_ssa, ctxt);
5399 on_top_level_param (iter_parm, ctxt);
5403 return m_current_frame;
5406 /* Get the function of the top-most frame in this region_model's stack.
5407 There must be such a frame. */
5410 region_model::get_current_function () const
5412 const frame_region *frame = get_current_frame ();
5414 return frame->get_function ();
5417 /* Pop the topmost frame_region from this region_model's stack;
5419 If RESULT_LVALUE is non-null, copy any return value from the frame
5420 into the corresponding region (evaluated with respect to the *caller*
5421 frame, rather than the called frame).
5422 If OUT_RESULT is non-null, copy any return value from the frame
5425 Purge the frame region and all its descendent regions.
5426 Convert any pointers that point into such regions into
5427 POISON_KIND_POPPED_STACK svalues. */
5430 region_model::pop_frame (tree result_lvalue,
5431 const svalue **out_result,
5432 region_model_context *ctxt)
5434 gcc_assert (m_current_frame);
5436 /* Evaluate the result, within the callee frame. */
5437 const frame_region *frame_reg = m_current_frame;
5438 tree fndecl = m_current_frame->get_function ()->decl;
5439 tree result = DECL_RESULT (fndecl);
5440 const svalue *retval = NULL;
5441 if (result && TREE_TYPE (result) != void_type_node)
5443 retval = get_rvalue (result, ctxt);
5445 *out_result = retval;
5448 /* Pop the frame. */
5449 m_current_frame = m_current_frame->get_calling_frame ();
5451 if (result_lvalue && retval)
5453 /* Compute result_dst_reg using RESULT_LVALUE *after* popping
5454 the frame, but before poisoning pointers into the old frame. */
5455 const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
5456 set_value (result_dst_reg, retval, ctxt);
5459 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
5462 /* Get the number of frames in this region_model's stack. */
5465 region_model::get_stack_depth () const
5467 const frame_region *frame = get_current_frame ();
5469 return frame->get_stack_depth ();
5474 /* Get the frame_region with the given index within the stack.
5475 The frame_region must exist. */
5477 const frame_region *
5478 region_model::get_frame_at_index (int index) const
5480 const frame_region *frame = get_current_frame ();
5482 gcc_assert (index >= 0);
5483 gcc_assert (index <= frame->get_index ());
5484 while (index != frame->get_index ())
5486 frame = frame->get_calling_frame ();
5492 /* Unbind svalues for any regions in REG and below.
5493 Find any pointers to such regions; convert them to
5494 poisoned values of kind PKIND.
5495 Also purge any dynamic extents. */
5498 region_model::unbind_region_and_descendents (const region *reg,
5499 enum poison_kind pkind)
5501 /* Gather a set of base regions to be unbound. */
5502 hash_set<const region *> base_regs;
5503 for (store::cluster_map_t::iterator iter = m_store.begin ();
5504 iter != m_store.end (); ++iter)
5506 const region *iter_base_reg = (*iter).first;
5507 if (iter_base_reg->descendent_of_p (reg))
5508 base_regs.add (iter_base_reg);
5510 for (hash_set<const region *>::iterator iter = base_regs.begin ();
5511 iter != base_regs.end (); ++iter)
5512 m_store.purge_cluster (*iter);
5514 /* Find any pointers to REG or its descendents; convert to poisoned. */
5515 poison_any_pointers_to_descendents (reg, pkind);
5517 /* Purge dynamic extents of any base regions in REG and below
5518 (e.g. VLAs and alloca stack regions). */
5519 for (auto iter : m_dynamic_extents)
5521 const region *iter_reg = iter.first;
5522 if (iter_reg->descendent_of_p (reg))
5523 unset_dynamic_extents (iter_reg);
5527 /* Implementation of BindingVisitor.
5528 Update the bound svalues for regions below REG to use poisoned
5531 struct bad_pointer_finder
5533 bad_pointer_finder (const region *reg, enum poison_kind pkind,
5534 region_model_manager *mgr)
5535 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
5538 void on_binding (const binding_key *, const svalue *&sval)
5540 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
5542 const region *ptr_dst = ptr_sval->get_pointee ();
5543 /* Poison ptrs to descendents of REG, but not to REG itself,
5544 otherwise double-free detection doesn't work (since sm-state
5545 for "free" is stored on the original ptr svalue). */
5546 if (ptr_dst->descendent_of_p (m_reg)
5547 && ptr_dst != m_reg)
5549 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
5556 const region *m_reg;
5557 enum poison_kind m_pkind;
5558 region_model_manager *const m_mgr;
5562 /* Find any pointers to REG or its descendents; convert them to
5563 poisoned values of kind PKIND.
5564 Return the number of pointers that were poisoned. */
5567 region_model::poison_any_pointers_to_descendents (const region *reg,
5568 enum poison_kind pkind)
5570 bad_pointer_finder bv (reg, pkind, m_mgr);
5571 m_store.for_each_binding (bv);
5575 /* Attempt to merge THIS with OTHER_MODEL, writing the result
5576 to OUT_MODEL. Use POINT to distinguish values created as a
5577 result of merging. */
5580 region_model::can_merge_with_p (const region_model &other_model,
5581 const program_point &point,
5582 region_model *out_model,
5583 const extrinsic_state *ext_state,
5584 const program_state *state_a,
5585 const program_state *state_b) const
5587 gcc_assert (out_model);
5588 gcc_assert (m_mgr == other_model.m_mgr);
5589 gcc_assert (m_mgr == out_model->m_mgr);
5591 if (m_current_frame != other_model.m_current_frame)
5593 out_model->m_current_frame = m_current_frame;
5595 model_merger m (this, &other_model, point, out_model,
5596 ext_state, state_a, state_b);
5598 if (!store::can_merge_p (&m_store, &other_model.m_store,
5599 &out_model->m_store, m_mgr->get_store_manager (),
5603 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
5604 &out_model->m_dynamic_extents))
5607 /* Merge constraints. */
5608 constraint_manager::merge (*m_constraints,
5609 *other_model.m_constraints,
5610 out_model->m_constraints);
5615 /* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
5619 region_model::get_fndecl_for_call (const gcall *call,
5620 region_model_context *ctxt)
5622 tree fn_ptr = gimple_call_fn (call);
5623 if (fn_ptr == NULL_TREE)
5625 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
5626 if (const region_svalue *fn_ptr_ptr
5627 = fn_ptr_sval->dyn_cast_region_svalue ())
5629 const region *reg = fn_ptr_ptr->get_pointee ();
5630 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
5632 tree fn_decl = fn_reg->get_fndecl ();
5633 cgraph_node *node = cgraph_node::get (fn_decl);
5636 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
5638 return ultimate_node->decl;
5645 /* Would be much simpler to use a lambda here, if it were supported. */
5647 struct append_regions_cb_data
5649 const region_model *model;
5650 auto_vec<const decl_region *> *out;
5653 /* Populate *OUT with all decl_regions in the current
5654 frame that have clusters within the store. */
5658 get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
5660 append_regions_cb_data data;
5663 m_store.for_each_cluster (append_regions_cb, &data);
5666 /* Implementation detail of get_regions_for_current_frame. */
5669 region_model::append_regions_cb (const region *base_reg,
5670 append_regions_cb_data *cb_data)
5672 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
5674 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
5675 cb_data->out->safe_push (decl_reg);
5679 /* Abstract class for diagnostics related to the use of
5680 floating-point arithmetic where precision is needed. */
5682 class imprecise_floating_point_arithmetic : public pending_diagnostic
5685 int get_controlling_option () const final override
5687 return OPT_Wanalyzer_imprecise_fp_arithmetic;
5691 /* Concrete diagnostic to complain about uses of floating-point arithmetic
5692 in the size argument of malloc etc. */
5694 class float_as_size_arg : public imprecise_floating_point_arithmetic
5697 float_as_size_arg (tree arg) : m_arg (arg)
5700 const char *get_kind () const final override
5702 return "float_as_size_arg_diagnostic";
5705 bool subclass_equal_p (const pending_diagnostic &other) const final override
5707 return same_tree_p (m_arg, ((const float_as_size_arg &) other).m_arg);
5710 bool emit (rich_location *rich_loc) final override
5712 diagnostic_metadata m;
5713 bool warned = warning_meta (rich_loc, m, get_controlling_option (),
5714 "use of floating-point arithmetic here might"
5715 " yield unexpected results");
5717 inform (rich_loc->get_loc (), "only use operands of an integer type"
5718 " inside the size argument");
5722 label_text describe_final_event (const evdesc::final_event &ev) final
5726 return ev.formatted_print ("operand %qE is of type %qT",
5727 m_arg, TREE_TYPE (m_arg));
5728 return ev.formatted_print ("at least one operand of the size argument is"
5729 " of a floating-point type");
5736 /* Visitor to find uses of floating-point variables/constants in an svalue. */
5738 class contains_floating_point_visitor : public visitor
5741 contains_floating_point_visitor (const svalue *root_sval) : m_result (NULL)
5743 root_sval->accept (this);
5746 const svalue *get_svalue_to_report ()
5751 void visit_constant_svalue (const constant_svalue *sval) final override
5753 /* At the point the analyzer runs, constant integer operands in a floating
5754 point expression are already implictly converted to floating-points.
5755 Thus, we do prefer to report non-constants such that the diagnostic
5756 always reports a floating-point operand. */
5757 tree type = sval->get_type ();
5758 if (type && FLOAT_TYPE_P (type) && !m_result)
5762 void visit_conjured_svalue (const conjured_svalue *sval) final override
5764 tree type = sval->get_type ();
5765 if (type && FLOAT_TYPE_P (type))
5769 void visit_initial_svalue (const initial_svalue *sval) final override
5771 tree type = sval->get_type ();
5772 if (type && FLOAT_TYPE_P (type))
5777 /* Non-null if at least one floating-point operand was found. */
5778 const svalue *m_result;
5781 /* May complain about uses of floating-point operands in SIZE_IN_BYTES. */
5784 region_model::check_dynamic_size_for_floats (const svalue *size_in_bytes,
5785 region_model_context *ctxt) const
5789 contains_floating_point_visitor v (size_in_bytes);
5790 if (const svalue *float_sval = v.get_svalue_to_report ())
5792 tree diag_arg = get_representative_tree (float_sval);
5793 ctxt->warn (make_unique<float_as_size_arg> (diag_arg));
5797 /* Return a new region describing a heap-allocated block of memory.
5798 Use CTXT to complain about tainted sizes. */
5801 region_model::create_region_for_heap_alloc (const svalue *size_in_bytes,
5802 region_model_context *ctxt)
5804 const region *reg = m_mgr->create_region_for_heap_alloc ();
5805 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
5806 set_dynamic_extents (reg, size_in_bytes, ctxt);
5810 /* Return a new region describing a block of memory allocated within the
5812 Use CTXT to complain about tainted sizes. */
5815 region_model::create_region_for_alloca (const svalue *size_in_bytes,
5816 region_model_context *ctxt)
5818 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
5819 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
5820 set_dynamic_extents (reg, size_in_bytes, ctxt);
5824 /* Record that the size of REG is SIZE_IN_BYTES.
5825 Use CTXT to complain about tainted sizes. */
5828 region_model::set_dynamic_extents (const region *reg,
5829 const svalue *size_in_bytes,
5830 region_model_context *ctxt)
5832 assert_compat_types (size_in_bytes->get_type (), size_type_node);
5835 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
5837 check_dynamic_size_for_floats (size_in_bytes, ctxt);
5839 m_dynamic_extents.put (reg, size_in_bytes);
5842 /* Get the recording of REG in bytes, or NULL if no dynamic size was
5846 region_model::get_dynamic_extents (const region *reg) const
5848 if (const svalue * const *slot = m_dynamic_extents.get (reg))
5853 /* Unset any recorded dynamic size of REG. */
5856 region_model::unset_dynamic_extents (const region *reg)
5858 m_dynamic_extents.remove (reg);
5861 /* Information of the layout of a RECORD_TYPE, capturing it as a vector
5862 of items, where each item is either a field or padding. */
5867 /* An item within a record; either a field, or padding after a field. */
5871 item (const bit_range &br,
5876 m_is_padding (is_padding)
5880 bit_offset_t get_start_bit_offset () const
5882 return m_bit_range.get_start_bit_offset ();
5884 bit_offset_t get_next_bit_offset () const
5886 return m_bit_range.get_next_bit_offset ();
5889 bool contains_p (bit_offset_t offset) const
5891 return m_bit_range.contains_p (offset);
5894 void dump_to_pp (pretty_printer *pp) const
5897 pp_printf (pp, "padding after %qD", m_field);
5899 pp_printf (pp, "%qD", m_field);
5900 pp_string (pp, ", ");
5901 m_bit_range.dump_to_pp (pp);
5904 bit_range m_bit_range;
5909 record_layout (tree record_type)
5911 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE);
5913 for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE;
5914 iter = DECL_CHAIN (iter))
5916 if (TREE_CODE (iter) == FIELD_DECL)
5918 int iter_field_offset = int_bit_position (iter);
5919 bit_size_t size_in_bits;
5920 if (!int_size_in_bits (TREE_TYPE (iter), &size_in_bits))
5923 maybe_pad_to (iter_field_offset);
5926 m_items.safe_push (item (bit_range (iter_field_offset,
5932 /* Add any trailing padding. */
5933 bit_size_t size_in_bits;
5934 if (int_size_in_bits (record_type, &size_in_bits))
5935 maybe_pad_to (size_in_bits);
5938 void dump_to_pp (pretty_printer *pp) const
5942 FOR_EACH_VEC_ELT (m_items, i, it)
5944 it->dump_to_pp (pp);
5949 DEBUG_FUNCTION void dump () const
5952 pp_format_decoder (&pp) = default_tree_printer;
5953 pp.buffer->stream = stderr;
5958 const record_layout::item *get_item_at (bit_offset_t offset) const
5962 FOR_EACH_VEC_ELT (m_items, i, it)
5963 if (it->contains_p (offset))
5969 /* Subroutine of ctor. Add padding item to NEXT_OFFSET if necessary. */
5971 void maybe_pad_to (bit_offset_t next_offset)
5973 if (m_items.length () > 0)
5975 const item &last_item = m_items[m_items.length () - 1];
5976 bit_offset_t offset_after_last_item
5977 = last_item.get_next_bit_offset ();
5978 if (next_offset > offset_after_last_item)
5980 bit_size_t padding_size
5981 = next_offset - offset_after_last_item;
5982 m_items.safe_push (item (bit_range (offset_after_last_item,
5984 last_item.m_field, true));
5989 auto_vec<item> m_items;
5992 /* A subclass of pending_diagnostic for complaining about uninitialized data
5993 being copied across a trust boundary to an untrusted output
5994 (e.g. copy_to_user infoleaks in the Linux kernel). */
5996 class exposure_through_uninit_copy
5997 : public pending_diagnostic_subclass<exposure_through_uninit_copy>
6000 exposure_through_uninit_copy (const region *src_region,
6001 const region *dest_region,
6002 const svalue *copied_sval)
6003 : m_src_region (src_region),
6004 m_dest_region (dest_region),
6005 m_copied_sval (copied_sval)
6007 gcc_assert (m_copied_sval->get_kind () == SK_POISONED
6008 || m_copied_sval->get_kind () == SK_COMPOUND);
6011 const char *get_kind () const final override
6013 return "exposure_through_uninit_copy";
6016 bool operator== (const exposure_through_uninit_copy &other) const
6018 return (m_src_region == other.m_src_region
6019 && m_dest_region == other.m_dest_region
6020 && m_copied_sval == other.m_copied_sval);
6023 int get_controlling_option () const final override
6025 return OPT_Wanalyzer_exposure_through_uninit_copy;
6028 bool emit (rich_location *rich_loc) final override
6030 diagnostic_metadata m;
6031 /* CWE-200: Exposure of Sensitive Information to an Unauthorized Actor. */
6033 enum memory_space mem_space = get_src_memory_space ();
6038 warned = warning_meta
6039 (rich_loc, m, get_controlling_option (),
6040 "potential exposure of sensitive information"
6041 " by copying uninitialized data across trust boundary");
6043 case MEMSPACE_STACK:
6044 warned = warning_meta
6045 (rich_loc, m, get_controlling_option (),
6046 "potential exposure of sensitive information"
6047 " by copying uninitialized data from stack across trust boundary");
6050 warned = warning_meta
6051 (rich_loc, m, get_controlling_option (),
6052 "potential exposure of sensitive information"
6053 " by copying uninitialized data from heap across trust boundary");
6058 location_t loc = rich_loc->get_loc ();
6059 inform_number_of_uninit_bits (loc);
6060 complain_about_uninit_ranges (loc);
6062 if (mem_space == MEMSPACE_STACK)
6063 maybe_emit_fixit_hint ();
6068 label_text describe_final_event (const evdesc::final_event &) final override
6070 enum memory_space mem_space = get_src_memory_space ();
6074 return label_text::borrow ("uninitialized data copied here");
6076 case MEMSPACE_STACK:
6077 return label_text::borrow ("uninitialized data copied from stack here");
6080 return label_text::borrow ("uninitialized data copied from heap here");
6084 void mark_interesting_stuff (interesting_t *interest) final override
6087 interest->add_region_creation (m_src_region);
6091 enum memory_space get_src_memory_space () const
6093 return m_src_region ? m_src_region->get_memory_space () : MEMSPACE_UNKNOWN;
6096 bit_size_t calc_num_uninit_bits () const
6098 switch (m_copied_sval->get_kind ())
6105 const poisoned_svalue *poisoned_sval
6106 = as_a <const poisoned_svalue *> (m_copied_sval);
6107 gcc_assert (poisoned_sval->get_poison_kind () == POISON_KIND_UNINIT);
6109 /* Give up if don't have type information. */
6110 if (m_copied_sval->get_type () == NULL_TREE)
6113 bit_size_t size_in_bits;
6114 if (int_size_in_bits (m_copied_sval->get_type (), &size_in_bits))
6115 return size_in_bits;
6117 /* Give up if we can't get the size of the type. */
6123 const compound_svalue *compound_sval
6124 = as_a <const compound_svalue *> (m_copied_sval);
6125 bit_size_t result = 0;
6126 /* Find keys for uninit svals. */
6127 for (auto iter : *compound_sval)
6129 const svalue *sval = iter.second;
6130 if (const poisoned_svalue *psval
6131 = sval->dyn_cast_poisoned_svalue ())
6132 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
6134 const binding_key *key = iter.first;
6135 const concrete_binding *ckey
6136 = key->dyn_cast_concrete_binding ();
6138 result += ckey->get_size_in_bits ();
6146 void inform_number_of_uninit_bits (location_t loc) const
6148 bit_size_t num_uninit_bits = calc_num_uninit_bits ();
6149 if (num_uninit_bits <= 0)
6151 if (num_uninit_bits % BITS_PER_UNIT == 0)
6153 /* Express in bytes. */
6154 byte_size_t num_uninit_bytes = num_uninit_bits / BITS_PER_UNIT;
6155 if (num_uninit_bytes == 1)
6156 inform (loc, "1 byte is uninitialized");
6159 "%wu bytes are uninitialized", num_uninit_bytes.to_uhwi ());
6163 /* Express in bits. */
6164 if (num_uninit_bits == 1)
6165 inform (loc, "1 bit is uninitialized");
6168 "%wu bits are uninitialized", num_uninit_bits.to_uhwi ());
6172 void complain_about_uninit_ranges (location_t loc) const
6174 if (const compound_svalue *compound_sval
6175 = m_copied_sval->dyn_cast_compound_svalue ())
6177 /* Find keys for uninit svals. */
6178 auto_vec<const concrete_binding *> uninit_keys;
6179 for (auto iter : *compound_sval)
6181 const svalue *sval = iter.second;
6182 if (const poisoned_svalue *psval
6183 = sval->dyn_cast_poisoned_svalue ())
6184 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
6186 const binding_key *key = iter.first;
6187 const concrete_binding *ckey
6188 = key->dyn_cast_concrete_binding ();
6190 uninit_keys.safe_push (ckey);
6193 /* Complain about them in sorted order. */
6194 uninit_keys.qsort (concrete_binding::cmp_ptr_ptr);
6196 std::unique_ptr<record_layout> layout;
6198 tree type = m_copied_sval->get_type ();
6199 if (type && TREE_CODE (type) == RECORD_TYPE)
6201 // (std::make_unique is C++14)
6202 layout = std::unique_ptr<record_layout> (new record_layout (type));
6209 const concrete_binding *ckey;
6210 FOR_EACH_VEC_ELT (uninit_keys, i, ckey)
6212 bit_offset_t start_bit = ckey->get_start_bit_offset ();
6213 bit_offset_t next_bit = ckey->get_next_bit_offset ();
6214 complain_about_uninit_range (loc, start_bit, next_bit,
6220 void complain_about_uninit_range (location_t loc,
6221 bit_offset_t start_bit,
6222 bit_offset_t next_bit,
6223 const record_layout *layout) const
6227 while (start_bit < next_bit)
6229 if (const record_layout::item *item
6230 = layout->get_item_at (start_bit))
6232 gcc_assert (start_bit >= item->get_start_bit_offset ());
6233 gcc_assert (start_bit < item->get_next_bit_offset ());
6234 if (item->get_start_bit_offset () == start_bit
6235 && item->get_next_bit_offset () <= next_bit)
6236 complain_about_fully_uninit_item (*item);
6238 complain_about_partially_uninit_item (*item);
6239 start_bit = item->get_next_bit_offset ();
6247 if (start_bit >= next_bit)
6250 if (start_bit % 8 == 0 && next_bit % 8 == 0)
6252 /* Express in bytes. */
6253 byte_offset_t start_byte = start_bit / 8;
6254 byte_offset_t last_byte = (next_bit / 8) - 1;
6255 if (last_byte == start_byte)
6257 "byte %wu is uninitialized",
6258 start_byte.to_uhwi ());
6261 "bytes %wu - %wu are uninitialized",
6262 start_byte.to_uhwi (),
6263 last_byte.to_uhwi ());
6267 /* Express in bits. */
6268 bit_offset_t last_bit = next_bit - 1;
6269 if (last_bit == start_bit)
6271 "bit %wu is uninitialized",
6272 start_bit.to_uhwi ());
6275 "bits %wu - %wu are uninitialized",
6276 start_bit.to_uhwi (),
6277 last_bit.to_uhwi ());
6282 complain_about_fully_uninit_item (const record_layout::item &item)
6284 tree field = item.m_field;
6285 bit_size_t num_bits = item.m_bit_range.m_size_in_bits;
6286 if (item.m_is_padding)
6288 if (num_bits % 8 == 0)
6290 /* Express in bytes. */
6291 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
6293 inform (DECL_SOURCE_LOCATION (field),
6294 "padding after field %qD is uninitialized (1 byte)",
6297 inform (DECL_SOURCE_LOCATION (field),
6298 "padding after field %qD is uninitialized (%wu bytes)",
6299 field, num_bytes.to_uhwi ());
6303 /* Express in bits. */
6305 inform (DECL_SOURCE_LOCATION (field),
6306 "padding after field %qD is uninitialized (1 bit)",
6309 inform (DECL_SOURCE_LOCATION (field),
6310 "padding after field %qD is uninitialized (%wu bits)",
6311 field, num_bits.to_uhwi ());
6316 if (num_bits % 8 == 0)
6318 /* Express in bytes. */
6319 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
6321 inform (DECL_SOURCE_LOCATION (field),
6322 "field %qD is uninitialized (1 byte)", field);
6324 inform (DECL_SOURCE_LOCATION (field),
6325 "field %qD is uninitialized (%wu bytes)",
6326 field, num_bytes.to_uhwi ());
6330 /* Express in bits. */
6332 inform (DECL_SOURCE_LOCATION (field),
6333 "field %qD is uninitialized (1 bit)", field);
6335 inform (DECL_SOURCE_LOCATION (field),
6336 "field %qD is uninitialized (%wu bits)",
6337 field, num_bits.to_uhwi ());
6343 complain_about_partially_uninit_item (const record_layout::item &item)
6345 tree field = item.m_field;
6346 if (item.m_is_padding)
6347 inform (DECL_SOURCE_LOCATION (field),
6348 "padding after field %qD is partially uninitialized",
6351 inform (DECL_SOURCE_LOCATION (field),
6352 "field %qD is partially uninitialized",
6354 /* TODO: ideally we'd describe what parts are uninitialized. */
6357 void maybe_emit_fixit_hint () const
6359 if (tree decl = m_src_region->maybe_get_decl ())
6361 gcc_rich_location hint_richloc (DECL_SOURCE_LOCATION (decl));
6362 hint_richloc.add_fixit_insert_after (" = {0}");
6363 inform (&hint_richloc,
6364 "suggest forcing zero-initialization by"
6365 " providing a %<{0}%> initializer");
6370 const region *m_src_region;
6371 const region *m_dest_region;
6372 const svalue *m_copied_sval;
6375 /* Return true if any part of SVAL is uninitialized. */
6378 contains_uninit_p (const svalue *sval)
6380 struct uninit_finder : public visitor
6383 uninit_finder () : m_found_uninit (false) {}
6384 void visit_poisoned_svalue (const poisoned_svalue *sval)
6386 if (sval->get_poison_kind () == POISON_KIND_UNINIT)
6387 m_found_uninit = true;
6389 bool m_found_uninit;
6395 return v.m_found_uninit;
6398 /* Function for use by plugins when simulating writing data through a
6399 pointer to an "untrusted" region DST_REG (and thus crossing a security
6400 boundary), such as copying data to user space in an OS kernel.
6402 Check that COPIED_SVAL is fully initialized. If not, complain about
6403 an infoleak to CTXT.
6405 SRC_REG can be NULL; if non-NULL it is used as a hint in the diagnostic
6406 as to where COPIED_SVAL came from. */
6409 region_model::maybe_complain_about_infoleak (const region *dst_reg,
6410 const svalue *copied_sval,
6411 const region *src_reg,
6412 region_model_context *ctxt)
6414 /* Check for exposure. */
6415 if (contains_uninit_p (copied_sval))
6416 ctxt->warn (make_unique<exposure_through_uninit_copy> (src_reg,
6421 /* class noop_region_model_context : public region_model_context. */
6424 noop_region_model_context::add_note (std::unique_ptr<pending_note>)
6429 noop_region_model_context::bifurcate (std::unique_ptr<custom_edge_info>)
6434 noop_region_model_context::terminate_path ()
6438 /* struct model_merger. */
6440 /* Dump a multiline representation of this merger to PP. */
6443 model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
6445 pp_string (pp, "model A:");
6447 m_model_a->dump_to_pp (pp, simple, true);
6450 pp_string (pp, "model B:");
6452 m_model_b->dump_to_pp (pp, simple, true);
6455 pp_string (pp, "merged model:");
6457 m_merged_model->dump_to_pp (pp, simple, true);
6461 /* Dump a multiline representation of this merger to FILE. */
6464 model_merger::dump (FILE *fp, bool simple) const
6467 pp_format_decoder (&pp) = default_tree_printer;
6468 pp_show_color (&pp) = pp_show_color (global_dc->printer);
6469 pp.buffer->stream = fp;
6470 dump_to_pp (&pp, simple);
6474 /* Dump a multiline representation of this merger to stderr. */
6477 model_merger::dump (bool simple) const
6479 dump (stderr, simple);
6482 /* Return true if it's OK to merge SVAL with other svalues. */
6485 model_merger::mergeable_svalue_p (const svalue *sval) const
6489 /* Reject merging svalues that have non-purgable sm-state,
6490 to avoid falsely reporting memory leaks by merging them
6491 with something else. For example, given a local var "p",
6492 reject the merger of a:
6493 store_a mapping "p" to a malloc-ed ptr
6495 store_b mapping "p" to a NULL ptr. */
6497 if (!m_state_a->can_purge_p (*m_ext_state, sval))
6500 if (!m_state_b->can_purge_p (*m_ext_state, sval))
6508 /* Dump RMODEL fully to stderr (i.e. without summarization). */
6511 debug (const region_model &rmodel)
6513 rmodel.dump (false);
6516 /* class rejected_op_constraint : public rejected_constraint. */
6519 rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
6521 region_model m (m_model);
6522 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
6523 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
6524 lhs_sval->dump_to_pp (pp, true);
6525 pp_printf (pp, " %s ", op_symbol_code (m_op));
6526 rhs_sval->dump_to_pp (pp, true);
6529 /* class rejected_ranges_constraint : public rejected_constraint. */
6532 rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
6534 region_model m (m_model);
6535 const svalue *sval = m.get_rvalue (m_expr, NULL);
6536 sval->dump_to_pp (pp, true);
6537 pp_string (pp, " in ");
6538 m_ranges->dump_to_pp (pp, true);
6543 /* engine's ctor. */
6545 engine::engine (const supergraph *sg, logger *logger)
6546 : m_sg (sg), m_mgr (logger)
6550 /* Dump the managed objects by class to LOGGER, and the per-class totals. */
6553 engine::log_stats (logger *logger) const
6555 m_mgr.log_stats (logger, true);
6562 namespace selftest {
6564 /* Build a constant tree of the given type from STR. */
6567 build_real_cst_from_string (tree type, const char *str)
6569 REAL_VALUE_TYPE real;
6570 real_from_string (&real, str);
6571 return build_real (type, real);
6574 /* Append various "interesting" constants to OUT (e.g. NaN). */
6577 append_interesting_constants (auto_vec<tree> *out)
6579 out->safe_push (build_int_cst (integer_type_node, 0));
6580 out->safe_push (build_int_cst (integer_type_node, 42));
6581 out->safe_push (build_int_cst (unsigned_type_node, 0));
6582 out->safe_push (build_int_cst (unsigned_type_node, 42));
6583 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
6584 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
6585 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
6586 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
6587 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
6588 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
6589 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
6590 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
6593 /* Verify that tree_cmp is a well-behaved comparator for qsort, even
6594 if the underlying constants aren't comparable. */
6597 test_tree_cmp_on_constants ()
6599 auto_vec<tree> csts;
6600 append_interesting_constants (&csts);
6602 /* Try sorting every triple. */
6603 const unsigned num = csts.length ();
6604 for (unsigned i = 0; i < num; i++)
6605 for (unsigned j = 0; j < num; j++)
6606 for (unsigned k = 0; k < num; k++)
6608 auto_vec<tree> v (3);
6609 v.quick_push (csts[i]);
6610 v.quick_push (csts[j]);
6611 v.quick_push (csts[k]);
6616 /* Implementation detail of the ASSERT_CONDITION_* macros. */
6619 assert_condition (const location &loc,
6620 region_model &model,
6621 const svalue *lhs, tree_code op, const svalue *rhs,
6624 tristate actual = model.eval_condition (lhs, op, rhs);
6625 ASSERT_EQ_AT (loc, actual, expected);
6628 /* Implementation detail of the ASSERT_CONDITION_* macros. */
6631 assert_condition (const location &loc,
6632 region_model &model,
6633 tree lhs, tree_code op, tree rhs,
6636 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
6637 ASSERT_EQ_AT (loc, actual, expected);
6640 /* Implementation detail of ASSERT_DUMP_TREE_EQ. */
6643 assert_dump_tree_eq (const location &loc, tree t, const char *expected)
6645 auto_fix_quotes sentinel;
6647 pp_format_decoder (&pp) = default_tree_printer;
6649 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
6652 /* Assert that dump_tree (T) is EXPECTED. */
6654 #define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
6655 SELFTEST_BEGIN_STMT \
6656 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
6659 /* Implementation detail of ASSERT_DUMP_EQ. */
6662 assert_dump_eq (const location &loc,
6663 const region_model &model,
6665 const char *expected)
6667 auto_fix_quotes sentinel;
6669 pp_format_decoder (&pp) = default_tree_printer;
6671 model.dump_to_pp (&pp, summarize, true);
6672 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
6675 /* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
6677 #define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
6678 SELFTEST_BEGIN_STMT \
6679 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
6682 /* Smoketest for region_model::dump_to_pp. */
6687 region_model_manager mgr;
6688 region_model model (&mgr);
6690 ASSERT_DUMP_EQ (model, false,
6692 "m_called_unknown_fn: FALSE\n"
6693 "constraint_manager:\n"
6696 ASSERT_DUMP_EQ (model, true,
6698 "m_called_unknown_fn: FALSE\n"
6699 "constraint_manager:\n"
6704 /* Helper function for selftests. Create a struct or union type named NAME,
6705 with the fields given by the FIELD_DECLS in FIELDS.
6706 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
6707 create a UNION_TYPE. */
6710 make_test_compound_type (const char *name, bool is_struct,
6711 const auto_vec<tree> *fields)
6713 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
6714 TYPE_NAME (t) = get_identifier (name);
6717 tree fieldlist = NULL;
6720 FOR_EACH_VEC_ELT (*fields, i, field)
6722 gcc_assert (TREE_CODE (field) == FIELD_DECL);
6723 DECL_CONTEXT (field) = t;
6724 fieldlist = chainon (field, fieldlist);
6726 fieldlist = nreverse (fieldlist);
6727 TYPE_FIELDS (t) = fieldlist;
6733 /* Selftest fixture for creating the type "struct coord {int x; int y; };". */
6739 auto_vec<tree> fields;
6740 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6741 get_identifier ("x"), integer_type_node);
6742 fields.safe_push (m_x_field);
6743 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6744 get_identifier ("y"), integer_type_node);
6745 fields.safe_push (m_y_field);
6746 m_coord_type = make_test_compound_type ("coord", true, &fields);
6754 /* Verify usage of a struct. */
6761 tree c = build_global_decl ("c", ct.m_coord_type);
6762 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6763 c, ct.m_x_field, NULL_TREE);
6764 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6765 c, ct.m_y_field, NULL_TREE);
6767 tree int_17 = build_int_cst (integer_type_node, 17);
6768 tree int_m3 = build_int_cst (integer_type_node, -3);
6770 region_model_manager mgr;
6771 region_model model (&mgr);
6772 model.set_value (c_x, int_17, NULL);
6773 model.set_value (c_y, int_m3, NULL);
6775 /* Verify get_offset for "c.x". */
6777 const region *c_x_reg = model.get_lvalue (c_x, NULL);
6778 region_offset offset = c_x_reg->get_offset (&mgr);
6779 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6780 ASSERT_EQ (offset.get_bit_offset (), 0);
6783 /* Verify get_offset for "c.y". */
6785 const region *c_y_reg = model.get_lvalue (c_y, NULL);
6786 region_offset offset = c_y_reg->get_offset (&mgr);
6787 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6788 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
6792 /* Verify usage of an array element. */
6797 tree tlen = size_int (10);
6798 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6800 tree a = build_global_decl ("a", arr_type);
6802 region_model_manager mgr;
6803 region_model model (&mgr);
6804 tree int_0 = build_int_cst (integer_type_node, 0);
6805 tree a_0 = build4 (ARRAY_REF, char_type_node,
6806 a, int_0, NULL_TREE, NULL_TREE);
6807 tree char_A = build_int_cst (char_type_node, 'A');
6808 model.set_value (a_0, char_A, NULL);
6811 /* Verify that region_model::get_representative_tree works as expected. */
6814 test_get_representative_tree ()
6816 region_model_manager mgr;
6820 tree string_cst = build_string (4, "foo");
6821 region_model m (&mgr);
6822 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
6823 tree rep = m.get_representative_tree (str_sval);
6824 ASSERT_EQ (rep, string_cst);
6827 /* String literal. */
6829 tree string_cst_ptr = build_string_literal (4, "foo");
6830 region_model m (&mgr);
6831 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
6832 tree rep = m.get_representative_tree (str_sval);
6833 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
6836 /* Value of an element within an array. */
6838 tree tlen = size_int (10);
6839 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6840 tree a = build_global_decl ("a", arr_type);
6841 placeholder_svalue test_sval (char_type_node, "test value");
6843 /* Value of a[3]. */
6845 test_region_model_context ctxt;
6846 region_model model (&mgr);
6847 tree int_3 = build_int_cst (integer_type_node, 3);
6848 tree a_3 = build4 (ARRAY_REF, char_type_node,
6849 a, int_3, NULL_TREE, NULL_TREE);
6850 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
6851 model.set_value (a_3_reg, &test_sval, &ctxt);
6852 tree rep = model.get_representative_tree (&test_sval);
6853 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
6856 /* Value of a[0]. */
6858 test_region_model_context ctxt;
6859 region_model model (&mgr);
6860 tree idx = build_int_cst (integer_type_node, 0);
6861 tree a_0 = build4 (ARRAY_REF, char_type_node,
6862 a, idx, NULL_TREE, NULL_TREE);
6863 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
6864 model.set_value (a_0_reg, &test_sval, &ctxt);
6865 tree rep = model.get_representative_tree (&test_sval);
6866 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
6870 /* Value of a field within a struct. */
6874 tree c = build_global_decl ("c", ct.m_coord_type);
6875 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6876 c, ct.m_x_field, NULL_TREE);
6877 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6878 c, ct.m_y_field, NULL_TREE);
6880 test_region_model_context ctxt;
6882 /* Value of initial field. */
6884 region_model m (&mgr);
6885 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
6886 placeholder_svalue test_sval_x (integer_type_node, "test x val");
6887 m.set_value (c_x_reg, &test_sval_x, &ctxt);
6888 tree rep = m.get_representative_tree (&test_sval_x);
6889 ASSERT_DUMP_TREE_EQ (rep, "c.x");
6892 /* Value of non-initial field. */
6894 region_model m (&mgr);
6895 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
6896 placeholder_svalue test_sval_y (integer_type_node, "test y val");
6897 m.set_value (c_y_reg, &test_sval_y, &ctxt);
6898 tree rep = m.get_representative_tree (&test_sval_y);
6899 ASSERT_DUMP_TREE_EQ (rep, "c.y");
6904 /* Verify that calling region_model::get_rvalue repeatedly on the same
6905 tree constant retrieves the same svalue *. */
6908 test_unique_constants ()
6910 tree int_0 = build_int_cst (integer_type_node, 0);
6911 tree int_42 = build_int_cst (integer_type_node, 42);
6913 test_region_model_context ctxt;
6914 region_model_manager mgr;
6915 region_model model (&mgr);
6916 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
6917 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
6918 model.get_rvalue (int_42, &ctxt));
6919 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
6920 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
6922 /* A "(const int)42" will be a different tree from "(int)42)"... */
6923 tree const_int_type_node
6924 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
6925 tree const_int_42 = build_int_cst (const_int_type_node, 42);
6926 ASSERT_NE (int_42, const_int_42);
6927 /* It should have a different const_svalue. */
6928 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
6929 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
6930 ASSERT_NE (int_42_sval, const_int_42_sval);
6931 /* But they should compare as equal. */
6932 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
6933 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
6936 /* Verify that each type gets its own singleton unknown_svalue within a
6937 region_model_manager, and that NULL_TREE gets its own singleton. */
6940 test_unique_unknowns ()
6942 region_model_manager mgr;
6943 const svalue *unknown_int
6944 = mgr.get_or_create_unknown_svalue (integer_type_node);
6945 /* Repeated calls with the same type should get the same "unknown"
6947 const svalue *unknown_int_2
6948 = mgr.get_or_create_unknown_svalue (integer_type_node);
6949 ASSERT_EQ (unknown_int, unknown_int_2);
6951 /* Different types (or the NULL type) should have different
6953 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
6954 ASSERT_NE (unknown_NULL_type, unknown_int);
6956 /* Repeated calls with NULL for the type should get the same "unknown"
6958 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
6959 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
6962 /* Verify that initial_svalue are handled as expected. */
6965 test_initial_svalue_folding ()
6967 region_model_manager mgr;
6968 tree x = build_global_decl ("x", integer_type_node);
6969 tree y = build_global_decl ("y", integer_type_node);
6971 test_region_model_context ctxt;
6972 region_model model (&mgr);
6973 const svalue *x_init = model.get_rvalue (x, &ctxt);
6974 const svalue *y_init = model.get_rvalue (y, &ctxt);
6975 ASSERT_NE (x_init, y_init);
6976 const region *x_reg = model.get_lvalue (x, &ctxt);
6977 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
6981 /* Verify that unary ops are folded as expected. */
6984 test_unaryop_svalue_folding ()
6986 region_model_manager mgr;
6987 tree x = build_global_decl ("x", integer_type_node);
6988 tree y = build_global_decl ("y", integer_type_node);
6990 test_region_model_context ctxt;
6991 region_model model (&mgr);
6992 const svalue *x_init = model.get_rvalue (x, &ctxt);
6993 const svalue *y_init = model.get_rvalue (y, &ctxt);
6994 const region *x_reg = model.get_lvalue (x, &ctxt);
6995 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
6997 /* "(int)x" -> "x". */
6998 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
7000 /* "(void *)x" -> something other than "x". */
7001 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
7003 /* "!(x == y)" -> "x != y". */
7004 ASSERT_EQ (mgr.get_or_create_unaryop
7005 (boolean_type_node, TRUTH_NOT_EXPR,
7006 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
7008 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
7010 /* "!(x > y)" -> "x <= y". */
7011 ASSERT_EQ (mgr.get_or_create_unaryop
7012 (boolean_type_node, TRUTH_NOT_EXPR,
7013 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
7015 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
7019 /* Verify that binops on constant svalues are folded. */
7022 test_binop_svalue_folding ()
7025 tree cst_int[NUM_CSTS];
7026 region_model_manager mgr;
7027 const svalue *cst_sval[NUM_CSTS];
7028 for (int i = 0; i < NUM_CSTS; i++)
7030 cst_int[i] = build_int_cst (integer_type_node, i);
7031 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
7032 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
7033 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
7036 for (int i = 0; i < NUM_CSTS; i++)
7037 for (int j = 0; j < NUM_CSTS; j++)
7040 ASSERT_NE (cst_sval[i], cst_sval[j]);
7041 if (i + j < NUM_CSTS)
7044 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7045 cst_sval[i], cst_sval[j]);
7046 ASSERT_EQ (sum, cst_sval[i + j]);
7050 const svalue *difference
7051 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
7052 cst_sval[i], cst_sval[j]);
7053 ASSERT_EQ (difference, cst_sval[i - j]);
7055 if (i * j < NUM_CSTS)
7057 const svalue *product
7058 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7059 cst_sval[i], cst_sval[j]);
7060 ASSERT_EQ (product, cst_sval[i * j]);
7062 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
7063 cst_sval[i], cst_sval[j]);
7064 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
7065 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
7066 cst_sval[i], cst_sval[j]);
7067 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
7071 tree x = build_global_decl ("x", integer_type_node);
7073 test_region_model_context ctxt;
7074 region_model model (&mgr);
7075 const svalue *x_init = model.get_rvalue (x, &ctxt);
7077 /* PLUS_EXPR folding. */
7078 const svalue *x_init_plus_zero
7079 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7080 x_init, cst_sval[0]);
7081 ASSERT_EQ (x_init_plus_zero, x_init);
7082 const svalue *zero_plus_x_init
7083 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7084 cst_sval[0], x_init);
7085 ASSERT_EQ (zero_plus_x_init, x_init);
7087 /* MULT_EXPR folding. */
7088 const svalue *x_init_times_zero
7089 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7090 x_init, cst_sval[0]);
7091 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
7092 const svalue *zero_times_x_init
7093 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7094 cst_sval[0], x_init);
7095 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
7097 const svalue *x_init_times_one
7098 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7099 x_init, cst_sval[1]);
7100 ASSERT_EQ (x_init_times_one, x_init);
7101 const svalue *one_times_x_init
7102 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7103 cst_sval[1], x_init);
7104 ASSERT_EQ (one_times_x_init, x_init);
7107 // TODO: do we want to use the match-and-simplify DSL for this?
7109 /* Verify that binops put any constants on the RHS. */
7110 const svalue *four_times_x_init
7111 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7112 cst_sval[4], x_init);
7113 const svalue *x_init_times_four
7114 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7115 x_init, cst_sval[4]);
7116 ASSERT_EQ (four_times_x_init, x_init_times_four);
7117 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
7118 ASSERT_EQ (binop->get_op (), MULT_EXPR);
7119 ASSERT_EQ (binop->get_arg0 (), x_init);
7120 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
7122 /* Verify that ((x + 1) + 1) == (x + 2). */
7123 const svalue *x_init_plus_one
7124 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7125 x_init, cst_sval[1]);
7126 const svalue *x_init_plus_two
7127 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7128 x_init, cst_sval[2]);
7129 const svalue *x_init_plus_one_plus_one
7130 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7131 x_init_plus_one, cst_sval[1]);
7132 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
7134 /* Verify various binops on booleans. */
7136 const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
7137 const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
7138 const svalue *sval_unknown
7139 = mgr.get_or_create_unknown_svalue (boolean_type_node);
7140 const placeholder_svalue sval_placeholder (boolean_type_node, "v");
7141 for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
7143 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7144 sval_true, sval_unknown),
7146 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7147 sval_false, sval_unknown),
7149 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7150 sval_false, &sval_placeholder),
7153 for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
7155 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7156 sval_false, sval_unknown),
7158 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7159 sval_true, sval_unknown),
7161 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7162 sval_true, &sval_placeholder),
7168 /* Verify that sub_svalues are folded as expected. */
7171 test_sub_svalue_folding ()
7174 tree c = build_global_decl ("c", ct.m_coord_type);
7175 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7176 c, ct.m_x_field, NULL_TREE);
7178 region_model_manager mgr;
7179 region_model model (&mgr);
7180 test_region_model_context ctxt;
7181 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
7183 /* Verify that sub_svalue of "unknown" simply
7184 yields an unknown. */
7186 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
7187 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
7189 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
7190 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
7193 /* Get BIT within VAL as a symbolic value within MGR. */
7195 static const svalue *
7196 get_bit (region_model_manager *mgr,
7198 unsigned HOST_WIDE_INT val)
7200 const svalue *inner_svalue
7201 = mgr->get_or_create_int_cst (unsigned_type_node, val);
7202 return mgr->get_or_create_bits_within (boolean_type_node,
7207 /* Verify that bits_within_svalues are folded as expected. */
7210 test_bits_within_svalue_folding ()
7212 region_model_manager mgr;
7214 const svalue *zero = mgr.get_or_create_int_cst (boolean_type_node, 0);
7215 const svalue *one = mgr.get_or_create_int_cst (boolean_type_node, 1);
7218 const unsigned val = 0x0000;
7219 for (unsigned bit = 0; bit < 16; bit++)
7220 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
7224 const unsigned val = 0x0001;
7225 ASSERT_EQ (get_bit (&mgr, 0, val), one);
7226 for (unsigned bit = 1; bit < 16; bit++)
7227 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
7231 const unsigned val = 0x8000;
7232 for (unsigned bit = 0; bit < 15; bit++)
7233 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
7234 ASSERT_EQ (get_bit (&mgr, 15, val), one);
7238 const unsigned val = 0xFFFF;
7239 for (unsigned bit = 0; bit < 16; bit++)
7240 ASSERT_EQ (get_bit (&mgr, bit, val), one);
7244 /* Test that region::descendent_of_p works as expected. */
7247 test_descendent_of_p ()
7249 region_model_manager mgr;
7250 const region *stack = mgr.get_stack_region ();
7251 const region *heap = mgr.get_heap_region ();
7252 const region *code = mgr.get_code_region ();
7253 const region *globals = mgr.get_globals_region ();
7255 /* descendent_of_p should return true when used on the region itself. */
7256 ASSERT_TRUE (stack->descendent_of_p (stack));
7257 ASSERT_FALSE (stack->descendent_of_p (heap));
7258 ASSERT_FALSE (stack->descendent_of_p (code));
7259 ASSERT_FALSE (stack->descendent_of_p (globals));
7261 tree x = build_global_decl ("x", integer_type_node);
7262 const region *x_reg = mgr.get_region_for_global (x);
7263 ASSERT_TRUE (x_reg->descendent_of_p (globals));
7265 /* A cast_region should be a descendent of the original region. */
7266 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
7267 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
7270 /* Verify that bit_range_region works as expected. */
7273 test_bit_range_regions ()
7275 tree x = build_global_decl ("x", integer_type_node);
7276 region_model_manager mgr;
7277 const region *x_reg = mgr.get_region_for_global (x);
7279 = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
7281 = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
7282 ASSERT_TRUE (byte0->descendent_of_p (x_reg));
7283 ASSERT_TRUE (byte1->descendent_of_p (x_reg));
7284 ASSERT_NE (byte0, byte1);
7287 /* Verify that simple assignments work as expected. */
7292 tree int_0 = build_int_cst (integer_type_node, 0);
7293 tree x = build_global_decl ("x", integer_type_node);
7294 tree y = build_global_decl ("y", integer_type_node);
7296 /* "x == 0", then use of y, then "y = 0;". */
7297 region_model_manager mgr;
7298 region_model model (&mgr);
7299 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
7300 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
7301 model.set_value (model.get_lvalue (y, NULL),
7302 model.get_rvalue (int_0, NULL),
7304 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
7305 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
7308 /* Verify that compound assignments work as expected. */
7311 test_compound_assignment ()
7315 tree c = build_global_decl ("c", ct.m_coord_type);
7316 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7317 c, ct.m_x_field, NULL_TREE);
7318 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7319 c, ct.m_y_field, NULL_TREE);
7320 tree d = build_global_decl ("d", ct.m_coord_type);
7321 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7322 d, ct.m_x_field, NULL_TREE);
7323 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7324 d, ct.m_y_field, NULL_TREE);
7326 tree int_17 = build_int_cst (integer_type_node, 17);
7327 tree int_m3 = build_int_cst (integer_type_node, -3);
7329 region_model_manager mgr;
7330 region_model model (&mgr);
7331 model.set_value (c_x, int_17, NULL);
7332 model.set_value (c_y, int_m3, NULL);
7335 const svalue *sval = model.get_rvalue (c, NULL);
7336 model.set_value (model.get_lvalue (d, NULL), sval, NULL);
7338 /* Check that the fields have the same svalues. */
7339 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
7340 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
7343 /* Verify the details of pushing and popping stack frames. */
7346 test_stack_frames ()
7348 tree int_42 = build_int_cst (integer_type_node, 42);
7349 tree int_10 = build_int_cst (integer_type_node, 10);
7350 tree int_5 = build_int_cst (integer_type_node, 5);
7351 tree int_0 = build_int_cst (integer_type_node, 0);
7353 auto_vec <tree> param_types;
7354 tree parent_fndecl = make_fndecl (integer_type_node,
7357 allocate_struct_function (parent_fndecl, true);
7359 tree child_fndecl = make_fndecl (integer_type_node,
7362 allocate_struct_function (child_fndecl, true);
7364 /* "a" and "b" in the parent frame. */
7365 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7366 get_identifier ("a"),
7368 DECL_CONTEXT (a) = parent_fndecl;
7369 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7370 get_identifier ("b"),
7372 DECL_CONTEXT (b) = parent_fndecl;
7373 /* "x" and "y" in a child frame. */
7374 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7375 get_identifier ("x"),
7377 DECL_CONTEXT (x) = child_fndecl;
7378 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7379 get_identifier ("y"),
7381 DECL_CONTEXT (y) = child_fndecl;
7384 tree p = build_global_decl ("p", ptr_type_node);
7387 tree q = build_global_decl ("q", ptr_type_node);
7389 region_model_manager mgr;
7390 test_region_model_context ctxt;
7391 region_model model (&mgr);
7393 /* Push stack frame for "parent_fn". */
7394 const region *parent_frame_reg
7395 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
7397 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
7398 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
7399 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
7400 model.set_value (a_in_parent_reg,
7401 model.get_rvalue (int_42, &ctxt),
7403 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
7405 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
7406 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
7407 tristate (tristate::TS_TRUE));
7409 /* Push stack frame for "child_fn". */
7410 const region *child_frame_reg
7411 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
7412 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
7413 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
7414 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
7415 model.set_value (x_in_child_reg,
7416 model.get_rvalue (int_0, &ctxt),
7418 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
7420 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
7421 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
7422 tristate (tristate::TS_TRUE));
7424 /* Point a global pointer at a local in the child frame: p = &x. */
7425 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
7426 model.set_value (p_in_globals_reg,
7427 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
7429 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
7431 /* Point another global pointer at p: q = &p. */
7432 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
7433 model.set_value (q_in_globals_reg,
7434 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
7437 /* Test region::descendent_of_p. */
7438 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
7439 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
7440 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
7442 /* Pop the "child_fn" frame from the stack. */
7443 model.pop_frame (NULL, NULL, &ctxt);
7444 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
7445 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
7447 /* Verify that p (which was pointing at the local "x" in the popped
7448 frame) has been poisoned. */
7449 const svalue *new_p_sval = model.get_rvalue (p, NULL);
7450 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
7451 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
7452 POISON_KIND_POPPED_STACK);
7454 /* Verify that q still points to p, in spite of the region
7456 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
7457 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
7458 ASSERT_EQ (new_q_sval->maybe_get_region (),
7459 model.get_lvalue (p, &ctxt));
7461 /* Verify that top of stack has been updated. */
7462 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
7464 /* Verify locals in parent frame. */
7465 /* Verify "a" still has its value. */
7466 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
7467 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
7468 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
7470 /* Verify "b" still has its constraint. */
7471 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
7472 tristate (tristate::TS_TRUE));
7475 /* Verify that get_representative_path_var works as expected, that
7476 we can map from regions to parms and back within a recursive call
7480 test_get_representative_path_var ()
7482 auto_vec <tree> param_types;
7483 tree fndecl = make_fndecl (integer_type_node,
7486 allocate_struct_function (fndecl, true);
7489 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7490 get_identifier ("n"),
7492 DECL_CONTEXT (n) = fndecl;
7494 region_model_manager mgr;
7495 test_region_model_context ctxt;
7496 region_model model (&mgr);
7498 /* Push 5 stack frames for "factorial", each with a param */
7499 auto_vec<const region *> parm_regs;
7500 auto_vec<const svalue *> parm_svals;
7501 for (int depth = 0; depth < 5; depth++)
7503 const region *frame_n_reg
7504 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
7505 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
7506 parm_regs.safe_push (parm_n_reg);
7508 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
7509 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
7510 parm_svals.safe_push (sval_n);
7513 /* Verify that we can recognize that the regions are the parms,
7515 for (int depth = 0; depth < 5; depth++)
7519 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
7521 path_var (n, depth + 1));
7523 /* ...and that we can lookup lvalues for locals for all frames,
7524 not just the top. */
7525 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
7527 /* ...and that we can locate the svalues. */
7530 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
7532 path_var (n, depth + 1));
7537 /* Ensure that region_model::operator== works as expected. */
7542 tree int_42 = build_int_cst (integer_type_node, 42);
7543 tree int_17 = build_int_cst (integer_type_node, 17);
7545 /* Verify that "empty" region_model instances are equal to each other. */
7546 region_model_manager mgr;
7547 region_model model0 (&mgr);
7548 region_model model1 (&mgr);
7549 ASSERT_EQ (model0, model1);
7551 /* Verify that setting state in model1 makes the models non-equal. */
7552 tree x = build_global_decl ("x", integer_type_node);
7553 model0.set_value (x, int_42, NULL);
7554 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
7555 ASSERT_NE (model0, model1);
7557 /* Verify the copy-ctor. */
7558 region_model model2 (model0);
7559 ASSERT_EQ (model0, model2);
7560 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
7561 ASSERT_NE (model1, model2);
7563 /* Verify that models obtained from copy-ctor are independently editable
7564 w/o affecting the original model. */
7565 model2.set_value (x, int_17, NULL);
7566 ASSERT_NE (model0, model2);
7567 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
7568 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
7571 /* Verify that region models for
7578 test_canonicalization_2 ()
7580 tree int_42 = build_int_cst (integer_type_node, 42);
7581 tree int_113 = build_int_cst (integer_type_node, 113);
7582 tree x = build_global_decl ("x", integer_type_node);
7583 tree y = build_global_decl ("y", integer_type_node);
7585 region_model_manager mgr;
7586 region_model model0 (&mgr);
7587 model0.set_value (model0.get_lvalue (x, NULL),
7588 model0.get_rvalue (int_42, NULL),
7590 model0.set_value (model0.get_lvalue (y, NULL),
7591 model0.get_rvalue (int_113, NULL),
7594 region_model model1 (&mgr);
7595 model1.set_value (model1.get_lvalue (y, NULL),
7596 model1.get_rvalue (int_113, NULL),
7598 model1.set_value (model1.get_lvalue (x, NULL),
7599 model1.get_rvalue (int_42, NULL),
7602 ASSERT_EQ (model0, model1);
7605 /* Verify that constraints for
7609 are equal after canonicalization. */
7612 test_canonicalization_3 ()
7614 tree int_3 = build_int_cst (integer_type_node, 3);
7615 tree int_42 = build_int_cst (integer_type_node, 42);
7616 tree x = build_global_decl ("x", integer_type_node);
7617 tree y = build_global_decl ("y", integer_type_node);
7619 region_model_manager mgr;
7620 region_model model0 (&mgr);
7621 model0.add_constraint (x, GT_EXPR, int_3, NULL);
7622 model0.add_constraint (y, GT_EXPR, int_42, NULL);
7624 region_model model1 (&mgr);
7625 model1.add_constraint (y, GT_EXPR, int_42, NULL);
7626 model1.add_constraint (x, GT_EXPR, int_3, NULL);
7628 model0.canonicalize ();
7629 model1.canonicalize ();
7630 ASSERT_EQ (model0, model1);
7633 /* Verify that we can canonicalize a model containing NaN and other real
7637 test_canonicalization_4 ()
7639 auto_vec<tree> csts;
7640 append_interesting_constants (&csts);
7642 region_model_manager mgr;
7643 region_model model (&mgr);
7645 for (tree cst : csts)
7646 model.get_rvalue (cst, NULL);
7648 model.canonicalize ();
7651 /* Assert that if we have two region_model instances
7652 with values VAL_A and VAL_B for EXPR that they are
7653 mergable. Write the merged model to *OUT_MERGED_MODEL,
7654 and the merged svalue ptr to *OUT_MERGED_SVALUE.
7655 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
7656 for that region_model. */
7659 assert_region_models_merge (tree expr, tree val_a, tree val_b,
7660 region_model *out_merged_model,
7661 const svalue **out_merged_svalue)
7663 region_model_manager *mgr = out_merged_model->get_manager ();
7664 program_point point (program_point::origin (*mgr));
7665 test_region_model_context ctxt;
7666 region_model model0 (mgr);
7667 region_model model1 (mgr);
7669 model0.set_value (model0.get_lvalue (expr, &ctxt),
7670 model0.get_rvalue (val_a, &ctxt),
7673 model1.set_value (model1.get_lvalue (expr, &ctxt),
7674 model1.get_rvalue (val_b, &ctxt),
7677 /* They should be mergeable. */
7678 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
7679 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
7682 /* Verify that we can merge region_model instances. */
7685 test_state_merging ()
7687 tree int_42 = build_int_cst (integer_type_node, 42);
7688 tree int_113 = build_int_cst (integer_type_node, 113);
7689 tree x = build_global_decl ("x", integer_type_node);
7690 tree y = build_global_decl ("y", integer_type_node);
7691 tree z = build_global_decl ("z", integer_type_node);
7692 tree p = build_global_decl ("p", ptr_type_node);
7694 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
7695 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
7697 auto_vec <tree> param_types;
7698 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
7699 allocate_struct_function (test_fndecl, true);
7702 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7703 get_identifier ("a"),
7705 DECL_CONTEXT (a) = test_fndecl;
7706 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
7708 /* Param "q", a pointer. */
7709 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7710 get_identifier ("q"),
7712 DECL_CONTEXT (q) = test_fndecl;
7714 region_model_manager mgr;
7715 program_point point (program_point::origin (mgr));
7718 region_model model0 (&mgr);
7719 region_model model1 (&mgr);
7720 region_model merged (&mgr);
7721 /* Verify empty models can be merged. */
7722 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7723 ASSERT_EQ (model0, merged);
7726 /* Verify that we can merge two contradictory constraints on the
7727 value for a global. */
7728 /* TODO: verify that the merged model doesn't have a value for
7731 region_model model0 (&mgr);
7732 region_model model1 (&mgr);
7733 region_model merged (&mgr);
7734 test_region_model_context ctxt;
7735 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7736 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
7737 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7738 ASSERT_NE (model0, merged);
7739 ASSERT_NE (model1, merged);
7742 /* Verify handling of a PARM_DECL. */
7744 test_region_model_context ctxt;
7745 region_model model0 (&mgr);
7746 region_model model1 (&mgr);
7747 ASSERT_EQ (model0.get_stack_depth (), 0);
7748 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7749 ASSERT_EQ (model0.get_stack_depth (), 1);
7750 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7752 placeholder_svalue test_sval (integer_type_node, "test sval");
7753 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
7754 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
7755 ASSERT_EQ (model0, model1);
7757 /* They should be mergeable, and the result should be the same. */
7758 region_model merged (&mgr);
7759 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7760 ASSERT_EQ (model0, merged);
7761 /* In particular, "a" should have the placeholder value. */
7762 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
7765 /* Verify handling of a global. */
7767 test_region_model_context ctxt;
7768 region_model model0 (&mgr);
7769 region_model model1 (&mgr);
7771 placeholder_svalue test_sval (integer_type_node, "test sval");
7772 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7773 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7774 ASSERT_EQ (model0, model1);
7776 /* They should be mergeable, and the result should be the same. */
7777 region_model merged (&mgr);
7778 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7779 ASSERT_EQ (model0, merged);
7780 /* In particular, "x" should have the placeholder value. */
7781 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
7784 /* Use global-handling to verify various combinations of values. */
7786 /* Two equal constant values. */
7788 region_model merged (&mgr);
7789 const svalue *merged_x_sval;
7790 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
7792 /* In particular, there should be a constant value for "x". */
7793 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
7794 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
7798 /* Two non-equal constant values. */
7800 region_model merged (&mgr);
7801 const svalue *merged_x_sval;
7802 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
7804 /* In particular, there should be a "widening" value for "x". */
7805 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
7808 /* Initial and constant. */
7810 region_model merged (&mgr);
7811 const svalue *merged_x_sval;
7812 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
7814 /* In particular, there should be an unknown value for "x". */
7815 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7818 /* Constant and initial. */
7820 region_model merged (&mgr);
7821 const svalue *merged_x_sval;
7822 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
7824 /* In particular, there should be an unknown value for "x". */
7825 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7828 /* Unknown and constant. */
7831 /* Pointers: NULL and NULL. */
7834 /* Pointers: NULL and non-NULL. */
7837 /* Pointers: non-NULL and non-NULL: ptr to a local. */
7839 region_model model0 (&mgr);
7840 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7841 model0.set_value (model0.get_lvalue (p, NULL),
7842 model0.get_rvalue (addr_of_a, NULL), NULL);
7844 region_model model1 (model0);
7845 ASSERT_EQ (model0, model1);
7847 /* They should be mergeable, and the result should be the same. */
7848 region_model merged (&mgr);
7849 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7850 ASSERT_EQ (model0, merged);
7853 /* Pointers: non-NULL and non-NULL: ptr to a global. */
7855 region_model merged (&mgr);
7856 /* p == &y in both input models. */
7857 const svalue *merged_p_sval;
7858 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
7861 /* We should get p == &y in the merged model. */
7862 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
7863 const region_svalue *merged_p_ptr
7864 = merged_p_sval->dyn_cast_region_svalue ();
7865 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
7866 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
7869 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
7871 region_model merged (&mgr);
7872 /* x == &y vs x == &z in the input models; these are actually casts
7873 of the ptrs to "int". */
7874 const svalue *merged_x_sval;
7876 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
7879 /* We should get x == unknown in the merged model. */
7880 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7883 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
7885 test_region_model_context ctxt;
7886 region_model model0 (&mgr);
7887 tree size = build_int_cst (size_type_node, 1024);
7888 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
7889 const region *new_reg
7890 = model0.create_region_for_heap_alloc (size_sval, &ctxt);
7891 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
7892 model0.set_value (model0.get_lvalue (p, &ctxt),
7895 region_model model1 (model0);
7897 ASSERT_EQ (model0, model1);
7899 region_model merged (&mgr);
7900 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7902 /* The merged model ought to be identical. */
7903 ASSERT_EQ (model0, merged);
7906 /* Two regions sharing the same placeholder svalue should continue sharing
7907 it after self-merger. */
7909 test_region_model_context ctxt;
7910 region_model model0 (&mgr);
7911 placeholder_svalue placeholder_sval (integer_type_node, "test");
7912 model0.set_value (model0.get_lvalue (x, &ctxt),
7913 &placeholder_sval, &ctxt);
7914 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
7915 region_model model1 (model0);
7917 /* They should be mergeable, and the result should be the same. */
7918 region_model merged (&mgr);
7919 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7920 ASSERT_EQ (model0, merged);
7922 /* In particular, we should have x == y. */
7923 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
7924 tristate (tristate::TS_TRUE));
7928 region_model model0 (&mgr);
7929 region_model model1 (&mgr);
7930 test_region_model_context ctxt;
7931 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7932 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7933 region_model merged (&mgr);
7934 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7938 region_model model0 (&mgr);
7939 region_model model1 (&mgr);
7940 test_region_model_context ctxt;
7941 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7942 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7943 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
7944 region_model merged (&mgr);
7945 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7948 // TODO: what can't we merge? need at least one such test
7950 /* TODO: various things
7953 - every combination, but in particular
7959 test_region_model_context ctxt;
7960 region_model model0 (&mgr);
7962 const region *x_reg = model0.get_lvalue (x, &ctxt);
7963 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
7964 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
7966 region_model model1 (model0);
7967 ASSERT_EQ (model1, model0);
7969 /* They should be mergeable, and the result should be the same. */
7970 region_model merged (&mgr);
7971 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7974 /* Verify that we can merge a model in which a local in an older stack
7975 frame points to a local in a more recent stack frame. */
7977 region_model model0 (&mgr);
7978 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7979 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
7981 /* Push a second frame. */
7982 const region *reg_2nd_frame
7983 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7985 /* Have a pointer in the older frame point to a local in the
7986 more recent frame. */
7987 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
7988 model0.set_value (q_in_first_frame, sval_ptr, NULL);
7990 /* Verify that it's pointing at the newer frame. */
7991 const region *reg_pointee = sval_ptr->maybe_get_region ();
7992 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
7994 model0.canonicalize ();
7996 region_model model1 (model0);
7997 ASSERT_EQ (model0, model1);
7999 /* They should be mergeable, and the result should be the same
8000 (after canonicalization, at least). */
8001 region_model merged (&mgr);
8002 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8003 merged.canonicalize ();
8004 ASSERT_EQ (model0, merged);
8007 /* Verify that we can merge a model in which a local points to a global. */
8009 region_model model0 (&mgr);
8010 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
8011 model0.set_value (model0.get_lvalue (q, NULL),
8012 model0.get_rvalue (addr_of_y, NULL), NULL);
8014 region_model model1 (model0);
8015 ASSERT_EQ (model0, model1);
8017 /* They should be mergeable, and the result should be the same
8018 (after canonicalization, at least). */
8019 region_model merged (&mgr);
8020 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8021 ASSERT_EQ (model0, merged);
8025 /* Verify that constraints are correctly merged when merging region_model
8029 test_constraint_merging ()
8031 tree int_0 = build_int_cst (integer_type_node, 0);
8032 tree int_5 = build_int_cst (integer_type_node, 5);
8033 tree x = build_global_decl ("x", integer_type_node);
8034 tree y = build_global_decl ("y", integer_type_node);
8035 tree z = build_global_decl ("z", integer_type_node);
8036 tree n = build_global_decl ("n", integer_type_node);
8038 region_model_manager mgr;
8039 test_region_model_context ctxt;
8041 /* model0: 0 <= (x == y) < n. */
8042 region_model model0 (&mgr);
8043 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
8044 model0.add_constraint (x, GE_EXPR, int_0, NULL);
8045 model0.add_constraint (x, LT_EXPR, n, NULL);
8047 /* model1: z != 5 && (0 <= x < n). */
8048 region_model model1 (&mgr);
8049 model1.add_constraint (z, NE_EXPR, int_5, NULL);
8050 model1.add_constraint (x, GE_EXPR, int_0, NULL);
8051 model1.add_constraint (x, LT_EXPR, n, NULL);
8053 /* They should be mergeable; the merged constraints should
8054 be: (0 <= x < n). */
8055 program_point point (program_point::origin (mgr));
8056 region_model merged (&mgr);
8057 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8059 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
8060 tristate (tristate::TS_TRUE));
8061 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
8062 tristate (tristate::TS_TRUE));
8064 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
8065 tristate (tristate::TS_UNKNOWN));
8066 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
8067 tristate (tristate::TS_UNKNOWN));
8070 /* Verify that widening_svalue::eval_condition_without_cm works as
8074 test_widening_constraints ()
8076 region_model_manager mgr;
8077 function_point point (program_point::origin (mgr).get_function_point ());
8078 tree int_0 = build_int_cst (integer_type_node, 0);
8079 tree int_m1 = build_int_cst (integer_type_node, -1);
8080 tree int_1 = build_int_cst (integer_type_node, 1);
8081 tree int_256 = build_int_cst (integer_type_node, 256);
8082 test_region_model_context ctxt;
8083 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
8084 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
8085 const svalue *w_zero_then_one_sval
8086 = mgr.get_or_create_widening_svalue (integer_type_node, point,
8087 int_0_sval, int_1_sval);
8088 const widening_svalue *w_zero_then_one
8089 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
8090 ASSERT_EQ (w_zero_then_one->get_direction (),
8091 widening_svalue::DIR_ASCENDING);
8092 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
8093 tristate::TS_FALSE);
8094 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
8095 tristate::TS_FALSE);
8096 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
8097 tristate::TS_UNKNOWN);
8098 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
8099 tristate::TS_UNKNOWN);
8101 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
8102 tristate::TS_FALSE);
8103 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
8104 tristate::TS_UNKNOWN);
8105 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
8106 tristate::TS_UNKNOWN);
8107 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
8108 tristate::TS_UNKNOWN);
8110 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
8112 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
8113 tristate::TS_UNKNOWN);
8114 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
8115 tristate::TS_UNKNOWN);
8116 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
8117 tristate::TS_UNKNOWN);
8119 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
8121 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
8123 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
8124 tristate::TS_UNKNOWN);
8125 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
8126 tristate::TS_UNKNOWN);
8128 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
8129 tristate::TS_FALSE);
8130 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
8131 tristate::TS_UNKNOWN);
8132 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
8133 tristate::TS_UNKNOWN);
8134 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
8135 tristate::TS_UNKNOWN);
8137 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
8139 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
8140 tristate::TS_UNKNOWN);
8141 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
8142 tristate::TS_UNKNOWN);
8143 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
8144 tristate::TS_UNKNOWN);
8147 /* Verify merging constraints for states simulating successive iterations
8150 for (i = 0; i < 256; i++)
8157 i_11 = PHI <i_15(2), i_23(3)>
8167 and thus these ops (and resultant states):
8170 add_constraint (i_11 <= 255) [for the true edge]
8171 {i_11: 0} [constraint was a no-op]
8175 {i_11: WIDENED (at phi, 0, 1)}
8176 add_constraint (i_11 <= 255) [for the true edge]
8177 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
8179 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
8180 i_11 = PHI(); merge with state at phi above
8181 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
8182 [changing meaning of "WIDENED" here]
8184 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
8191 region_model_manager mgr;
8192 program_point point (program_point::origin (mgr));
8194 tree int_0 = build_int_cst (integer_type_node, 0);
8195 tree int_1 = build_int_cst (integer_type_node, 1);
8196 tree int_256 = build_int_cst (integer_type_node, 256);
8197 tree int_257 = build_int_cst (integer_type_node, 257);
8198 tree i = build_global_decl ("i", integer_type_node);
8200 test_region_model_context ctxt;
8203 region_model model0 (&mgr);
8204 model0.set_value (i, int_0, &ctxt);
8207 region_model model1 (&mgr);
8208 model1.set_value (i, int_1, &ctxt);
8210 /* Should merge "i" to a widened value. */
8211 region_model model2 (&mgr);
8212 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
8213 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
8214 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
8215 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
8216 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
8218 /* Add constraint: i < 256 */
8219 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
8220 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
8221 tristate (tristate::TS_TRUE));
8222 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
8223 tristate (tristate::TS_TRUE));
8225 /* Try merging with the initial state. */
8226 region_model model3 (&mgr);
8227 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
8228 /* Merging the merged value with the initial value should be idempotent,
8229 so that the analysis converges. */
8230 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
8231 /* Merger of 0 and a widening value with constraint < CST
8232 should retain the constraint, even though it was implicit
8234 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
8235 tristate (tristate::TS_TRUE));
8236 /* ...and we should have equality: the analysis should have converged. */
8237 ASSERT_EQ (model3, model2);
8239 /* "i_23 = i_11 + 1;" */
8240 region_model model4 (model3);
8241 ASSERT_EQ (model4, model2);
8242 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
8243 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
8244 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
8246 /* Try merging with the "i: 1" state. */
8247 region_model model5 (&mgr);
8248 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
8249 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
8250 ASSERT_EQ (model5, model4);
8252 /* "i_11 = PHI();" merge with state at phi above.
8253 For i, we should have a merger of WIDENING with WIDENING + 1,
8254 and this should be WIDENING again. */
8255 region_model model6 (&mgr);
8256 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
8257 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
8258 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
8260 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
8263 /* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
8264 all cast pointers to that region are also known to be non-NULL. */
8267 test_malloc_constraints ()
8269 region_model_manager mgr;
8270 region_model model (&mgr);
8271 tree p = build_global_decl ("p", ptr_type_node);
8272 tree char_star = build_pointer_type (char_type_node);
8273 tree q = build_global_decl ("q", char_star);
8274 tree null_ptr = build_int_cst (ptr_type_node, 0);
8276 const svalue *size_in_bytes
8277 = mgr.get_or_create_unknown_svalue (size_type_node);
8278 const region *reg = model.create_region_for_heap_alloc (size_in_bytes, NULL);
8279 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
8280 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
8281 model.set_value (q, p, NULL);
8283 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
8284 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
8285 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
8286 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
8288 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
8290 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
8291 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
8292 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
8293 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
8296 /* Smoketest of getting and setting the value of a variable. */
8302 tree i = build_global_decl ("i", integer_type_node);
8304 tree int_17 = build_int_cst (integer_type_node, 17);
8305 tree int_m3 = build_int_cst (integer_type_node, -3);
8307 region_model_manager mgr;
8308 region_model model (&mgr);
8310 const region *i_reg = model.get_lvalue (i, NULL);
8311 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
8313 /* Reading "i" should give a symbolic "initial value". */
8314 const svalue *sval_init = model.get_rvalue (i, NULL);
8315 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
8316 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
8317 /* ..and doing it again should give the same "initial value". */
8318 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
8321 model.set_value (i, int_17, NULL);
8322 ASSERT_EQ (model.get_rvalue (i, NULL),
8323 model.get_rvalue (int_17, NULL));
8326 model.set_value (i, int_m3, NULL);
8327 ASSERT_EQ (model.get_rvalue (i, NULL),
8328 model.get_rvalue (int_m3, NULL));
8330 /* Verify get_offset for "i". */
8332 region_offset offset = i_reg->get_offset (&mgr);
8333 ASSERT_EQ (offset.get_base_region (), i_reg);
8334 ASSERT_EQ (offset.get_bit_offset (), 0);
8341 /* "int arr[10];" */
8342 tree tlen = size_int (10);
8344 = build_array_type (integer_type_node, build_index_type (tlen));
8345 tree arr = build_global_decl ("arr", arr_type);
8348 tree i = build_global_decl ("i", integer_type_node);
8350 tree int_0 = build_int_cst (integer_type_node, 0);
8351 tree int_1 = build_int_cst (integer_type_node, 1);
8353 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
8354 arr, int_0, NULL_TREE, NULL_TREE);
8355 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
8356 arr, int_1, NULL_TREE, NULL_TREE);
8357 tree arr_i = build4 (ARRAY_REF, integer_type_node,
8358 arr, i, NULL_TREE, NULL_TREE);
8360 tree int_17 = build_int_cst (integer_type_node, 17);
8361 tree int_42 = build_int_cst (integer_type_node, 42);
8362 tree int_m3 = build_int_cst (integer_type_node, -3);
8364 region_model_manager mgr;
8365 region_model model (&mgr);
8366 /* "arr[0] = 17;". */
8367 model.set_value (arr_0, int_17, NULL);
8368 /* "arr[1] = -3;". */
8369 model.set_value (arr_1, int_m3, NULL);
8371 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
8372 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
8374 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
8375 model.set_value (arr_1, int_42, NULL);
8376 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
8378 /* Verify get_offset for "arr[0]". */
8380 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
8381 region_offset offset = arr_0_reg->get_offset (&mgr);
8382 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
8383 ASSERT_EQ (offset.get_bit_offset (), 0);
8386 /* Verify get_offset for "arr[1]". */
8388 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
8389 region_offset offset = arr_1_reg->get_offset (&mgr);
8390 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
8391 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
8394 /* Verify get_offset for "arr[i]". */
8396 const region *arr_i_reg = model.get_lvalue (arr_i, NULL);
8397 region_offset offset = arr_i_reg->get_offset (&mgr);
8398 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
8399 ASSERT_EQ (offset.get_symbolic_byte_offset ()->get_kind (), SK_BINOP);
8402 /* "arr[i] = i;" - this should remove the earlier bindings. */
8403 model.set_value (arr_i, i, NULL);
8404 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
8405 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
8407 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
8408 model.set_value (arr_0, int_17, NULL);
8409 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
8410 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
8413 /* Smoketest of dereferencing a pointer via MEM_REF. */
8423 tree x = build_global_decl ("x", integer_type_node);
8424 tree int_star = build_pointer_type (integer_type_node);
8425 tree p = build_global_decl ("p", int_star);
8427 tree int_17 = build_int_cst (integer_type_node, 17);
8428 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
8429 tree offset_0 = build_int_cst (integer_type_node, 0);
8430 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
8432 region_model_manager mgr;
8433 region_model model (&mgr);
8436 model.set_value (x, int_17, NULL);
8439 model.set_value (p, addr_of_x, NULL);
8441 const svalue *sval = model.get_rvalue (star_p, NULL);
8442 ASSERT_EQ (sval->maybe_get_constant (), int_17);
8445 /* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
8446 Analogous to this code:
8447 void test_6 (int a[10])
8449 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
8451 __analyzer_eval (a[3] == 42); [should be TRUE]
8453 from data-model-1.c, which looks like this at the gimple level:
8454 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
8455 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
8456 int _2 = *_1; # MEM_REF
8457 _Bool _3 = _2 == 42;
8459 __analyzer_eval (_4);
8462 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
8465 # __analyzer_eval (a[3] == 42); [should be TRUE]
8466 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
8467 int _7 = *_6; # MEM_REF
8468 _Bool _8 = _7 == 42;
8470 __analyzer_eval (_9); */
8473 test_POINTER_PLUS_EXPR_then_MEM_REF ()
8475 tree int_star = build_pointer_type (integer_type_node);
8476 tree a = build_global_decl ("a", int_star);
8477 tree offset_12 = build_int_cst (size_type_node, 12);
8478 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
8479 tree offset_0 = build_int_cst (integer_type_node, 0);
8480 tree mem_ref = build2 (MEM_REF, integer_type_node,
8481 pointer_plus_expr, offset_0);
8482 region_model_manager mgr;
8483 region_model m (&mgr);
8485 tree int_42 = build_int_cst (integer_type_node, 42);
8486 m.set_value (mem_ref, int_42, NULL);
8487 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
8490 /* Verify that malloc works. */
8495 tree int_star = build_pointer_type (integer_type_node);
8496 tree p = build_global_decl ("p", int_star);
8497 tree n = build_global_decl ("n", integer_type_node);
8498 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
8499 n, build_int_cst (size_type_node, 4));
8501 region_model_manager mgr;
8502 test_region_model_context ctxt;
8503 region_model model (&mgr);
8505 /* "p = malloc (n * 4);". */
8506 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
8507 const region *reg = model.create_region_for_heap_alloc (size_sval, &ctxt);
8508 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
8509 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
8510 ASSERT_EQ (model.get_capacity (reg), size_sval);
8513 /* Verify that alloca works. */
8518 auto_vec <tree> param_types;
8519 tree fndecl = make_fndecl (integer_type_node,
8522 allocate_struct_function (fndecl, true);
8525 tree int_star = build_pointer_type (integer_type_node);
8526 tree p = build_global_decl ("p", int_star);
8527 tree n = build_global_decl ("n", integer_type_node);
8528 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
8529 n, build_int_cst (size_type_node, 4));
8531 region_model_manager mgr;
8532 test_region_model_context ctxt;
8533 region_model model (&mgr);
8535 /* Push stack frame. */
8536 const region *frame_reg
8537 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
8539 /* "p = alloca (n * 4);". */
8540 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
8541 const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
8542 ASSERT_EQ (reg->get_parent_region (), frame_reg);
8543 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
8544 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
8545 ASSERT_EQ (model.get_capacity (reg), size_sval);
8547 /* Verify that the pointers to the alloca region are replaced by
8548 poisoned values when the frame is popped. */
8549 model.pop_frame (NULL, NULL, &ctxt);
8550 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
8553 /* Verify that svalue::involves_p works. */
8558 region_model_manager mgr;
8559 tree int_star = build_pointer_type (integer_type_node);
8560 tree p = build_global_decl ("p", int_star);
8561 tree q = build_global_decl ("q", int_star);
8563 test_region_model_context ctxt;
8564 region_model model (&mgr);
8565 const svalue *p_init = model.get_rvalue (p, &ctxt);
8566 const svalue *q_init = model.get_rvalue (q, &ctxt);
8568 ASSERT_TRUE (p_init->involves_p (p_init));
8569 ASSERT_FALSE (p_init->involves_p (q_init));
8571 const region *star_p_reg = mgr.get_symbolic_region (p_init);
8572 const region *star_q_reg = mgr.get_symbolic_region (q_init);
8574 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
8575 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
8577 ASSERT_TRUE (init_star_p->involves_p (p_init));
8578 ASSERT_FALSE (p_init->involves_p (init_star_p));
8579 ASSERT_FALSE (init_star_p->involves_p (q_init));
8580 ASSERT_TRUE (init_star_q->involves_p (q_init));
8581 ASSERT_FALSE (init_star_q->involves_p (p_init));
8584 /* Run all of the selftests within this file. */
8587 analyzer_region_model_cc_tests ()
8589 test_tree_cmp_on_constants ();
8593 test_get_representative_tree ();
8594 test_unique_constants ();
8595 test_unique_unknowns ();
8596 test_initial_svalue_folding ();
8597 test_unaryop_svalue_folding ();
8598 test_binop_svalue_folding ();
8599 test_sub_svalue_folding ();
8600 test_bits_within_svalue_folding ();
8601 test_descendent_of_p ();
8602 test_bit_range_regions ();
8604 test_compound_assignment ();
8605 test_stack_frames ();
8606 test_get_representative_path_var ();
8608 test_canonicalization_2 ();
8609 test_canonicalization_3 ();
8610 test_canonicalization_4 ();
8611 test_state_merging ();
8612 test_constraint_merging ();
8613 test_widening_constraints ();
8614 test_iteration_1 ();
8615 test_malloc_constraints ();
8619 test_POINTER_PLUS_EXPR_then_MEM_REF ();
8625 } // namespace selftest
8627 #endif /* CHECKING_P */
8631 #endif /* #if ENABLE_ANALYZER */