1 /* Alias analysis for trees.
2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
32 #include "tree-pretty-print.h"
34 #include "fold-const.h"
36 #include "langhooks.h"
40 #include "ipa-reference.h"
42 /* Broad overview of how alias analysis on gimple works:
44 Statements clobbering or using memory are linked through the
45 virtual operand factored use-def chain. The virtual operand
46 is unique per function, its symbol is accessible via gimple_vop (cfun).
47 Virtual operands are used for efficiently walking memory statements
48 in the gimple IL and are useful for things like value-numbering as
49 a generation count for memory references.
51 SSA_NAME pointers may have associated points-to information
52 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
53 points-to information is (re-)computed by the TODO_rebuild_alias
54 pass manager todo. Points-to information is also used for more
55 precise tracking of call-clobbered and call-used variables and
56 related disambiguations.
58 This file contains functions for disambiguating memory references,
59 the so called alias-oracle and tools for walking of the gimple IL.
61 The main alias-oracle entry-points are
63 bool stmt_may_clobber_ref_p (gimple *, tree)
65 This function queries if a statement may invalidate (parts of)
66 the memory designated by the reference tree argument.
68 bool ref_maybe_used_by_stmt_p (gimple *, tree)
70 This function queries if a statement may need (parts of) the
71 memory designated by the reference tree argument.
73 There are variants of these functions that only handle the call
74 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75 Note that these do not disambiguate against a possible call lhs.
77 bool refs_may_alias_p (tree, tree)
79 This function tries to disambiguate two reference trees.
81 bool ptr_deref_may_alias_global_p (tree)
83 This function queries if dereferencing a pointer variable may
86 More low-level disambiguators are available and documented in
87 this file. Low-level disambiguators dealing with points-to
88 information are in tree-ssa-structalias.c. */
91 /* Query statistics for the different low-level disambiguators.
92 A high-level query may trigger multiple of them. */
95 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
96 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
97 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
98 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
99 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
100 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
104 dump_alias_stats (FILE *s)
106 fprintf (s, "\nAlias oracle query stats:\n");
107 fprintf (s, " refs_may_alias_p: "
108 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
109 HOST_WIDE_INT_PRINT_DEC" queries\n",
110 alias_stats.refs_may_alias_p_no_alias,
111 alias_stats.refs_may_alias_p_no_alias
112 + alias_stats.refs_may_alias_p_may_alias);
113 fprintf (s, " ref_maybe_used_by_call_p: "
114 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
115 HOST_WIDE_INT_PRINT_DEC" queries\n",
116 alias_stats.ref_maybe_used_by_call_p_no_alias,
117 alias_stats.refs_may_alias_p_no_alias
118 + alias_stats.ref_maybe_used_by_call_p_may_alias);
119 fprintf (s, " call_may_clobber_ref_p: "
120 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
121 HOST_WIDE_INT_PRINT_DEC" queries\n",
122 alias_stats.call_may_clobber_ref_p_no_alias,
123 alias_stats.call_may_clobber_ref_p_no_alias
124 + alias_stats.call_may_clobber_ref_p_may_alias);
125 dump_alias_stats_in_alias_c (s);
129 /* Return true, if dereferencing PTR may alias with a global variable. */
132 ptr_deref_may_alias_global_p (tree ptr)
134 struct ptr_info_def *pi;
136 /* If we end up with a pointer constant here that may point
138 if (TREE_CODE (ptr) != SSA_NAME)
141 pi = SSA_NAME_PTR_INFO (ptr);
143 /* If we do not have points-to information for this variable,
148 /* ??? This does not use TBAA to prune globals ptr may not access. */
149 return pt_solution_includes_global (&pi->pt);
152 /* Return true if dereferencing PTR may alias DECL.
153 The caller is responsible for applying TBAA to see if PTR
154 may access DECL at all. */
157 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
159 struct ptr_info_def *pi;
161 /* Conversions are irrelevant for points-to information and
162 data-dependence analysis can feed us those. */
165 /* Anything we do not explicilty handle aliases. */
166 if ((TREE_CODE (ptr) != SSA_NAME
167 && TREE_CODE (ptr) != ADDR_EXPR
168 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
169 || !POINTER_TYPE_P (TREE_TYPE (ptr))
170 || (TREE_CODE (decl) != VAR_DECL
171 && TREE_CODE (decl) != PARM_DECL
172 && TREE_CODE (decl) != RESULT_DECL))
175 /* Disregard pointer offsetting. */
176 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
180 ptr = TREE_OPERAND (ptr, 0);
182 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
183 return ptr_deref_may_alias_decl_p (ptr, decl);
186 /* ADDR_EXPR pointers either just offset another pointer or directly
187 specify the pointed-to set. */
188 if (TREE_CODE (ptr) == ADDR_EXPR)
190 tree base = get_base_address (TREE_OPERAND (ptr, 0));
192 && (TREE_CODE (base) == MEM_REF
193 || TREE_CODE (base) == TARGET_MEM_REF))
194 ptr = TREE_OPERAND (base, 0);
197 return compare_base_decls (base, decl) != 0;
199 && CONSTANT_CLASS_P (base))
205 /* Non-aliased variables can not be pointed to. */
206 if (!may_be_aliased (decl))
209 /* If we do not have useful points-to information for this pointer
210 we cannot disambiguate anything else. */
211 pi = SSA_NAME_PTR_INFO (ptr);
215 return pt_solution_includes (&pi->pt, decl);
218 /* Return true if dereferenced PTR1 and PTR2 may alias.
219 The caller is responsible for applying TBAA to see if accesses
220 through PTR1 and PTR2 may conflict at all. */
223 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
225 struct ptr_info_def *pi1, *pi2;
227 /* Conversions are irrelevant for points-to information and
228 data-dependence analysis can feed us those. */
232 /* Disregard pointer offsetting. */
233 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
237 ptr1 = TREE_OPERAND (ptr1, 0);
239 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
240 return ptr_derefs_may_alias_p (ptr1, ptr2);
242 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
246 ptr2 = TREE_OPERAND (ptr2, 0);
248 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
249 return ptr_derefs_may_alias_p (ptr1, ptr2);
252 /* ADDR_EXPR pointers either just offset another pointer or directly
253 specify the pointed-to set. */
254 if (TREE_CODE (ptr1) == ADDR_EXPR)
256 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
258 && (TREE_CODE (base) == MEM_REF
259 || TREE_CODE (base) == TARGET_MEM_REF))
260 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
263 return ptr_deref_may_alias_decl_p (ptr2, base);
267 if (TREE_CODE (ptr2) == ADDR_EXPR)
269 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
271 && (TREE_CODE (base) == MEM_REF
272 || TREE_CODE (base) == TARGET_MEM_REF))
273 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
276 return ptr_deref_may_alias_decl_p (ptr1, base);
281 /* From here we require SSA name pointers. Anything else aliases. */
282 if (TREE_CODE (ptr1) != SSA_NAME
283 || TREE_CODE (ptr2) != SSA_NAME
284 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
285 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
288 /* We may end up with two empty points-to solutions for two same pointers.
289 In this case we still want to say both pointers alias, so shortcut
294 /* If we do not have useful points-to information for either pointer
295 we cannot disambiguate anything else. */
296 pi1 = SSA_NAME_PTR_INFO (ptr1);
297 pi2 = SSA_NAME_PTR_INFO (ptr2);
301 /* ??? This does not use TBAA to prune decls from the intersection
302 that not both pointers may access. */
303 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
306 /* Return true if dereferencing PTR may alias *REF.
307 The caller is responsible for applying TBAA to see if PTR
308 may access *REF at all. */
311 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
313 tree base = ao_ref_base (ref);
315 if (TREE_CODE (base) == MEM_REF
316 || TREE_CODE (base) == TARGET_MEM_REF)
317 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
318 else if (DECL_P (base))
319 return ptr_deref_may_alias_decl_p (ptr, base);
324 /* Returns whether reference REF to BASE may refer to global memory. */
327 ref_may_alias_global_p_1 (tree base)
330 return is_global_var (base);
331 else if (TREE_CODE (base) == MEM_REF
332 || TREE_CODE (base) == TARGET_MEM_REF)
333 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
338 ref_may_alias_global_p (ao_ref *ref)
340 tree base = ao_ref_base (ref);
341 return ref_may_alias_global_p_1 (base);
345 ref_may_alias_global_p (tree ref)
347 tree base = get_base_address (ref);
348 return ref_may_alias_global_p_1 (base);
351 /* Return true whether STMT may clobber global memory. */
354 stmt_may_clobber_global_p (gimple *stmt)
358 if (!gimple_vdef (stmt))
361 /* ??? We can ask the oracle whether an artificial pointer
362 dereference with a pointer with points-to information covering
363 all global memory (what about non-address taken memory?) maybe
364 clobbered by this call. As there is at the moment no convenient
365 way of doing that without generating garbage do some manual
367 ??? We could make a NULL ao_ref argument to the various
368 predicates special, meaning any global memory. */
370 switch (gimple_code (stmt))
373 lhs = gimple_assign_lhs (stmt);
374 return (TREE_CODE (lhs) != SSA_NAME
375 && ref_may_alias_global_p (lhs));
384 /* Dump alias information on FILE. */
387 dump_alias_info (FILE *file)
391 = lang_hooks.decl_printable_name (current_function_decl, 2);
394 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
396 fprintf (file, "Aliased symbols\n\n");
398 FOR_EACH_LOCAL_DECL (cfun, i, var)
400 if (may_be_aliased (var))
401 dump_variable (file, var);
404 fprintf (file, "\nCall clobber information\n");
406 fprintf (file, "\nESCAPED");
407 dump_points_to_solution (file, &cfun->gimple_df->escaped);
409 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
411 for (i = 1; i < num_ssa_names; i++)
413 tree ptr = ssa_name (i);
414 struct ptr_info_def *pi;
417 || !POINTER_TYPE_P (TREE_TYPE (ptr))
418 || SSA_NAME_IN_FREE_LIST (ptr))
421 pi = SSA_NAME_PTR_INFO (ptr);
423 dump_points_to_info_for (file, ptr);
426 fprintf (file, "\n");
430 /* Dump alias information on stderr. */
433 debug_alias_info (void)
435 dump_alias_info (stderr);
439 /* Dump the points-to set *PT into FILE. */
442 dump_points_to_solution (FILE *file, struct pt_solution *pt)
445 fprintf (file, ", points-to anything");
448 fprintf (file, ", points-to non-local");
451 fprintf (file, ", points-to escaped");
454 fprintf (file, ", points-to unit escaped");
457 fprintf (file, ", points-to NULL");
461 fprintf (file, ", points-to vars: ");
462 dump_decl_set (file, pt->vars);
463 if (pt->vars_contains_nonlocal
464 && pt->vars_contains_escaped_heap)
465 fprintf (file, " (nonlocal, escaped heap)");
466 else if (pt->vars_contains_nonlocal
467 && pt->vars_contains_escaped)
468 fprintf (file, " (nonlocal, escaped)");
469 else if (pt->vars_contains_nonlocal)
470 fprintf (file, " (nonlocal)");
471 else if (pt->vars_contains_escaped_heap)
472 fprintf (file, " (escaped heap)");
473 else if (pt->vars_contains_escaped)
474 fprintf (file, " (escaped)");
479 /* Unified dump function for pt_solution. */
482 debug (pt_solution &ref)
484 dump_points_to_solution (stderr, &ref);
488 debug (pt_solution *ptr)
493 fprintf (stderr, "<nil>\n");
497 /* Dump points-to information for SSA_NAME PTR into FILE. */
500 dump_points_to_info_for (FILE *file, tree ptr)
502 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
504 print_generic_expr (file, ptr, dump_flags);
507 dump_points_to_solution (file, &pi->pt);
509 fprintf (file, ", points-to anything");
511 fprintf (file, "\n");
515 /* Dump points-to information for VAR into stderr. */
518 debug_points_to_info_for (tree var)
520 dump_points_to_info_for (stderr, var);
524 /* Initializes the alias-oracle reference representation *R from REF. */
527 ao_ref_init (ao_ref *r, tree ref)
534 r->ref_alias_set = -1;
535 r->base_alias_set = -1;
536 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
539 /* Returns the base object of the memory reference *REF. */
542 ao_ref_base (ao_ref *ref)
548 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
549 &ref->max_size, &reverse);
553 /* Returns the base object alias set of the memory reference *REF. */
556 ao_ref_base_alias_set (ao_ref *ref)
559 if (ref->base_alias_set != -1)
560 return ref->base_alias_set;
564 while (handled_component_p (base_ref))
565 base_ref = TREE_OPERAND (base_ref, 0);
566 ref->base_alias_set = get_alias_set (base_ref);
567 return ref->base_alias_set;
570 /* Returns the reference alias set of the memory reference *REF. */
573 ao_ref_alias_set (ao_ref *ref)
575 if (ref->ref_alias_set != -1)
576 return ref->ref_alias_set;
577 ref->ref_alias_set = get_alias_set (ref->ref);
578 return ref->ref_alias_set;
581 /* Init an alias-oracle reference representation from a gimple pointer
582 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
583 size is assumed to be unknown. The access is assumed to be only
584 to or after of the pointer target, not before it. */
587 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
589 HOST_WIDE_INT t, size_hwi, extra_offset = 0;
590 ref->ref = NULL_TREE;
591 if (TREE_CODE (ptr) == SSA_NAME)
593 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
594 if (gimple_assign_single_p (stmt)
595 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
596 ptr = gimple_assign_rhs1 (stmt);
597 else if (is_gimple_assign (stmt)
598 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
599 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
601 ptr = gimple_assign_rhs1 (stmt);
602 extra_offset = BITS_PER_UNIT
603 * int_cst_value (gimple_assign_rhs2 (stmt));
607 if (TREE_CODE (ptr) == ADDR_EXPR)
609 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
611 ref->offset = BITS_PER_UNIT * t;
616 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
621 ref->base = build2 (MEM_REF, char_type_node,
622 ptr, null_pointer_node);
625 ref->offset += extra_offset;
627 && tree_fits_shwi_p (size)
628 && (size_hwi = tree_to_shwi (size)) <= HOST_WIDE_INT_MAX / BITS_PER_UNIT)
629 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
631 ref->max_size = ref->size = -1;
632 ref->ref_alias_set = 0;
633 ref->base_alias_set = 0;
634 ref->volatile_p = false;
637 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
638 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
642 same_type_for_tbaa (tree type1, tree type2)
644 type1 = TYPE_MAIN_VARIANT (type1);
645 type2 = TYPE_MAIN_VARIANT (type2);
647 /* If we would have to do structural comparison bail out. */
648 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
649 || TYPE_STRUCTURAL_EQUALITY_P (type2))
652 /* Compare the canonical types. */
653 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
656 /* ??? Array types are not properly unified in all cases as we have
657 spurious changes in the index types for example. Removing this
658 causes all sorts of problems with the Fortran frontend. */
659 if (TREE_CODE (type1) == ARRAY_TYPE
660 && TREE_CODE (type2) == ARRAY_TYPE)
663 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
664 object of one of its constrained subtypes, e.g. when a function with an
665 unconstrained parameter passed by reference is called on an object and
666 inlined. But, even in the case of a fixed size, type and subtypes are
667 not equivalent enough as to share the same TYPE_CANONICAL, since this
668 would mean that conversions between them are useless, whereas they are
669 not (e.g. type and subtypes can have different modes). So, in the end,
670 they are only guaranteed to have the same alias set. */
671 if (get_alias_set (type1) == get_alias_set (type2))
674 /* The types are known to be not equal. */
678 /* Determine if the two component references REF1 and REF2 which are
679 based on access types TYPE1 and TYPE2 and of which at least one is based
680 on an indirect reference may alias. REF2 is the only one that can
681 be a decl in which case REF2_IS_DECL is true.
682 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
683 are the respective alias sets. */
686 aliasing_component_refs_p (tree ref1,
687 alias_set_type ref1_alias_set,
688 alias_set_type base1_alias_set,
689 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
691 alias_set_type ref2_alias_set,
692 alias_set_type base2_alias_set,
693 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
696 /* If one reference is a component references through pointers try to find a
697 common base and apply offset based disambiguation. This handles
699 struct A { int i; int j; } *q;
700 struct B { struct A a; int k; } *p;
701 disambiguating q->i and p->a.j. */
707 /* Choose bases and base types to search for. */
709 while (handled_component_p (base1))
710 base1 = TREE_OPERAND (base1, 0);
711 type1 = TREE_TYPE (base1);
713 while (handled_component_p (base2))
714 base2 = TREE_OPERAND (base2, 0);
715 type2 = TREE_TYPE (base2);
717 /* Now search for the type1 in the access path of ref2. This
718 would be a common base for doing offset based disambiguation on. */
720 while (handled_component_p (*refp)
721 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
722 refp = &TREE_OPERAND (*refp, 0);
723 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
724 /* If we couldn't compare types we have to bail out. */
727 else if (same_p == 1)
729 HOST_WIDE_INT offadj, sztmp, msztmp;
731 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
733 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp, &reverse);
735 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
737 /* If we didn't find a common base, try the other way around. */
739 while (handled_component_p (*refp)
740 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
741 refp = &TREE_OPERAND (*refp, 0);
742 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
743 /* If we couldn't compare types we have to bail out. */
746 else if (same_p == 1)
748 HOST_WIDE_INT offadj, sztmp, msztmp;
750 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
752 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp, &reverse);
754 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
757 /* If we have two type access paths B1.path1 and B2.path2 they may
758 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
759 But we can still have a path that goes B1.path1...B2.path2 with
760 a part that we do not see. So we can only disambiguate now
761 if there is no B2 in the tail of path1 and no B1 on the
763 if (base1_alias_set == ref2_alias_set
764 || alias_set_subset_of (base1_alias_set, ref2_alias_set))
766 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
768 return (base2_alias_set == ref1_alias_set
769 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
773 /* Return true if we can determine that component references REF1 and REF2,
774 that are within a common DECL, cannot overlap. */
777 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
779 auto_vec<tree, 16> component_refs1;
780 auto_vec<tree, 16> component_refs2;
782 /* Create the stack of handled components for REF1. */
783 while (handled_component_p (ref1))
785 component_refs1.safe_push (ref1);
786 ref1 = TREE_OPERAND (ref1, 0);
788 if (TREE_CODE (ref1) == MEM_REF)
790 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
792 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
795 /* Create the stack of handled components for REF2. */
796 while (handled_component_p (ref2))
798 component_refs2.safe_push (ref2);
799 ref2 = TREE_OPERAND (ref2, 0);
801 if (TREE_CODE (ref2) == MEM_REF)
803 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
805 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
808 /* Bases must be either same or uncomparable. */
809 gcc_checking_assert (ref1 == ref2
810 || (DECL_P (ref1) && DECL_P (ref2)
811 && compare_base_decls (ref1, ref2) != 0));
813 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
814 rank. This is sufficient because we start from the same DECL and you
815 cannot reference several fields at a time with COMPONENT_REFs (unlike
816 with ARRAY_RANGE_REFs for arrays) so you always need the same number
817 of them to access a sub-component, unless you're in a union, in which
818 case the return value will precisely be false. */
823 if (component_refs1.is_empty ())
825 ref1 = component_refs1.pop ();
827 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
831 if (component_refs2.is_empty ())
833 ref2 = component_refs2.pop ();
835 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
837 /* Beware of BIT_FIELD_REF. */
838 if (TREE_CODE (ref1) != COMPONENT_REF
839 || TREE_CODE (ref2) != COMPONENT_REF)
842 tree field1 = TREE_OPERAND (ref1, 1);
843 tree field2 = TREE_OPERAND (ref2, 1);
845 /* ??? We cannot simply use the type of operand #0 of the refs here
846 as the Fortran compiler smuggles type punning into COMPONENT_REFs
847 for common blocks instead of using unions like everyone else. */
848 tree type1 = DECL_CONTEXT (field1);
849 tree type2 = DECL_CONTEXT (field2);
851 /* We cannot disambiguate fields in a union or qualified union. */
852 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
855 /* Different fields of the same record type cannot overlap.
856 ??? Bitfields can overlap at RTL level so punt on them. */
857 if (field1 != field2)
859 component_refs1.release ();
860 component_refs2.release ();
861 return !(DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2));
866 component_refs1.release ();
867 component_refs2.release ();
871 /* qsort compare function to sort FIELD_DECLs after their
872 DECL_FIELD_CONTEXT TYPE_UID. */
875 ncr_compar (const void *field1_, const void *field2_)
877 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
878 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
879 unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
880 unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
883 else if (uid1 > uid2)
888 /* Return true if we can determine that the fields referenced cannot
889 overlap for any pair of objects. */
892 nonoverlapping_component_refs_p (const_tree x, const_tree y)
894 if (!flag_strict_aliasing
896 || TREE_CODE (x) != COMPONENT_REF
897 || TREE_CODE (y) != COMPONENT_REF)
900 auto_vec<const_tree, 16> fieldsx;
901 while (TREE_CODE (x) == COMPONENT_REF)
903 tree field = TREE_OPERAND (x, 1);
904 tree type = DECL_FIELD_CONTEXT (field);
905 if (TREE_CODE (type) == RECORD_TYPE)
906 fieldsx.safe_push (field);
907 x = TREE_OPERAND (x, 0);
909 if (fieldsx.length () == 0)
911 auto_vec<const_tree, 16> fieldsy;
912 while (TREE_CODE (y) == COMPONENT_REF)
914 tree field = TREE_OPERAND (y, 1);
915 tree type = DECL_FIELD_CONTEXT (field);
916 if (TREE_CODE (type) == RECORD_TYPE)
917 fieldsy.safe_push (TREE_OPERAND (y, 1));
918 y = TREE_OPERAND (y, 0);
920 if (fieldsy.length () == 0)
923 /* Most common case first. */
924 if (fieldsx.length () == 1
925 && fieldsy.length () == 1)
926 return ((DECL_FIELD_CONTEXT (fieldsx[0])
927 == DECL_FIELD_CONTEXT (fieldsy[0]))
928 && fieldsx[0] != fieldsy[0]
929 && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])));
931 if (fieldsx.length () == 2)
933 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
934 std::swap (fieldsx[0], fieldsx[1]);
937 fieldsx.qsort (ncr_compar);
939 if (fieldsy.length () == 2)
941 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
942 std::swap (fieldsy[0], fieldsy[1]);
945 fieldsy.qsort (ncr_compar);
947 unsigned i = 0, j = 0;
950 const_tree fieldx = fieldsx[i];
951 const_tree fieldy = fieldsy[j];
952 tree typex = DECL_FIELD_CONTEXT (fieldx);
953 tree typey = DECL_FIELD_CONTEXT (fieldy);
956 /* We're left with accessing different fields of a structure,
957 no possible overlap, unless they are both bitfields. */
958 if (fieldx != fieldy)
959 return !(DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy));
961 if (TYPE_UID (typex) < TYPE_UID (typey))
964 if (i == fieldsx.length ())
970 if (j == fieldsy.length ())
980 /* Return true if two memory references based on the variables BASE1
981 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
982 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
983 if non-NULL are the complete memory reference trees. */
986 decl_refs_may_alias_p (tree ref1, tree base1,
987 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
988 tree ref2, tree base2,
989 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
991 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
993 /* If both references are based on different variables, they cannot alias. */
994 if (compare_base_decls (base1, base2) == 0)
997 /* If both references are based on the same variable, they cannot alias if
998 the accesses do not overlap. */
999 if (!ranges_overlap_p (offset1, max_size1, offset2, max_size2))
1002 /* For components with variable position, the above test isn't sufficient,
1003 so we disambiguate component references manually. */
1005 && handled_component_p (ref1) && handled_component_p (ref2)
1006 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1012 /* Return true if an indirect reference based on *PTR1 constrained
1013 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1014 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1015 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1016 in which case they are computed on-demand. REF1 and REF2
1017 if non-NULL are the complete memory reference trees. */
1020 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1021 HOST_WIDE_INT offset1,
1022 HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
1023 alias_set_type ref1_alias_set,
1024 alias_set_type base1_alias_set,
1025 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1026 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1027 alias_set_type ref2_alias_set,
1028 alias_set_type base2_alias_set, bool tbaa_p)
1031 tree ptrtype1, dbase2;
1032 HOST_WIDE_INT offset1p = offset1, offset2p = offset2;
1033 HOST_WIDE_INT doffset1, doffset2;
1035 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1036 || TREE_CODE (base1) == TARGET_MEM_REF)
1039 ptr1 = TREE_OPERAND (base1, 0);
1041 /* The offset embedded in MEM_REFs can be negative. Bias them
1042 so that the resulting offset adjustment is positive. */
1043 offset_int moff = mem_ref_offset (base1);
1044 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1045 if (wi::neg_p (moff))
1046 offset2p += (-moff).to_short_addr ();
1048 offset1p += moff.to_short_addr ();
1050 /* If only one reference is based on a variable, they cannot alias if
1051 the pointer access is beyond the extent of the variable access.
1052 (the pointer base cannot validly point to an offset less than zero
1054 ??? IVOPTs creates bases that do not honor this restriction,
1055 so do not apply this optimization for TARGET_MEM_REFs. */
1056 if (TREE_CODE (base1) != TARGET_MEM_REF
1057 && !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
1059 /* They also cannot alias if the pointer may not point to the decl. */
1060 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1063 /* Disambiguations that rely on strict aliasing rules follow. */
1064 if (!flag_strict_aliasing || !tbaa_p)
1067 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1069 /* If the alias set for a pointer access is zero all bets are off. */
1070 if (base1_alias_set == 0)
1073 /* When we are trying to disambiguate an access with a pointer dereference
1074 as base versus one with a decl as base we can use both the size
1075 of the decl and its dynamic type for extra disambiguation.
1076 ??? We do not know anything about the dynamic type of the decl
1077 other than that its alias-set contains base2_alias_set as a subset
1078 which does not help us here. */
1079 /* As we know nothing useful about the dynamic type of the decl just
1080 use the usual conflict check rather than a subset test.
1081 ??? We could introduce -fvery-strict-aliasing when the language
1082 does not allow decls to have a dynamic type that differs from their
1083 static type. Then we can check
1084 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1085 if (base1_alias_set != base2_alias_set
1086 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1088 /* If the size of the access relevant for TBAA through the pointer
1089 is bigger than the size of the decl we can't possibly access the
1090 decl via that pointer. */
1091 if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
1092 && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
1093 && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
1094 /* ??? This in turn may run afoul when a decl of type T which is
1095 a member of union type U is accessed through a pointer to
1096 type U and sizeof T is smaller than sizeof U. */
1097 && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1098 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1099 && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
1105 /* If the decl is accessed via a MEM_REF, reconstruct the base
1106 we can use for TBAA and an appropriately adjusted offset. */
1108 while (handled_component_p (dbase2))
1109 dbase2 = TREE_OPERAND (dbase2, 0);
1112 if (TREE_CODE (dbase2) == MEM_REF
1113 || TREE_CODE (dbase2) == TARGET_MEM_REF)
1115 offset_int moff = mem_ref_offset (dbase2);
1116 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1117 if (wi::neg_p (moff))
1118 doffset1 -= (-moff).to_short_addr ();
1120 doffset2 -= moff.to_short_addr ();
1123 /* If either reference is view-converted, give up now. */
1124 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1125 || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
1128 /* If both references are through the same type, they do not alias
1129 if the accesses do not overlap. This does extra disambiguation
1130 for mixed/pointer accesses but requires strict aliasing.
1131 For MEM_REFs we require that the component-ref offset we computed
1132 is relative to the start of the type which we ensure by
1133 comparing rvalue and access type and disregarding the constant
1135 if ((TREE_CODE (base1) != TARGET_MEM_REF
1136 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1137 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1138 return ranges_overlap_p (doffset1, max_size1, doffset2, max_size2);
1141 && nonoverlapping_component_refs_p (ref1, ref2))
1144 /* Do access-path based disambiguation. */
1146 && (handled_component_p (ref1) || handled_component_p (ref2)))
1147 return aliasing_component_refs_p (ref1,
1148 ref1_alias_set, base1_alias_set,
1151 ref2_alias_set, base2_alias_set,
1152 offset2, max_size2, true);
1157 /* Return true if two indirect references based on *PTR1
1158 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1159 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1160 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1161 in which case they are computed on-demand. REF1 and REF2
1162 if non-NULL are the complete memory reference trees. */
1165 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1166 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
1167 alias_set_type ref1_alias_set,
1168 alias_set_type base1_alias_set,
1169 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1170 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1171 alias_set_type ref2_alias_set,
1172 alias_set_type base2_alias_set, bool tbaa_p)
1176 tree ptrtype1, ptrtype2;
1178 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1179 || TREE_CODE (base1) == TARGET_MEM_REF)
1180 && (TREE_CODE (base2) == MEM_REF
1181 || TREE_CODE (base2) == TARGET_MEM_REF));
1183 ptr1 = TREE_OPERAND (base1, 0);
1184 ptr2 = TREE_OPERAND (base2, 0);
1186 /* If both bases are based on pointers they cannot alias if they may not
1187 point to the same memory object or if they point to the same object
1188 and the accesses do not overlap. */
1189 if ((!cfun || gimple_in_ssa_p (cfun))
1190 && operand_equal_p (ptr1, ptr2, 0)
1191 && (((TREE_CODE (base1) != TARGET_MEM_REF
1192 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1193 && (TREE_CODE (base2) != TARGET_MEM_REF
1194 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1195 || (TREE_CODE (base1) == TARGET_MEM_REF
1196 && TREE_CODE (base2) == TARGET_MEM_REF
1197 && (TMR_STEP (base1) == TMR_STEP (base2)
1198 || (TMR_STEP (base1) && TMR_STEP (base2)
1199 && operand_equal_p (TMR_STEP (base1),
1200 TMR_STEP (base2), 0)))
1201 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1202 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1203 && operand_equal_p (TMR_INDEX (base1),
1204 TMR_INDEX (base2), 0)))
1205 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1206 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1207 && operand_equal_p (TMR_INDEX2 (base1),
1208 TMR_INDEX2 (base2), 0))))))
1211 /* The offset embedded in MEM_REFs can be negative. Bias them
1212 so that the resulting offset adjustment is positive. */
1213 moff = mem_ref_offset (base1);
1214 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1215 if (wi::neg_p (moff))
1216 offset2 += (-moff).to_short_addr ();
1218 offset1 += moff.to_shwi ();
1219 moff = mem_ref_offset (base2);
1220 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1221 if (wi::neg_p (moff))
1222 offset1 += (-moff).to_short_addr ();
1224 offset2 += moff.to_short_addr ();
1225 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1227 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1230 /* Disambiguations that rely on strict aliasing rules follow. */
1231 if (!flag_strict_aliasing || !tbaa_p)
1234 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1235 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1237 /* If the alias set for a pointer access is zero all bets are off. */
1238 if (base1_alias_set == 0
1239 || base2_alias_set == 0)
1242 /* If both references are through the same type, they do not alias
1243 if the accesses do not overlap. This does extra disambiguation
1244 for mixed/pointer accesses but requires strict aliasing. */
1245 if ((TREE_CODE (base1) != TARGET_MEM_REF
1246 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1247 && (TREE_CODE (base2) != TARGET_MEM_REF
1248 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1249 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1250 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
1251 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1252 TREE_TYPE (ptrtype2)) == 1)
1253 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1255 /* Do type-based disambiguation. */
1256 if (base1_alias_set != base2_alias_set
1257 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1260 /* If either reference is view-converted, give up now. */
1261 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1262 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1266 && nonoverlapping_component_refs_p (ref1, ref2))
1269 /* Do access-path based disambiguation. */
1271 && (handled_component_p (ref1) || handled_component_p (ref2)))
1272 return aliasing_component_refs_p (ref1,
1273 ref1_alias_set, base1_alias_set,
1276 ref2_alias_set, base2_alias_set,
1277 offset2, max_size2, false);
1282 /* Return true, if the two memory references REF1 and REF2 may alias. */
1285 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1288 HOST_WIDE_INT offset1 = 0, offset2 = 0;
1289 HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
1290 bool var1_p, var2_p, ind1_p, ind2_p;
1292 gcc_checking_assert ((!ref1->ref
1293 || TREE_CODE (ref1->ref) == SSA_NAME
1294 || DECL_P (ref1->ref)
1295 || TREE_CODE (ref1->ref) == STRING_CST
1296 || handled_component_p (ref1->ref)
1297 || TREE_CODE (ref1->ref) == MEM_REF
1298 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1300 || TREE_CODE (ref2->ref) == SSA_NAME
1301 || DECL_P (ref2->ref)
1302 || TREE_CODE (ref2->ref) == STRING_CST
1303 || handled_component_p (ref2->ref)
1304 || TREE_CODE (ref2->ref) == MEM_REF
1305 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1307 /* Decompose the references into their base objects and the access. */
1308 base1 = ao_ref_base (ref1);
1309 offset1 = ref1->offset;
1310 max_size1 = ref1->max_size;
1311 base2 = ao_ref_base (ref2);
1312 offset2 = ref2->offset;
1313 max_size2 = ref2->max_size;
1315 /* We can end up with registers or constants as bases for example from
1316 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1317 which is seen as a struct copy. */
1318 if (TREE_CODE (base1) == SSA_NAME
1319 || TREE_CODE (base1) == CONST_DECL
1320 || TREE_CODE (base1) == CONSTRUCTOR
1321 || TREE_CODE (base1) == ADDR_EXPR
1322 || CONSTANT_CLASS_P (base1)
1323 || TREE_CODE (base2) == SSA_NAME
1324 || TREE_CODE (base2) == CONST_DECL
1325 || TREE_CODE (base2) == CONSTRUCTOR
1326 || TREE_CODE (base2) == ADDR_EXPR
1327 || CONSTANT_CLASS_P (base2))
1330 /* We can end up referring to code via function and label decls.
1331 As we likely do not properly track code aliases conservatively
1333 if (TREE_CODE (base1) == FUNCTION_DECL
1334 || TREE_CODE (base1) == LABEL_DECL
1335 || TREE_CODE (base2) == FUNCTION_DECL
1336 || TREE_CODE (base2) == LABEL_DECL)
1339 /* Two volatile accesses always conflict. */
1340 if (ref1->volatile_p
1341 && ref2->volatile_p)
1344 /* Defer to simple offset based disambiguation if we have
1345 references based on two decls. Do this before defering to
1346 TBAA to handle must-alias cases in conformance with the
1347 GCC extension of allowing type-punning through unions. */
1348 var1_p = DECL_P (base1);
1349 var2_p = DECL_P (base2);
1350 if (var1_p && var2_p)
1351 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1352 ref2->ref, base2, offset2, max_size2);
1354 /* Handle restrict based accesses.
1355 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
1357 tree rbase1 = base1;
1358 tree rbase2 = base2;
1363 while (handled_component_p (rbase1))
1364 rbase1 = TREE_OPERAND (rbase1, 0);
1370 while (handled_component_p (rbase2))
1371 rbase2 = TREE_OPERAND (rbase2, 0);
1373 if (rbase1 && rbase2
1374 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1375 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1376 /* If the accesses are in the same restrict clique... */
1377 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1378 /* But based on different pointers they do not alias. */
1379 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1382 ind1_p = (TREE_CODE (base1) == MEM_REF
1383 || TREE_CODE (base1) == TARGET_MEM_REF);
1384 ind2_p = (TREE_CODE (base2) == MEM_REF
1385 || TREE_CODE (base2) == TARGET_MEM_REF);
1387 /* Canonicalize the pointer-vs-decl case. */
1388 if (ind1_p && var2_p)
1390 std::swap (offset1, offset2);
1391 std::swap (max_size1, max_size2);
1392 std::swap (base1, base2);
1393 std::swap (ref1, ref2);
1400 /* First defer to TBAA if possible. */
1402 && flag_strict_aliasing
1403 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1404 ao_ref_alias_set (ref2)))
1407 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1408 if (var1_p && ind2_p)
1409 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1411 ao_ref_alias_set (ref2),
1412 ao_ref_base_alias_set (ref2),
1415 ao_ref_alias_set (ref1),
1416 ao_ref_base_alias_set (ref1),
1418 else if (ind1_p && ind2_p)
1419 return indirect_refs_may_alias_p (ref1->ref, base1,
1421 ao_ref_alias_set (ref1),
1422 ao_ref_base_alias_set (ref1),
1425 ao_ref_alias_set (ref2),
1426 ao_ref_base_alias_set (ref2),
1433 refs_may_alias_p (tree ref1, ao_ref *ref2)
1436 ao_ref_init (&r1, ref1);
1437 return refs_may_alias_p_1 (&r1, ref2, true);
1441 refs_may_alias_p (tree ref1, tree ref2)
1445 ao_ref_init (&r1, ref1);
1446 ao_ref_init (&r2, ref2);
1447 res = refs_may_alias_p_1 (&r1, &r2, true);
1449 ++alias_stats.refs_may_alias_p_may_alias;
1451 ++alias_stats.refs_may_alias_p_no_alias;
1455 /* Returns true if there is a anti-dependence for the STORE that
1456 executes after the LOAD. */
1459 refs_anti_dependent_p (tree load, tree store)
1462 ao_ref_init (&r1, load);
1463 ao_ref_init (&r2, store);
1464 return refs_may_alias_p_1 (&r1, &r2, false);
1467 /* Returns true if there is a output dependence for the stores
1468 STORE1 and STORE2. */
1471 refs_output_dependent_p (tree store1, tree store2)
1474 ao_ref_init (&r1, store1);
1475 ao_ref_init (&r2, store2);
1476 return refs_may_alias_p_1 (&r1, &r2, false);
1479 /* If the call CALL may use the memory reference REF return true,
1480 otherwise return false. */
1483 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref)
1487 int flags = gimple_call_flags (call);
1489 /* Const functions without a static chain do not implicitly use memory. */
1490 if (!gimple_call_chain (call)
1491 && (flags & (ECF_CONST|ECF_NOVOPS)))
1494 base = ao_ref_base (ref);
1498 /* A call that is not without side-effects might involve volatile
1499 accesses and thus conflicts with all other volatile accesses. */
1500 if (ref->volatile_p)
1503 /* If the reference is based on a decl that is not aliased the call
1504 cannot possibly use it. */
1506 && !may_be_aliased (base)
1507 /* But local statics can be used through recursion. */
1508 && !is_global_var (base))
1511 callee = gimple_call_fndecl (call);
1513 /* Handle those builtin functions explicitly that do not act as
1514 escape points. See tree-ssa-structalias.c:find_func_aliases
1515 for the list of builtins we might need to handle here. */
1516 if (callee != NULL_TREE
1517 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1518 switch (DECL_FUNCTION_CODE (callee))
1520 /* All the following functions read memory pointed to by
1521 their second argument. strcat/strncat additionally
1522 reads memory pointed to by the first argument. */
1523 case BUILT_IN_STRCAT:
1524 case BUILT_IN_STRNCAT:
1527 ao_ref_init_from_ptr_and_size (&dref,
1528 gimple_call_arg (call, 0),
1530 if (refs_may_alias_p_1 (&dref, ref, false))
1534 case BUILT_IN_STRCPY:
1535 case BUILT_IN_STRNCPY:
1536 case BUILT_IN_MEMCPY:
1537 case BUILT_IN_MEMMOVE:
1538 case BUILT_IN_MEMPCPY:
1539 case BUILT_IN_STPCPY:
1540 case BUILT_IN_STPNCPY:
1541 case BUILT_IN_TM_MEMCPY:
1542 case BUILT_IN_TM_MEMMOVE:
1545 tree size = NULL_TREE;
1546 if (gimple_call_num_args (call) == 3)
1547 size = gimple_call_arg (call, 2);
1548 ao_ref_init_from_ptr_and_size (&dref,
1549 gimple_call_arg (call, 1),
1551 return refs_may_alias_p_1 (&dref, ref, false);
1553 case BUILT_IN_STRCAT_CHK:
1554 case BUILT_IN_STRNCAT_CHK:
1557 ao_ref_init_from_ptr_and_size (&dref,
1558 gimple_call_arg (call, 0),
1560 if (refs_may_alias_p_1 (&dref, ref, false))
1564 case BUILT_IN_STRCPY_CHK:
1565 case BUILT_IN_STRNCPY_CHK:
1566 case BUILT_IN_MEMCPY_CHK:
1567 case BUILT_IN_MEMMOVE_CHK:
1568 case BUILT_IN_MEMPCPY_CHK:
1569 case BUILT_IN_STPCPY_CHK:
1570 case BUILT_IN_STPNCPY_CHK:
1573 tree size = NULL_TREE;
1574 if (gimple_call_num_args (call) == 4)
1575 size = gimple_call_arg (call, 2);
1576 ao_ref_init_from_ptr_and_size (&dref,
1577 gimple_call_arg (call, 1),
1579 return refs_may_alias_p_1 (&dref, ref, false);
1581 case BUILT_IN_BCOPY:
1584 tree size = gimple_call_arg (call, 2);
1585 ao_ref_init_from_ptr_and_size (&dref,
1586 gimple_call_arg (call, 0),
1588 return refs_may_alias_p_1 (&dref, ref, false);
1591 /* The following functions read memory pointed to by their
1593 CASE_BUILT_IN_TM_LOAD (1):
1594 CASE_BUILT_IN_TM_LOAD (2):
1595 CASE_BUILT_IN_TM_LOAD (4):
1596 CASE_BUILT_IN_TM_LOAD (8):
1597 CASE_BUILT_IN_TM_LOAD (FLOAT):
1598 CASE_BUILT_IN_TM_LOAD (DOUBLE):
1599 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1600 CASE_BUILT_IN_TM_LOAD (M64):
1601 CASE_BUILT_IN_TM_LOAD (M128):
1602 CASE_BUILT_IN_TM_LOAD (M256):
1603 case BUILT_IN_TM_LOG:
1604 case BUILT_IN_TM_LOG_1:
1605 case BUILT_IN_TM_LOG_2:
1606 case BUILT_IN_TM_LOG_4:
1607 case BUILT_IN_TM_LOG_8:
1608 case BUILT_IN_TM_LOG_FLOAT:
1609 case BUILT_IN_TM_LOG_DOUBLE:
1610 case BUILT_IN_TM_LOG_LDOUBLE:
1611 case BUILT_IN_TM_LOG_M64:
1612 case BUILT_IN_TM_LOG_M128:
1613 case BUILT_IN_TM_LOG_M256:
1614 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1616 /* These read memory pointed to by the first argument. */
1617 case BUILT_IN_STRDUP:
1618 case BUILT_IN_STRNDUP:
1619 case BUILT_IN_REALLOC:
1622 tree size = NULL_TREE;
1623 if (gimple_call_num_args (call) == 2)
1624 size = gimple_call_arg (call, 1);
1625 ao_ref_init_from_ptr_and_size (&dref,
1626 gimple_call_arg (call, 0),
1628 return refs_may_alias_p_1 (&dref, ref, false);
1630 /* These read memory pointed to by the first argument. */
1631 case BUILT_IN_INDEX:
1632 case BUILT_IN_STRCHR:
1633 case BUILT_IN_STRRCHR:
1636 ao_ref_init_from_ptr_and_size (&dref,
1637 gimple_call_arg (call, 0),
1639 return refs_may_alias_p_1 (&dref, ref, false);
1641 /* These read memory pointed to by the first argument with size
1642 in the third argument. */
1643 case BUILT_IN_MEMCHR:
1646 ao_ref_init_from_ptr_and_size (&dref,
1647 gimple_call_arg (call, 0),
1648 gimple_call_arg (call, 2));
1649 return refs_may_alias_p_1 (&dref, ref, false);
1651 /* These read memory pointed to by the first and second arguments. */
1652 case BUILT_IN_STRSTR:
1653 case BUILT_IN_STRPBRK:
1656 ao_ref_init_from_ptr_and_size (&dref,
1657 gimple_call_arg (call, 0),
1659 if (refs_may_alias_p_1 (&dref, ref, false))
1661 ao_ref_init_from_ptr_and_size (&dref,
1662 gimple_call_arg (call, 1),
1664 return refs_may_alias_p_1 (&dref, ref, false);
1667 /* The following builtins do not read from memory. */
1669 case BUILT_IN_MALLOC:
1670 case BUILT_IN_POSIX_MEMALIGN:
1671 case BUILT_IN_ALIGNED_ALLOC:
1672 case BUILT_IN_CALLOC:
1673 case BUILT_IN_ALLOCA:
1674 case BUILT_IN_ALLOCA_WITH_ALIGN:
1675 case BUILT_IN_STACK_SAVE:
1676 case BUILT_IN_STACK_RESTORE:
1677 case BUILT_IN_MEMSET:
1678 case BUILT_IN_TM_MEMSET:
1679 case BUILT_IN_MEMSET_CHK:
1680 case BUILT_IN_FREXP:
1681 case BUILT_IN_FREXPF:
1682 case BUILT_IN_FREXPL:
1683 case BUILT_IN_GAMMA_R:
1684 case BUILT_IN_GAMMAF_R:
1685 case BUILT_IN_GAMMAL_R:
1686 case BUILT_IN_LGAMMA_R:
1687 case BUILT_IN_LGAMMAF_R:
1688 case BUILT_IN_LGAMMAL_R:
1690 case BUILT_IN_MODFF:
1691 case BUILT_IN_MODFL:
1692 case BUILT_IN_REMQUO:
1693 case BUILT_IN_REMQUOF:
1694 case BUILT_IN_REMQUOL:
1695 case BUILT_IN_SINCOS:
1696 case BUILT_IN_SINCOSF:
1697 case BUILT_IN_SINCOSL:
1698 case BUILT_IN_ASSUME_ALIGNED:
1699 case BUILT_IN_VA_END:
1701 /* __sync_* builtins and some OpenMP builtins act as threading
1703 #undef DEF_SYNC_BUILTIN
1704 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1705 #include "sync-builtins.def"
1706 #undef DEF_SYNC_BUILTIN
1707 case BUILT_IN_GOMP_ATOMIC_START:
1708 case BUILT_IN_GOMP_ATOMIC_END:
1709 case BUILT_IN_GOMP_BARRIER:
1710 case BUILT_IN_GOMP_BARRIER_CANCEL:
1711 case BUILT_IN_GOMP_TASKWAIT:
1712 case BUILT_IN_GOMP_TASKGROUP_END:
1713 case BUILT_IN_GOMP_CRITICAL_START:
1714 case BUILT_IN_GOMP_CRITICAL_END:
1715 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1716 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1717 case BUILT_IN_GOMP_LOOP_END:
1718 case BUILT_IN_GOMP_LOOP_END_CANCEL:
1719 case BUILT_IN_GOMP_ORDERED_START:
1720 case BUILT_IN_GOMP_ORDERED_END:
1721 case BUILT_IN_GOMP_SECTIONS_END:
1722 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1723 case BUILT_IN_GOMP_SINGLE_COPY_START:
1724 case BUILT_IN_GOMP_SINGLE_COPY_END:
1728 /* Fallthru to general call handling. */;
1731 /* Check if base is a global static variable that is not read
1733 if (callee != NULL_TREE
1734 && TREE_CODE (base) == VAR_DECL
1735 && TREE_STATIC (base))
1737 struct cgraph_node *node = cgraph_node::get (callee);
1740 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1741 node yet. We should enforce that there are nodes for all decls in the
1742 IL and remove this check instead. */
1744 && (not_read = ipa_reference_get_not_read_global (node))
1745 && bitmap_bit_p (not_read, ipa_reference_var_uid (base)))
1749 /* Check if the base variable is call-used. */
1752 if (pt_solution_includes (gimple_call_use_set (call), base))
1755 else if ((TREE_CODE (base) == MEM_REF
1756 || TREE_CODE (base) == TARGET_MEM_REF)
1757 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1759 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1763 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1769 /* Inspect call arguments for passed-by-value aliases. */
1771 for (i = 0; i < gimple_call_num_args (call); ++i)
1773 tree op = gimple_call_arg (call, i);
1774 int flags = gimple_call_arg_flags (call, i);
1776 if (flags & EAF_UNUSED)
1779 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1780 op = TREE_OPERAND (op, 0);
1782 if (TREE_CODE (op) != SSA_NAME
1783 && !is_gimple_min_invariant (op))
1786 ao_ref_init (&r, op);
1787 if (refs_may_alias_p_1 (&r, ref, true))
1796 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref)
1799 res = ref_maybe_used_by_call_p_1 (call, ref);
1801 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1803 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1808 /* If the statement STMT may use the memory reference REF return
1809 true, otherwise return false. */
1812 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref)
1814 if (is_gimple_assign (stmt))
1818 /* All memory assign statements are single. */
1819 if (!gimple_assign_single_p (stmt))
1822 rhs = gimple_assign_rhs1 (stmt);
1823 if (is_gimple_reg (rhs)
1824 || is_gimple_min_invariant (rhs)
1825 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1828 return refs_may_alias_p (rhs, ref);
1830 else if (is_gimple_call (stmt))
1831 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref);
1832 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
1834 tree retval = gimple_return_retval (return_stmt);
1836 && TREE_CODE (retval) != SSA_NAME
1837 && !is_gimple_min_invariant (retval)
1838 && refs_may_alias_p (retval, ref))
1840 /* If ref escapes the function then the return acts as a use. */
1841 tree base = ao_ref_base (ref);
1844 else if (DECL_P (base))
1845 return is_global_var (base);
1846 else if (TREE_CODE (base) == MEM_REF
1847 || TREE_CODE (base) == TARGET_MEM_REF)
1848 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1856 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref)
1859 ao_ref_init (&r, ref);
1860 return ref_maybe_used_by_stmt_p (stmt, &r);
1863 /* If the call in statement CALL may clobber the memory reference REF
1864 return true, otherwise return false. */
1867 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
1872 /* If the call is pure or const it cannot clobber anything. */
1873 if (gimple_call_flags (call)
1874 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1876 if (gimple_call_internal_p (call))
1877 switch (gimple_call_internal_fn (call))
1879 /* Treat these internal calls like ECF_PURE for aliasing,
1880 they don't write to any memory the program should care about.
1881 They have important other side-effects, and read memory,
1882 so can't be ECF_NOVOPS. */
1883 case IFN_UBSAN_NULL:
1884 case IFN_UBSAN_BOUNDS:
1885 case IFN_UBSAN_VPTR:
1886 case IFN_UBSAN_OBJECT_SIZE:
1887 case IFN_ASAN_CHECK:
1888 case IFN_ESAN_RECORD_ACCESS:
1894 base = ao_ref_base (ref);
1898 if (TREE_CODE (base) == SSA_NAME
1899 || CONSTANT_CLASS_P (base))
1902 /* A call that is not without side-effects might involve volatile
1903 accesses and thus conflicts with all other volatile accesses. */
1904 if (ref->volatile_p)
1907 /* If the reference is based on a decl that is not aliased the call
1908 cannot possibly clobber it. */
1910 && !may_be_aliased (base)
1911 /* But local non-readonly statics can be modified through recursion
1912 or the call may implement a threading barrier which we must
1913 treat as may-def. */
1914 && (TREE_READONLY (base)
1915 || !is_global_var (base)))
1918 callee = gimple_call_fndecl (call);
1920 /* Handle those builtin functions explicitly that do not act as
1921 escape points. See tree-ssa-structalias.c:find_func_aliases
1922 for the list of builtins we might need to handle here. */
1923 if (callee != NULL_TREE
1924 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1925 switch (DECL_FUNCTION_CODE (callee))
1927 /* All the following functions clobber memory pointed to by
1928 their first argument. */
1929 case BUILT_IN_STRCPY:
1930 case BUILT_IN_STRNCPY:
1931 case BUILT_IN_MEMCPY:
1932 case BUILT_IN_MEMMOVE:
1933 case BUILT_IN_MEMPCPY:
1934 case BUILT_IN_STPCPY:
1935 case BUILT_IN_STPNCPY:
1936 case BUILT_IN_STRCAT:
1937 case BUILT_IN_STRNCAT:
1938 case BUILT_IN_MEMSET:
1939 case BUILT_IN_TM_MEMSET:
1940 CASE_BUILT_IN_TM_STORE (1):
1941 CASE_BUILT_IN_TM_STORE (2):
1942 CASE_BUILT_IN_TM_STORE (4):
1943 CASE_BUILT_IN_TM_STORE (8):
1944 CASE_BUILT_IN_TM_STORE (FLOAT):
1945 CASE_BUILT_IN_TM_STORE (DOUBLE):
1946 CASE_BUILT_IN_TM_STORE (LDOUBLE):
1947 CASE_BUILT_IN_TM_STORE (M64):
1948 CASE_BUILT_IN_TM_STORE (M128):
1949 CASE_BUILT_IN_TM_STORE (M256):
1950 case BUILT_IN_TM_MEMCPY:
1951 case BUILT_IN_TM_MEMMOVE:
1954 tree size = NULL_TREE;
1955 /* Don't pass in size for strncat, as the maximum size
1956 is strlen (dest) + n + 1 instead of n, resp.
1957 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1959 if (gimple_call_num_args (call) == 3
1960 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
1961 size = gimple_call_arg (call, 2);
1962 ao_ref_init_from_ptr_and_size (&dref,
1963 gimple_call_arg (call, 0),
1965 return refs_may_alias_p_1 (&dref, ref, false);
1967 case BUILT_IN_STRCPY_CHK:
1968 case BUILT_IN_STRNCPY_CHK:
1969 case BUILT_IN_MEMCPY_CHK:
1970 case BUILT_IN_MEMMOVE_CHK:
1971 case BUILT_IN_MEMPCPY_CHK:
1972 case BUILT_IN_STPCPY_CHK:
1973 case BUILT_IN_STPNCPY_CHK:
1974 case BUILT_IN_STRCAT_CHK:
1975 case BUILT_IN_STRNCAT_CHK:
1976 case BUILT_IN_MEMSET_CHK:
1979 tree size = NULL_TREE;
1980 /* Don't pass in size for __strncat_chk, as the maximum size
1981 is strlen (dest) + n + 1 instead of n, resp.
1982 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1984 if (gimple_call_num_args (call) == 4
1985 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
1986 size = gimple_call_arg (call, 2);
1987 ao_ref_init_from_ptr_and_size (&dref,
1988 gimple_call_arg (call, 0),
1990 return refs_may_alias_p_1 (&dref, ref, false);
1992 case BUILT_IN_BCOPY:
1995 tree size = gimple_call_arg (call, 2);
1996 ao_ref_init_from_ptr_and_size (&dref,
1997 gimple_call_arg (call, 1),
1999 return refs_may_alias_p_1 (&dref, ref, false);
2001 /* Allocating memory does not have any side-effects apart from
2002 being the definition point for the pointer. */
2003 case BUILT_IN_MALLOC:
2004 case BUILT_IN_ALIGNED_ALLOC:
2005 case BUILT_IN_CALLOC:
2006 case BUILT_IN_STRDUP:
2007 case BUILT_IN_STRNDUP:
2008 /* Unix98 specifies that errno is set on allocation failure. */
2010 && targetm.ref_may_alias_errno (ref))
2013 case BUILT_IN_STACK_SAVE:
2014 case BUILT_IN_ALLOCA:
2015 case BUILT_IN_ALLOCA_WITH_ALIGN:
2016 case BUILT_IN_ASSUME_ALIGNED:
2018 /* But posix_memalign stores a pointer into the memory pointed to
2019 by its first argument. */
2020 case BUILT_IN_POSIX_MEMALIGN:
2022 tree ptrptr = gimple_call_arg (call, 0);
2024 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2025 TYPE_SIZE_UNIT (ptr_type_node));
2026 return (refs_may_alias_p_1 (&dref, ref, false)
2028 && targetm.ref_may_alias_errno (ref)));
2030 /* Freeing memory kills the pointed-to memory. More importantly
2031 the call has to serve as a barrier for moving loads and stores
2034 case BUILT_IN_VA_END:
2036 tree ptr = gimple_call_arg (call, 0);
2037 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2039 /* Realloc serves both as allocation point and deallocation point. */
2040 case BUILT_IN_REALLOC:
2042 tree ptr = gimple_call_arg (call, 0);
2043 /* Unix98 specifies that errno is set on allocation failure. */
2044 return ((flag_errno_math
2045 && targetm.ref_may_alias_errno (ref))
2046 || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2048 case BUILT_IN_GAMMA_R:
2049 case BUILT_IN_GAMMAF_R:
2050 case BUILT_IN_GAMMAL_R:
2051 case BUILT_IN_LGAMMA_R:
2052 case BUILT_IN_LGAMMAF_R:
2053 case BUILT_IN_LGAMMAL_R:
2055 tree out = gimple_call_arg (call, 1);
2056 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2058 if (flag_errno_math)
2062 case BUILT_IN_FREXP:
2063 case BUILT_IN_FREXPF:
2064 case BUILT_IN_FREXPL:
2066 case BUILT_IN_MODFF:
2067 case BUILT_IN_MODFL:
2069 tree out = gimple_call_arg (call, 1);
2070 return ptr_deref_may_alias_ref_p_1 (out, ref);
2072 case BUILT_IN_REMQUO:
2073 case BUILT_IN_REMQUOF:
2074 case BUILT_IN_REMQUOL:
2076 tree out = gimple_call_arg (call, 2);
2077 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2079 if (flag_errno_math)
2083 case BUILT_IN_SINCOS:
2084 case BUILT_IN_SINCOSF:
2085 case BUILT_IN_SINCOSL:
2087 tree sin = gimple_call_arg (call, 1);
2088 tree cos = gimple_call_arg (call, 2);
2089 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2090 || ptr_deref_may_alias_ref_p_1 (cos, ref));
2092 /* __sync_* builtins and some OpenMP builtins act as threading
2094 #undef DEF_SYNC_BUILTIN
2095 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2096 #include "sync-builtins.def"
2097 #undef DEF_SYNC_BUILTIN
2098 case BUILT_IN_GOMP_ATOMIC_START:
2099 case BUILT_IN_GOMP_ATOMIC_END:
2100 case BUILT_IN_GOMP_BARRIER:
2101 case BUILT_IN_GOMP_BARRIER_CANCEL:
2102 case BUILT_IN_GOMP_TASKWAIT:
2103 case BUILT_IN_GOMP_TASKGROUP_END:
2104 case BUILT_IN_GOMP_CRITICAL_START:
2105 case BUILT_IN_GOMP_CRITICAL_END:
2106 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2107 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2108 case BUILT_IN_GOMP_LOOP_END:
2109 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2110 case BUILT_IN_GOMP_ORDERED_START:
2111 case BUILT_IN_GOMP_ORDERED_END:
2112 case BUILT_IN_GOMP_SECTIONS_END:
2113 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2114 case BUILT_IN_GOMP_SINGLE_COPY_START:
2115 case BUILT_IN_GOMP_SINGLE_COPY_END:
2118 /* Fallthru to general call handling. */;
2121 /* Check if base is a global static variable that is not written
2123 if (callee != NULL_TREE
2124 && TREE_CODE (base) == VAR_DECL
2125 && TREE_STATIC (base))
2127 struct cgraph_node *node = cgraph_node::get (callee);
2131 && (not_written = ipa_reference_get_not_written_global (node))
2132 && bitmap_bit_p (not_written, ipa_reference_var_uid (base)))
2136 /* Check if the base variable is call-clobbered. */
2138 return pt_solution_includes (gimple_call_clobber_set (call), base);
2139 else if ((TREE_CODE (base) == MEM_REF
2140 || TREE_CODE (base) == TARGET_MEM_REF)
2141 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2143 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2147 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2153 /* If the call in statement CALL may clobber the memory reference REF
2154 return true, otherwise return false. */
2157 call_may_clobber_ref_p (gcall *call, tree ref)
2161 ao_ref_init (&r, ref);
2162 res = call_may_clobber_ref_p_1 (call, &r);
2164 ++alias_stats.call_may_clobber_ref_p_may_alias;
2166 ++alias_stats.call_may_clobber_ref_p_no_alias;
2171 /* If the statement STMT may clobber the memory reference REF return true,
2172 otherwise return false. */
2175 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref)
2177 if (is_gimple_call (stmt))
2179 tree lhs = gimple_call_lhs (stmt);
2181 && TREE_CODE (lhs) != SSA_NAME)
2184 ao_ref_init (&r, lhs);
2185 if (refs_may_alias_p_1 (ref, &r, true))
2189 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
2191 else if (gimple_assign_single_p (stmt))
2193 tree lhs = gimple_assign_lhs (stmt);
2194 if (TREE_CODE (lhs) != SSA_NAME)
2197 ao_ref_init (&r, lhs);
2198 return refs_may_alias_p_1 (ref, &r, true);
2201 else if (gimple_code (stmt) == GIMPLE_ASM)
2208 stmt_may_clobber_ref_p (gimple *stmt, tree ref)
2211 ao_ref_init (&r, ref);
2212 return stmt_may_clobber_ref_p_1 (stmt, &r);
2215 /* If STMT kills the memory reference REF return true, otherwise
2219 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
2221 if (!ao_ref_base (ref))
2224 if (gimple_has_lhs (stmt)
2225 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2226 /* The assignment is not necessarily carried out if it can throw
2227 and we can catch it in the current function where we could inspect
2229 ??? We only need to care about the RHS throwing. For aggregate
2230 assignments or similar calls and non-call exceptions the LHS
2231 might throw as well. */
2232 && !stmt_can_throw_internal (stmt))
2234 tree lhs = gimple_get_lhs (stmt);
2235 /* If LHS is literally a base of the access we are done. */
2238 tree base = ref->ref;
2239 if (handled_component_p (base))
2241 tree saved_lhs0 = NULL_TREE;
2242 if (handled_component_p (lhs))
2244 saved_lhs0 = TREE_OPERAND (lhs, 0);
2245 TREE_OPERAND (lhs, 0) = integer_zero_node;
2249 /* Just compare the outermost handled component, if
2250 they are equal we have found a possible common
2252 tree saved_base0 = TREE_OPERAND (base, 0);
2253 TREE_OPERAND (base, 0) = integer_zero_node;
2254 bool res = operand_equal_p (lhs, base, 0);
2255 TREE_OPERAND (base, 0) = saved_base0;
2258 /* Otherwise drop handled components of the access. */
2261 while (handled_component_p (base));
2263 TREE_OPERAND (lhs, 0) = saved_lhs0;
2265 /* Finally check if the lhs has the same address and size as the
2266 base candidate of the access. */
2268 || (((TYPE_SIZE (TREE_TYPE (lhs))
2269 == TYPE_SIZE (TREE_TYPE (base)))
2270 || (TYPE_SIZE (TREE_TYPE (lhs))
2271 && TYPE_SIZE (TREE_TYPE (base))
2272 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
2273 TYPE_SIZE (TREE_TYPE (base)), 0)))
2274 && operand_equal_p (lhs, base, OEP_ADDRESS_OF)))
2278 /* Now look for non-literal equal bases with the restriction of
2279 handling constant offset and size. */
2280 /* For a must-alias check we need to be able to constrain
2281 the access properly. */
2282 if (ref->max_size == -1)
2284 HOST_WIDE_INT size, offset, max_size, ref_offset = ref->offset;
2287 = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
2288 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2289 so base == ref->base does not always hold. */
2290 if (base != ref->base)
2292 /* If both base and ref->base are MEM_REFs, only compare the
2293 first operand, and if the second operand isn't equal constant,
2294 try to add the offsets into offset and ref_offset. */
2295 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2296 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2298 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2299 TREE_OPERAND (ref->base, 1)))
2301 offset_int off1 = mem_ref_offset (base);
2302 off1 = wi::lshift (off1, LOG2_BITS_PER_UNIT);
2304 offset_int off2 = mem_ref_offset (ref->base);
2305 off2 = wi::lshift (off2, LOG2_BITS_PER_UNIT);
2307 if (wi::fits_shwi_p (off1) && wi::fits_shwi_p (off2))
2309 offset = off1.to_shwi ();
2310 ref_offset = off2.to_shwi ();
2319 /* For a must-alias check we need to be able to constrain
2320 the access properly. */
2321 if (size != -1 && size == max_size)
2323 if (offset <= ref_offset
2324 && offset + size >= ref_offset + ref->max_size)
2329 if (is_gimple_call (stmt))
2331 tree callee = gimple_call_fndecl (stmt);
2332 if (callee != NULL_TREE
2333 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2334 switch (DECL_FUNCTION_CODE (callee))
2338 tree ptr = gimple_call_arg (stmt, 0);
2339 tree base = ao_ref_base (ref);
2340 if (base && TREE_CODE (base) == MEM_REF
2341 && TREE_OPERAND (base, 0) == ptr)
2346 case BUILT_IN_MEMCPY:
2347 case BUILT_IN_MEMPCPY:
2348 case BUILT_IN_MEMMOVE:
2349 case BUILT_IN_MEMSET:
2350 case BUILT_IN_MEMCPY_CHK:
2351 case BUILT_IN_MEMPCPY_CHK:
2352 case BUILT_IN_MEMMOVE_CHK:
2353 case BUILT_IN_MEMSET_CHK:
2355 /* For a must-alias check we need to be able to constrain
2356 the access properly. */
2357 if (ref->max_size == -1)
2359 tree dest = gimple_call_arg (stmt, 0);
2360 tree len = gimple_call_arg (stmt, 2);
2361 if (!tree_fits_shwi_p (len))
2363 tree rbase = ref->base;
2364 offset_int roffset = ref->offset;
2366 ao_ref_init_from_ptr_and_size (&dref, dest, len);
2367 tree base = ao_ref_base (&dref);
2368 offset_int offset = dref.offset;
2369 if (!base || dref.size == -1)
2371 if (TREE_CODE (base) == MEM_REF)
2373 if (TREE_CODE (rbase) != MEM_REF)
2375 // Compare pointers.
2376 offset += wi::lshift (mem_ref_offset (base),
2377 LOG2_BITS_PER_UNIT);
2378 roffset += wi::lshift (mem_ref_offset (rbase),
2379 LOG2_BITS_PER_UNIT);
2380 base = TREE_OPERAND (base, 0);
2381 rbase = TREE_OPERAND (rbase, 0);
2384 && wi::les_p (offset, roffset)
2385 && wi::les_p (roffset + ref->max_size,
2386 offset + wi::lshift (wi::to_offset (len),
2387 LOG2_BITS_PER_UNIT)))
2392 case BUILT_IN_VA_END:
2394 tree ptr = gimple_call_arg (stmt, 0);
2395 if (TREE_CODE (ptr) == ADDR_EXPR)
2397 tree base = ao_ref_base (ref);
2398 if (TREE_OPERAND (ptr, 0) == base)
2411 stmt_kills_ref_p (gimple *stmt, tree ref)
2414 ao_ref_init (&r, ref);
2415 return stmt_kills_ref_p (stmt, &r);
2419 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2420 TARGET or a statement clobbering the memory reference REF in which
2421 case false is returned. The walk starts with VUSE, one argument of PHI. */
2424 maybe_skip_until (gimple *phi, tree target, ao_ref *ref,
2425 tree vuse, unsigned int *cnt, bitmap *visited,
2426 bool abort_on_visited,
2427 void *(*translate)(ao_ref *, tree, void *, bool *),
2430 basic_block bb = gimple_bb (phi);
2433 *visited = BITMAP_ALLOC (NULL);
2435 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2437 /* Walk until we hit the target. */
2438 while (vuse != target)
2440 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2441 /* Recurse for PHI nodes. */
2442 if (gimple_code (def_stmt) == GIMPLE_PHI)
2444 /* An already visited PHI node ends the walk successfully. */
2445 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2446 return !abort_on_visited;
2447 vuse = get_continuation_for_phi (def_stmt, ref, cnt,
2448 visited, abort_on_visited,
2454 else if (gimple_nop_p (def_stmt))
2458 /* A clobbering statement or the end of the IL ends it failing. */
2460 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2462 bool disambiguate_only = true;
2464 && (*translate) (ref, vuse, data, &disambiguate_only) == NULL)
2470 /* If we reach a new basic-block see if we already skipped it
2471 in a previous walk that ended successfully. */
2472 if (gimple_bb (def_stmt) != bb)
2474 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2475 return !abort_on_visited;
2476 bb = gimple_bb (def_stmt);
2478 vuse = gimple_vuse (def_stmt);
2483 /* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
2484 until we hit the phi argument definition that dominates the other one.
2485 Return that, or NULL_TREE if there is no such definition. */
2488 get_continuation_for_phi_1 (gimple *phi, tree arg0, tree arg1,
2489 ao_ref *ref, unsigned int *cnt,
2490 bitmap *visited, bool abort_on_visited,
2491 void *(*translate)(ao_ref *, tree, void *, bool *),
2494 gimple *def0 = SSA_NAME_DEF_STMT (arg0);
2495 gimple *def1 = SSA_NAME_DEF_STMT (arg1);
2500 else if (gimple_nop_p (def0)
2501 || (!gimple_nop_p (def1)
2502 && dominated_by_p (CDI_DOMINATORS,
2503 gimple_bb (def1), gimple_bb (def0))))
2505 if (maybe_skip_until (phi, arg0, ref, arg1, cnt,
2506 visited, abort_on_visited, translate, data))
2509 else if (gimple_nop_p (def1)
2510 || dominated_by_p (CDI_DOMINATORS,
2511 gimple_bb (def0), gimple_bb (def1)))
2513 if (maybe_skip_until (phi, arg1, ref, arg0, cnt,
2514 visited, abort_on_visited, translate, data))
2517 /* Special case of a diamond:
2519 goto (cond) ? L1 : L2
2520 L1: store1 = ... #MEM_2 = vuse(MEM_1)
2522 L2: store2 = ... #MEM_3 = vuse(MEM_1)
2523 L3: MEM_4 = PHI<MEM_2, MEM_3>
2524 We were called with the PHI at L3, MEM_2 and MEM_3 don't
2525 dominate each other, but still we can easily skip this PHI node
2526 if we recognize that the vuse MEM operand is the same for both,
2527 and that we can skip both statements (they don't clobber us).
2528 This is still linear. Don't use maybe_skip_until, that might
2529 potentially be slow. */
2530 else if ((common_vuse = gimple_vuse (def0))
2531 && common_vuse == gimple_vuse (def1))
2533 bool disambiguate_only = true;
2535 if ((!stmt_may_clobber_ref_p_1 (def0, ref)
2537 && (*translate) (ref, arg0, data, &disambiguate_only) == NULL))
2538 && (!stmt_may_clobber_ref_p_1 (def1, ref)
2540 && (*translate) (ref, arg1, data, &disambiguate_only) == NULL)))
2548 /* Starting from a PHI node for the virtual operand of the memory reference
2549 REF find a continuation virtual operand that allows to continue walking
2550 statements dominating PHI skipping only statements that cannot possibly
2551 clobber REF. Increments *CNT for each alias disambiguation done.
2552 Returns NULL_TREE if no suitable virtual operand can be found. */
2555 get_continuation_for_phi (gimple *phi, ao_ref *ref,
2556 unsigned int *cnt, bitmap *visited,
2557 bool abort_on_visited,
2558 void *(*translate)(ao_ref *, tree, void *, bool *),
2561 unsigned nargs = gimple_phi_num_args (phi);
2563 /* Through a single-argument PHI we can simply look through. */
2565 return PHI_ARG_DEF (phi, 0);
2567 /* For two or more arguments try to pairwise skip non-aliasing code
2568 until we hit the phi argument definition that dominates the other one. */
2569 else if (nargs >= 2)
2574 /* Find a candidate for the virtual operand which definition
2575 dominates those of all others. */
2576 arg0 = PHI_ARG_DEF (phi, 0);
2577 if (!SSA_NAME_IS_DEFAULT_DEF (arg0))
2578 for (i = 1; i < nargs; ++i)
2580 arg1 = PHI_ARG_DEF (phi, i);
2581 if (SSA_NAME_IS_DEFAULT_DEF (arg1))
2586 if (dominated_by_p (CDI_DOMINATORS,
2587 gimple_bb (SSA_NAME_DEF_STMT (arg0)),
2588 gimple_bb (SSA_NAME_DEF_STMT (arg1))))
2592 /* Then pairwise reduce against the found candidate. */
2593 for (i = 0; i < nargs; ++i)
2595 arg1 = PHI_ARG_DEF (phi, i);
2596 arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref,
2597 cnt, visited, abort_on_visited,
2609 /* Based on the memory reference REF and its virtual use VUSE call
2610 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2611 itself. That is, for each virtual use for which its defining statement
2612 does not clobber REF.
2614 WALKER is called with REF, the current virtual use and DATA. If
2615 WALKER returns non-NULL the walk stops and its result is returned.
2616 At the end of a non-successful walk NULL is returned.
2618 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2619 use which definition is a statement that may clobber REF and DATA.
2620 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2621 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2622 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2623 to adjust REF and *DATA to make that valid.
2625 VALUEIZE if non-NULL is called with the next VUSE that is considered
2626 and return value is substituted for that. This can be used to
2627 implement optimistic value-numbering for example. Note that the
2628 VUSE argument is assumed to be valueized already.
2630 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2633 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2634 void *(*walker)(ao_ref *, tree, unsigned int, void *),
2635 void *(*translate)(ao_ref *, tree, void *, bool *),
2636 tree (*valueize)(tree),
2639 bitmap visited = NULL;
2641 unsigned int cnt = 0;
2642 bool translated = false;
2644 timevar_push (TV_ALIAS_STMT_WALK);
2650 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2651 res = (*walker) (ref, vuse, cnt, data);
2653 if (res == (void *)-1)
2658 /* Lookup succeeded. */
2659 else if (res != NULL)
2663 vuse = valueize (vuse);
2664 def_stmt = SSA_NAME_DEF_STMT (vuse);
2665 if (gimple_nop_p (def_stmt))
2667 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2668 vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
2669 &visited, translated, translate, data);
2673 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2677 bool disambiguate_only = false;
2678 res = (*translate) (ref, vuse, data, &disambiguate_only);
2679 /* Failed lookup and translation. */
2680 if (res == (void *)-1)
2685 /* Lookup succeeded. */
2686 else if (res != NULL)
2688 /* Translation succeeded, continue walking. */
2689 translated = translated || !disambiguate_only;
2691 vuse = gimple_vuse (def_stmt);
2697 BITMAP_FREE (visited);
2699 timevar_pop (TV_ALIAS_STMT_WALK);
2705 /* Based on the memory reference REF call WALKER for each vdef which
2706 defining statement may clobber REF, starting with VDEF. If REF
2707 is NULL_TREE, each defining statement is visited.
2709 WALKER is called with REF, the current vdef and DATA. If WALKER
2710 returns true the walk is stopped, otherwise it continues.
2712 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
2713 The pointer may be NULL and then we do not track this information.
2715 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2716 PHI argument (but only one walk continues on merge points), the
2717 return value is true if any of the walks was successful.
2719 The function returns the number of statements walked. */
2722 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2723 bool (*walker)(ao_ref *, tree, void *), void *data,
2724 bitmap *visited, unsigned int cnt,
2725 bool *function_entry_reached)
2729 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
2732 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2735 if (gimple_nop_p (def_stmt))
2737 if (function_entry_reached)
2738 *function_entry_reached = true;
2741 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2745 *visited = BITMAP_ALLOC (NULL);
2746 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2747 cnt += walk_aliased_vdefs_1 (ref, gimple_phi_arg_def (def_stmt, i),
2748 walker, data, visited, 0,
2749 function_entry_reached);
2753 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2756 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
2757 && (*walker) (ref, vdef, data))
2760 vdef = gimple_vuse (def_stmt);
2766 walk_aliased_vdefs (ao_ref *ref, tree vdef,
2767 bool (*walker)(ao_ref *, tree, void *), void *data,
2769 bool *function_entry_reached)
2771 bitmap local_visited = NULL;
2774 timevar_push (TV_ALIAS_STMT_WALK);
2776 if (function_entry_reached)
2777 *function_entry_reached = false;
2779 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2780 visited ? visited : &local_visited, 0,
2781 function_entry_reached);
2783 BITMAP_FREE (local_visited);
2785 timevar_pop (TV_ALIAS_STMT_WALK);