1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "fold-const.h"
33 #include "insn-config.h"
42 #include "stor-layout.h"
44 #include "dominance.h"
45 #include "internal-fn.h"
46 #include "gimple-fold.h"
48 #include "gimple-iterator.h"
49 #include "tree-into-ssa.h"
52 #include "tree-ssa-propagate.h"
55 #include "ipa-utils.h"
56 #include "gimple-pretty-print.h"
57 #include "tree-ssa-address.h"
58 #include "langhooks.h"
59 #include "gimplify-me.h"
64 #include "gimple-match.h"
65 #include "gomp-constants.h"
67 /* Return true when DECL can be referenced from current unit.
68 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
69 We can get declarations that are not possible to reference for various
72 1) When analyzing C++ virtual tables.
73 C++ virtual tables do have known constructors even
74 when they are keyed to other compilation unit.
75 Those tables can contain pointers to methods and vars
76 in other units. Those methods have both STATIC and EXTERNAL
78 2) In WHOPR mode devirtualization might lead to reference
79 to method that was partitioned elsehwere.
80 In this case we have static VAR_DECL or FUNCTION_DECL
81 that has no corresponding callgraph/varpool node
83 3) COMDAT functions referred by external vtables that
84 we devirtualize only during final compilation stage.
85 At this time we already decided that we will not output
86 the function body and thus we can't reference the symbol
90 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
93 struct cgraph_node *node;
96 if (DECL_ABSTRACT_P (decl))
99 /* We are concerned only about static/external vars and functions. */
100 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
101 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
104 /* Static objects can be referred only if they was not optimized out yet. */
105 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
107 /* Before we start optimizing unreachable code we can be sure all
108 static objects are defined. */
109 if (symtab->function_flags_ready)
111 snode = symtab_node::get (decl);
112 if (!snode || !snode->definition)
114 node = dyn_cast <cgraph_node *> (snode);
115 return !node || !node->global.inlined_to;
118 /* We will later output the initializer, so we can refer to it.
119 So we are concerned only when DECL comes from initializer of
120 external var or var that has been optimized out. */
122 || TREE_CODE (from_decl) != VAR_DECL
123 || (!DECL_EXTERNAL (from_decl)
124 && (vnode = varpool_node::get (from_decl)) != NULL
125 && vnode->definition)
127 && (vnode = varpool_node::get (from_decl)) != NULL
128 && vnode->in_other_partition))
130 /* We are folding reference from external vtable. The vtable may reffer
131 to a symbol keyed to other compilation unit. The other compilation
132 unit may be in separate DSO and the symbol may be hidden. */
133 if (DECL_VISIBILITY_SPECIFIED (decl)
134 && DECL_EXTERNAL (decl)
135 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
136 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
138 /* When function is public, we always can introduce new reference.
139 Exception are the COMDAT functions where introducing a direct
140 reference imply need to include function body in the curren tunit. */
141 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
143 /* We have COMDAT. We are going to check if we still have definition
144 or if the definition is going to be output in other partition.
145 Bypass this when gimplifying; all needed functions will be produced.
147 As observed in PR20991 for already optimized out comdat virtual functions
148 it may be tempting to not necessarily give up because the copy will be
149 output elsewhere when corresponding vtable is output.
150 This is however not possible - ABI specify that COMDATs are output in
151 units where they are used and when the other unit was compiled with LTO
152 it is possible that vtable was kept public while the function itself
154 if (!symtab->function_flags_ready)
157 snode = symtab_node::get (decl);
159 || ((!snode->definition || DECL_EXTERNAL (decl))
160 && (!snode->in_other_partition
161 || (!snode->forced_by_abi && !snode->force_output))))
163 node = dyn_cast <cgraph_node *> (snode);
164 return !node || !node->global.inlined_to;
167 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
168 acceptable form for is_gimple_min_invariant.
169 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
172 canonicalize_constructor_val (tree cval, tree from_decl)
174 tree orig_cval = cval;
176 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
177 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
179 tree ptr = TREE_OPERAND (cval, 0);
180 if (is_gimple_min_invariant (ptr))
181 cval = build1_loc (EXPR_LOCATION (cval),
182 ADDR_EXPR, TREE_TYPE (ptr),
183 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
185 fold_convert (ptr_type_node,
186 TREE_OPERAND (cval, 1))));
188 if (TREE_CODE (cval) == ADDR_EXPR)
190 tree base = NULL_TREE;
191 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
193 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
195 TREE_OPERAND (cval, 0) = base;
198 base = get_base_address (TREE_OPERAND (cval, 0));
202 if ((TREE_CODE (base) == VAR_DECL
203 || TREE_CODE (base) == FUNCTION_DECL)
204 && !can_refer_decl_in_current_unit_p (base, from_decl))
206 if (TREE_CODE (base) == VAR_DECL)
207 TREE_ADDRESSABLE (base) = 1;
208 else if (TREE_CODE (base) == FUNCTION_DECL)
210 /* Make sure we create a cgraph node for functions we'll reference.
211 They can be non-existent if the reference comes from an entry
212 of an external vtable for example. */
213 cgraph_node::get_create (base);
215 /* Fixup types in global initializers. */
216 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
217 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
219 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
220 cval = fold_convert (TREE_TYPE (orig_cval), cval);
223 if (TREE_OVERFLOW_P (cval))
224 return drop_tree_overflow (cval);
228 /* If SYM is a constant variable with known value, return the value.
229 NULL_TREE is returned otherwise. */
232 get_symbol_constant_value (tree sym)
234 tree val = ctor_for_folding (sym);
235 if (val != error_mark_node)
239 val = canonicalize_constructor_val (unshare_expr (val), sym);
240 if (val && is_gimple_min_invariant (val))
245 /* Variables declared 'const' without an initializer
246 have zero as the initializer if they may not be
247 overridden at link or run time. */
249 && is_gimple_reg_type (TREE_TYPE (sym)))
250 return build_zero_cst (TREE_TYPE (sym));
258 /* Subroutine of fold_stmt. We perform several simplifications of the
259 memory reference tree EXPR and make sure to re-gimplify them properly
260 after propagation of constant addresses. IS_LHS is true if the
261 reference is supposed to be an lvalue. */
264 maybe_fold_reference (tree expr, bool is_lhs)
268 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
269 || TREE_CODE (expr) == REALPART_EXPR
270 || TREE_CODE (expr) == IMAGPART_EXPR)
271 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
272 return fold_unary_loc (EXPR_LOCATION (expr),
275 TREE_OPERAND (expr, 0));
276 else if (TREE_CODE (expr) == BIT_FIELD_REF
277 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
278 return fold_ternary_loc (EXPR_LOCATION (expr),
281 TREE_OPERAND (expr, 0),
282 TREE_OPERAND (expr, 1),
283 TREE_OPERAND (expr, 2));
286 && (result = fold_const_aggregate_ref (expr))
287 && is_gimple_min_invariant (result))
294 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
295 replacement rhs for the statement or NULL_TREE if no simplification
296 could be made. It is assumed that the operands have been previously
300 fold_gimple_assign (gimple_stmt_iterator *si)
302 gimple *stmt = gsi_stmt (*si);
303 enum tree_code subcode = gimple_assign_rhs_code (stmt);
304 location_t loc = gimple_location (stmt);
306 tree result = NULL_TREE;
308 switch (get_gimple_rhs_class (subcode))
310 case GIMPLE_SINGLE_RHS:
312 tree rhs = gimple_assign_rhs1 (stmt);
314 if (TREE_CLOBBER_P (rhs))
317 if (REFERENCE_CLASS_P (rhs))
318 return maybe_fold_reference (rhs, false);
320 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
322 tree val = OBJ_TYPE_REF_EXPR (rhs);
323 if (is_gimple_min_invariant (val))
325 else if (flag_devirtualize && virtual_method_call_p (rhs))
328 vec <cgraph_node *>targets
329 = possible_polymorphic_call_targets (rhs, stmt, &final);
330 if (final && targets.length () <= 1 && dbg_cnt (devirt))
332 if (dump_enabled_p ())
334 location_t loc = gimple_location_safe (stmt);
335 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
336 "resolving virtual function address "
337 "reference to function %s\n",
338 targets.length () == 1
339 ? targets[0]->name ()
342 if (targets.length () == 1)
344 val = fold_convert (TREE_TYPE (val),
345 build_fold_addr_expr_loc
346 (loc, targets[0]->decl));
347 STRIP_USELESS_TYPE_CONVERSION (val);
350 /* We can not use __builtin_unreachable here because it
351 can not have address taken. */
352 val = build_int_cst (TREE_TYPE (val), 0);
358 else if (TREE_CODE (rhs) == ADDR_EXPR)
360 tree ref = TREE_OPERAND (rhs, 0);
361 tree tem = maybe_fold_reference (ref, true);
363 && TREE_CODE (tem) == MEM_REF
364 && integer_zerop (TREE_OPERAND (tem, 1)))
365 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
367 result = fold_convert (TREE_TYPE (rhs),
368 build_fold_addr_expr_loc (loc, tem));
369 else if (TREE_CODE (ref) == MEM_REF
370 && integer_zerop (TREE_OPERAND (ref, 1)))
371 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
374 else if (TREE_CODE (rhs) == CONSTRUCTOR
375 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
376 && (CONSTRUCTOR_NELTS (rhs)
377 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
379 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
383 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
384 if (TREE_CODE (val) != INTEGER_CST
385 && TREE_CODE (val) != REAL_CST
386 && TREE_CODE (val) != FIXED_CST)
389 return build_vector_from_ctor (TREE_TYPE (rhs),
390 CONSTRUCTOR_ELTS (rhs));
393 else if (DECL_P (rhs))
394 return get_symbol_constant_value (rhs);
396 /* If we couldn't fold the RHS, hand over to the generic
398 if (result == NULL_TREE)
401 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
402 that may have been added by fold, and "useless" type
403 conversions that might now be apparent due to propagation. */
404 STRIP_USELESS_TYPE_CONVERSION (result);
406 if (result != rhs && valid_gimple_rhs_p (result))
413 case GIMPLE_UNARY_RHS:
416 case GIMPLE_BINARY_RHS:
419 case GIMPLE_TERNARY_RHS:
420 result = fold_ternary_loc (loc, subcode,
421 TREE_TYPE (gimple_assign_lhs (stmt)),
422 gimple_assign_rhs1 (stmt),
423 gimple_assign_rhs2 (stmt),
424 gimple_assign_rhs3 (stmt));
428 STRIP_USELESS_TYPE_CONVERSION (result);
429 if (valid_gimple_rhs_p (result))
434 case GIMPLE_INVALID_RHS:
442 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
443 adjusting the replacement stmts location and virtual operands.
444 If the statement has a lhs the last stmt in the sequence is expected
445 to assign to that lhs. */
448 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
450 gimple *stmt = gsi_stmt (*si_p);
452 if (gimple_has_location (stmt))
453 annotate_all_with_location (stmts, gimple_location (stmt));
455 /* First iterate over the replacement statements backward, assigning
456 virtual operands to their defining statements. */
457 gimple *laststore = NULL;
458 for (gimple_stmt_iterator i = gsi_last (stmts);
459 !gsi_end_p (i); gsi_prev (&i))
461 gimple *new_stmt = gsi_stmt (i);
462 if ((gimple_assign_single_p (new_stmt)
463 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
464 || (is_gimple_call (new_stmt)
465 && (gimple_call_flags (new_stmt)
466 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
470 vdef = gimple_vdef (stmt);
472 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
473 gimple_set_vdef (new_stmt, vdef);
474 if (vdef && TREE_CODE (vdef) == SSA_NAME)
475 SSA_NAME_DEF_STMT (vdef) = new_stmt;
476 laststore = new_stmt;
480 /* Second iterate over the statements forward, assigning virtual
481 operands to their uses. */
482 tree reaching_vuse = gimple_vuse (stmt);
483 for (gimple_stmt_iterator i = gsi_start (stmts);
484 !gsi_end_p (i); gsi_next (&i))
486 gimple *new_stmt = gsi_stmt (i);
487 /* If the new statement possibly has a VUSE, update it with exact SSA
488 name we know will reach this one. */
489 if (gimple_has_mem_ops (new_stmt))
490 gimple_set_vuse (new_stmt, reaching_vuse);
491 gimple_set_modified (new_stmt, true);
492 if (gimple_vdef (new_stmt))
493 reaching_vuse = gimple_vdef (new_stmt);
496 /* If the new sequence does not do a store release the virtual
497 definition of the original statement. */
499 && reaching_vuse == gimple_vuse (stmt))
501 tree vdef = gimple_vdef (stmt);
503 && TREE_CODE (vdef) == SSA_NAME)
505 unlink_stmt_vdef (stmt);
506 release_ssa_name (vdef);
510 /* Finally replace the original statement with the sequence. */
511 gsi_replace_with_seq (si_p, stmts, false);
514 /* Convert EXPR into a GIMPLE value suitable for substitution on the
515 RHS of an assignment. Insert the necessary statements before
516 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
517 is replaced. If the call is expected to produces a result, then it
518 is replaced by an assignment of the new RHS to the result variable.
519 If the result is to be ignored, then the call is replaced by a
520 GIMPLE_NOP. A proper VDEF chain is retained by making the first
521 VUSE and the last VDEF of the whole sequence be the same as the replaced
522 statement and using new SSA names for stores in between. */
525 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
528 gimple *stmt, *new_stmt;
529 gimple_stmt_iterator i;
530 gimple_seq stmts = NULL;
532 stmt = gsi_stmt (*si_p);
534 gcc_assert (is_gimple_call (stmt));
536 push_gimplify_context (gimple_in_ssa_p (cfun));
538 lhs = gimple_call_lhs (stmt);
539 if (lhs == NULL_TREE)
541 gimplify_and_add (expr, &stmts);
542 /* We can end up with folding a memcpy of an empty class assignment
543 which gets optimized away by C++ gimplification. */
544 if (gimple_seq_empty_p (stmts))
546 pop_gimplify_context (NULL);
547 if (gimple_in_ssa_p (cfun))
549 unlink_stmt_vdef (stmt);
552 gsi_replace (si_p, gimple_build_nop (), false);
558 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
559 new_stmt = gimple_build_assign (lhs, tmp);
560 i = gsi_last (stmts);
561 gsi_insert_after_without_update (&i, new_stmt,
562 GSI_CONTINUE_LINKING);
565 pop_gimplify_context (NULL);
567 gsi_replace_with_seq_vops (si_p, stmts);
571 /* Replace the call at *GSI with the gimple value VAL. */
574 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
576 gimple *stmt = gsi_stmt (*gsi);
577 tree lhs = gimple_call_lhs (stmt);
581 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
582 val = fold_convert (TREE_TYPE (lhs), val);
583 repl = gimple_build_assign (lhs, val);
586 repl = gimple_build_nop ();
587 tree vdef = gimple_vdef (stmt);
588 if (vdef && TREE_CODE (vdef) == SSA_NAME)
590 unlink_stmt_vdef (stmt);
591 release_ssa_name (vdef);
593 gsi_replace (gsi, repl, false);
596 /* Replace the call at *GSI with the new call REPL and fold that
600 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
602 gimple *stmt = gsi_stmt (*gsi);
603 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
604 gimple_set_location (repl, gimple_location (stmt));
605 if (gimple_vdef (stmt)
606 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
608 gimple_set_vdef (repl, gimple_vdef (stmt));
609 gimple_set_vuse (repl, gimple_vuse (stmt));
610 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
612 gsi_replace (gsi, repl, false);
616 /* Return true if VAR is a VAR_DECL or a component thereof. */
619 var_decl_component_p (tree var)
622 while (handled_component_p (inner))
623 inner = TREE_OPERAND (inner, 0);
624 return SSA_VAR_P (inner);
627 /* Fold function call to builtin mem{{,p}cpy,move}. Return
628 false if no simplification can be made.
629 If ENDP is 0, return DEST (like memcpy).
630 If ENDP is 1, return DEST+LEN (like mempcpy).
631 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
632 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
636 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
637 tree dest, tree src, int endp)
639 gimple *stmt = gsi_stmt (*gsi);
640 tree lhs = gimple_call_lhs (stmt);
641 tree len = gimple_call_arg (stmt, 2);
642 tree destvar, srcvar;
643 location_t loc = gimple_location (stmt);
645 /* If the LEN parameter is zero, return DEST. */
646 if (integer_zerop (len))
649 if (gimple_call_lhs (stmt))
650 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
652 repl = gimple_build_nop ();
653 tree vdef = gimple_vdef (stmt);
654 if (vdef && TREE_CODE (vdef) == SSA_NAME)
656 unlink_stmt_vdef (stmt);
657 release_ssa_name (vdef);
659 gsi_replace (gsi, repl, false);
663 /* If SRC and DEST are the same (and not volatile), return
664 DEST{,+LEN,+LEN-1}. */
665 if (operand_equal_p (src, dest, 0))
667 unlink_stmt_vdef (stmt);
668 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
669 release_ssa_name (gimple_vdef (stmt));
672 gsi_replace (gsi, gimple_build_nop (), false);
679 tree srctype, desttype;
680 unsigned int src_align, dest_align;
683 /* Build accesses at offset zero with a ref-all character type. */
684 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
687 /* If we can perform the copy efficiently with first doing all loads
688 and then all stores inline it that way. Currently efficiently
689 means that we can load all the memory into a single integer
690 register which is what MOVE_MAX gives us. */
691 src_align = get_pointer_alignment (src);
692 dest_align = get_pointer_alignment (dest);
693 if (tree_fits_uhwi_p (len)
694 && compare_tree_int (len, MOVE_MAX) <= 0
695 /* ??? Don't transform copies from strings with known length this
696 confuses the tree-ssa-strlen.c. This doesn't handle
697 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
699 && !c_strlen (src, 2))
701 unsigned ilen = tree_to_uhwi (len);
702 if (exact_log2 (ilen) != -1)
704 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
706 && TYPE_MODE (type) != BLKmode
707 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
709 /* If the destination pointer is not aligned we must be able
710 to emit an unaligned store. */
711 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
712 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
715 tree desttype = type;
716 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
717 srctype = build_aligned_type (type, src_align);
718 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
719 tree tem = fold_const_aggregate_ref (srcmem);
722 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
723 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
729 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
731 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
732 if (gimple_in_ssa_p (cfun))
733 srcmem = make_ssa_name (TREE_TYPE (srcmem),
736 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
737 gimple_assign_set_lhs (new_stmt, srcmem);
738 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
739 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
741 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
742 desttype = build_aligned_type (type, dest_align);
744 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
747 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
748 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
749 if (gimple_vdef (new_stmt)
750 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
751 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
754 gsi_replace (gsi, new_stmt, false);
757 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
766 /* Both DEST and SRC must be pointer types.
767 ??? This is what old code did. Is the testing for pointer types
770 If either SRC is readonly or length is 1, we can use memcpy. */
771 if (!dest_align || !src_align)
773 if (readonly_data_expr (src)
774 || (tree_fits_uhwi_p (len)
775 && (MIN (src_align, dest_align) / BITS_PER_UNIT
776 >= tree_to_uhwi (len))))
778 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
781 gimple_call_set_fndecl (stmt, fn);
782 gimple_call_set_arg (stmt, 0, dest);
783 gimple_call_set_arg (stmt, 1, src);
788 /* If *src and *dest can't overlap, optimize into memcpy as well. */
789 if (TREE_CODE (src) == ADDR_EXPR
790 && TREE_CODE (dest) == ADDR_EXPR)
792 tree src_base, dest_base, fn;
793 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
794 HOST_WIDE_INT size = -1;
795 HOST_WIDE_INT maxsize = -1;
797 srcvar = TREE_OPERAND (src, 0);
798 src_base = get_ref_base_and_extent (srcvar, &src_offset,
800 destvar = TREE_OPERAND (dest, 0);
801 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
803 if (tree_fits_uhwi_p (len))
804 maxsize = tree_to_uhwi (len);
807 src_offset /= BITS_PER_UNIT;
808 dest_offset /= BITS_PER_UNIT;
809 if (SSA_VAR_P (src_base)
810 && SSA_VAR_P (dest_base))
812 if (operand_equal_p (src_base, dest_base, 0)
813 && ranges_overlap_p (src_offset, maxsize,
814 dest_offset, maxsize))
817 else if (TREE_CODE (src_base) == MEM_REF
818 && TREE_CODE (dest_base) == MEM_REF)
820 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
821 TREE_OPERAND (dest_base, 0), 0))
823 offset_int off = mem_ref_offset (src_base) + src_offset;
824 if (!wi::fits_shwi_p (off))
826 src_offset = off.to_shwi ();
828 off = mem_ref_offset (dest_base) + dest_offset;
829 if (!wi::fits_shwi_p (off))
831 dest_offset = off.to_shwi ();
832 if (ranges_overlap_p (src_offset, maxsize,
833 dest_offset, maxsize))
839 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
842 gimple_call_set_fndecl (stmt, fn);
843 gimple_call_set_arg (stmt, 0, dest);
844 gimple_call_set_arg (stmt, 1, src);
849 /* If the destination and source do not alias optimize into
851 if ((is_gimple_min_invariant (dest)
852 || TREE_CODE (dest) == SSA_NAME)
853 && (is_gimple_min_invariant (src)
854 || TREE_CODE (src) == SSA_NAME))
857 ao_ref_init_from_ptr_and_size (&destr, dest, len);
858 ao_ref_init_from_ptr_and_size (&srcr, src, len);
859 if (!refs_may_alias_p_1 (&destr, &srcr, false))
862 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
865 gimple_call_set_fndecl (stmt, fn);
866 gimple_call_set_arg (stmt, 0, dest);
867 gimple_call_set_arg (stmt, 1, src);
876 if (!tree_fits_shwi_p (len))
879 This logic lose for arguments like (type *)malloc (sizeof (type)),
880 since we strip the casts of up to VOID return value from malloc.
881 Perhaps we ought to inherit type from non-VOID argument here? */
884 if (!POINTER_TYPE_P (TREE_TYPE (src))
885 || !POINTER_TYPE_P (TREE_TYPE (dest)))
887 /* In the following try to find a type that is most natural to be
888 used for the memcpy source and destination and that allows
889 the most optimization when memcpy is turned into a plain assignment
890 using that type. In theory we could always use a char[len] type
891 but that only gains us that the destination and source possibly
892 no longer will have their address taken. */
893 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
894 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
896 tree tem = TREE_OPERAND (src, 0);
898 if (tem != TREE_OPERAND (src, 0))
899 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
901 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
903 tree tem = TREE_OPERAND (dest, 0);
905 if (tem != TREE_OPERAND (dest, 0))
906 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
908 srctype = TREE_TYPE (TREE_TYPE (src));
909 if (TREE_CODE (srctype) == ARRAY_TYPE
910 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
912 srctype = TREE_TYPE (srctype);
914 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
916 desttype = TREE_TYPE (TREE_TYPE (dest));
917 if (TREE_CODE (desttype) == ARRAY_TYPE
918 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
920 desttype = TREE_TYPE (desttype);
922 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
924 if (TREE_ADDRESSABLE (srctype)
925 || TREE_ADDRESSABLE (desttype))
928 /* Make sure we are not copying using a floating-point mode or
929 a type whose size possibly does not match its precision. */
930 if (FLOAT_MODE_P (TYPE_MODE (desttype))
931 || TREE_CODE (desttype) == BOOLEAN_TYPE
932 || TREE_CODE (desttype) == ENUMERAL_TYPE)
933 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
934 if (FLOAT_MODE_P (TYPE_MODE (srctype))
935 || TREE_CODE (srctype) == BOOLEAN_TYPE
936 || TREE_CODE (srctype) == ENUMERAL_TYPE)
937 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
945 src_align = get_pointer_alignment (src);
946 dest_align = get_pointer_alignment (dest);
947 if (dest_align < TYPE_ALIGN (desttype)
948 || src_align < TYPE_ALIGN (srctype))
952 STRIP_NOPS (destvar);
953 if (TREE_CODE (destvar) == ADDR_EXPR
954 && var_decl_component_p (TREE_OPERAND (destvar, 0))
955 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
956 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
962 if (TREE_CODE (srcvar) == ADDR_EXPR
963 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
964 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
967 || src_align >= TYPE_ALIGN (desttype))
968 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
970 else if (!STRICT_ALIGNMENT)
972 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
974 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
982 if (srcvar == NULL_TREE && destvar == NULL_TREE)
985 if (srcvar == NULL_TREE)
988 if (src_align >= TYPE_ALIGN (desttype))
989 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
992 if (STRICT_ALIGNMENT)
994 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
996 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
999 else if (destvar == NULL_TREE)
1002 if (dest_align >= TYPE_ALIGN (srctype))
1003 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1006 if (STRICT_ALIGNMENT)
1008 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1010 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1015 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1017 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1018 if (gimple_in_ssa_p (cfun))
1019 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1021 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1022 gimple_assign_set_lhs (new_stmt, srcvar);
1023 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1024 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1026 new_stmt = gimple_build_assign (destvar, srcvar);
1027 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1028 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1029 if (gimple_vdef (new_stmt)
1030 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1031 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1034 gsi_replace (gsi, new_stmt, false);
1037 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1041 if (endp == 0 || endp == 3)
1044 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1046 if (endp == 2 || endp == 1)
1047 dest = fold_build_pointer_plus_loc (loc, dest, len);
1049 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1051 gimple *repl = gimple_build_assign (lhs, dest);
1052 gsi_replace (gsi, repl, false);
1056 /* Fold function call to builtin memset or bzero at *GSI setting the
1057 memory of size LEN to VAL. Return whether a simplification was made. */
1060 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1062 gimple *stmt = gsi_stmt (*gsi);
1064 unsigned HOST_WIDE_INT length, cval;
1066 /* If the LEN parameter is zero, return DEST. */
1067 if (integer_zerop (len))
1069 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1073 if (! tree_fits_uhwi_p (len))
1076 if (TREE_CODE (c) != INTEGER_CST)
1079 tree dest = gimple_call_arg (stmt, 0);
1081 if (TREE_CODE (var) != ADDR_EXPR)
1084 var = TREE_OPERAND (var, 0);
1085 if (TREE_THIS_VOLATILE (var))
1088 etype = TREE_TYPE (var);
1089 if (TREE_CODE (etype) == ARRAY_TYPE)
1090 etype = TREE_TYPE (etype);
1092 if (!INTEGRAL_TYPE_P (etype)
1093 && !POINTER_TYPE_P (etype))
1096 if (! var_decl_component_p (var))
1099 length = tree_to_uhwi (len);
1100 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1101 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1104 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1107 if (integer_zerop (c))
1111 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1114 cval = TREE_INT_CST_LOW (c);
1118 cval |= (cval << 31) << 1;
1121 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1122 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1123 gimple_set_vuse (store, gimple_vuse (stmt));
1124 tree vdef = gimple_vdef (stmt);
1125 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1127 gimple_set_vdef (store, gimple_vdef (stmt));
1128 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1130 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1131 if (gimple_call_lhs (stmt))
1133 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1134 gsi_replace (gsi, asgn, false);
1138 gimple_stmt_iterator gsi2 = *gsi;
1140 gsi_remove (&gsi2, true);
1147 /* Return the string length, maximum string length or maximum value of
1149 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1150 is not NULL and, for TYPE == 0, its value is not equal to the length
1151 we determine or if we are unable to determine the length or value,
1152 return false. VISITED is a bitmap of visited variables.
1153 TYPE is 0 if string length should be returned, 1 for maximum string
1154 length and 2 for maximum value ARG can have. */
1157 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1162 if (TREE_CODE (arg) != SSA_NAME)
1164 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1165 if (TREE_CODE (arg) == ADDR_EXPR
1166 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1167 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1169 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1170 if (TREE_CODE (aop0) == INDIRECT_REF
1171 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1172 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1173 length, visited, type);
1179 if (TREE_CODE (val) != INTEGER_CST
1180 || tree_int_cst_sgn (val) < 0)
1184 val = c_strlen (arg, 1);
1192 if (TREE_CODE (*length) != INTEGER_CST
1193 || TREE_CODE (val) != INTEGER_CST)
1196 if (tree_int_cst_lt (*length, val))
1200 else if (simple_cst_equal (val, *length) != 1)
1208 /* If ARG is registered for SSA update we cannot look at its defining
1210 if (name_registered_for_update_p (arg))
1213 /* If we were already here, break the infinite cycle. */
1215 *visited = BITMAP_ALLOC (NULL);
1216 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1220 def_stmt = SSA_NAME_DEF_STMT (var);
1222 switch (gimple_code (def_stmt))
1225 /* The RHS of the statement defining VAR must either have a
1226 constant length or come from another SSA_NAME with a constant
1228 if (gimple_assign_single_p (def_stmt)
1229 || gimple_assign_unary_nop_p (def_stmt))
1231 tree rhs = gimple_assign_rhs1 (def_stmt);
1232 return get_maxval_strlen (rhs, length, visited, type);
1234 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1236 tree op2 = gimple_assign_rhs2 (def_stmt);
1237 tree op3 = gimple_assign_rhs3 (def_stmt);
1238 return get_maxval_strlen (op2, length, visited, type)
1239 && get_maxval_strlen (op3, length, visited, type);
1245 /* All the arguments of the PHI node must have the same constant
1249 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1251 tree arg = gimple_phi_arg (def_stmt, i)->def;
1253 /* If this PHI has itself as an argument, we cannot
1254 determine the string length of this argument. However,
1255 if we can find a constant string length for the other
1256 PHI args then we can still be sure that this is a
1257 constant string length. So be optimistic and just
1258 continue with the next argument. */
1259 if (arg == gimple_phi_result (def_stmt))
1262 if (!get_maxval_strlen (arg, length, visited, type))
1274 get_maxval_strlen (tree arg, int type)
1276 bitmap visited = NULL;
1277 tree len = NULL_TREE;
1278 if (!get_maxval_strlen (arg, &len, &visited, type))
1281 BITMAP_FREE (visited);
1287 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1288 If LEN is not NULL, it represents the length of the string to be
1289 copied. Return NULL_TREE if no simplification can be made. */
1292 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1293 tree dest, tree src)
1295 location_t loc = gimple_location (gsi_stmt (*gsi));
1298 /* If SRC and DEST are the same (and not volatile), return DEST. */
1299 if (operand_equal_p (src, dest, 0))
1301 replace_call_with_value (gsi, dest);
1305 if (optimize_function_for_size_p (cfun))
1308 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1312 tree len = get_maxval_strlen (src, 0);
1316 len = fold_convert_loc (loc, size_type_node, len);
1317 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1318 len = force_gimple_operand_gsi (gsi, len, true,
1319 NULL_TREE, true, GSI_SAME_STMT);
1320 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1321 replace_call_with_call_and_fold (gsi, repl);
1325 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1326 If SLEN is not NULL, it represents the length of the source string.
1327 Return NULL_TREE if no simplification can be made. */
1330 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1331 tree dest, tree src, tree len)
1333 location_t loc = gimple_location (gsi_stmt (*gsi));
1336 /* If the LEN parameter is zero, return DEST. */
1337 if (integer_zerop (len))
1339 replace_call_with_value (gsi, dest);
1343 /* We can't compare slen with len as constants below if len is not a
1345 if (TREE_CODE (len) != INTEGER_CST)
1348 /* Now, we must be passed a constant src ptr parameter. */
1349 tree slen = get_maxval_strlen (src, 0);
1350 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1353 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1355 /* We do not support simplification of this case, though we do
1356 support it when expanding trees into RTL. */
1357 /* FIXME: generate a call to __builtin_memset. */
1358 if (tree_int_cst_lt (slen, len))
1361 /* OK transform into builtin memcpy. */
1362 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1366 len = fold_convert_loc (loc, size_type_node, len);
1367 len = force_gimple_operand_gsi (gsi, len, true,
1368 NULL_TREE, true, GSI_SAME_STMT);
1369 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1370 replace_call_with_call_and_fold (gsi, repl);
1374 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1377 Return NULL_TREE if no simplification was possible, otherwise return the
1378 simplified form of the call as a tree.
1380 The simplified form may be a constant or other expression which
1381 computes the same value, but in a more efficient manner (including
1382 calls to other builtin functions).
1384 The call may contain arguments which need to be evaluated, but
1385 which are not useful to determine the result of the call. In
1386 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1387 COMPOUND_EXPR will be an argument which must be evaluated.
1388 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1389 COMPOUND_EXPR in the chain will contain the tree for the simplified
1390 form of the builtin function call. */
1393 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1395 gimple *stmt = gsi_stmt (*gsi);
1396 location_t loc = gimple_location (stmt);
1398 const char *p = c_getstr (src);
1400 /* If the string length is zero, return the dst parameter. */
1401 if (p && *p == '\0')
1403 replace_call_with_value (gsi, dst);
1407 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1410 /* See if we can store by pieces into (dst + strlen(dst)). */
1412 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1413 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1415 if (!strlen_fn || !memcpy_fn)
1418 /* If the length of the source string isn't computable don't
1419 split strcat into strlen and memcpy. */
1420 tree len = get_maxval_strlen (src, 0);
1424 /* Create strlen (dst). */
1425 gimple_seq stmts = NULL, stmts2;
1426 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1427 gimple_set_location (repl, loc);
1428 if (gimple_in_ssa_p (cfun))
1429 newdst = make_ssa_name (size_type_node);
1431 newdst = create_tmp_reg (size_type_node);
1432 gimple_call_set_lhs (repl, newdst);
1433 gimple_seq_add_stmt_without_update (&stmts, repl);
1435 /* Create (dst p+ strlen (dst)). */
1436 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1437 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1438 gimple_seq_add_seq_without_update (&stmts, stmts2);
1440 len = fold_convert_loc (loc, size_type_node, len);
1441 len = size_binop_loc (loc, PLUS_EXPR, len,
1442 build_int_cst (size_type_node, 1));
1443 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1444 gimple_seq_add_seq_without_update (&stmts, stmts2);
1446 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1447 gimple_seq_add_stmt_without_update (&stmts, repl);
1448 if (gimple_call_lhs (stmt))
1450 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1451 gimple_seq_add_stmt_without_update (&stmts, repl);
1452 gsi_replace_with_seq_vops (gsi, stmts);
1453 /* gsi now points at the assignment to the lhs, get a
1454 stmt iterator to the memcpy call.
1455 ??? We can't use gsi_for_stmt as that doesn't work when the
1456 CFG isn't built yet. */
1457 gimple_stmt_iterator gsi2 = *gsi;
1463 gsi_replace_with_seq_vops (gsi, stmts);
1469 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1470 are the arguments to the call. */
1473 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1475 gimple *stmt = gsi_stmt (*gsi);
1476 tree dest = gimple_call_arg (stmt, 0);
1477 tree src = gimple_call_arg (stmt, 1);
1478 tree size = gimple_call_arg (stmt, 2);
1484 /* If the SRC parameter is "", return DEST. */
1485 if (p && *p == '\0')
1487 replace_call_with_value (gsi, dest);
1491 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1494 /* If __builtin_strcat_chk is used, assume strcat is available. */
1495 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1499 gimple *repl = gimple_build_call (fn, 2, dest, src);
1500 replace_call_with_call_and_fold (gsi, repl);
1504 /* Simplify a call to the strncat builtin. */
1507 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1509 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1510 tree dst = gimple_call_arg (stmt, 0);
1511 tree src = gimple_call_arg (stmt, 1);
1512 tree len = gimple_call_arg (stmt, 2);
1514 const char *p = c_getstr (src);
1516 /* If the requested length is zero, or the src parameter string
1517 length is zero, return the dst parameter. */
1518 if (integer_zerop (len) || (p && *p == '\0'))
1520 replace_call_with_value (gsi, dst);
1524 /* If the requested len is greater than or equal to the string
1525 length, call strcat. */
1526 if (TREE_CODE (len) == INTEGER_CST && p
1527 && compare_tree_int (len, strlen (p)) >= 0)
1529 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1531 /* If the replacement _DECL isn't initialized, don't do the
1536 gcall *repl = gimple_build_call (fn, 2, dst, src);
1537 replace_call_with_call_and_fold (gsi, repl);
1544 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1548 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1550 gimple *stmt = gsi_stmt (*gsi);
1551 tree dest = gimple_call_arg (stmt, 0);
1552 tree src = gimple_call_arg (stmt, 1);
1553 tree len = gimple_call_arg (stmt, 2);
1554 tree size = gimple_call_arg (stmt, 3);
1559 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1560 if ((p && *p == '\0')
1561 || integer_zerop (len))
1563 replace_call_with_value (gsi, dest);
1567 if (! tree_fits_uhwi_p (size))
1570 if (! integer_all_onesp (size))
1572 tree src_len = c_strlen (src, 1);
1574 && tree_fits_uhwi_p (src_len)
1575 && tree_fits_uhwi_p (len)
1576 && ! tree_int_cst_lt (len, src_len))
1578 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1579 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1583 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1584 replace_call_with_call_and_fold (gsi, repl);
1590 /* If __builtin_strncat_chk is used, assume strncat is available. */
1591 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1595 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1596 replace_call_with_call_and_fold (gsi, repl);
1600 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1601 to the call. IGNORE is true if the value returned
1602 by the builtin will be ignored. UNLOCKED is true is true if this
1603 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1604 the known length of the string. Return NULL_TREE if no simplification
1608 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1609 tree arg0, tree arg1,
1612 gimple *stmt = gsi_stmt (*gsi);
1614 /* If we're using an unlocked function, assume the other unlocked
1615 functions exist explicitly. */
1616 tree const fn_fputc = (unlocked
1617 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1618 : builtin_decl_implicit (BUILT_IN_FPUTC));
1619 tree const fn_fwrite = (unlocked
1620 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1621 : builtin_decl_implicit (BUILT_IN_FWRITE));
1623 /* If the return value is used, don't do the transformation. */
1624 if (gimple_call_lhs (stmt))
1627 /* Get the length of the string passed to fputs. If the length
1628 can't be determined, punt. */
1629 tree len = get_maxval_strlen (arg0, 0);
1631 || TREE_CODE (len) != INTEGER_CST)
1634 switch (compare_tree_int (len, 1))
1636 case -1: /* length is 0, delete the call entirely . */
1637 replace_call_with_value (gsi, integer_zero_node);
1640 case 0: /* length is 1, call fputc. */
1642 const char *p = c_getstr (arg0);
1648 gimple *repl = gimple_build_call (fn_fputc, 2,
1650 (integer_type_node, p[0]), arg1);
1651 replace_call_with_call_and_fold (gsi, repl);
1656 case 1: /* length is greater than 1, call fwrite. */
1658 /* If optimizing for size keep fputs. */
1659 if (optimize_function_for_size_p (cfun))
1661 /* New argument list transforming fputs(string, stream) to
1662 fwrite(string, 1, len, stream). */
1666 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
1667 size_one_node, len, arg1);
1668 replace_call_with_call_and_fold (gsi, repl);
1677 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1678 DEST, SRC, LEN, and SIZE are the arguments to the call.
1679 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1680 code of the builtin. If MAXLEN is not NULL, it is maximum length
1681 passed as third argument. */
1684 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1685 tree dest, tree src, tree len, tree size,
1686 enum built_in_function fcode)
1688 gimple *stmt = gsi_stmt (*gsi);
1689 location_t loc = gimple_location (stmt);
1690 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1693 /* If SRC and DEST are the same (and not volatile), return DEST
1694 (resp. DEST+LEN for __mempcpy_chk). */
1695 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1697 if (fcode != BUILT_IN_MEMPCPY_CHK)
1699 replace_call_with_value (gsi, dest);
1704 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1705 temp = force_gimple_operand_gsi (gsi, temp,
1706 false, NULL_TREE, true,
1708 replace_call_with_value (gsi, temp);
1713 if (! tree_fits_uhwi_p (size))
1716 tree maxlen = get_maxval_strlen (len, 2);
1717 if (! integer_all_onesp (size))
1719 if (! tree_fits_uhwi_p (len))
1721 /* If LEN is not constant, try MAXLEN too.
1722 For MAXLEN only allow optimizing into non-_ocs function
1723 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1724 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1726 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1728 /* (void) __mempcpy_chk () can be optimized into
1729 (void) __memcpy_chk (). */
1730 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1734 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1735 replace_call_with_call_and_fold (gsi, repl);
1744 if (tree_int_cst_lt (size, maxlen))
1749 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1750 mem{cpy,pcpy,move,set} is available. */
1753 case BUILT_IN_MEMCPY_CHK:
1754 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1756 case BUILT_IN_MEMPCPY_CHK:
1757 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1759 case BUILT_IN_MEMMOVE_CHK:
1760 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1762 case BUILT_IN_MEMSET_CHK:
1763 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1772 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1773 replace_call_with_call_and_fold (gsi, repl);
1777 /* Fold a call to the __st[rp]cpy_chk builtin.
1778 DEST, SRC, and SIZE are the arguments to the call.
1779 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1780 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1781 strings passed as second argument. */
1784 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1786 tree src, tree size,
1787 enum built_in_function fcode)
1789 gimple *stmt = gsi_stmt (*gsi);
1790 location_t loc = gimple_location (stmt);
1791 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1794 /* If SRC and DEST are the same (and not volatile), return DEST. */
1795 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1797 replace_call_with_value (gsi, dest);
1801 if (! tree_fits_uhwi_p (size))
1804 tree maxlen = get_maxval_strlen (src, 1);
1805 if (! integer_all_onesp (size))
1807 len = c_strlen (src, 1);
1808 if (! len || ! tree_fits_uhwi_p (len))
1810 /* If LEN is not constant, try MAXLEN too.
1811 For MAXLEN only allow optimizing into non-_ocs function
1812 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1813 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1815 if (fcode == BUILT_IN_STPCPY_CHK)
1820 /* If return value of __stpcpy_chk is ignored,
1821 optimize into __strcpy_chk. */
1822 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1826 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1827 replace_call_with_call_and_fold (gsi, repl);
1831 if (! len || TREE_SIDE_EFFECTS (len))
1834 /* If c_strlen returned something, but not a constant,
1835 transform __strcpy_chk into __memcpy_chk. */
1836 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1840 len = fold_convert_loc (loc, size_type_node, len);
1841 len = size_binop_loc (loc, PLUS_EXPR, len,
1842 build_int_cst (size_type_node, 1));
1843 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1844 true, GSI_SAME_STMT);
1845 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1846 replace_call_with_call_and_fold (gsi, repl);
1853 if (! tree_int_cst_lt (maxlen, size))
1857 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1858 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1859 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1863 gimple *repl = gimple_build_call (fn, 2, dest, src);
1864 replace_call_with_call_and_fold (gsi, repl);
1868 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1869 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1870 length passed as third argument. IGNORE is true if return value can be
1871 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1874 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1875 tree dest, tree src,
1876 tree len, tree size,
1877 enum built_in_function fcode)
1879 gimple *stmt = gsi_stmt (*gsi);
1880 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1883 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1885 /* If return value of __stpncpy_chk is ignored,
1886 optimize into __strncpy_chk. */
1887 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1890 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1891 replace_call_with_call_and_fold (gsi, repl);
1896 if (! tree_fits_uhwi_p (size))
1899 tree maxlen = get_maxval_strlen (len, 2);
1900 if (! integer_all_onesp (size))
1902 if (! tree_fits_uhwi_p (len))
1904 /* If LEN is not constant, try MAXLEN too.
1905 For MAXLEN only allow optimizing into non-_ocs function
1906 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1907 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1913 if (tree_int_cst_lt (size, maxlen))
1917 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1918 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1919 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1923 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1924 replace_call_with_call_and_fold (gsi, repl);
1928 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1929 Return NULL_TREE if no simplification can be made. */
1932 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1934 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1935 location_t loc = gimple_location (stmt);
1936 tree dest = gimple_call_arg (stmt, 0);
1937 tree src = gimple_call_arg (stmt, 1);
1938 tree fn, len, lenp1;
1940 /* If the result is unused, replace stpcpy with strcpy. */
1941 if (gimple_call_lhs (stmt) == NULL_TREE)
1943 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1946 gimple_call_set_fndecl (stmt, fn);
1951 len = c_strlen (src, 1);
1953 || TREE_CODE (len) != INTEGER_CST)
1956 if (optimize_function_for_size_p (cfun)
1957 /* If length is zero it's small enough. */
1958 && !integer_zerop (len))
1961 /* If the source has a known length replace stpcpy with memcpy. */
1962 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1966 gimple_seq stmts = NULL;
1967 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1968 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1969 tem, build_int_cst (size_type_node, 1));
1970 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1971 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1972 gimple_set_vuse (repl, gimple_vuse (stmt));
1973 gimple_set_vdef (repl, gimple_vdef (stmt));
1974 if (gimple_vdef (repl)
1975 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1976 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1977 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1978 /* Replace the result with dest + len. */
1980 tem = gimple_convert (&stmts, loc, sizetype, len);
1981 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1982 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
1983 POINTER_PLUS_EXPR, dest, tem);
1984 gsi_replace (gsi, ret, false);
1985 /* Finally fold the memcpy call. */
1986 gimple_stmt_iterator gsi2 = *gsi;
1992 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
1993 NULL_TREE if a normal call should be emitted rather than expanding
1994 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
1995 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
1996 passed as second argument. */
1999 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2000 enum built_in_function fcode)
2002 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2003 tree dest, size, len, fn, fmt, flag;
2004 const char *fmt_str;
2006 /* Verify the required arguments in the original call. */
2007 if (gimple_call_num_args (stmt) < 5)
2010 dest = gimple_call_arg (stmt, 0);
2011 len = gimple_call_arg (stmt, 1);
2012 flag = gimple_call_arg (stmt, 2);
2013 size = gimple_call_arg (stmt, 3);
2014 fmt = gimple_call_arg (stmt, 4);
2016 if (! tree_fits_uhwi_p (size))
2019 if (! integer_all_onesp (size))
2021 tree maxlen = get_maxval_strlen (len, 2);
2022 if (! tree_fits_uhwi_p (len))
2024 /* If LEN is not constant, try MAXLEN too.
2025 For MAXLEN only allow optimizing into non-_ocs function
2026 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2027 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2033 if (tree_int_cst_lt (size, maxlen))
2037 if (!init_target_chars ())
2040 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2041 or if format doesn't contain % chars or is "%s". */
2042 if (! integer_zerop (flag))
2044 fmt_str = c_getstr (fmt);
2045 if (fmt_str == NULL)
2047 if (strchr (fmt_str, target_percent) != NULL
2048 && strcmp (fmt_str, target_percent_s))
2052 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2054 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2055 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2059 /* Replace the called function and the first 5 argument by 3 retaining
2060 trailing varargs. */
2061 gimple_call_set_fndecl (stmt, fn);
2062 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2063 gimple_call_set_arg (stmt, 0, dest);
2064 gimple_call_set_arg (stmt, 1, len);
2065 gimple_call_set_arg (stmt, 2, fmt);
2066 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2067 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2068 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2073 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2074 Return NULL_TREE if a normal call should be emitted rather than
2075 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2076 or BUILT_IN_VSPRINTF_CHK. */
2079 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2080 enum built_in_function fcode)
2082 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2083 tree dest, size, len, fn, fmt, flag;
2084 const char *fmt_str;
2085 unsigned nargs = gimple_call_num_args (stmt);
2087 /* Verify the required arguments in the original call. */
2090 dest = gimple_call_arg (stmt, 0);
2091 flag = gimple_call_arg (stmt, 1);
2092 size = gimple_call_arg (stmt, 2);
2093 fmt = gimple_call_arg (stmt, 3);
2095 if (! tree_fits_uhwi_p (size))
2100 if (!init_target_chars ())
2103 /* Check whether the format is a literal string constant. */
2104 fmt_str = c_getstr (fmt);
2105 if (fmt_str != NULL)
2107 /* If the format doesn't contain % args or %%, we know the size. */
2108 if (strchr (fmt_str, target_percent) == 0)
2110 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2111 len = build_int_cstu (size_type_node, strlen (fmt_str));
2113 /* If the format is "%s" and first ... argument is a string literal,
2114 we know the size too. */
2115 else if (fcode == BUILT_IN_SPRINTF_CHK
2116 && strcmp (fmt_str, target_percent_s) == 0)
2122 arg = gimple_call_arg (stmt, 4);
2123 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2125 len = c_strlen (arg, 1);
2126 if (! len || ! tree_fits_uhwi_p (len))
2133 if (! integer_all_onesp (size))
2135 if (! len || ! tree_int_cst_lt (len, size))
2139 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2140 or if format doesn't contain % chars or is "%s". */
2141 if (! integer_zerop (flag))
2143 if (fmt_str == NULL)
2145 if (strchr (fmt_str, target_percent) != NULL
2146 && strcmp (fmt_str, target_percent_s))
2150 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2151 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2152 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2156 /* Replace the called function and the first 4 argument by 2 retaining
2157 trailing varargs. */
2158 gimple_call_set_fndecl (stmt, fn);
2159 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2160 gimple_call_set_arg (stmt, 0, dest);
2161 gimple_call_set_arg (stmt, 1, fmt);
2162 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2163 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2164 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2169 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2170 ORIG may be null if this is a 2-argument call. We don't attempt to
2171 simplify calls with more than 3 arguments.
2173 Return NULL_TREE if no simplification was possible, otherwise return the
2174 simplified form of the call as a tree. If IGNORED is true, it means that
2175 the caller does not use the returned value of the function. */
2178 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2180 gimple *stmt = gsi_stmt (*gsi);
2181 tree dest = gimple_call_arg (stmt, 0);
2182 tree fmt = gimple_call_arg (stmt, 1);
2183 tree orig = NULL_TREE;
2184 const char *fmt_str = NULL;
2186 /* Verify the required arguments in the original call. We deal with two
2187 types of sprintf() calls: 'sprintf (str, fmt)' and
2188 'sprintf (dest, "%s", orig)'. */
2189 if (gimple_call_num_args (stmt) > 3)
2192 if (gimple_call_num_args (stmt) == 3)
2193 orig = gimple_call_arg (stmt, 2);
2195 /* Check whether the format is a literal string constant. */
2196 fmt_str = c_getstr (fmt);
2197 if (fmt_str == NULL)
2200 if (!init_target_chars ())
2203 /* If the format doesn't contain % args or %%, use strcpy. */
2204 if (strchr (fmt_str, target_percent) == NULL)
2206 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2211 /* Don't optimize sprintf (buf, "abc", ptr++). */
2215 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2216 'format' is known to contain no % formats. */
2217 gimple_seq stmts = NULL;
2218 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2219 gimple_seq_add_stmt_without_update (&stmts, repl);
2220 if (gimple_call_lhs (stmt))
2222 repl = gimple_build_assign (gimple_call_lhs (stmt),
2223 build_int_cst (integer_type_node,
2225 gimple_seq_add_stmt_without_update (&stmts, repl);
2226 gsi_replace_with_seq_vops (gsi, stmts);
2227 /* gsi now points at the assignment to the lhs, get a
2228 stmt iterator to the memcpy call.
2229 ??? We can't use gsi_for_stmt as that doesn't work when the
2230 CFG isn't built yet. */
2231 gimple_stmt_iterator gsi2 = *gsi;
2237 gsi_replace_with_seq_vops (gsi, stmts);
2243 /* If the format is "%s", use strcpy if the result isn't used. */
2244 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2247 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2252 /* Don't crash on sprintf (str1, "%s"). */
2256 tree orig_len = NULL_TREE;
2257 if (gimple_call_lhs (stmt))
2259 orig_len = get_maxval_strlen (orig, 0);
2264 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2265 gimple_seq stmts = NULL;
2266 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2267 gimple_seq_add_stmt_without_update (&stmts, repl);
2268 if (gimple_call_lhs (stmt))
2270 if (!useless_type_conversion_p (integer_type_node,
2271 TREE_TYPE (orig_len)))
2272 orig_len = fold_convert (integer_type_node, orig_len);
2273 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2274 gimple_seq_add_stmt_without_update (&stmts, repl);
2275 gsi_replace_with_seq_vops (gsi, stmts);
2276 /* gsi now points at the assignment to the lhs, get a
2277 stmt iterator to the memcpy call.
2278 ??? We can't use gsi_for_stmt as that doesn't work when the
2279 CFG isn't built yet. */
2280 gimple_stmt_iterator gsi2 = *gsi;
2286 gsi_replace_with_seq_vops (gsi, stmts);
2294 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2295 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2296 attempt to simplify calls with more than 4 arguments.
2298 Return NULL_TREE if no simplification was possible, otherwise return the
2299 simplified form of the call as a tree. If IGNORED is true, it means that
2300 the caller does not use the returned value of the function. */
2303 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2305 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2306 tree dest = gimple_call_arg (stmt, 0);
2307 tree destsize = gimple_call_arg (stmt, 1);
2308 tree fmt = gimple_call_arg (stmt, 2);
2309 tree orig = NULL_TREE;
2310 const char *fmt_str = NULL;
2312 if (gimple_call_num_args (stmt) > 4)
2315 if (gimple_call_num_args (stmt) == 4)
2316 orig = gimple_call_arg (stmt, 3);
2318 if (!tree_fits_uhwi_p (destsize))
2320 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2322 /* Check whether the format is a literal string constant. */
2323 fmt_str = c_getstr (fmt);
2324 if (fmt_str == NULL)
2327 if (!init_target_chars ())
2330 /* If the format doesn't contain % args or %%, use strcpy. */
2331 if (strchr (fmt_str, target_percent) == NULL)
2333 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2337 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2341 /* We could expand this as
2342 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2344 memcpy (str, fmt_with_nul_at_cstm1, cst);
2345 but in the former case that might increase code size
2346 and in the latter case grow .rodata section too much.
2348 size_t len = strlen (fmt_str);
2352 gimple_seq stmts = NULL;
2353 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2354 gimple_seq_add_stmt_without_update (&stmts, repl);
2355 if (gimple_call_lhs (stmt))
2357 repl = gimple_build_assign (gimple_call_lhs (stmt),
2358 build_int_cst (integer_type_node, len));
2359 gimple_seq_add_stmt_without_update (&stmts, repl);
2360 gsi_replace_with_seq_vops (gsi, stmts);
2361 /* gsi now points at the assignment to the lhs, get a
2362 stmt iterator to the memcpy call.
2363 ??? We can't use gsi_for_stmt as that doesn't work when the
2364 CFG isn't built yet. */
2365 gimple_stmt_iterator gsi2 = *gsi;
2371 gsi_replace_with_seq_vops (gsi, stmts);
2377 /* If the format is "%s", use strcpy if the result isn't used. */
2378 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2380 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2384 /* Don't crash on snprintf (str1, cst, "%s"). */
2388 tree orig_len = get_maxval_strlen (orig, 0);
2389 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2392 /* We could expand this as
2393 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2395 memcpy (str1, str2_with_nul_at_cstm1, cst);
2396 but in the former case that might increase code size
2397 and in the latter case grow .rodata section too much.
2399 if (compare_tree_int (orig_len, destlen) >= 0)
2402 /* Convert snprintf (str1, cst, "%s", str2) into
2403 strcpy (str1, str2) if strlen (str2) < cst. */
2404 gimple_seq stmts = NULL;
2405 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2406 gimple_seq_add_stmt_without_update (&stmts, repl);
2407 if (gimple_call_lhs (stmt))
2409 if (!useless_type_conversion_p (integer_type_node,
2410 TREE_TYPE (orig_len)))
2411 orig_len = fold_convert (integer_type_node, orig_len);
2412 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2413 gimple_seq_add_stmt_without_update (&stmts, repl);
2414 gsi_replace_with_seq_vops (gsi, stmts);
2415 /* gsi now points at the assignment to the lhs, get a
2416 stmt iterator to the memcpy call.
2417 ??? We can't use gsi_for_stmt as that doesn't work when the
2418 CFG isn't built yet. */
2419 gimple_stmt_iterator gsi2 = *gsi;
2425 gsi_replace_with_seq_vops (gsi, stmts);
2433 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2434 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2435 more than 3 arguments, and ARG may be null in the 2-argument case.
2437 Return NULL_TREE if no simplification was possible, otherwise return the
2438 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2439 code of the function to be simplified. */
2442 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2443 tree fp, tree fmt, tree arg,
2444 enum built_in_function fcode)
2446 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2447 tree fn_fputc, fn_fputs;
2448 const char *fmt_str = NULL;
2450 /* If the return value is used, don't do the transformation. */
2451 if (gimple_call_lhs (stmt) != NULL_TREE)
2454 /* Check whether the format is a literal string constant. */
2455 fmt_str = c_getstr (fmt);
2456 if (fmt_str == NULL)
2459 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2461 /* If we're using an unlocked function, assume the other
2462 unlocked functions exist explicitly. */
2463 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2464 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2468 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2469 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2472 if (!init_target_chars ())
2475 /* If the format doesn't contain % args or %%, use strcpy. */
2476 if (strchr (fmt_str, target_percent) == NULL)
2478 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2482 /* If the format specifier was "", fprintf does nothing. */
2483 if (fmt_str[0] == '\0')
2485 replace_call_with_value (gsi, NULL_TREE);
2489 /* When "string" doesn't contain %, replace all cases of
2490 fprintf (fp, string) with fputs (string, fp). The fputs
2491 builtin will take care of special cases like length == 1. */
2494 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2495 replace_call_with_call_and_fold (gsi, repl);
2500 /* The other optimizations can be done only on the non-va_list variants. */
2501 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2504 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2505 else if (strcmp (fmt_str, target_percent_s) == 0)
2507 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2511 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2512 replace_call_with_call_and_fold (gsi, repl);
2517 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2518 else if (strcmp (fmt_str, target_percent_c) == 0)
2521 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2525 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2526 replace_call_with_call_and_fold (gsi, repl);
2534 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2535 FMT and ARG are the arguments to the call; we don't fold cases with
2536 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2538 Return NULL_TREE if no simplification was possible, otherwise return the
2539 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2540 code of the function to be simplified. */
2543 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2544 tree arg, enum built_in_function fcode)
2546 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2547 tree fn_putchar, fn_puts, newarg;
2548 const char *fmt_str = NULL;
2550 /* If the return value is used, don't do the transformation. */
2551 if (gimple_call_lhs (stmt) != NULL_TREE)
2554 /* Check whether the format is a literal string constant. */
2555 fmt_str = c_getstr (fmt);
2556 if (fmt_str == NULL)
2559 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2561 /* If we're using an unlocked function, assume the other
2562 unlocked functions exist explicitly. */
2563 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2564 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2568 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2569 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2572 if (!init_target_chars ())
2575 if (strcmp (fmt_str, target_percent_s) == 0
2576 || strchr (fmt_str, target_percent) == NULL)
2580 if (strcmp (fmt_str, target_percent_s) == 0)
2582 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2585 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2588 str = c_getstr (arg);
2594 /* The format specifier doesn't contain any '%' characters. */
2595 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2601 /* If the string was "", printf does nothing. */
2604 replace_call_with_value (gsi, NULL_TREE);
2608 /* If the string has length of 1, call putchar. */
2611 /* Given printf("c"), (where c is any one character,)
2612 convert "c"[0] to an int and pass that to the replacement
2614 newarg = build_int_cst (integer_type_node, str[0]);
2617 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2618 replace_call_with_call_and_fold (gsi, repl);
2624 /* If the string was "string\n", call puts("string"). */
2625 size_t len = strlen (str);
2626 if ((unsigned char)str[len - 1] == target_newline
2627 && (size_t) (int) len == len
2631 tree offset_node, string_cst;
2633 /* Create a NUL-terminated string that's one char shorter
2634 than the original, stripping off the trailing '\n'. */
2635 newarg = build_string_literal (len, str);
2636 string_cst = string_constant (newarg, &offset_node);
2637 gcc_checking_assert (string_cst
2638 && (TREE_STRING_LENGTH (string_cst)
2640 && integer_zerop (offset_node)
2642 TREE_STRING_POINTER (string_cst)[len - 1]
2644 /* build_string_literal creates a new STRING_CST,
2645 modify it in place to avoid double copying. */
2646 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2647 newstr[len - 1] = '\0';
2650 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2651 replace_call_with_call_and_fold (gsi, repl);
2656 /* We'd like to arrange to call fputs(string,stdout) here,
2657 but we need stdout and don't have a way to get it yet. */
2662 /* The other optimizations can be done only on the non-va_list variants. */
2663 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2666 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2667 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2669 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2673 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2674 replace_call_with_call_and_fold (gsi, repl);
2679 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2680 else if (strcmp (fmt_str, target_percent_c) == 0)
2682 if (!arg || ! useless_type_conversion_p (integer_type_node,
2687 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2688 replace_call_with_call_and_fold (gsi, repl);
2698 /* Fold a call to __builtin_strlen with known length LEN. */
2701 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2703 gimple *stmt = gsi_stmt (*gsi);
2704 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2707 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2708 replace_call_with_value (gsi, len);
2712 /* Fold a call to __builtin_acc_on_device. */
2715 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2717 /* Defer folding until we know which compiler we're in. */
2718 if (symtab->state != EXPANSION)
2721 unsigned val_host = GOMP_DEVICE_HOST;
2722 unsigned val_dev = GOMP_DEVICE_NONE;
2724 #ifdef ACCEL_COMPILER
2725 val_host = GOMP_DEVICE_NOT_HOST;
2726 val_dev = ACCEL_COMPILER_acc_device;
2729 location_t loc = gimple_location (gsi_stmt (*gsi));
2731 tree host_eq = make_ssa_name (boolean_type_node);
2732 gimple *host_ass = gimple_build_assign
2733 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2734 gimple_set_location (host_ass, loc);
2735 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2737 tree dev_eq = make_ssa_name (boolean_type_node);
2738 gimple *dev_ass = gimple_build_assign
2739 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2740 gimple_set_location (dev_ass, loc);
2741 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2743 tree result = make_ssa_name (boolean_type_node);
2744 gimple *result_ass = gimple_build_assign
2745 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2746 gimple_set_location (result_ass, loc);
2747 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2749 replace_call_with_value (gsi, result);
2754 /* Fold the non-target builtin at *GSI and return whether any simplification
2758 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2760 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2761 tree callee = gimple_call_fndecl (stmt);
2763 /* Give up for always_inline inline builtins until they are
2765 if (avoid_folding_inline_builtin (callee))
2768 unsigned n = gimple_call_num_args (stmt);
2769 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2772 case BUILT_IN_BZERO:
2773 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2774 gimple_call_arg (stmt, 1));
2775 case BUILT_IN_MEMSET:
2776 return gimple_fold_builtin_memset (gsi,
2777 gimple_call_arg (stmt, 1),
2778 gimple_call_arg (stmt, 2));
2779 case BUILT_IN_BCOPY:
2780 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2781 gimple_call_arg (stmt, 0), 3);
2782 case BUILT_IN_MEMCPY:
2783 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2784 gimple_call_arg (stmt, 1), 0);
2785 case BUILT_IN_MEMPCPY:
2786 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2787 gimple_call_arg (stmt, 1), 1);
2788 case BUILT_IN_MEMMOVE:
2789 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2790 gimple_call_arg (stmt, 1), 3);
2791 case BUILT_IN_SPRINTF_CHK:
2792 case BUILT_IN_VSPRINTF_CHK:
2793 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2794 case BUILT_IN_STRCAT_CHK:
2795 return gimple_fold_builtin_strcat_chk (gsi);
2796 case BUILT_IN_STRNCAT_CHK:
2797 return gimple_fold_builtin_strncat_chk (gsi);
2798 case BUILT_IN_STRLEN:
2799 return gimple_fold_builtin_strlen (gsi);
2800 case BUILT_IN_STRCPY:
2801 return gimple_fold_builtin_strcpy (gsi,
2802 gimple_call_arg (stmt, 0),
2803 gimple_call_arg (stmt, 1));
2804 case BUILT_IN_STRNCPY:
2805 return gimple_fold_builtin_strncpy (gsi,
2806 gimple_call_arg (stmt, 0),
2807 gimple_call_arg (stmt, 1),
2808 gimple_call_arg (stmt, 2));
2809 case BUILT_IN_STRCAT:
2810 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2811 gimple_call_arg (stmt, 1));
2812 case BUILT_IN_STRNCAT:
2813 return gimple_fold_builtin_strncat (gsi);
2814 case BUILT_IN_FPUTS:
2815 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2816 gimple_call_arg (stmt, 1), false);
2817 case BUILT_IN_FPUTS_UNLOCKED:
2818 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2819 gimple_call_arg (stmt, 1), true);
2820 case BUILT_IN_MEMCPY_CHK:
2821 case BUILT_IN_MEMPCPY_CHK:
2822 case BUILT_IN_MEMMOVE_CHK:
2823 case BUILT_IN_MEMSET_CHK:
2824 return gimple_fold_builtin_memory_chk (gsi,
2825 gimple_call_arg (stmt, 0),
2826 gimple_call_arg (stmt, 1),
2827 gimple_call_arg (stmt, 2),
2828 gimple_call_arg (stmt, 3),
2830 case BUILT_IN_STPCPY:
2831 return gimple_fold_builtin_stpcpy (gsi);
2832 case BUILT_IN_STRCPY_CHK:
2833 case BUILT_IN_STPCPY_CHK:
2834 return gimple_fold_builtin_stxcpy_chk (gsi,
2835 gimple_call_arg (stmt, 0),
2836 gimple_call_arg (stmt, 1),
2837 gimple_call_arg (stmt, 2),
2839 case BUILT_IN_STRNCPY_CHK:
2840 case BUILT_IN_STPNCPY_CHK:
2841 return gimple_fold_builtin_stxncpy_chk (gsi,
2842 gimple_call_arg (stmt, 0),
2843 gimple_call_arg (stmt, 1),
2844 gimple_call_arg (stmt, 2),
2845 gimple_call_arg (stmt, 3),
2847 case BUILT_IN_SNPRINTF_CHK:
2848 case BUILT_IN_VSNPRINTF_CHK:
2849 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2850 case BUILT_IN_SNPRINTF:
2851 return gimple_fold_builtin_snprintf (gsi);
2852 case BUILT_IN_SPRINTF:
2853 return gimple_fold_builtin_sprintf (gsi);
2854 case BUILT_IN_FPRINTF:
2855 case BUILT_IN_FPRINTF_UNLOCKED:
2856 case BUILT_IN_VFPRINTF:
2857 if (n == 2 || n == 3)
2858 return gimple_fold_builtin_fprintf (gsi,
2859 gimple_call_arg (stmt, 0),
2860 gimple_call_arg (stmt, 1),
2862 ? gimple_call_arg (stmt, 2)
2866 case BUILT_IN_FPRINTF_CHK:
2867 case BUILT_IN_VFPRINTF_CHK:
2868 if (n == 3 || n == 4)
2869 return gimple_fold_builtin_fprintf (gsi,
2870 gimple_call_arg (stmt, 0),
2871 gimple_call_arg (stmt, 2),
2873 ? gimple_call_arg (stmt, 3)
2877 case BUILT_IN_PRINTF:
2878 case BUILT_IN_PRINTF_UNLOCKED:
2879 case BUILT_IN_VPRINTF:
2880 if (n == 1 || n == 2)
2881 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2883 ? gimple_call_arg (stmt, 1)
2884 : NULL_TREE, fcode);
2886 case BUILT_IN_PRINTF_CHK:
2887 case BUILT_IN_VPRINTF_CHK:
2888 if (n == 2 || n == 3)
2889 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2891 ? gimple_call_arg (stmt, 2)
2892 : NULL_TREE, fcode);
2894 case BUILT_IN_ACC_ON_DEVICE:
2895 return gimple_fold_builtin_acc_on_device (gsi,
2896 gimple_call_arg (stmt, 0));
2900 /* Try the generic builtin folder. */
2901 bool ignore = (gimple_call_lhs (stmt) == NULL);
2902 tree result = fold_call_stmt (stmt, ignore);
2906 STRIP_NOPS (result);
2908 result = fold_convert (gimple_call_return_type (stmt), result);
2909 if (!update_call_from_tree (gsi, result))
2910 gimplify_and_update_call_from_tree (gsi, result);
2917 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2918 doesn't fit into TYPE. The test for overflow should be regardless of
2919 -fwrapv, and even for unsigned types. */
2922 arith_overflowed_p (enum tree_code code, const_tree type,
2923 const_tree arg0, const_tree arg1)
2925 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2926 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2928 widest2_int warg0 = widest2_int_cst (arg0);
2929 widest2_int warg1 = widest2_int_cst (arg1);
2933 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2934 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2935 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2936 default: gcc_unreachable ();
2938 signop sign = TYPE_SIGN (type);
2939 if (sign == UNSIGNED && wi::neg_p (wres))
2941 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2944 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2945 The statement may be replaced by another statement, e.g., if the call
2946 simplifies to a constant value. Return true if any changes were made.
2947 It is assumed that the operands have been previously folded. */
2950 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2952 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2954 bool changed = false;
2957 /* Fold *& in call arguments. */
2958 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2959 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2961 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2964 gimple_call_set_arg (stmt, i, tmp);
2969 /* Check for virtual calls that became direct calls. */
2970 callee = gimple_call_fn (stmt);
2971 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
2973 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2975 if (dump_file && virtual_method_call_p (callee)
2976 && !possible_polymorphic_call_target_p
2977 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
2978 (OBJ_TYPE_REF_EXPR (callee)))))
2981 "Type inheritance inconsistent devirtualization of ");
2982 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2983 fprintf (dump_file, " to ");
2984 print_generic_expr (dump_file, callee, TDF_SLIM);
2985 fprintf (dump_file, "\n");
2988 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
2991 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
2994 vec <cgraph_node *>targets
2995 = possible_polymorphic_call_targets (callee, stmt, &final);
2996 if (final && targets.length () <= 1 && dbg_cnt (devirt))
2998 tree lhs = gimple_call_lhs (stmt);
2999 if (dump_enabled_p ())
3001 location_t loc = gimple_location_safe (stmt);
3002 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3003 "folding virtual function call to %s\n",
3004 targets.length () == 1
3005 ? targets[0]->name ()
3006 : "__builtin_unreachable");
3008 if (targets.length () == 1)
3010 gimple_call_set_fndecl (stmt, targets[0]->decl);
3012 /* If the call becomes noreturn, remove the lhs. */
3013 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3015 if (TREE_CODE (lhs) == SSA_NAME)
3017 tree var = create_tmp_var (TREE_TYPE (lhs));
3018 tree def = get_or_create_ssa_default_def (cfun, var);
3019 gimple *new_stmt = gimple_build_assign (lhs, def);
3020 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3022 gimple_call_set_lhs (stmt, NULL_TREE);
3024 maybe_remove_unused_call_args (cfun, stmt);
3028 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3029 gimple *new_stmt = gimple_build_call (fndecl, 0);
3030 gimple_set_location (new_stmt, gimple_location (stmt));
3031 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3033 tree var = create_tmp_var (TREE_TYPE (lhs));
3034 tree def = get_or_create_ssa_default_def (cfun, var);
3036 /* To satisfy condition for
3037 cgraph_update_edges_for_call_stmt_node,
3038 we need to preserve GIMPLE_CALL statement
3039 at position of GSI iterator. */
3040 update_call_from_tree (gsi, def);
3041 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3045 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3046 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3047 gsi_replace (gsi, new_stmt, false);
3055 /* Check for indirect calls that became direct calls, and then
3056 no longer require a static chain. */
3057 if (gimple_call_chain (stmt))
3059 tree fn = gimple_call_fndecl (stmt);
3060 if (fn && !DECL_STATIC_CHAIN (fn))
3062 gimple_call_set_chain (stmt, NULL);
3067 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3070 gimple_call_set_chain (stmt, tmp);
3079 /* Check for builtins that CCP can handle using information not
3080 available in the generic fold routines. */
3081 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3083 if (gimple_fold_builtin (gsi))
3086 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3088 changed |= targetm.gimple_fold_builtin (gsi);
3090 else if (gimple_call_internal_p (stmt))
3092 enum tree_code subcode = ERROR_MARK;
3093 tree result = NULL_TREE;
3094 bool cplx_result = false;
3095 tree overflow = NULL_TREE;
3096 switch (gimple_call_internal_fn (stmt))
3098 case IFN_BUILTIN_EXPECT:
3099 result = fold_builtin_expect (gimple_location (stmt),
3100 gimple_call_arg (stmt, 0),
3101 gimple_call_arg (stmt, 1),
3102 gimple_call_arg (stmt, 2));
3104 case IFN_UBSAN_OBJECT_SIZE:
3105 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3106 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3107 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3108 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3109 gimple_call_arg (stmt, 2))))
3111 gsi_replace (gsi, gimple_build_nop (), false);
3112 unlink_stmt_vdef (stmt);
3113 release_defs (stmt);
3117 case IFN_UBSAN_CHECK_ADD:
3118 subcode = PLUS_EXPR;
3120 case IFN_UBSAN_CHECK_SUB:
3121 subcode = MINUS_EXPR;
3123 case IFN_UBSAN_CHECK_MUL:
3124 subcode = MULT_EXPR;
3126 case IFN_ADD_OVERFLOW:
3127 subcode = PLUS_EXPR;
3130 case IFN_SUB_OVERFLOW:
3131 subcode = MINUS_EXPR;
3134 case IFN_MUL_OVERFLOW:
3135 subcode = MULT_EXPR;
3141 if (subcode != ERROR_MARK)
3143 tree arg0 = gimple_call_arg (stmt, 0);
3144 tree arg1 = gimple_call_arg (stmt, 1);
3145 tree type = TREE_TYPE (arg0);
3148 tree lhs = gimple_call_lhs (stmt);
3149 if (lhs == NULL_TREE)
3152 type = TREE_TYPE (TREE_TYPE (lhs));
3154 if (type == NULL_TREE)
3156 /* x = y + 0; x = y - 0; x = y * 0; */
3157 else if (integer_zerop (arg1))
3158 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3159 /* x = 0 + y; x = 0 * y; */
3160 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3161 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3163 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3164 result = integer_zero_node;
3165 /* x = y * 1; x = 1 * y; */
3166 else if (subcode == MULT_EXPR && integer_onep (arg1))
3168 else if (subcode == MULT_EXPR && integer_onep (arg0))
3170 else if (TREE_CODE (arg0) == INTEGER_CST
3171 && TREE_CODE (arg1) == INTEGER_CST)
3174 result = int_const_binop (subcode, fold_convert (type, arg0),
3175 fold_convert (type, arg1));
3177 result = int_const_binop (subcode, arg0, arg1);
3178 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3181 overflow = build_one_cst (type);
3188 if (result == integer_zero_node)
3189 result = build_zero_cst (type);
3190 else if (cplx_result && TREE_TYPE (result) != type)
3192 if (TREE_CODE (result) == INTEGER_CST)
3194 if (arith_overflowed_p (PLUS_EXPR, type, result,
3196 overflow = build_one_cst (type);
3198 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3199 && TYPE_UNSIGNED (type))
3200 || (TYPE_PRECISION (type)
3201 < (TYPE_PRECISION (TREE_TYPE (result))
3202 + (TYPE_UNSIGNED (TREE_TYPE (result))
3203 && !TYPE_UNSIGNED (type)))))
3206 result = fold_convert (type, result);
3213 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3214 result = drop_tree_overflow (result);
3217 if (overflow == NULL_TREE)
3218 overflow = build_zero_cst (TREE_TYPE (result));
3219 tree ctype = build_complex_type (TREE_TYPE (result));
3220 if (TREE_CODE (result) == INTEGER_CST
3221 && TREE_CODE (overflow) == INTEGER_CST)
3222 result = build_complex (ctype, result, overflow);
3224 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3225 ctype, result, overflow);
3227 if (!update_call_from_tree (gsi, result))
3228 gimplify_and_update_call_from_tree (gsi, result);
3237 /* Return true whether NAME has a use on STMT. */
3240 has_use_on_stmt (tree name, gimple *stmt)
3242 imm_use_iterator iter;
3243 use_operand_p use_p;
3244 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3245 if (USE_STMT (use_p) == stmt)
3250 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3253 Replaces *GSI with the simplification result in RCODE and OPS
3254 and the associated statements in *SEQ. Does the replacement
3255 according to INPLACE and returns true if the operation succeeded. */
3258 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3259 code_helper rcode, tree *ops,
3260 gimple_seq *seq, bool inplace)
3262 gimple *stmt = gsi_stmt (*gsi);
3264 /* Play safe and do not allow abnormals to be mentioned in
3265 newly created statements. See also maybe_push_res_to_seq.
3266 As an exception allow such uses if there was a use of the
3267 same SSA name on the old stmt. */
3268 if ((TREE_CODE (ops[0]) == SSA_NAME
3269 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3270 && !has_use_on_stmt (ops[0], stmt))
3272 && TREE_CODE (ops[1]) == SSA_NAME
3273 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3274 && !has_use_on_stmt (ops[1], stmt))
3276 && TREE_CODE (ops[2]) == SSA_NAME
3277 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3278 && !has_use_on_stmt (ops[2], stmt)))
3281 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3283 gcc_assert (rcode.is_tree_code ());
3284 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3285 /* GIMPLE_CONDs condition may not throw. */
3286 && (!flag_exceptions
3287 || !cfun->can_throw_non_call_exceptions
3288 || !operation_could_trap_p (rcode,
3289 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3291 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3292 else if (rcode == SSA_NAME)
3293 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3294 build_zero_cst (TREE_TYPE (ops[0])));
3295 else if (rcode == INTEGER_CST)
3297 if (integer_zerop (ops[0]))
3298 gimple_cond_make_false (cond_stmt);
3300 gimple_cond_make_true (cond_stmt);
3304 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3308 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3309 build_zero_cst (TREE_TYPE (res)));
3313 if (dump_file && (dump_flags & TDF_DETAILS))
3315 fprintf (dump_file, "gimple_simplified to ");
3316 if (!gimple_seq_empty_p (*seq))
3317 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3318 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3321 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3324 else if (is_gimple_assign (stmt)
3325 && rcode.is_tree_code ())
3328 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3330 maybe_build_generic_op (rcode,
3331 TREE_TYPE (gimple_assign_lhs (stmt)),
3332 &ops[0], ops[1], ops[2]);
3333 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3334 if (dump_file && (dump_flags & TDF_DETAILS))
3336 fprintf (dump_file, "gimple_simplified to ");
3337 if (!gimple_seq_empty_p (*seq))
3338 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3339 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3342 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3346 else if (rcode.is_fn_code ()
3347 && gimple_call_builtin_p (stmt, rcode))
3350 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3352 gcc_assert (ops[i] != NULL_TREE);
3353 gimple_call_set_arg (stmt, i, ops[i]);
3356 gcc_assert (ops[i] == NULL_TREE);
3357 gcc_assert (gimple_seq_empty_p (*seq));
3362 if (gimple_has_lhs (stmt))
3364 tree lhs = gimple_get_lhs (stmt);
3365 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3368 if (dump_file && (dump_flags & TDF_DETAILS))
3370 fprintf (dump_file, "gimple_simplified to ");
3371 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3373 gsi_replace_with_seq_vops (gsi, *seq);
3383 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3386 maybe_canonicalize_mem_ref_addr (tree *t)
3390 if (TREE_CODE (*t) == ADDR_EXPR)
3391 t = &TREE_OPERAND (*t, 0);
3393 while (handled_component_p (*t))
3394 t = &TREE_OPERAND (*t, 0);
3396 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3397 of invariant addresses into a SSA name MEM_REF address. */
3398 if (TREE_CODE (*t) == MEM_REF
3399 || TREE_CODE (*t) == TARGET_MEM_REF)
3401 tree addr = TREE_OPERAND (*t, 0);
3402 if (TREE_CODE (addr) == ADDR_EXPR
3403 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3404 || handled_component_p (TREE_OPERAND (addr, 0))))
3407 HOST_WIDE_INT coffset;
3408 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3413 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3414 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3415 TREE_OPERAND (*t, 1),
3416 size_int (coffset));
3419 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3420 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3423 /* Canonicalize back MEM_REFs to plain reference trees if the object
3424 accessed is a decl that has the same access semantics as the MEM_REF. */
3425 if (TREE_CODE (*t) == MEM_REF
3426 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3427 && integer_zerop (TREE_OPERAND (*t, 1))
3428 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3430 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3431 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3432 if (/* Same volatile qualification. */
3433 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3434 /* Same TBAA behavior with -fstrict-aliasing. */
3435 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3436 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3437 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3438 /* Same alignment. */
3439 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3440 /* We have to look out here to not drop a required conversion
3441 from the rhs to the lhs if *t appears on the lhs or vice-versa
3442 if it appears on the rhs. Thus require strict type
3444 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3446 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3451 /* Canonicalize TARGET_MEM_REF in particular with respect to
3452 the indexes becoming constant. */
3453 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3455 tree tem = maybe_fold_tmr (*t);
3466 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3467 distinguishes both cases. */
3470 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3472 bool changed = false;
3473 gimple *stmt = gsi_stmt (*gsi);
3476 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3478 ??? This shouldn't be done in generic folding but in the
3479 propagation helpers which also know whether an address was
3481 Also canonicalize operand order. */
3482 switch (gimple_code (stmt))
3485 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3487 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3488 if ((REFERENCE_CLASS_P (*rhs)
3489 || TREE_CODE (*rhs) == ADDR_EXPR)
3490 && maybe_canonicalize_mem_ref_addr (rhs))
3492 tree *lhs = gimple_assign_lhs_ptr (stmt);
3493 if (REFERENCE_CLASS_P (*lhs)
3494 && maybe_canonicalize_mem_ref_addr (lhs))
3499 /* Canonicalize operand order. */
3500 enum tree_code code = gimple_assign_rhs_code (stmt);
3501 if (TREE_CODE_CLASS (code) == tcc_comparison
3502 || commutative_tree_code (code)
3503 || commutative_ternary_tree_code (code))
3505 tree rhs1 = gimple_assign_rhs1 (stmt);
3506 tree rhs2 = gimple_assign_rhs2 (stmt);
3507 if (tree_swap_operands_p (rhs1, rhs2, false))
3509 gimple_assign_set_rhs1 (stmt, rhs2);
3510 gimple_assign_set_rhs2 (stmt, rhs1);
3511 if (TREE_CODE_CLASS (code) == tcc_comparison)
3512 gimple_assign_set_rhs_code (stmt,
3513 swap_tree_comparison (code));
3521 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3523 tree *arg = gimple_call_arg_ptr (stmt, i);
3524 if (REFERENCE_CLASS_P (*arg)
3525 && maybe_canonicalize_mem_ref_addr (arg))
3528 tree *lhs = gimple_call_lhs_ptr (stmt);
3530 && REFERENCE_CLASS_P (*lhs)
3531 && maybe_canonicalize_mem_ref_addr (lhs))
3537 gasm *asm_stmt = as_a <gasm *> (stmt);
3538 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3540 tree link = gimple_asm_output_op (asm_stmt, i);
3541 tree op = TREE_VALUE (link);
3542 if (REFERENCE_CLASS_P (op)
3543 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3546 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3548 tree link = gimple_asm_input_op (asm_stmt, i);
3549 tree op = TREE_VALUE (link);
3550 if ((REFERENCE_CLASS_P (op)
3551 || TREE_CODE (op) == ADDR_EXPR)
3552 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3558 if (gimple_debug_bind_p (stmt))
3560 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3562 && (REFERENCE_CLASS_P (*val)
3563 || TREE_CODE (*val) == ADDR_EXPR)
3564 && maybe_canonicalize_mem_ref_addr (val))
3570 /* Canonicalize operand order. */
3571 tree lhs = gimple_cond_lhs (stmt);
3572 tree rhs = gimple_cond_rhs (stmt);
3573 if (tree_swap_operands_p (lhs, rhs, false))
3575 gcond *gc = as_a <gcond *> (stmt);
3576 gimple_cond_set_lhs (gc, rhs);
3577 gimple_cond_set_rhs (gc, lhs);
3578 gimple_cond_set_code (gc,
3579 swap_tree_comparison (gimple_cond_code (gc)));
3586 /* Dispatch to pattern-based folding. */
3588 || is_gimple_assign (stmt)
3589 || gimple_code (stmt) == GIMPLE_COND)
3591 gimple_seq seq = NULL;
3594 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3595 valueize, valueize))
3597 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3600 gimple_seq_discard (seq);
3604 stmt = gsi_stmt (*gsi);
3606 /* Fold the main computation performed by the statement. */
3607 switch (gimple_code (stmt))
3611 /* Try to canonicalize for boolean-typed X the comparisons
3612 X == 0, X == 1, X != 0, and X != 1. */
3613 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3614 || gimple_assign_rhs_code (stmt) == NE_EXPR)
3616 tree lhs = gimple_assign_lhs (stmt);
3617 tree op1 = gimple_assign_rhs1 (stmt);
3618 tree op2 = gimple_assign_rhs2 (stmt);
3619 tree type = TREE_TYPE (op1);
3621 /* Check whether the comparison operands are of the same boolean
3622 type as the result type is.
3623 Check that second operand is an integer-constant with value
3625 if (TREE_CODE (op2) == INTEGER_CST
3626 && (integer_zerop (op2) || integer_onep (op2))
3627 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3629 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3630 bool is_logical_not = false;
3632 /* X == 0 and X != 1 is a logical-not.of X
3633 X == 1 and X != 0 is X */
3634 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3635 || (cmp_code == NE_EXPR && integer_onep (op2)))
3636 is_logical_not = true;
3638 if (is_logical_not == false)
3639 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3640 /* Only for one-bit precision typed X the transformation
3641 !X -> ~X is valied. */
3642 else if (TYPE_PRECISION (type) == 1)
3643 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3644 /* Otherwise we use !X -> X ^ 1. */
3646 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3647 build_int_cst (type, 1));
3653 unsigned old_num_ops = gimple_num_ops (stmt);
3654 tree lhs = gimple_assign_lhs (stmt);
3655 tree new_rhs = fold_gimple_assign (gsi);
3657 && !useless_type_conversion_p (TREE_TYPE (lhs),
3658 TREE_TYPE (new_rhs)))
3659 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3662 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3664 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3671 changed |= gimple_fold_call (gsi, inplace);
3675 /* Fold *& in asm operands. */
3677 gasm *asm_stmt = as_a <gasm *> (stmt);
3679 const char **oconstraints;
3680 const char *constraint;
3681 bool allows_mem, allows_reg;
3683 noutputs = gimple_asm_noutputs (asm_stmt);
3684 oconstraints = XALLOCAVEC (const char *, noutputs);
3686 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3688 tree link = gimple_asm_output_op (asm_stmt, i);
3689 tree op = TREE_VALUE (link);
3691 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3692 if (REFERENCE_CLASS_P (op)
3693 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3695 TREE_VALUE (link) = op;
3699 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3701 tree link = gimple_asm_input_op (asm_stmt, i);
3702 tree op = TREE_VALUE (link);
3704 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3705 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3706 oconstraints, &allows_mem, &allows_reg);
3707 if (REFERENCE_CLASS_P (op)
3708 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3711 TREE_VALUE (link) = op;
3719 if (gimple_debug_bind_p (stmt))
3721 tree val = gimple_debug_bind_get_value (stmt);
3723 && REFERENCE_CLASS_P (val))
3725 tree tem = maybe_fold_reference (val, false);
3728 gimple_debug_bind_set_value (stmt, tem);
3733 && TREE_CODE (val) == ADDR_EXPR)
3735 tree ref = TREE_OPERAND (val, 0);
3736 tree tem = maybe_fold_reference (ref, false);
3739 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3740 gimple_debug_bind_set_value (stmt, tem);
3750 stmt = gsi_stmt (*gsi);
3752 /* Fold *& on the lhs. */
3753 if (gimple_has_lhs (stmt))
3755 tree lhs = gimple_get_lhs (stmt);
3756 if (lhs && REFERENCE_CLASS_P (lhs))
3758 tree new_lhs = maybe_fold_reference (lhs, true);
3761 gimple_set_lhs (stmt, new_lhs);
3770 /* Valueziation callback that ends up not following SSA edges. */
3773 no_follow_ssa_edges (tree)
3778 /* Valueization callback that ends up following single-use SSA edges only. */
3781 follow_single_use_edges (tree val)
3783 if (TREE_CODE (val) == SSA_NAME
3784 && !has_single_use (val))
3789 /* Fold the statement pointed to by GSI. In some cases, this function may
3790 replace the whole statement with a new one. Returns true iff folding
3792 The statement pointed to by GSI should be in valid gimple form but may
3793 be in unfolded state as resulting from for example constant propagation
3794 which can produce *&x = 0. */
3797 fold_stmt (gimple_stmt_iterator *gsi)
3799 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3803 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3805 return fold_stmt_1 (gsi, false, valueize);
3808 /* Perform the minimal folding on statement *GSI. Only operations like
3809 *&x created by constant propagation are handled. The statement cannot
3810 be replaced with a new one. Return true if the statement was
3811 changed, false otherwise.
3812 The statement *GSI should be in valid gimple form but may
3813 be in unfolded state as resulting from for example constant propagation
3814 which can produce *&x = 0. */
3817 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3819 gimple *stmt = gsi_stmt (*gsi);
3820 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3821 gcc_assert (gsi_stmt (*gsi) == stmt);
3825 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3826 if EXPR is null or we don't know how.
3827 If non-null, the result always has boolean type. */
3830 canonicalize_bool (tree expr, bool invert)
3836 if (integer_nonzerop (expr))
3837 return boolean_false_node;
3838 else if (integer_zerop (expr))
3839 return boolean_true_node;
3840 else if (TREE_CODE (expr) == SSA_NAME)
3841 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3842 build_int_cst (TREE_TYPE (expr), 0));
3843 else if (COMPARISON_CLASS_P (expr))
3844 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3846 TREE_OPERAND (expr, 0),
3847 TREE_OPERAND (expr, 1));
3853 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3855 if (integer_nonzerop (expr))
3856 return boolean_true_node;
3857 else if (integer_zerop (expr))
3858 return boolean_false_node;
3859 else if (TREE_CODE (expr) == SSA_NAME)
3860 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3861 build_int_cst (TREE_TYPE (expr), 0));
3862 else if (COMPARISON_CLASS_P (expr))
3863 return fold_build2 (TREE_CODE (expr),
3865 TREE_OPERAND (expr, 0),
3866 TREE_OPERAND (expr, 1));
3872 /* Check to see if a boolean expression EXPR is logically equivalent to the
3873 comparison (OP1 CODE OP2). Check for various identities involving
3877 same_bool_comparison_p (const_tree expr, enum tree_code code,
3878 const_tree op1, const_tree op2)
3882 /* The obvious case. */
3883 if (TREE_CODE (expr) == code
3884 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3885 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3888 /* Check for comparing (name, name != 0) and the case where expr
3889 is an SSA_NAME with a definition matching the comparison. */
3890 if (TREE_CODE (expr) == SSA_NAME
3891 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3893 if (operand_equal_p (expr, op1, 0))
3894 return ((code == NE_EXPR && integer_zerop (op2))
3895 || (code == EQ_EXPR && integer_nonzerop (op2)));
3896 s = SSA_NAME_DEF_STMT (expr);
3897 if (is_gimple_assign (s)
3898 && gimple_assign_rhs_code (s) == code
3899 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3900 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3904 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3905 of name is a comparison, recurse. */
3906 if (TREE_CODE (op1) == SSA_NAME
3907 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3909 s = SSA_NAME_DEF_STMT (op1);
3910 if (is_gimple_assign (s)
3911 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3913 enum tree_code c = gimple_assign_rhs_code (s);
3914 if ((c == NE_EXPR && integer_zerop (op2))
3915 || (c == EQ_EXPR && integer_nonzerop (op2)))
3916 return same_bool_comparison_p (expr, c,
3917 gimple_assign_rhs1 (s),
3918 gimple_assign_rhs2 (s));
3919 if ((c == EQ_EXPR && integer_zerop (op2))
3920 || (c == NE_EXPR && integer_nonzerop (op2)))
3921 return same_bool_comparison_p (expr,
3922 invert_tree_comparison (c, false),
3923 gimple_assign_rhs1 (s),
3924 gimple_assign_rhs2 (s));
3930 /* Check to see if two boolean expressions OP1 and OP2 are logically
3934 same_bool_result_p (const_tree op1, const_tree op2)
3936 /* Simple cases first. */
3937 if (operand_equal_p (op1, op2, 0))
3940 /* Check the cases where at least one of the operands is a comparison.
3941 These are a bit smarter than operand_equal_p in that they apply some
3942 identifies on SSA_NAMEs. */
3943 if (COMPARISON_CLASS_P (op2)
3944 && same_bool_comparison_p (op1, TREE_CODE (op2),
3945 TREE_OPERAND (op2, 0),
3946 TREE_OPERAND (op2, 1)))
3948 if (COMPARISON_CLASS_P (op1)
3949 && same_bool_comparison_p (op2, TREE_CODE (op1),
3950 TREE_OPERAND (op1, 0),
3951 TREE_OPERAND (op1, 1)))
3958 /* Forward declarations for some mutually recursive functions. */
3961 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3962 enum tree_code code2, tree op2a, tree op2b);
3964 and_var_with_comparison (tree var, bool invert,
3965 enum tree_code code2, tree op2a, tree op2b);
3967 and_var_with_comparison_1 (gimple *stmt,
3968 enum tree_code code2, tree op2a, tree op2b);
3970 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3971 enum tree_code code2, tree op2a, tree op2b);
3973 or_var_with_comparison (tree var, bool invert,
3974 enum tree_code code2, tree op2a, tree op2b);
3976 or_var_with_comparison_1 (gimple *stmt,
3977 enum tree_code code2, tree op2a, tree op2b);
3979 /* Helper function for and_comparisons_1: try to simplify the AND of the
3980 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3981 If INVERT is true, invert the value of the VAR before doing the AND.
3982 Return NULL_EXPR if we can't simplify this to a single expression. */
3985 and_var_with_comparison (tree var, bool invert,
3986 enum tree_code code2, tree op2a, tree op2b)
3989 gimple *stmt = SSA_NAME_DEF_STMT (var);
3991 /* We can only deal with variables whose definitions are assignments. */
3992 if (!is_gimple_assign (stmt))
3995 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3996 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3997 Then we only have to consider the simpler non-inverted cases. */
3999 t = or_var_with_comparison_1 (stmt,
4000 invert_tree_comparison (code2, false),
4003 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4004 return canonicalize_bool (t, invert);
4007 /* Try to simplify the AND of the ssa variable defined by the assignment
4008 STMT with the comparison specified by (OP2A CODE2 OP2B).
4009 Return NULL_EXPR if we can't simplify this to a single expression. */
4012 and_var_with_comparison_1 (gimple *stmt,
4013 enum tree_code code2, tree op2a, tree op2b)
4015 tree var = gimple_assign_lhs (stmt);
4016 tree true_test_var = NULL_TREE;
4017 tree false_test_var = NULL_TREE;
4018 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4020 /* Check for identities like (var AND (var == 0)) => false. */
4021 if (TREE_CODE (op2a) == SSA_NAME
4022 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4024 if ((code2 == NE_EXPR && integer_zerop (op2b))
4025 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4027 true_test_var = op2a;
4028 if (var == true_test_var)
4031 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4032 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4034 false_test_var = op2a;
4035 if (var == false_test_var)
4036 return boolean_false_node;
4040 /* If the definition is a comparison, recurse on it. */
4041 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4043 tree t = and_comparisons_1 (innercode,
4044 gimple_assign_rhs1 (stmt),
4045 gimple_assign_rhs2 (stmt),
4053 /* If the definition is an AND or OR expression, we may be able to
4054 simplify by reassociating. */
4055 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4056 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4058 tree inner1 = gimple_assign_rhs1 (stmt);
4059 tree inner2 = gimple_assign_rhs2 (stmt);
4062 tree partial = NULL_TREE;
4063 bool is_and = (innercode == BIT_AND_EXPR);
4065 /* Check for boolean identities that don't require recursive examination
4067 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4068 inner1 AND (inner1 OR inner2) => inner1
4069 !inner1 AND (inner1 AND inner2) => false
4070 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4071 Likewise for similar cases involving inner2. */
4072 if (inner1 == true_test_var)
4073 return (is_and ? var : inner1);
4074 else if (inner2 == true_test_var)
4075 return (is_and ? var : inner2);
4076 else if (inner1 == false_test_var)
4078 ? boolean_false_node
4079 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4080 else if (inner2 == false_test_var)
4082 ? boolean_false_node
4083 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4085 /* Next, redistribute/reassociate the AND across the inner tests.
4086 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4087 if (TREE_CODE (inner1) == SSA_NAME
4088 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4089 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4090 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4091 gimple_assign_rhs1 (s),
4092 gimple_assign_rhs2 (s),
4093 code2, op2a, op2b)))
4095 /* Handle the AND case, where we are reassociating:
4096 (inner1 AND inner2) AND (op2a code2 op2b)
4098 If the partial result t is a constant, we win. Otherwise
4099 continue on to try reassociating with the other inner test. */
4102 if (integer_onep (t))
4104 else if (integer_zerop (t))
4105 return boolean_false_node;
4108 /* Handle the OR case, where we are redistributing:
4109 (inner1 OR inner2) AND (op2a code2 op2b)
4110 => (t OR (inner2 AND (op2a code2 op2b))) */
4111 else if (integer_onep (t))
4112 return boolean_true_node;
4114 /* Save partial result for later. */
4118 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4119 if (TREE_CODE (inner2) == SSA_NAME
4120 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4121 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4122 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4123 gimple_assign_rhs1 (s),
4124 gimple_assign_rhs2 (s),
4125 code2, op2a, op2b)))
4127 /* Handle the AND case, where we are reassociating:
4128 (inner1 AND inner2) AND (op2a code2 op2b)
4129 => (inner1 AND t) */
4132 if (integer_onep (t))
4134 else if (integer_zerop (t))
4135 return boolean_false_node;
4136 /* If both are the same, we can apply the identity
4138 else if (partial && same_bool_result_p (t, partial))
4142 /* Handle the OR case. where we are redistributing:
4143 (inner1 OR inner2) AND (op2a code2 op2b)
4144 => (t OR (inner1 AND (op2a code2 op2b)))
4145 => (t OR partial) */
4148 if (integer_onep (t))
4149 return boolean_true_node;
4152 /* We already got a simplification for the other
4153 operand to the redistributed OR expression. The
4154 interesting case is when at least one is false.
4155 Or, if both are the same, we can apply the identity
4157 if (integer_zerop (partial))
4159 else if (integer_zerop (t))
4161 else if (same_bool_result_p (t, partial))
4170 /* Try to simplify the AND of two comparisons defined by
4171 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4172 If this can be done without constructing an intermediate value,
4173 return the resulting tree; otherwise NULL_TREE is returned.
4174 This function is deliberately asymmetric as it recurses on SSA_DEFs
4175 in the first comparison but not the second. */
4178 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4179 enum tree_code code2, tree op2a, tree op2b)
4181 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4183 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4184 if (operand_equal_p (op1a, op2a, 0)
4185 && operand_equal_p (op1b, op2b, 0))
4187 /* Result will be either NULL_TREE, or a combined comparison. */
4188 tree t = combine_comparisons (UNKNOWN_LOCATION,
4189 TRUTH_ANDIF_EXPR, code1, code2,
4190 truth_type, op1a, op1b);
4195 /* Likewise the swapped case of the above. */
4196 if (operand_equal_p (op1a, op2b, 0)
4197 && operand_equal_p (op1b, op2a, 0))
4199 /* Result will be either NULL_TREE, or a combined comparison. */
4200 tree t = combine_comparisons (UNKNOWN_LOCATION,
4201 TRUTH_ANDIF_EXPR, code1,
4202 swap_tree_comparison (code2),
4203 truth_type, op1a, op1b);
4208 /* If both comparisons are of the same value against constants, we might
4209 be able to merge them. */
4210 if (operand_equal_p (op1a, op2a, 0)
4211 && TREE_CODE (op1b) == INTEGER_CST
4212 && TREE_CODE (op2b) == INTEGER_CST)
4214 int cmp = tree_int_cst_compare (op1b, op2b);
4216 /* If we have (op1a == op1b), we should either be able to
4217 return that or FALSE, depending on whether the constant op1b
4218 also satisfies the other comparison against op2b. */
4219 if (code1 == EQ_EXPR)
4225 case EQ_EXPR: val = (cmp == 0); break;
4226 case NE_EXPR: val = (cmp != 0); break;
4227 case LT_EXPR: val = (cmp < 0); break;
4228 case GT_EXPR: val = (cmp > 0); break;
4229 case LE_EXPR: val = (cmp <= 0); break;
4230 case GE_EXPR: val = (cmp >= 0); break;
4231 default: done = false;
4236 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4238 return boolean_false_node;
4241 /* Likewise if the second comparison is an == comparison. */
4242 else if (code2 == EQ_EXPR)
4248 case EQ_EXPR: val = (cmp == 0); break;
4249 case NE_EXPR: val = (cmp != 0); break;
4250 case LT_EXPR: val = (cmp > 0); break;
4251 case GT_EXPR: val = (cmp < 0); break;
4252 case LE_EXPR: val = (cmp >= 0); break;
4253 case GE_EXPR: val = (cmp <= 0); break;
4254 default: done = false;
4259 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4261 return boolean_false_node;
4265 /* Same business with inequality tests. */
4266 else if (code1 == NE_EXPR)
4271 case EQ_EXPR: val = (cmp != 0); break;
4272 case NE_EXPR: val = (cmp == 0); break;
4273 case LT_EXPR: val = (cmp >= 0); break;
4274 case GT_EXPR: val = (cmp <= 0); break;
4275 case LE_EXPR: val = (cmp > 0); break;
4276 case GE_EXPR: val = (cmp < 0); break;
4281 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4283 else if (code2 == NE_EXPR)
4288 case EQ_EXPR: val = (cmp == 0); break;
4289 case NE_EXPR: val = (cmp != 0); break;
4290 case LT_EXPR: val = (cmp <= 0); break;
4291 case GT_EXPR: val = (cmp >= 0); break;
4292 case LE_EXPR: val = (cmp < 0); break;
4293 case GE_EXPR: val = (cmp > 0); break;
4298 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4301 /* Chose the more restrictive of two < or <= comparisons. */
4302 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4303 && (code2 == LT_EXPR || code2 == LE_EXPR))
4305 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4306 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4308 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4311 /* Likewise chose the more restrictive of two > or >= comparisons. */
4312 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4313 && (code2 == GT_EXPR || code2 == GE_EXPR))
4315 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4316 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4318 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4321 /* Check for singleton ranges. */
4323 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4324 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4325 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4327 /* Check for disjoint ranges. */
4329 && (code1 == LT_EXPR || code1 == LE_EXPR)
4330 && (code2 == GT_EXPR || code2 == GE_EXPR))
4331 return boolean_false_node;
4333 && (code1 == GT_EXPR || code1 == GE_EXPR)
4334 && (code2 == LT_EXPR || code2 == LE_EXPR))
4335 return boolean_false_node;
4338 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4339 NAME's definition is a truth value. See if there are any simplifications
4340 that can be done against the NAME's definition. */
4341 if (TREE_CODE (op1a) == SSA_NAME
4342 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4343 && (integer_zerop (op1b) || integer_onep (op1b)))
4345 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4346 || (code1 == NE_EXPR && integer_onep (op1b)));
4347 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4348 switch (gimple_code (stmt))
4351 /* Try to simplify by copy-propagating the definition. */
4352 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4355 /* If every argument to the PHI produces the same result when
4356 ANDed with the second comparison, we win.
4357 Do not do this unless the type is bool since we need a bool
4358 result here anyway. */
4359 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4361 tree result = NULL_TREE;
4363 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4365 tree arg = gimple_phi_arg_def (stmt, i);
4367 /* If this PHI has itself as an argument, ignore it.
4368 If all the other args produce the same result,
4370 if (arg == gimple_phi_result (stmt))
4372 else if (TREE_CODE (arg) == INTEGER_CST)
4374 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4377 result = boolean_false_node;
4378 else if (!integer_zerop (result))
4382 result = fold_build2 (code2, boolean_type_node,
4384 else if (!same_bool_comparison_p (result,
4388 else if (TREE_CODE (arg) == SSA_NAME
4389 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4392 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4393 /* In simple cases we can look through PHI nodes,
4394 but we have to be careful with loops.
4396 if (! dom_info_available_p (CDI_DOMINATORS)
4397 || gimple_bb (def_stmt) == gimple_bb (stmt)
4398 || dominated_by_p (CDI_DOMINATORS,
4399 gimple_bb (def_stmt),
4402 temp = and_var_with_comparison (arg, invert, code2,
4408 else if (!same_bool_result_p (result, temp))
4424 /* Try to simplify the AND of two comparisons, specified by
4425 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4426 If this can be simplified to a single expression (without requiring
4427 introducing more SSA variables to hold intermediate values),
4428 return the resulting tree. Otherwise return NULL_TREE.
4429 If the result expression is non-null, it has boolean type. */
4432 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4433 enum tree_code code2, tree op2a, tree op2b)
4435 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4439 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4442 /* Helper function for or_comparisons_1: try to simplify the OR of the
4443 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4444 If INVERT is true, invert the value of VAR before doing the OR.
4445 Return NULL_EXPR if we can't simplify this to a single expression. */
4448 or_var_with_comparison (tree var, bool invert,
4449 enum tree_code code2, tree op2a, tree op2b)
4452 gimple *stmt = SSA_NAME_DEF_STMT (var);
4454 /* We can only deal with variables whose definitions are assignments. */
4455 if (!is_gimple_assign (stmt))
4458 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4459 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4460 Then we only have to consider the simpler non-inverted cases. */
4462 t = and_var_with_comparison_1 (stmt,
4463 invert_tree_comparison (code2, false),
4466 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4467 return canonicalize_bool (t, invert);
4470 /* Try to simplify the OR of the ssa variable defined by the assignment
4471 STMT with the comparison specified by (OP2A CODE2 OP2B).
4472 Return NULL_EXPR if we can't simplify this to a single expression. */
4475 or_var_with_comparison_1 (gimple *stmt,
4476 enum tree_code code2, tree op2a, tree op2b)
4478 tree var = gimple_assign_lhs (stmt);
4479 tree true_test_var = NULL_TREE;
4480 tree false_test_var = NULL_TREE;
4481 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4483 /* Check for identities like (var OR (var != 0)) => true . */
4484 if (TREE_CODE (op2a) == SSA_NAME
4485 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4487 if ((code2 == NE_EXPR && integer_zerop (op2b))
4488 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4490 true_test_var = op2a;
4491 if (var == true_test_var)
4494 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4495 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4497 false_test_var = op2a;
4498 if (var == false_test_var)
4499 return boolean_true_node;
4503 /* If the definition is a comparison, recurse on it. */
4504 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4506 tree t = or_comparisons_1 (innercode,
4507 gimple_assign_rhs1 (stmt),
4508 gimple_assign_rhs2 (stmt),
4516 /* If the definition is an AND or OR expression, we may be able to
4517 simplify by reassociating. */
4518 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4519 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4521 tree inner1 = gimple_assign_rhs1 (stmt);
4522 tree inner2 = gimple_assign_rhs2 (stmt);
4525 tree partial = NULL_TREE;
4526 bool is_or = (innercode == BIT_IOR_EXPR);
4528 /* Check for boolean identities that don't require recursive examination
4530 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4531 inner1 OR (inner1 AND inner2) => inner1
4532 !inner1 OR (inner1 OR inner2) => true
4533 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4535 if (inner1 == true_test_var)
4536 return (is_or ? var : inner1);
4537 else if (inner2 == true_test_var)
4538 return (is_or ? var : inner2);
4539 else if (inner1 == false_test_var)
4542 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4543 else if (inner2 == false_test_var)
4546 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4548 /* Next, redistribute/reassociate the OR across the inner tests.
4549 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4550 if (TREE_CODE (inner1) == SSA_NAME
4551 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4552 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4553 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4554 gimple_assign_rhs1 (s),
4555 gimple_assign_rhs2 (s),
4556 code2, op2a, op2b)))
4558 /* Handle the OR case, where we are reassociating:
4559 (inner1 OR inner2) OR (op2a code2 op2b)
4561 If the partial result t is a constant, we win. Otherwise
4562 continue on to try reassociating with the other inner test. */
4565 if (integer_onep (t))
4566 return boolean_true_node;
4567 else if (integer_zerop (t))
4571 /* Handle the AND case, where we are redistributing:
4572 (inner1 AND inner2) OR (op2a code2 op2b)
4573 => (t AND (inner2 OR (op2a code op2b))) */
4574 else if (integer_zerop (t))
4575 return boolean_false_node;
4577 /* Save partial result for later. */
4581 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4582 if (TREE_CODE (inner2) == SSA_NAME
4583 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4584 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4585 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4586 gimple_assign_rhs1 (s),
4587 gimple_assign_rhs2 (s),
4588 code2, op2a, op2b)))
4590 /* Handle the OR case, where we are reassociating:
4591 (inner1 OR inner2) OR (op2a code2 op2b)
4593 => (t OR partial) */
4596 if (integer_zerop (t))
4598 else if (integer_onep (t))
4599 return boolean_true_node;
4600 /* If both are the same, we can apply the identity
4602 else if (partial && same_bool_result_p (t, partial))
4606 /* Handle the AND case, where we are redistributing:
4607 (inner1 AND inner2) OR (op2a code2 op2b)
4608 => (t AND (inner1 OR (op2a code2 op2b)))
4609 => (t AND partial) */
4612 if (integer_zerop (t))
4613 return boolean_false_node;
4616 /* We already got a simplification for the other
4617 operand to the redistributed AND expression. The
4618 interesting case is when at least one is true.
4619 Or, if both are the same, we can apply the identity
4621 if (integer_onep (partial))
4623 else if (integer_onep (t))
4625 else if (same_bool_result_p (t, partial))
4634 /* Try to simplify the OR of two comparisons defined by
4635 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4636 If this can be done without constructing an intermediate value,
4637 return the resulting tree; otherwise NULL_TREE is returned.
4638 This function is deliberately asymmetric as it recurses on SSA_DEFs
4639 in the first comparison but not the second. */
4642 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4643 enum tree_code code2, tree op2a, tree op2b)
4645 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4647 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4648 if (operand_equal_p (op1a, op2a, 0)
4649 && operand_equal_p (op1b, op2b, 0))
4651 /* Result will be either NULL_TREE, or a combined comparison. */
4652 tree t = combine_comparisons (UNKNOWN_LOCATION,
4653 TRUTH_ORIF_EXPR, code1, code2,
4654 truth_type, op1a, op1b);
4659 /* Likewise the swapped case of the above. */
4660 if (operand_equal_p (op1a, op2b, 0)
4661 && operand_equal_p (op1b, op2a, 0))
4663 /* Result will be either NULL_TREE, or a combined comparison. */
4664 tree t = combine_comparisons (UNKNOWN_LOCATION,
4665 TRUTH_ORIF_EXPR, code1,
4666 swap_tree_comparison (code2),
4667 truth_type, op1a, op1b);
4672 /* If both comparisons are of the same value against constants, we might
4673 be able to merge them. */
4674 if (operand_equal_p (op1a, op2a, 0)
4675 && TREE_CODE (op1b) == INTEGER_CST
4676 && TREE_CODE (op2b) == INTEGER_CST)
4678 int cmp = tree_int_cst_compare (op1b, op2b);
4680 /* If we have (op1a != op1b), we should either be able to
4681 return that or TRUE, depending on whether the constant op1b
4682 also satisfies the other comparison against op2b. */
4683 if (code1 == NE_EXPR)
4689 case EQ_EXPR: val = (cmp == 0); break;
4690 case NE_EXPR: val = (cmp != 0); break;
4691 case LT_EXPR: val = (cmp < 0); break;
4692 case GT_EXPR: val = (cmp > 0); break;
4693 case LE_EXPR: val = (cmp <= 0); break;
4694 case GE_EXPR: val = (cmp >= 0); break;
4695 default: done = false;
4700 return boolean_true_node;
4702 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4705 /* Likewise if the second comparison is a != comparison. */
4706 else if (code2 == NE_EXPR)
4712 case EQ_EXPR: val = (cmp == 0); break;
4713 case NE_EXPR: val = (cmp != 0); break;
4714 case LT_EXPR: val = (cmp > 0); break;
4715 case GT_EXPR: val = (cmp < 0); break;
4716 case LE_EXPR: val = (cmp >= 0); break;
4717 case GE_EXPR: val = (cmp <= 0); break;
4718 default: done = false;
4723 return boolean_true_node;
4725 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4729 /* See if an equality test is redundant with the other comparison. */
4730 else if (code1 == EQ_EXPR)
4735 case EQ_EXPR: val = (cmp == 0); break;
4736 case NE_EXPR: val = (cmp != 0); break;
4737 case LT_EXPR: val = (cmp < 0); break;
4738 case GT_EXPR: val = (cmp > 0); break;
4739 case LE_EXPR: val = (cmp <= 0); break;
4740 case GE_EXPR: val = (cmp >= 0); break;
4745 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4747 else if (code2 == EQ_EXPR)
4752 case EQ_EXPR: val = (cmp == 0); break;
4753 case NE_EXPR: val = (cmp != 0); break;
4754 case LT_EXPR: val = (cmp > 0); break;
4755 case GT_EXPR: val = (cmp < 0); break;
4756 case LE_EXPR: val = (cmp >= 0); break;
4757 case GE_EXPR: val = (cmp <= 0); break;
4762 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4765 /* Chose the less restrictive of two < or <= comparisons. */
4766 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4767 && (code2 == LT_EXPR || code2 == LE_EXPR))
4769 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4770 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4772 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4775 /* Likewise chose the less restrictive of two > or >= comparisons. */
4776 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4777 && (code2 == GT_EXPR || code2 == GE_EXPR))
4779 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4780 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4782 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4785 /* Check for singleton ranges. */
4787 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4788 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4789 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4791 /* Check for less/greater pairs that don't restrict the range at all. */
4793 && (code1 == LT_EXPR || code1 == LE_EXPR)
4794 && (code2 == GT_EXPR || code2 == GE_EXPR))
4795 return boolean_true_node;
4797 && (code1 == GT_EXPR || code1 == GE_EXPR)
4798 && (code2 == LT_EXPR || code2 == LE_EXPR))
4799 return boolean_true_node;
4802 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4803 NAME's definition is a truth value. See if there are any simplifications
4804 that can be done against the NAME's definition. */
4805 if (TREE_CODE (op1a) == SSA_NAME
4806 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4807 && (integer_zerop (op1b) || integer_onep (op1b)))
4809 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4810 || (code1 == NE_EXPR && integer_onep (op1b)));
4811 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4812 switch (gimple_code (stmt))
4815 /* Try to simplify by copy-propagating the definition. */
4816 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4819 /* If every argument to the PHI produces the same result when
4820 ORed with the second comparison, we win.
4821 Do not do this unless the type is bool since we need a bool
4822 result here anyway. */
4823 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4825 tree result = NULL_TREE;
4827 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4829 tree arg = gimple_phi_arg_def (stmt, i);
4831 /* If this PHI has itself as an argument, ignore it.
4832 If all the other args produce the same result,
4834 if (arg == gimple_phi_result (stmt))
4836 else if (TREE_CODE (arg) == INTEGER_CST)
4838 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4841 result = boolean_true_node;
4842 else if (!integer_onep (result))
4846 result = fold_build2 (code2, boolean_type_node,
4848 else if (!same_bool_comparison_p (result,
4852 else if (TREE_CODE (arg) == SSA_NAME
4853 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4856 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4857 /* In simple cases we can look through PHI nodes,
4858 but we have to be careful with loops.
4860 if (! dom_info_available_p (CDI_DOMINATORS)
4861 || gimple_bb (def_stmt) == gimple_bb (stmt)
4862 || dominated_by_p (CDI_DOMINATORS,
4863 gimple_bb (def_stmt),
4866 temp = or_var_with_comparison (arg, invert, code2,
4872 else if (!same_bool_result_p (result, temp))
4888 /* Try to simplify the OR of two comparisons, specified by
4889 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4890 If this can be simplified to a single expression (without requiring
4891 introducing more SSA variables to hold intermediate values),
4892 return the resulting tree. Otherwise return NULL_TREE.
4893 If the result expression is non-null, it has boolean type. */
4896 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4897 enum tree_code code2, tree op2a, tree op2b)
4899 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4903 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4907 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4909 Either NULL_TREE, a simplified but non-constant or a constant
4912 ??? This should go into a gimple-fold-inline.h file to be eventually
4913 privatized with the single valueize function used in the various TUs
4914 to avoid the indirect function call overhead. */
4917 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
4918 tree (*gvalueize) (tree))
4922 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4923 edges if there are intermediate VARYING defs. For this reason
4924 do not follow SSA edges here even though SCCVN can technically
4925 just deal fine with that. */
4926 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
4928 tree res = NULL_TREE;
4929 if (rcode.is_tree_code ()
4930 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4931 || ((tree_code) rcode) == ADDR_EXPR)
4932 && is_gimple_val (ops[0]))
4934 else if (mprts_hook)
4935 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
4938 if (dump_file && dump_flags & TDF_DETAILS)
4940 fprintf (dump_file, "Match-and-simplified ");
4941 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4942 fprintf (dump_file, " to ");
4943 print_generic_expr (dump_file, res, 0);
4944 fprintf (dump_file, "\n");
4950 location_t loc = gimple_location (stmt);
4951 switch (gimple_code (stmt))
4955 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4957 switch (get_gimple_rhs_class (subcode))
4959 case GIMPLE_SINGLE_RHS:
4961 tree rhs = gimple_assign_rhs1 (stmt);
4962 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4964 if (TREE_CODE (rhs) == SSA_NAME)
4966 /* If the RHS is an SSA_NAME, return its known constant value,
4968 return (*valueize) (rhs);
4970 /* Handle propagating invariant addresses into address
4972 else if (TREE_CODE (rhs) == ADDR_EXPR
4973 && !is_gimple_min_invariant (rhs))
4975 HOST_WIDE_INT offset = 0;
4977 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4981 && (CONSTANT_CLASS_P (base)
4982 || decl_address_invariant_p (base)))
4983 return build_invariant_address (TREE_TYPE (rhs),
4986 else if (TREE_CODE (rhs) == CONSTRUCTOR
4987 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4988 && (CONSTRUCTOR_NELTS (rhs)
4989 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4994 vec = XALLOCAVEC (tree,
4995 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
4996 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4998 val = (*valueize) (val);
4999 if (TREE_CODE (val) == INTEGER_CST
5000 || TREE_CODE (val) == REAL_CST
5001 || TREE_CODE (val) == FIXED_CST)
5007 return build_vector (TREE_TYPE (rhs), vec);
5009 if (subcode == OBJ_TYPE_REF)
5011 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5012 /* If callee is constant, we can fold away the wrapper. */
5013 if (is_gimple_min_invariant (val))
5017 if (kind == tcc_reference)
5019 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5020 || TREE_CODE (rhs) == REALPART_EXPR
5021 || TREE_CODE (rhs) == IMAGPART_EXPR)
5022 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5024 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5025 return fold_unary_loc (EXPR_LOCATION (rhs),
5027 TREE_TYPE (rhs), val);
5029 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5030 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5032 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5033 return fold_ternary_loc (EXPR_LOCATION (rhs),
5035 TREE_TYPE (rhs), val,
5036 TREE_OPERAND (rhs, 1),
5037 TREE_OPERAND (rhs, 2));
5039 else if (TREE_CODE (rhs) == MEM_REF
5040 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5042 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5043 if (TREE_CODE (val) == ADDR_EXPR
5044 && is_gimple_min_invariant (val))
5046 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5048 TREE_OPERAND (rhs, 1));
5053 return fold_const_aggregate_ref_1 (rhs, valueize);
5055 else if (kind == tcc_declaration)
5056 return get_symbol_constant_value (rhs);
5060 case GIMPLE_UNARY_RHS:
5063 case GIMPLE_BINARY_RHS:
5064 /* Translate &x + CST into an invariant form suitable for
5065 further propagation. */
5066 if (subcode == POINTER_PLUS_EXPR)
5068 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5069 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5070 if (TREE_CODE (op0) == ADDR_EXPR
5071 && TREE_CODE (op1) == INTEGER_CST)
5073 tree off = fold_convert (ptr_type_node, op1);
5074 return build_fold_addr_expr_loc
5076 fold_build2 (MEM_REF,
5077 TREE_TYPE (TREE_TYPE (op0)),
5078 unshare_expr (op0), off));
5081 /* Canonicalize bool != 0 and bool == 0 appearing after
5082 valueization. While gimple_simplify handles this
5083 it can get confused by the ~X == 1 -> X == 0 transform
5084 which we cant reduce to a SSA name or a constant
5085 (and we have no way to tell gimple_simplify to not
5086 consider those transforms in the first place). */
5087 else if (subcode == EQ_EXPR
5088 || subcode == NE_EXPR)
5090 tree lhs = gimple_assign_lhs (stmt);
5091 tree op0 = gimple_assign_rhs1 (stmt);
5092 if (useless_type_conversion_p (TREE_TYPE (lhs),
5095 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5096 op0 = (*valueize) (op0);
5097 if (TREE_CODE (op0) == INTEGER_CST)
5098 std::swap (op0, op1);
5099 if (TREE_CODE (op1) == INTEGER_CST
5100 && ((subcode == NE_EXPR && integer_zerop (op1))
5101 || (subcode == EQ_EXPR && integer_onep (op1))))
5107 case GIMPLE_TERNARY_RHS:
5109 /* Handle ternary operators that can appear in GIMPLE form. */
5110 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5111 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5112 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5113 return fold_ternary_loc (loc, subcode,
5114 gimple_expr_type (stmt), op0, op1, op2);
5125 gcall *call_stmt = as_a <gcall *> (stmt);
5127 if (gimple_call_internal_p (stmt))
5129 enum tree_code subcode = ERROR_MARK;
5130 switch (gimple_call_internal_fn (stmt))
5132 case IFN_UBSAN_CHECK_ADD:
5133 subcode = PLUS_EXPR;
5135 case IFN_UBSAN_CHECK_SUB:
5136 subcode = MINUS_EXPR;
5138 case IFN_UBSAN_CHECK_MUL:
5139 subcode = MULT_EXPR;
5144 tree arg0 = gimple_call_arg (stmt, 0);
5145 tree arg1 = gimple_call_arg (stmt, 1);
5146 tree op0 = (*valueize) (arg0);
5147 tree op1 = (*valueize) (arg1);
5149 if (TREE_CODE (op0) != INTEGER_CST
5150 || TREE_CODE (op1) != INTEGER_CST)
5155 /* x * 0 = 0 * x = 0 without overflow. */
5156 if (integer_zerop (op0) || integer_zerop (op1))
5157 return build_zero_cst (TREE_TYPE (arg0));
5160 /* y - y = 0 without overflow. */
5161 if (operand_equal_p (op0, op1, 0))
5162 return build_zero_cst (TREE_TYPE (arg0));
5169 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5171 && TREE_CODE (res) == INTEGER_CST
5172 && !TREE_OVERFLOW (res))
5177 fn = (*valueize) (gimple_call_fn (stmt));
5178 if (TREE_CODE (fn) == ADDR_EXPR
5179 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5180 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5181 && gimple_builtin_call_types_compatible_p (stmt,
5182 TREE_OPERAND (fn, 0)))
5184 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5187 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5188 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5189 retval = fold_builtin_call_array (loc,
5190 gimple_call_return_type (call_stmt),
5191 fn, gimple_call_num_args (stmt), args);
5194 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5195 STRIP_NOPS (retval);
5196 retval = fold_convert (gimple_call_return_type (call_stmt),
5209 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5210 Returns NULL_TREE if folding to a constant is not possible, otherwise
5211 returns a constant according to is_gimple_min_invariant. */
5214 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5216 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5217 if (res && is_gimple_min_invariant (res))
5223 /* The following set of functions are supposed to fold references using
5224 their constant initializers. */
5226 /* See if we can find constructor defining value of BASE.
5227 When we know the consructor with constant offset (such as
5228 base is array[40] and we do know constructor of array), then
5229 BIT_OFFSET is adjusted accordingly.
5231 As a special case, return error_mark_node when constructor
5232 is not explicitly available, but it is known to be zero
5233 such as 'static const int a;'. */
5235 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5236 tree (*valueize)(tree))
5238 HOST_WIDE_INT bit_offset2, size, max_size;
5239 if (TREE_CODE (base) == MEM_REF)
5241 if (!integer_zerop (TREE_OPERAND (base, 1)))
5243 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5245 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5250 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5251 base = valueize (TREE_OPERAND (base, 0));
5252 if (!base || TREE_CODE (base) != ADDR_EXPR)
5254 base = TREE_OPERAND (base, 0);
5257 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5258 DECL_INITIAL. If BASE is a nested reference into another
5259 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5260 the inner reference. */
5261 switch (TREE_CODE (base))
5266 tree init = ctor_for_folding (base);
5268 /* Our semantic is exact opposite of ctor_for_folding;
5269 NULL means unknown, while error_mark_node is 0. */
5270 if (init == error_mark_node)
5273 return error_mark_node;
5279 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
5280 if (max_size == -1 || size != max_size)
5282 *bit_offset += bit_offset2;
5283 return get_base_constructor (base, bit_offset, valueize);
5294 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5295 SIZE to the memory at bit OFFSET. */
5298 fold_array_ctor_reference (tree type, tree ctor,
5299 unsigned HOST_WIDE_INT offset,
5300 unsigned HOST_WIDE_INT size,
5303 unsigned HOST_WIDE_INT cnt;
5305 offset_int low_bound;
5306 offset_int elt_size;
5307 offset_int index, max_index;
5308 offset_int access_index;
5309 tree domain_type = NULL_TREE, index_type = NULL_TREE;
5310 HOST_WIDE_INT inner_offset;
5312 /* Compute low bound and elt size. */
5313 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5314 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5315 if (domain_type && TYPE_MIN_VALUE (domain_type))
5317 /* Static constructors for variably sized objects makes no sense. */
5318 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5319 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
5320 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5324 /* Static constructors for variably sized objects makes no sense. */
5325 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5327 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5329 /* We can handle only constantly sized accesses that are known to not
5330 be larger than size of array element. */
5331 if (!TYPE_SIZE_UNIT (type)
5332 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5333 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5337 /* Compute the array index we look for. */
5338 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5340 access_index += low_bound;
5342 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
5343 TYPE_SIGN (index_type));
5345 /* And offset within the access. */
5346 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5348 /* See if the array field is large enough to span whole access. We do not
5349 care to fold accesses spanning multiple array indexes. */
5350 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5353 index = low_bound - 1;
5355 index = wi::ext (index, TYPE_PRECISION (index_type),
5356 TYPE_SIGN (index_type));
5358 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
5360 /* Array constructor might explicitely set index, or specify range
5361 or leave index NULL meaning that it is next index after previous
5365 if (TREE_CODE (cfield) == INTEGER_CST)
5366 max_index = index = wi::to_offset (cfield);
5369 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
5370 index = wi::to_offset (TREE_OPERAND (cfield, 0));
5371 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
5378 index = wi::ext (index, TYPE_PRECISION (index_type),
5379 TYPE_SIGN (index_type));
5383 /* Do we have match? */
5384 if (wi::cmpu (access_index, index) >= 0
5385 && wi::cmpu (access_index, max_index) <= 0)
5386 return fold_ctor_reference (type, cval, inner_offset, size,
5389 /* When memory is not explicitely mentioned in constructor,
5390 it is 0 (or out of range). */
5391 return build_zero_cst (type);
5394 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5395 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5398 fold_nonarray_ctor_reference (tree type, tree ctor,
5399 unsigned HOST_WIDE_INT offset,
5400 unsigned HOST_WIDE_INT size,
5403 unsigned HOST_WIDE_INT cnt;
5406 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5409 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5410 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5411 tree field_size = DECL_SIZE (cfield);
5412 offset_int bitoffset;
5413 offset_int bitoffset_end, access_end;
5415 /* Variable sized objects in static constructors makes no sense,
5416 but field_size can be NULL for flexible array members. */
5417 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5418 && TREE_CODE (byte_offset) == INTEGER_CST
5419 && (field_size != NULL_TREE
5420 ? TREE_CODE (field_size) == INTEGER_CST
5421 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5423 /* Compute bit offset of the field. */
5424 bitoffset = (wi::to_offset (field_offset)
5425 + wi::lshift (wi::to_offset (byte_offset),
5426 LOG2_BITS_PER_UNIT));
5427 /* Compute bit offset where the field ends. */
5428 if (field_size != NULL_TREE)
5429 bitoffset_end = bitoffset + wi::to_offset (field_size);
5433 access_end = offset_int (offset) + size;
5435 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5436 [BITOFFSET, BITOFFSET_END)? */
5437 if (wi::cmps (access_end, bitoffset) > 0
5438 && (field_size == NULL_TREE
5439 || wi::lts_p (offset, bitoffset_end)))
5441 offset_int inner_offset = offset_int (offset) - bitoffset;
5442 /* We do have overlap. Now see if field is large enough to
5443 cover the access. Give up for accesses spanning multiple
5445 if (wi::cmps (access_end, bitoffset_end) > 0)
5447 if (wi::lts_p (offset, bitoffset))
5449 return fold_ctor_reference (type, cval,
5450 inner_offset.to_uhwi (), size,
5454 /* When memory is not explicitely mentioned in constructor, it is 0. */
5455 return build_zero_cst (type);
5458 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5459 to the memory at bit OFFSET. */
5462 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5463 unsigned HOST_WIDE_INT size, tree from_decl)
5467 /* We found the field with exact match. */
5468 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5470 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5472 /* We are at the end of walk, see if we can view convert the
5474 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5475 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5476 && !compare_tree_int (TYPE_SIZE (type), size)
5477 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5479 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5480 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5482 STRIP_USELESS_TYPE_CONVERSION (ret);
5485 /* For constants and byte-aligned/sized reads try to go through
5486 native_encode/interpret. */
5487 if (CONSTANT_CLASS_P (ctor)
5488 && BITS_PER_UNIT == 8
5489 && offset % BITS_PER_UNIT == 0
5490 && size % BITS_PER_UNIT == 0
5491 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5493 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5494 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5495 offset / BITS_PER_UNIT) > 0)
5496 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5498 if (TREE_CODE (ctor) == CONSTRUCTOR)
5501 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5502 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5503 return fold_array_ctor_reference (type, ctor, offset, size,
5506 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5513 /* Return the tree representing the element referenced by T if T is an
5514 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5515 names using VALUEIZE. Return NULL_TREE otherwise. */
5518 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5520 tree ctor, idx, base;
5521 HOST_WIDE_INT offset, size, max_size;
5524 if (TREE_THIS_VOLATILE (t))
5528 return get_symbol_constant_value (t);
5530 tem = fold_read_from_constant_string (t);
5534 switch (TREE_CODE (t))
5537 case ARRAY_RANGE_REF:
5538 /* Constant indexes are handled well by get_base_constructor.
5539 Only special case variable offsets.
5540 FIXME: This code can't handle nested references with variable indexes
5541 (they will be handled only by iteration of ccp). Perhaps we can bring
5542 get_ref_base_and_extent here and make it use a valueize callback. */
5543 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5545 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5546 && TREE_CODE (idx) == INTEGER_CST)
5548 tree low_bound, unit_size;
5550 /* If the resulting bit-offset is constant, track it. */
5551 if ((low_bound = array_ref_low_bound (t),
5552 TREE_CODE (low_bound) == INTEGER_CST)
5553 && (unit_size = array_ref_element_size (t),
5554 tree_fits_uhwi_p (unit_size)))
5557 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5558 TYPE_PRECISION (TREE_TYPE (idx)));
5560 if (wi::fits_shwi_p (woffset))
5562 offset = woffset.to_shwi ();
5563 /* TODO: This code seems wrong, multiply then check
5564 to see if it fits. */
5565 offset *= tree_to_uhwi (unit_size);
5566 offset *= BITS_PER_UNIT;
5568 base = TREE_OPERAND (t, 0);
5569 ctor = get_base_constructor (base, &offset, valueize);
5570 /* Empty constructor. Always fold to 0. */
5571 if (ctor == error_mark_node)
5572 return build_zero_cst (TREE_TYPE (t));
5573 /* Out of bound array access. Value is undefined,
5577 /* We can not determine ctor. */
5580 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5581 tree_to_uhwi (unit_size)
5591 case TARGET_MEM_REF:
5593 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5594 ctor = get_base_constructor (base, &offset, valueize);
5596 /* Empty constructor. Always fold to 0. */
5597 if (ctor == error_mark_node)
5598 return build_zero_cst (TREE_TYPE (t));
5599 /* We do not know precise address. */
5600 if (max_size == -1 || max_size != size)
5602 /* We can not determine ctor. */
5606 /* Out of bound array access. Value is undefined, but don't fold. */
5610 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5616 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5617 if (c && TREE_CODE (c) == COMPLEX_CST)
5618 return fold_build1_loc (EXPR_LOCATION (t),
5619 TREE_CODE (t), TREE_TYPE (t), c);
5631 fold_const_aggregate_ref (tree t)
5633 return fold_const_aggregate_ref_1 (t, NULL);
5636 /* Lookup virtual method with index TOKEN in a virtual table V
5638 Set CAN_REFER if non-NULL to false if method
5639 is not referable or if the virtual table is ill-formed (such as rewriten
5640 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5643 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5645 unsigned HOST_WIDE_INT offset,
5648 tree vtable = v, init, fn;
5649 unsigned HOST_WIDE_INT size;
5650 unsigned HOST_WIDE_INT elt_size, access_index;
5656 /* First of all double check we have virtual table. */
5657 if (TREE_CODE (v) != VAR_DECL
5658 || !DECL_VIRTUAL_P (v))
5660 /* Pass down that we lost track of the target. */
5666 init = ctor_for_folding (v);
5668 /* The virtual tables should always be born with constructors
5669 and we always should assume that they are avaialble for
5670 folding. At the moment we do not stream them in all cases,
5671 but it should never happen that ctor seem unreachable. */
5673 if (init == error_mark_node)
5675 gcc_assert (in_lto_p);
5676 /* Pass down that we lost track of the target. */
5681 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5682 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5683 offset *= BITS_PER_UNIT;
5684 offset += token * size;
5686 /* Lookup the value in the constructor that is assumed to be array.
5687 This is equivalent to
5688 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5689 offset, size, NULL);
5690 but in a constant time. We expect that frontend produced a simple
5691 array without indexed initializers. */
5693 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5694 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5695 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5696 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5698 access_index = offset / BITS_PER_UNIT / elt_size;
5699 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5701 /* This code makes an assumption that there are no
5702 indexed fileds produced by C++ FE, so we can directly index the array. */
5703 if (access_index < CONSTRUCTOR_NELTS (init))
5705 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5706 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5712 /* For type inconsistent program we may end up looking up virtual method
5713 in virtual table that does not contain TOKEN entries. We may overrun
5714 the virtual table and pick up a constant or RTTI info pointer.
5715 In any case the call is undefined. */
5717 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5718 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5719 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5722 fn = TREE_OPERAND (fn, 0);
5724 /* When cgraph node is missing and function is not public, we cannot
5725 devirtualize. This can happen in WHOPR when the actual method
5726 ends up in other partition, because we found devirtualization
5727 possibility too late. */
5728 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5739 /* Make sure we create a cgraph node for functions we'll reference.
5740 They can be non-existent if the reference comes from an entry
5741 of an external vtable for example. */
5742 cgraph_node::get_create (fn);
5747 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5748 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5749 KNOWN_BINFO carries the binfo describing the true type of
5750 OBJ_TYPE_REF_OBJECT(REF).
5751 Set CAN_REFER if non-NULL to false if method
5752 is not referable or if the virtual table is ill-formed (such as rewriten
5753 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5756 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5759 unsigned HOST_WIDE_INT offset;
5762 v = BINFO_VTABLE (known_binfo);
5763 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5767 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5773 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5776 /* Given a pointer value OP0, return a simplified version of an
5777 indirection through OP0, or NULL_TREE if no simplification is
5778 possible. Note that the resulting type may be different from
5779 the type pointed to in the sense that it is still compatible
5780 from the langhooks point of view. */
5783 gimple_fold_indirect_ref (tree t)
5785 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5790 subtype = TREE_TYPE (sub);
5791 if (!POINTER_TYPE_P (subtype))
5794 if (TREE_CODE (sub) == ADDR_EXPR)
5796 tree op = TREE_OPERAND (sub, 0);
5797 tree optype = TREE_TYPE (op);
5799 if (useless_type_conversion_p (type, optype))
5802 /* *(foo *)&fooarray => fooarray[0] */
5803 if (TREE_CODE (optype) == ARRAY_TYPE
5804 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5805 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5807 tree type_domain = TYPE_DOMAIN (optype);
5808 tree min_val = size_zero_node;
5809 if (type_domain && TYPE_MIN_VALUE (type_domain))
5810 min_val = TYPE_MIN_VALUE (type_domain);
5811 if (TREE_CODE (min_val) == INTEGER_CST)
5812 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5814 /* *(foo *)&complexfoo => __real__ complexfoo */
5815 else if (TREE_CODE (optype) == COMPLEX_TYPE
5816 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5817 return fold_build1 (REALPART_EXPR, type, op);
5818 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5819 else if (TREE_CODE (optype) == VECTOR_TYPE
5820 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5822 tree part_width = TYPE_SIZE (type);
5823 tree index = bitsize_int (0);
5824 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5828 /* *(p + CST) -> ... */
5829 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5830 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5832 tree addr = TREE_OPERAND (sub, 0);
5833 tree off = TREE_OPERAND (sub, 1);
5837 addrtype = TREE_TYPE (addr);
5839 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5840 if (TREE_CODE (addr) == ADDR_EXPR
5841 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5842 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5843 && tree_fits_uhwi_p (off))
5845 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5846 tree part_width = TYPE_SIZE (type);
5847 unsigned HOST_WIDE_INT part_widthi
5848 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5849 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5850 tree index = bitsize_int (indexi);
5851 if (offset / part_widthi
5852 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5853 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5857 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5858 if (TREE_CODE (addr) == ADDR_EXPR
5859 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5860 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5862 tree size = TYPE_SIZE_UNIT (type);
5863 if (tree_int_cst_equal (size, off))
5864 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5867 /* *(p + CST) -> MEM_REF <p, CST>. */
5868 if (TREE_CODE (addr) != ADDR_EXPR
5869 || DECL_P (TREE_OPERAND (addr, 0)))
5870 return fold_build2 (MEM_REF, type,
5872 wide_int_to_tree (ptype, off));
5875 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5876 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5877 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5878 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5881 tree min_val = size_zero_node;
5883 sub = gimple_fold_indirect_ref (sub);
5885 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5886 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5887 if (type_domain && TYPE_MIN_VALUE (type_domain))
5888 min_val = TYPE_MIN_VALUE (type_domain);
5889 if (TREE_CODE (min_val) == INTEGER_CST)
5890 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5896 /* Return true if CODE is an operation that when operating on signed
5897 integer types involves undefined behavior on overflow and the
5898 operation can be expressed with unsigned arithmetic. */
5901 arith_code_with_undefined_signed_overflow (tree_code code)
5909 case POINTER_PLUS_EXPR:
5916 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5917 operation that can be transformed to unsigned arithmetic by converting
5918 its operand, carrying out the operation in the corresponding unsigned
5919 type and converting the result back to the original type.
5921 Returns a sequence of statements that replace STMT and also contain
5922 a modified form of STMT itself. */
5925 rewrite_to_defined_overflow (gimple *stmt)
5927 if (dump_file && (dump_flags & TDF_DETAILS))
5929 fprintf (dump_file, "rewriting stmt with undefined signed "
5931 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5934 tree lhs = gimple_assign_lhs (stmt);
5935 tree type = unsigned_type_for (TREE_TYPE (lhs));
5936 gimple_seq stmts = NULL;
5937 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5939 gimple_seq stmts2 = NULL;
5940 gimple_set_op (stmt, i,
5941 force_gimple_operand (fold_convert (type,
5942 gimple_op (stmt, i)),
5943 &stmts2, true, NULL_TREE));
5944 gimple_seq_add_seq (&stmts, stmts2);
5946 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5947 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5948 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5949 gimple_seq_add_stmt (&stmts, stmt);
5950 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
5951 gimple_seq_add_stmt (&stmts, cvt);
5957 /* The valueization hook we use for the gimple_build API simplification.
5958 This makes us match fold_buildN behavior by only combining with
5959 statements in the sequence(s) we are currently building. */
5962 gimple_build_valueize (tree op)
5964 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
5969 /* Build the expression CODE OP0 of type TYPE with location LOC,
5970 simplifying it first if possible. Returns the built
5971 expression value and appends statements possibly defining it
5975 gimple_build (gimple_seq *seq, location_t loc,
5976 enum tree_code code, tree type, tree op0)
5978 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
5981 if (gimple_in_ssa_p (cfun))
5982 res = make_ssa_name (type);
5984 res = create_tmp_reg (type);
5986 if (code == REALPART_EXPR
5987 || code == IMAGPART_EXPR
5988 || code == VIEW_CONVERT_EXPR)
5989 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
5991 stmt = gimple_build_assign (res, code, op0);
5992 gimple_set_location (stmt, loc);
5993 gimple_seq_add_stmt_without_update (seq, stmt);
5998 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
5999 simplifying it first if possible. Returns the built
6000 expression value and appends statements possibly defining it
6004 gimple_build (gimple_seq *seq, location_t loc,
6005 enum tree_code code, tree type, tree op0, tree op1)
6007 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6010 if (gimple_in_ssa_p (cfun))
6011 res = make_ssa_name (type);
6013 res = create_tmp_reg (type);
6014 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6015 gimple_set_location (stmt, loc);
6016 gimple_seq_add_stmt_without_update (seq, stmt);
6021 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6022 simplifying it first if possible. Returns the built
6023 expression value and appends statements possibly defining it
6027 gimple_build (gimple_seq *seq, location_t loc,
6028 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6030 tree res = gimple_simplify (code, type, op0, op1, op2,
6031 seq, gimple_build_valueize);
6034 if (gimple_in_ssa_p (cfun))
6035 res = make_ssa_name (type);
6037 res = create_tmp_reg (type);
6039 if (code == BIT_FIELD_REF)
6040 stmt = gimple_build_assign (res, code,
6041 build3 (code, type, op0, op1, op2));
6043 stmt = gimple_build_assign (res, code, op0, op1, op2);
6044 gimple_set_location (stmt, loc);
6045 gimple_seq_add_stmt_without_update (seq, stmt);
6050 /* Build the call FN (ARG0) with a result of type TYPE
6051 (or no result if TYPE is void) with location LOC,
6052 simplifying it first if possible. Returns the built
6053 expression value (or NULL_TREE if TYPE is void) and appends
6054 statements possibly defining it to SEQ. */
6057 gimple_build (gimple_seq *seq, location_t loc,
6058 enum built_in_function fn, tree type, tree arg0)
6060 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6063 tree decl = builtin_decl_implicit (fn);
6064 gimple *stmt = gimple_build_call (decl, 1, arg0);
6065 if (!VOID_TYPE_P (type))
6067 if (gimple_in_ssa_p (cfun))
6068 res = make_ssa_name (type);
6070 res = create_tmp_reg (type);
6071 gimple_call_set_lhs (stmt, res);
6073 gimple_set_location (stmt, loc);
6074 gimple_seq_add_stmt_without_update (seq, stmt);
6079 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6080 (or no result if TYPE is void) with location LOC,
6081 simplifying it first if possible. Returns the built
6082 expression value (or NULL_TREE if TYPE is void) and appends
6083 statements possibly defining it to SEQ. */
6086 gimple_build (gimple_seq *seq, location_t loc,
6087 enum built_in_function fn, tree type, tree arg0, tree arg1)
6089 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6092 tree decl = builtin_decl_implicit (fn);
6093 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6094 if (!VOID_TYPE_P (type))
6096 if (gimple_in_ssa_p (cfun))
6097 res = make_ssa_name (type);
6099 res = create_tmp_reg (type);
6100 gimple_call_set_lhs (stmt, res);
6102 gimple_set_location (stmt, loc);
6103 gimple_seq_add_stmt_without_update (seq, stmt);
6108 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6109 (or no result if TYPE is void) with location LOC,
6110 simplifying it first if possible. Returns the built
6111 expression value (or NULL_TREE if TYPE is void) and appends
6112 statements possibly defining it to SEQ. */
6115 gimple_build (gimple_seq *seq, location_t loc,
6116 enum built_in_function fn, tree type,
6117 tree arg0, tree arg1, tree arg2)
6119 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6120 seq, gimple_build_valueize);
6123 tree decl = builtin_decl_implicit (fn);
6124 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6125 if (!VOID_TYPE_P (type))
6127 if (gimple_in_ssa_p (cfun))
6128 res = make_ssa_name (type);
6130 res = create_tmp_reg (type);
6131 gimple_call_set_lhs (stmt, res);
6133 gimple_set_location (stmt, loc);
6134 gimple_seq_add_stmt_without_update (seq, stmt);
6139 /* Build the conversion (TYPE) OP with a result of type TYPE
6140 with location LOC if such conversion is neccesary in GIMPLE,
6141 simplifying it first.
6142 Returns the built expression value and appends
6143 statements possibly defining it to SEQ. */
6146 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6148 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6150 return gimple_build (seq, loc, NOP_EXPR, type, op);
6153 /* Return true if the result of assignment STMT is known to be non-negative.
6154 If the return value is based on the assumption that signed overflow is
6155 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6156 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6159 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6162 enum tree_code code = gimple_assign_rhs_code (stmt);
6163 switch (get_gimple_rhs_class (code))
6165 case GIMPLE_UNARY_RHS:
6166 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6167 gimple_expr_type (stmt),
6168 gimple_assign_rhs1 (stmt),
6169 strict_overflow_p, depth);
6170 case GIMPLE_BINARY_RHS:
6171 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6172 gimple_expr_type (stmt),
6173 gimple_assign_rhs1 (stmt),
6174 gimple_assign_rhs2 (stmt),
6175 strict_overflow_p, depth);
6176 case GIMPLE_TERNARY_RHS:
6178 case GIMPLE_SINGLE_RHS:
6179 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6180 strict_overflow_p, depth);
6181 case GIMPLE_INVALID_RHS:
6187 /* Return true if return value of call STMT is known to be non-negative.
6188 If the return value is based on the assumption that signed overflow is
6189 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6190 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6193 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6196 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6197 gimple_call_arg (stmt, 0) : NULL_TREE;
6198 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6199 gimple_call_arg (stmt, 1) : NULL_TREE;
6201 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6202 gimple_call_fndecl (stmt),
6205 strict_overflow_p, depth);
6208 /* Return true if STMT is known to compute a non-negative value.
6209 If the return value is based on the assumption that signed overflow is
6210 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6211 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6214 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6217 switch (gimple_code (stmt))
6220 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6223 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,