1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "insn-config.h"
35 #include "gimple-pretty-print.h"
37 #include "fold-const.h"
45 #include "stor-layout.h"
47 #include "internal-fn.h"
48 #include "gimple-fold.h"
50 #include "gimple-iterator.h"
51 #include "tree-into-ssa.h"
54 #include "tree-ssa-propagate.h"
55 #include "ipa-utils.h"
56 #include "tree-ssa-address.h"
57 #include "langhooks.h"
58 #include "gimplify-me.h"
63 #include "gimple-match.h"
64 #include "gomp-constants.h"
65 #include "optabs-query.h"
68 /* Return true when DECL can be referenced from current unit.
69 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
70 We can get declarations that are not possible to reference for various
73 1) When analyzing C++ virtual tables.
74 C++ virtual tables do have known constructors even
75 when they are keyed to other compilation unit.
76 Those tables can contain pointers to methods and vars
77 in other units. Those methods have both STATIC and EXTERNAL
79 2) In WHOPR mode devirtualization might lead to reference
80 to method that was partitioned elsehwere.
81 In this case we have static VAR_DECL or FUNCTION_DECL
82 that has no corresponding callgraph/varpool node
84 3) COMDAT functions referred by external vtables that
85 we devirtualize only during final compilation stage.
86 At this time we already decided that we will not output
87 the function body and thus we can't reference the symbol
91 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
94 struct cgraph_node *node;
97 if (DECL_ABSTRACT_P (decl))
100 /* We are concerned only about static/external vars and functions. */
101 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
102 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
105 /* Static objects can be referred only if they was not optimized out yet. */
106 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
108 /* Before we start optimizing unreachable code we can be sure all
109 static objects are defined. */
110 if (symtab->function_flags_ready)
112 snode = symtab_node::get (decl);
113 if (!snode || !snode->definition)
115 node = dyn_cast <cgraph_node *> (snode);
116 return !node || !node->global.inlined_to;
119 /* We will later output the initializer, so we can refer to it.
120 So we are concerned only when DECL comes from initializer of
121 external var or var that has been optimized out. */
123 || TREE_CODE (from_decl) != VAR_DECL
124 || (!DECL_EXTERNAL (from_decl)
125 && (vnode = varpool_node::get (from_decl)) != NULL
126 && vnode->definition)
128 && (vnode = varpool_node::get (from_decl)) != NULL
129 && vnode->in_other_partition))
131 /* We are folding reference from external vtable. The vtable may reffer
132 to a symbol keyed to other compilation unit. The other compilation
133 unit may be in separate DSO and the symbol may be hidden. */
134 if (DECL_VISIBILITY_SPECIFIED (decl)
135 && DECL_EXTERNAL (decl)
136 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
137 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
139 /* When function is public, we always can introduce new reference.
140 Exception are the COMDAT functions where introducing a direct
141 reference imply need to include function body in the curren tunit. */
142 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
144 /* We have COMDAT. We are going to check if we still have definition
145 or if the definition is going to be output in other partition.
146 Bypass this when gimplifying; all needed functions will be produced.
148 As observed in PR20991 for already optimized out comdat virtual functions
149 it may be tempting to not necessarily give up because the copy will be
150 output elsewhere when corresponding vtable is output.
151 This is however not possible - ABI specify that COMDATs are output in
152 units where they are used and when the other unit was compiled with LTO
153 it is possible that vtable was kept public while the function itself
155 if (!symtab->function_flags_ready)
158 snode = symtab_node::get (decl);
160 || ((!snode->definition || DECL_EXTERNAL (decl))
161 && (!snode->in_other_partition
162 || (!snode->forced_by_abi && !snode->force_output))))
164 node = dyn_cast <cgraph_node *> (snode);
165 return !node || !node->global.inlined_to;
168 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
169 acceptable form for is_gimple_min_invariant.
170 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
173 canonicalize_constructor_val (tree cval, tree from_decl)
175 tree orig_cval = cval;
177 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
178 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
180 tree ptr = TREE_OPERAND (cval, 0);
181 if (is_gimple_min_invariant (ptr))
182 cval = build1_loc (EXPR_LOCATION (cval),
183 ADDR_EXPR, TREE_TYPE (ptr),
184 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
186 fold_convert (ptr_type_node,
187 TREE_OPERAND (cval, 1))));
189 if (TREE_CODE (cval) == ADDR_EXPR)
191 tree base = NULL_TREE;
192 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
194 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
196 TREE_OPERAND (cval, 0) = base;
199 base = get_base_address (TREE_OPERAND (cval, 0));
203 if ((TREE_CODE (base) == VAR_DECL
204 || TREE_CODE (base) == FUNCTION_DECL)
205 && !can_refer_decl_in_current_unit_p (base, from_decl))
207 if (TREE_CODE (base) == VAR_DECL)
208 TREE_ADDRESSABLE (base) = 1;
209 else if (TREE_CODE (base) == FUNCTION_DECL)
211 /* Make sure we create a cgraph node for functions we'll reference.
212 They can be non-existent if the reference comes from an entry
213 of an external vtable for example. */
214 cgraph_node::get_create (base);
216 /* Fixup types in global initializers. */
217 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
218 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
220 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
221 cval = fold_convert (TREE_TYPE (orig_cval), cval);
224 if (TREE_OVERFLOW_P (cval))
225 return drop_tree_overflow (cval);
229 /* If SYM is a constant variable with known value, return the value.
230 NULL_TREE is returned otherwise. */
233 get_symbol_constant_value (tree sym)
235 tree val = ctor_for_folding (sym);
236 if (val != error_mark_node)
240 val = canonicalize_constructor_val (unshare_expr (val), sym);
241 if (val && is_gimple_min_invariant (val))
246 /* Variables declared 'const' without an initializer
247 have zero as the initializer if they may not be
248 overridden at link or run time. */
250 && is_gimple_reg_type (TREE_TYPE (sym)))
251 return build_zero_cst (TREE_TYPE (sym));
259 /* Subroutine of fold_stmt. We perform several simplifications of the
260 memory reference tree EXPR and make sure to re-gimplify them properly
261 after propagation of constant addresses. IS_LHS is true if the
262 reference is supposed to be an lvalue. */
265 maybe_fold_reference (tree expr, bool is_lhs)
269 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
270 || TREE_CODE (expr) == REALPART_EXPR
271 || TREE_CODE (expr) == IMAGPART_EXPR)
272 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
273 return fold_unary_loc (EXPR_LOCATION (expr),
276 TREE_OPERAND (expr, 0));
277 else if (TREE_CODE (expr) == BIT_FIELD_REF
278 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
279 return fold_ternary_loc (EXPR_LOCATION (expr),
282 TREE_OPERAND (expr, 0),
283 TREE_OPERAND (expr, 1),
284 TREE_OPERAND (expr, 2));
287 && (result = fold_const_aggregate_ref (expr))
288 && is_gimple_min_invariant (result))
295 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
296 replacement rhs for the statement or NULL_TREE if no simplification
297 could be made. It is assumed that the operands have been previously
301 fold_gimple_assign (gimple_stmt_iterator *si)
303 gimple *stmt = gsi_stmt (*si);
304 enum tree_code subcode = gimple_assign_rhs_code (stmt);
305 location_t loc = gimple_location (stmt);
307 tree result = NULL_TREE;
309 switch (get_gimple_rhs_class (subcode))
311 case GIMPLE_SINGLE_RHS:
313 tree rhs = gimple_assign_rhs1 (stmt);
315 if (TREE_CLOBBER_P (rhs))
318 if (REFERENCE_CLASS_P (rhs))
319 return maybe_fold_reference (rhs, false);
321 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
323 tree val = OBJ_TYPE_REF_EXPR (rhs);
324 if (is_gimple_min_invariant (val))
326 else if (flag_devirtualize && virtual_method_call_p (rhs))
329 vec <cgraph_node *>targets
330 = possible_polymorphic_call_targets (rhs, stmt, &final);
331 if (final && targets.length () <= 1 && dbg_cnt (devirt))
333 if (dump_enabled_p ())
335 location_t loc = gimple_location_safe (stmt);
336 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
337 "resolving virtual function address "
338 "reference to function %s\n",
339 targets.length () == 1
340 ? targets[0]->name ()
343 if (targets.length () == 1)
345 val = fold_convert (TREE_TYPE (val),
346 build_fold_addr_expr_loc
347 (loc, targets[0]->decl));
348 STRIP_USELESS_TYPE_CONVERSION (val);
351 /* We can not use __builtin_unreachable here because it
352 can not have address taken. */
353 val = build_int_cst (TREE_TYPE (val), 0);
359 else if (TREE_CODE (rhs) == ADDR_EXPR)
361 tree ref = TREE_OPERAND (rhs, 0);
362 tree tem = maybe_fold_reference (ref, true);
364 && TREE_CODE (tem) == MEM_REF
365 && integer_zerop (TREE_OPERAND (tem, 1)))
366 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
368 result = fold_convert (TREE_TYPE (rhs),
369 build_fold_addr_expr_loc (loc, tem));
370 else if (TREE_CODE (ref) == MEM_REF
371 && integer_zerop (TREE_OPERAND (ref, 1)))
372 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
375 else if (TREE_CODE (rhs) == CONSTRUCTOR
376 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
377 && (CONSTRUCTOR_NELTS (rhs)
378 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
380 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
384 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
385 if (TREE_CODE (val) != INTEGER_CST
386 && TREE_CODE (val) != REAL_CST
387 && TREE_CODE (val) != FIXED_CST)
390 return build_vector_from_ctor (TREE_TYPE (rhs),
391 CONSTRUCTOR_ELTS (rhs));
394 else if (DECL_P (rhs))
395 return get_symbol_constant_value (rhs);
397 /* If we couldn't fold the RHS, hand over to the generic
399 if (result == NULL_TREE)
402 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
403 that may have been added by fold, and "useless" type
404 conversions that might now be apparent due to propagation. */
405 STRIP_USELESS_TYPE_CONVERSION (result);
407 if (result != rhs && valid_gimple_rhs_p (result))
414 case GIMPLE_UNARY_RHS:
417 case GIMPLE_BINARY_RHS:
420 case GIMPLE_TERNARY_RHS:
421 result = fold_ternary_loc (loc, subcode,
422 TREE_TYPE (gimple_assign_lhs (stmt)),
423 gimple_assign_rhs1 (stmt),
424 gimple_assign_rhs2 (stmt),
425 gimple_assign_rhs3 (stmt));
429 STRIP_USELESS_TYPE_CONVERSION (result);
430 if (valid_gimple_rhs_p (result))
435 case GIMPLE_INVALID_RHS:
443 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
444 adjusting the replacement stmts location and virtual operands.
445 If the statement has a lhs the last stmt in the sequence is expected
446 to assign to that lhs. */
449 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
451 gimple *stmt = gsi_stmt (*si_p);
453 if (gimple_has_location (stmt))
454 annotate_all_with_location (stmts, gimple_location (stmt));
456 /* First iterate over the replacement statements backward, assigning
457 virtual operands to their defining statements. */
458 gimple *laststore = NULL;
459 for (gimple_stmt_iterator i = gsi_last (stmts);
460 !gsi_end_p (i); gsi_prev (&i))
462 gimple *new_stmt = gsi_stmt (i);
463 if ((gimple_assign_single_p (new_stmt)
464 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
465 || (is_gimple_call (new_stmt)
466 && (gimple_call_flags (new_stmt)
467 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
471 vdef = gimple_vdef (stmt);
473 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
474 gimple_set_vdef (new_stmt, vdef);
475 if (vdef && TREE_CODE (vdef) == SSA_NAME)
476 SSA_NAME_DEF_STMT (vdef) = new_stmt;
477 laststore = new_stmt;
481 /* Second iterate over the statements forward, assigning virtual
482 operands to their uses. */
483 tree reaching_vuse = gimple_vuse (stmt);
484 for (gimple_stmt_iterator i = gsi_start (stmts);
485 !gsi_end_p (i); gsi_next (&i))
487 gimple *new_stmt = gsi_stmt (i);
488 /* If the new statement possibly has a VUSE, update it with exact SSA
489 name we know will reach this one. */
490 if (gimple_has_mem_ops (new_stmt))
491 gimple_set_vuse (new_stmt, reaching_vuse);
492 gimple_set_modified (new_stmt, true);
493 if (gimple_vdef (new_stmt))
494 reaching_vuse = gimple_vdef (new_stmt);
497 /* If the new sequence does not do a store release the virtual
498 definition of the original statement. */
500 && reaching_vuse == gimple_vuse (stmt))
502 tree vdef = gimple_vdef (stmt);
504 && TREE_CODE (vdef) == SSA_NAME)
506 unlink_stmt_vdef (stmt);
507 release_ssa_name (vdef);
511 /* Finally replace the original statement with the sequence. */
512 gsi_replace_with_seq (si_p, stmts, false);
515 /* Convert EXPR into a GIMPLE value suitable for substitution on the
516 RHS of an assignment. Insert the necessary statements before
517 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
518 is replaced. If the call is expected to produces a result, then it
519 is replaced by an assignment of the new RHS to the result variable.
520 If the result is to be ignored, then the call is replaced by a
521 GIMPLE_NOP. A proper VDEF chain is retained by making the first
522 VUSE and the last VDEF of the whole sequence be the same as the replaced
523 statement and using new SSA names for stores in between. */
526 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
529 gimple *stmt, *new_stmt;
530 gimple_stmt_iterator i;
531 gimple_seq stmts = NULL;
533 stmt = gsi_stmt (*si_p);
535 gcc_assert (is_gimple_call (stmt));
537 push_gimplify_context (gimple_in_ssa_p (cfun));
539 lhs = gimple_call_lhs (stmt);
540 if (lhs == NULL_TREE)
542 gimplify_and_add (expr, &stmts);
543 /* We can end up with folding a memcpy of an empty class assignment
544 which gets optimized away by C++ gimplification. */
545 if (gimple_seq_empty_p (stmts))
547 pop_gimplify_context (NULL);
548 if (gimple_in_ssa_p (cfun))
550 unlink_stmt_vdef (stmt);
553 gsi_replace (si_p, gimple_build_nop (), false);
559 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
560 new_stmt = gimple_build_assign (lhs, tmp);
561 i = gsi_last (stmts);
562 gsi_insert_after_without_update (&i, new_stmt,
563 GSI_CONTINUE_LINKING);
566 pop_gimplify_context (NULL);
568 gsi_replace_with_seq_vops (si_p, stmts);
572 /* Replace the call at *GSI with the gimple value VAL. */
575 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
577 gimple *stmt = gsi_stmt (*gsi);
578 tree lhs = gimple_call_lhs (stmt);
582 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
583 val = fold_convert (TREE_TYPE (lhs), val);
584 repl = gimple_build_assign (lhs, val);
587 repl = gimple_build_nop ();
588 tree vdef = gimple_vdef (stmt);
589 if (vdef && TREE_CODE (vdef) == SSA_NAME)
591 unlink_stmt_vdef (stmt);
592 release_ssa_name (vdef);
594 gsi_replace (gsi, repl, false);
597 /* Replace the call at *GSI with the new call REPL and fold that
601 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
603 gimple *stmt = gsi_stmt (*gsi);
604 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
605 gimple_set_location (repl, gimple_location (stmt));
606 if (gimple_vdef (stmt)
607 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
609 gimple_set_vdef (repl, gimple_vdef (stmt));
610 gimple_set_vuse (repl, gimple_vuse (stmt));
611 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
613 gsi_replace (gsi, repl, false);
617 /* Return true if VAR is a VAR_DECL or a component thereof. */
620 var_decl_component_p (tree var)
623 while (handled_component_p (inner))
624 inner = TREE_OPERAND (inner, 0);
625 return SSA_VAR_P (inner);
628 /* Fold function call to builtin mem{{,p}cpy,move}. Return
629 false if no simplification can be made.
630 If ENDP is 0, return DEST (like memcpy).
631 If ENDP is 1, return DEST+LEN (like mempcpy).
632 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
633 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
637 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
638 tree dest, tree src, int endp)
640 gimple *stmt = gsi_stmt (*gsi);
641 tree lhs = gimple_call_lhs (stmt);
642 tree len = gimple_call_arg (stmt, 2);
643 tree destvar, srcvar;
644 location_t loc = gimple_location (stmt);
646 /* If the LEN parameter is zero, return DEST. */
647 if (integer_zerop (len))
650 if (gimple_call_lhs (stmt))
651 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
653 repl = gimple_build_nop ();
654 tree vdef = gimple_vdef (stmt);
655 if (vdef && TREE_CODE (vdef) == SSA_NAME)
657 unlink_stmt_vdef (stmt);
658 release_ssa_name (vdef);
660 gsi_replace (gsi, repl, false);
664 /* If SRC and DEST are the same (and not volatile), return
665 DEST{,+LEN,+LEN-1}. */
666 if (operand_equal_p (src, dest, 0))
668 unlink_stmt_vdef (stmt);
669 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
670 release_ssa_name (gimple_vdef (stmt));
673 gsi_replace (gsi, gimple_build_nop (), false);
680 tree srctype, desttype;
681 unsigned int src_align, dest_align;
684 /* Build accesses at offset zero with a ref-all character type. */
685 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
688 /* If we can perform the copy efficiently with first doing all loads
689 and then all stores inline it that way. Currently efficiently
690 means that we can load all the memory into a single integer
691 register which is what MOVE_MAX gives us. */
692 src_align = get_pointer_alignment (src);
693 dest_align = get_pointer_alignment (dest);
694 if (tree_fits_uhwi_p (len)
695 && compare_tree_int (len, MOVE_MAX) <= 0
696 /* ??? Don't transform copies from strings with known length this
697 confuses the tree-ssa-strlen.c. This doesn't handle
698 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
700 && !c_strlen (src, 2))
702 unsigned ilen = tree_to_uhwi (len);
703 if (exact_log2 (ilen) != -1)
705 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
707 && TYPE_MODE (type) != BLKmode
708 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
710 /* If the destination pointer is not aligned we must be able
711 to emit an unaligned store. */
712 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
713 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
714 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
715 != CODE_FOR_nothing)))
718 tree desttype = type;
719 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
720 srctype = build_aligned_type (type, src_align);
721 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
722 tree tem = fold_const_aggregate_ref (srcmem);
725 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
726 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
728 && (optab_handler (movmisalign_optab,
730 == CODE_FOR_nothing))
735 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
737 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
738 if (gimple_in_ssa_p (cfun))
739 srcmem = make_ssa_name (TREE_TYPE (srcmem),
742 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
743 gimple_assign_set_lhs (new_stmt, srcmem);
744 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
745 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
747 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
748 desttype = build_aligned_type (type, dest_align);
750 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
753 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
754 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
755 if (gimple_vdef (new_stmt)
756 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
757 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
760 gsi_replace (gsi, new_stmt, false);
763 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
772 /* Both DEST and SRC must be pointer types.
773 ??? This is what old code did. Is the testing for pointer types
776 If either SRC is readonly or length is 1, we can use memcpy. */
777 if (!dest_align || !src_align)
779 if (readonly_data_expr (src)
780 || (tree_fits_uhwi_p (len)
781 && (MIN (src_align, dest_align) / BITS_PER_UNIT
782 >= tree_to_uhwi (len))))
784 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
787 gimple_call_set_fndecl (stmt, fn);
788 gimple_call_set_arg (stmt, 0, dest);
789 gimple_call_set_arg (stmt, 1, src);
794 /* If *src and *dest can't overlap, optimize into memcpy as well. */
795 if (TREE_CODE (src) == ADDR_EXPR
796 && TREE_CODE (dest) == ADDR_EXPR)
798 tree src_base, dest_base, fn;
799 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
800 HOST_WIDE_INT size = -1;
801 HOST_WIDE_INT maxsize = -1;
803 srcvar = TREE_OPERAND (src, 0);
804 src_base = get_ref_base_and_extent (srcvar, &src_offset,
806 destvar = TREE_OPERAND (dest, 0);
807 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
809 if (tree_fits_uhwi_p (len))
810 maxsize = tree_to_uhwi (len);
813 src_offset /= BITS_PER_UNIT;
814 dest_offset /= BITS_PER_UNIT;
815 if (SSA_VAR_P (src_base)
816 && SSA_VAR_P (dest_base))
818 if (operand_equal_p (src_base, dest_base, 0)
819 && ranges_overlap_p (src_offset, maxsize,
820 dest_offset, maxsize))
823 else if (TREE_CODE (src_base) == MEM_REF
824 && TREE_CODE (dest_base) == MEM_REF)
826 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
827 TREE_OPERAND (dest_base, 0), 0))
829 offset_int off = mem_ref_offset (src_base) + src_offset;
830 if (!wi::fits_shwi_p (off))
832 src_offset = off.to_shwi ();
834 off = mem_ref_offset (dest_base) + dest_offset;
835 if (!wi::fits_shwi_p (off))
837 dest_offset = off.to_shwi ();
838 if (ranges_overlap_p (src_offset, maxsize,
839 dest_offset, maxsize))
845 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
848 gimple_call_set_fndecl (stmt, fn);
849 gimple_call_set_arg (stmt, 0, dest);
850 gimple_call_set_arg (stmt, 1, src);
855 /* If the destination and source do not alias optimize into
857 if ((is_gimple_min_invariant (dest)
858 || TREE_CODE (dest) == SSA_NAME)
859 && (is_gimple_min_invariant (src)
860 || TREE_CODE (src) == SSA_NAME))
863 ao_ref_init_from_ptr_and_size (&destr, dest, len);
864 ao_ref_init_from_ptr_and_size (&srcr, src, len);
865 if (!refs_may_alias_p_1 (&destr, &srcr, false))
868 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
871 gimple_call_set_fndecl (stmt, fn);
872 gimple_call_set_arg (stmt, 0, dest);
873 gimple_call_set_arg (stmt, 1, src);
882 if (!tree_fits_shwi_p (len))
885 This logic lose for arguments like (type *)malloc (sizeof (type)),
886 since we strip the casts of up to VOID return value from malloc.
887 Perhaps we ought to inherit type from non-VOID argument here? */
890 if (!POINTER_TYPE_P (TREE_TYPE (src))
891 || !POINTER_TYPE_P (TREE_TYPE (dest)))
893 /* In the following try to find a type that is most natural to be
894 used for the memcpy source and destination and that allows
895 the most optimization when memcpy is turned into a plain assignment
896 using that type. In theory we could always use a char[len] type
897 but that only gains us that the destination and source possibly
898 no longer will have their address taken. */
899 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
900 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
902 tree tem = TREE_OPERAND (src, 0);
904 if (tem != TREE_OPERAND (src, 0))
905 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
907 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
909 tree tem = TREE_OPERAND (dest, 0);
911 if (tem != TREE_OPERAND (dest, 0))
912 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
914 srctype = TREE_TYPE (TREE_TYPE (src));
915 if (TREE_CODE (srctype) == ARRAY_TYPE
916 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
918 srctype = TREE_TYPE (srctype);
920 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
922 desttype = TREE_TYPE (TREE_TYPE (dest));
923 if (TREE_CODE (desttype) == ARRAY_TYPE
924 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
926 desttype = TREE_TYPE (desttype);
928 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
930 if (TREE_ADDRESSABLE (srctype)
931 || TREE_ADDRESSABLE (desttype))
934 /* Make sure we are not copying using a floating-point mode or
935 a type whose size possibly does not match its precision. */
936 if (FLOAT_MODE_P (TYPE_MODE (desttype))
937 || TREE_CODE (desttype) == BOOLEAN_TYPE
938 || TREE_CODE (desttype) == ENUMERAL_TYPE)
939 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
940 if (FLOAT_MODE_P (TYPE_MODE (srctype))
941 || TREE_CODE (srctype) == BOOLEAN_TYPE
942 || TREE_CODE (srctype) == ENUMERAL_TYPE)
943 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
951 src_align = get_pointer_alignment (src);
952 dest_align = get_pointer_alignment (dest);
953 if (dest_align < TYPE_ALIGN (desttype)
954 || src_align < TYPE_ALIGN (srctype))
958 STRIP_NOPS (destvar);
959 if (TREE_CODE (destvar) == ADDR_EXPR
960 && var_decl_component_p (TREE_OPERAND (destvar, 0))
961 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
962 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
968 if (TREE_CODE (srcvar) == ADDR_EXPR
969 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
970 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
973 || src_align >= TYPE_ALIGN (desttype))
974 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
976 else if (!STRICT_ALIGNMENT)
978 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
980 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
988 if (srcvar == NULL_TREE && destvar == NULL_TREE)
991 if (srcvar == NULL_TREE)
994 if (src_align >= TYPE_ALIGN (desttype))
995 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
998 if (STRICT_ALIGNMENT)
1000 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1002 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1005 else if (destvar == NULL_TREE)
1008 if (dest_align >= TYPE_ALIGN (srctype))
1009 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1012 if (STRICT_ALIGNMENT)
1014 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1016 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1021 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1023 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1024 if (gimple_in_ssa_p (cfun))
1025 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1027 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1028 gimple_assign_set_lhs (new_stmt, srcvar);
1029 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1030 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1032 new_stmt = gimple_build_assign (destvar, srcvar);
1033 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1034 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1035 if (gimple_vdef (new_stmt)
1036 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1037 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1040 gsi_replace (gsi, new_stmt, false);
1043 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1047 gimple_seq stmts = NULL;
1048 if (endp == 0 || endp == 3)
1051 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1053 if (endp == 2 || endp == 1)
1055 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1056 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1057 TREE_TYPE (dest), dest, len);
1060 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1061 gimple *repl = gimple_build_assign (lhs, dest);
1062 gsi_replace (gsi, repl, false);
1066 /* Fold function call to builtin memset or bzero at *GSI setting the
1067 memory of size LEN to VAL. Return whether a simplification was made. */
1070 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1072 gimple *stmt = gsi_stmt (*gsi);
1074 unsigned HOST_WIDE_INT length, cval;
1076 /* If the LEN parameter is zero, return DEST. */
1077 if (integer_zerop (len))
1079 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1083 if (! tree_fits_uhwi_p (len))
1086 if (TREE_CODE (c) != INTEGER_CST)
1089 tree dest = gimple_call_arg (stmt, 0);
1091 if (TREE_CODE (var) != ADDR_EXPR)
1094 var = TREE_OPERAND (var, 0);
1095 if (TREE_THIS_VOLATILE (var))
1098 etype = TREE_TYPE (var);
1099 if (TREE_CODE (etype) == ARRAY_TYPE)
1100 etype = TREE_TYPE (etype);
1102 if (!INTEGRAL_TYPE_P (etype)
1103 && !POINTER_TYPE_P (etype))
1106 if (! var_decl_component_p (var))
1109 length = tree_to_uhwi (len);
1110 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1111 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1114 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1117 if (integer_zerop (c))
1121 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1124 cval = TREE_INT_CST_LOW (c);
1128 cval |= (cval << 31) << 1;
1131 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1132 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1133 gimple_set_vuse (store, gimple_vuse (stmt));
1134 tree vdef = gimple_vdef (stmt);
1135 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1137 gimple_set_vdef (store, gimple_vdef (stmt));
1138 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1140 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1141 if (gimple_call_lhs (stmt))
1143 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1144 gsi_replace (gsi, asgn, false);
1148 gimple_stmt_iterator gsi2 = *gsi;
1150 gsi_remove (&gsi2, true);
1157 /* Return the string length, maximum string length or maximum value of
1159 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1160 is not NULL and, for TYPE == 0, its value is not equal to the length
1161 we determine or if we are unable to determine the length or value,
1162 return false. VISITED is a bitmap of visited variables.
1163 TYPE is 0 if string length should be returned, 1 for maximum string
1164 length and 2 for maximum value ARG can have. */
1167 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1172 if (TREE_CODE (arg) != SSA_NAME)
1174 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1175 if (TREE_CODE (arg) == ADDR_EXPR
1176 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1177 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1179 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1180 if (TREE_CODE (aop0) == INDIRECT_REF
1181 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1182 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1183 length, visited, type);
1189 if (TREE_CODE (val) != INTEGER_CST
1190 || tree_int_cst_sgn (val) < 0)
1194 val = c_strlen (arg, 1);
1202 if (TREE_CODE (*length) != INTEGER_CST
1203 || TREE_CODE (val) != INTEGER_CST)
1206 if (tree_int_cst_lt (*length, val))
1210 else if (simple_cst_equal (val, *length) != 1)
1218 /* If ARG is registered for SSA update we cannot look at its defining
1220 if (name_registered_for_update_p (arg))
1223 /* If we were already here, break the infinite cycle. */
1225 *visited = BITMAP_ALLOC (NULL);
1226 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1230 def_stmt = SSA_NAME_DEF_STMT (var);
1232 switch (gimple_code (def_stmt))
1235 /* The RHS of the statement defining VAR must either have a
1236 constant length or come from another SSA_NAME with a constant
1238 if (gimple_assign_single_p (def_stmt)
1239 || gimple_assign_unary_nop_p (def_stmt))
1241 tree rhs = gimple_assign_rhs1 (def_stmt);
1242 return get_maxval_strlen (rhs, length, visited, type);
1244 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1246 tree op2 = gimple_assign_rhs2 (def_stmt);
1247 tree op3 = gimple_assign_rhs3 (def_stmt);
1248 return get_maxval_strlen (op2, length, visited, type)
1249 && get_maxval_strlen (op3, length, visited, type);
1255 /* All the arguments of the PHI node must have the same constant
1259 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1261 tree arg = gimple_phi_arg (def_stmt, i)->def;
1263 /* If this PHI has itself as an argument, we cannot
1264 determine the string length of this argument. However,
1265 if we can find a constant string length for the other
1266 PHI args then we can still be sure that this is a
1267 constant string length. So be optimistic and just
1268 continue with the next argument. */
1269 if (arg == gimple_phi_result (def_stmt))
1272 if (!get_maxval_strlen (arg, length, visited, type))
1284 get_maxval_strlen (tree arg, int type)
1286 bitmap visited = NULL;
1287 tree len = NULL_TREE;
1288 if (!get_maxval_strlen (arg, &len, &visited, type))
1291 BITMAP_FREE (visited);
1297 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1298 If LEN is not NULL, it represents the length of the string to be
1299 copied. Return NULL_TREE if no simplification can be made. */
1302 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1303 tree dest, tree src)
1305 location_t loc = gimple_location (gsi_stmt (*gsi));
1308 /* If SRC and DEST are the same (and not volatile), return DEST. */
1309 if (operand_equal_p (src, dest, 0))
1311 replace_call_with_value (gsi, dest);
1315 if (optimize_function_for_size_p (cfun))
1318 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1322 tree len = get_maxval_strlen (src, 0);
1326 len = fold_convert_loc (loc, size_type_node, len);
1327 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1328 len = force_gimple_operand_gsi (gsi, len, true,
1329 NULL_TREE, true, GSI_SAME_STMT);
1330 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1331 replace_call_with_call_and_fold (gsi, repl);
1335 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1336 If SLEN is not NULL, it represents the length of the source string.
1337 Return NULL_TREE if no simplification can be made. */
1340 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1341 tree dest, tree src, tree len)
1343 location_t loc = gimple_location (gsi_stmt (*gsi));
1346 /* If the LEN parameter is zero, return DEST. */
1347 if (integer_zerop (len))
1349 replace_call_with_value (gsi, dest);
1353 /* We can't compare slen with len as constants below if len is not a
1355 if (TREE_CODE (len) != INTEGER_CST)
1358 /* Now, we must be passed a constant src ptr parameter. */
1359 tree slen = get_maxval_strlen (src, 0);
1360 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1363 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1365 /* We do not support simplification of this case, though we do
1366 support it when expanding trees into RTL. */
1367 /* FIXME: generate a call to __builtin_memset. */
1368 if (tree_int_cst_lt (slen, len))
1371 /* OK transform into builtin memcpy. */
1372 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1376 len = fold_convert_loc (loc, size_type_node, len);
1377 len = force_gimple_operand_gsi (gsi, len, true,
1378 NULL_TREE, true, GSI_SAME_STMT);
1379 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1380 replace_call_with_call_and_fold (gsi, repl);
1384 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1387 Return NULL_TREE if no simplification was possible, otherwise return the
1388 simplified form of the call as a tree.
1390 The simplified form may be a constant or other expression which
1391 computes the same value, but in a more efficient manner (including
1392 calls to other builtin functions).
1394 The call may contain arguments which need to be evaluated, but
1395 which are not useful to determine the result of the call. In
1396 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1397 COMPOUND_EXPR will be an argument which must be evaluated.
1398 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1399 COMPOUND_EXPR in the chain will contain the tree for the simplified
1400 form of the builtin function call. */
1403 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1405 gimple *stmt = gsi_stmt (*gsi);
1406 location_t loc = gimple_location (stmt);
1408 const char *p = c_getstr (src);
1410 /* If the string length is zero, return the dst parameter. */
1411 if (p && *p == '\0')
1413 replace_call_with_value (gsi, dst);
1417 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1420 /* See if we can store by pieces into (dst + strlen(dst)). */
1422 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1423 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1425 if (!strlen_fn || !memcpy_fn)
1428 /* If the length of the source string isn't computable don't
1429 split strcat into strlen and memcpy. */
1430 tree len = get_maxval_strlen (src, 0);
1434 /* Create strlen (dst). */
1435 gimple_seq stmts = NULL, stmts2;
1436 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1437 gimple_set_location (repl, loc);
1438 if (gimple_in_ssa_p (cfun))
1439 newdst = make_ssa_name (size_type_node);
1441 newdst = create_tmp_reg (size_type_node);
1442 gimple_call_set_lhs (repl, newdst);
1443 gimple_seq_add_stmt_without_update (&stmts, repl);
1445 /* Create (dst p+ strlen (dst)). */
1446 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1447 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1448 gimple_seq_add_seq_without_update (&stmts, stmts2);
1450 len = fold_convert_loc (loc, size_type_node, len);
1451 len = size_binop_loc (loc, PLUS_EXPR, len,
1452 build_int_cst (size_type_node, 1));
1453 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1454 gimple_seq_add_seq_without_update (&stmts, stmts2);
1456 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1457 gimple_seq_add_stmt_without_update (&stmts, repl);
1458 if (gimple_call_lhs (stmt))
1460 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1461 gimple_seq_add_stmt_without_update (&stmts, repl);
1462 gsi_replace_with_seq_vops (gsi, stmts);
1463 /* gsi now points at the assignment to the lhs, get a
1464 stmt iterator to the memcpy call.
1465 ??? We can't use gsi_for_stmt as that doesn't work when the
1466 CFG isn't built yet. */
1467 gimple_stmt_iterator gsi2 = *gsi;
1473 gsi_replace_with_seq_vops (gsi, stmts);
1479 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1480 are the arguments to the call. */
1483 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1485 gimple *stmt = gsi_stmt (*gsi);
1486 tree dest = gimple_call_arg (stmt, 0);
1487 tree src = gimple_call_arg (stmt, 1);
1488 tree size = gimple_call_arg (stmt, 2);
1494 /* If the SRC parameter is "", return DEST. */
1495 if (p && *p == '\0')
1497 replace_call_with_value (gsi, dest);
1501 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1504 /* If __builtin_strcat_chk is used, assume strcat is available. */
1505 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1509 gimple *repl = gimple_build_call (fn, 2, dest, src);
1510 replace_call_with_call_and_fold (gsi, repl);
1514 /* Simplify a call to the strncat builtin. */
1517 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1519 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1520 tree dst = gimple_call_arg (stmt, 0);
1521 tree src = gimple_call_arg (stmt, 1);
1522 tree len = gimple_call_arg (stmt, 2);
1524 const char *p = c_getstr (src);
1526 /* If the requested length is zero, or the src parameter string
1527 length is zero, return the dst parameter. */
1528 if (integer_zerop (len) || (p && *p == '\0'))
1530 replace_call_with_value (gsi, dst);
1534 /* If the requested len is greater than or equal to the string
1535 length, call strcat. */
1536 if (TREE_CODE (len) == INTEGER_CST && p
1537 && compare_tree_int (len, strlen (p)) >= 0)
1539 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1541 /* If the replacement _DECL isn't initialized, don't do the
1546 gcall *repl = gimple_build_call (fn, 2, dst, src);
1547 replace_call_with_call_and_fold (gsi, repl);
1554 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1558 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1560 gimple *stmt = gsi_stmt (*gsi);
1561 tree dest = gimple_call_arg (stmt, 0);
1562 tree src = gimple_call_arg (stmt, 1);
1563 tree len = gimple_call_arg (stmt, 2);
1564 tree size = gimple_call_arg (stmt, 3);
1569 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1570 if ((p && *p == '\0')
1571 || integer_zerop (len))
1573 replace_call_with_value (gsi, dest);
1577 if (! tree_fits_uhwi_p (size))
1580 if (! integer_all_onesp (size))
1582 tree src_len = c_strlen (src, 1);
1584 && tree_fits_uhwi_p (src_len)
1585 && tree_fits_uhwi_p (len)
1586 && ! tree_int_cst_lt (len, src_len))
1588 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1589 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1593 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1594 replace_call_with_call_and_fold (gsi, repl);
1600 /* If __builtin_strncat_chk is used, assume strncat is available. */
1601 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1605 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1606 replace_call_with_call_and_fold (gsi, repl);
1610 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1611 to the call. IGNORE is true if the value returned
1612 by the builtin will be ignored. UNLOCKED is true is true if this
1613 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1614 the known length of the string. Return NULL_TREE if no simplification
1618 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1619 tree arg0, tree arg1,
1622 gimple *stmt = gsi_stmt (*gsi);
1624 /* If we're using an unlocked function, assume the other unlocked
1625 functions exist explicitly. */
1626 tree const fn_fputc = (unlocked
1627 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1628 : builtin_decl_implicit (BUILT_IN_FPUTC));
1629 tree const fn_fwrite = (unlocked
1630 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1631 : builtin_decl_implicit (BUILT_IN_FWRITE));
1633 /* If the return value is used, don't do the transformation. */
1634 if (gimple_call_lhs (stmt))
1637 /* Get the length of the string passed to fputs. If the length
1638 can't be determined, punt. */
1639 tree len = get_maxval_strlen (arg0, 0);
1641 || TREE_CODE (len) != INTEGER_CST)
1644 switch (compare_tree_int (len, 1))
1646 case -1: /* length is 0, delete the call entirely . */
1647 replace_call_with_value (gsi, integer_zero_node);
1650 case 0: /* length is 1, call fputc. */
1652 const char *p = c_getstr (arg0);
1658 gimple *repl = gimple_build_call (fn_fputc, 2,
1660 (integer_type_node, p[0]), arg1);
1661 replace_call_with_call_and_fold (gsi, repl);
1666 case 1: /* length is greater than 1, call fwrite. */
1668 /* If optimizing for size keep fputs. */
1669 if (optimize_function_for_size_p (cfun))
1671 /* New argument list transforming fputs(string, stream) to
1672 fwrite(string, 1, len, stream). */
1676 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
1677 size_one_node, len, arg1);
1678 replace_call_with_call_and_fold (gsi, repl);
1687 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1688 DEST, SRC, LEN, and SIZE are the arguments to the call.
1689 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1690 code of the builtin. If MAXLEN is not NULL, it is maximum length
1691 passed as third argument. */
1694 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1695 tree dest, tree src, tree len, tree size,
1696 enum built_in_function fcode)
1698 gimple *stmt = gsi_stmt (*gsi);
1699 location_t loc = gimple_location (stmt);
1700 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1703 /* If SRC and DEST are the same (and not volatile), return DEST
1704 (resp. DEST+LEN for __mempcpy_chk). */
1705 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1707 if (fcode != BUILT_IN_MEMPCPY_CHK)
1709 replace_call_with_value (gsi, dest);
1714 gimple_seq stmts = NULL;
1715 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1716 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR, dest, len);
1717 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1718 replace_call_with_value (gsi, temp);
1723 if (! tree_fits_uhwi_p (size))
1726 tree maxlen = get_maxval_strlen (len, 2);
1727 if (! integer_all_onesp (size))
1729 if (! tree_fits_uhwi_p (len))
1731 /* If LEN is not constant, try MAXLEN too.
1732 For MAXLEN only allow optimizing into non-_ocs function
1733 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1734 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1736 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1738 /* (void) __mempcpy_chk () can be optimized into
1739 (void) __memcpy_chk (). */
1740 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1744 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1745 replace_call_with_call_and_fold (gsi, repl);
1754 if (tree_int_cst_lt (size, maxlen))
1759 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1760 mem{cpy,pcpy,move,set} is available. */
1763 case BUILT_IN_MEMCPY_CHK:
1764 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1766 case BUILT_IN_MEMPCPY_CHK:
1767 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1769 case BUILT_IN_MEMMOVE_CHK:
1770 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1772 case BUILT_IN_MEMSET_CHK:
1773 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1782 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1783 replace_call_with_call_and_fold (gsi, repl);
1787 /* Fold a call to the __st[rp]cpy_chk builtin.
1788 DEST, SRC, and SIZE are the arguments to the call.
1789 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1790 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1791 strings passed as second argument. */
1794 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1796 tree src, tree size,
1797 enum built_in_function fcode)
1799 gimple *stmt = gsi_stmt (*gsi);
1800 location_t loc = gimple_location (stmt);
1801 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1804 /* If SRC and DEST are the same (and not volatile), return DEST. */
1805 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1807 replace_call_with_value (gsi, dest);
1811 if (! tree_fits_uhwi_p (size))
1814 tree maxlen = get_maxval_strlen (src, 1);
1815 if (! integer_all_onesp (size))
1817 len = c_strlen (src, 1);
1818 if (! len || ! tree_fits_uhwi_p (len))
1820 /* If LEN is not constant, try MAXLEN too.
1821 For MAXLEN only allow optimizing into non-_ocs function
1822 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1823 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1825 if (fcode == BUILT_IN_STPCPY_CHK)
1830 /* If return value of __stpcpy_chk is ignored,
1831 optimize into __strcpy_chk. */
1832 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1836 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1837 replace_call_with_call_and_fold (gsi, repl);
1841 if (! len || TREE_SIDE_EFFECTS (len))
1844 /* If c_strlen returned something, but not a constant,
1845 transform __strcpy_chk into __memcpy_chk. */
1846 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1850 gimple_seq stmts = NULL;
1851 len = gimple_convert (&stmts, loc, size_type_node, len);
1852 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
1853 build_int_cst (size_type_node, 1));
1854 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1855 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1856 replace_call_with_call_and_fold (gsi, repl);
1863 if (! tree_int_cst_lt (maxlen, size))
1867 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1868 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1869 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1873 gimple *repl = gimple_build_call (fn, 2, dest, src);
1874 replace_call_with_call_and_fold (gsi, repl);
1878 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1879 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1880 length passed as third argument. IGNORE is true if return value can be
1881 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1884 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1885 tree dest, tree src,
1886 tree len, tree size,
1887 enum built_in_function fcode)
1889 gimple *stmt = gsi_stmt (*gsi);
1890 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1893 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1895 /* If return value of __stpncpy_chk is ignored,
1896 optimize into __strncpy_chk. */
1897 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1900 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1901 replace_call_with_call_and_fold (gsi, repl);
1906 if (! tree_fits_uhwi_p (size))
1909 tree maxlen = get_maxval_strlen (len, 2);
1910 if (! integer_all_onesp (size))
1912 if (! tree_fits_uhwi_p (len))
1914 /* If LEN is not constant, try MAXLEN too.
1915 For MAXLEN only allow optimizing into non-_ocs function
1916 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1917 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1923 if (tree_int_cst_lt (size, maxlen))
1927 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1928 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1929 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1933 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1934 replace_call_with_call_and_fold (gsi, repl);
1938 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1939 Return NULL_TREE if no simplification can be made. */
1942 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1944 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1945 location_t loc = gimple_location (stmt);
1946 tree dest = gimple_call_arg (stmt, 0);
1947 tree src = gimple_call_arg (stmt, 1);
1948 tree fn, len, lenp1;
1950 /* If the result is unused, replace stpcpy with strcpy. */
1951 if (gimple_call_lhs (stmt) == NULL_TREE)
1953 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1956 gimple_call_set_fndecl (stmt, fn);
1961 len = c_strlen (src, 1);
1963 || TREE_CODE (len) != INTEGER_CST)
1966 if (optimize_function_for_size_p (cfun)
1967 /* If length is zero it's small enough. */
1968 && !integer_zerop (len))
1971 /* If the source has a known length replace stpcpy with memcpy. */
1972 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1976 gimple_seq stmts = NULL;
1977 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1978 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1979 tem, build_int_cst (size_type_node, 1));
1980 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1981 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1982 gimple_set_vuse (repl, gimple_vuse (stmt));
1983 gimple_set_vdef (repl, gimple_vdef (stmt));
1984 if (gimple_vdef (repl)
1985 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1986 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1987 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1988 /* Replace the result with dest + len. */
1990 tem = gimple_convert (&stmts, loc, sizetype, len);
1991 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1992 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
1993 POINTER_PLUS_EXPR, dest, tem);
1994 gsi_replace (gsi, ret, false);
1995 /* Finally fold the memcpy call. */
1996 gimple_stmt_iterator gsi2 = *gsi;
2002 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2003 NULL_TREE if a normal call should be emitted rather than expanding
2004 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2005 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2006 passed as second argument. */
2009 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2010 enum built_in_function fcode)
2012 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2013 tree dest, size, len, fn, fmt, flag;
2014 const char *fmt_str;
2016 /* Verify the required arguments in the original call. */
2017 if (gimple_call_num_args (stmt) < 5)
2020 dest = gimple_call_arg (stmt, 0);
2021 len = gimple_call_arg (stmt, 1);
2022 flag = gimple_call_arg (stmt, 2);
2023 size = gimple_call_arg (stmt, 3);
2024 fmt = gimple_call_arg (stmt, 4);
2026 if (! tree_fits_uhwi_p (size))
2029 if (! integer_all_onesp (size))
2031 tree maxlen = get_maxval_strlen (len, 2);
2032 if (! tree_fits_uhwi_p (len))
2034 /* If LEN is not constant, try MAXLEN too.
2035 For MAXLEN only allow optimizing into non-_ocs function
2036 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2037 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2043 if (tree_int_cst_lt (size, maxlen))
2047 if (!init_target_chars ())
2050 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2051 or if format doesn't contain % chars or is "%s". */
2052 if (! integer_zerop (flag))
2054 fmt_str = c_getstr (fmt);
2055 if (fmt_str == NULL)
2057 if (strchr (fmt_str, target_percent) != NULL
2058 && strcmp (fmt_str, target_percent_s))
2062 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2064 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2065 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2069 /* Replace the called function and the first 5 argument by 3 retaining
2070 trailing varargs. */
2071 gimple_call_set_fndecl (stmt, fn);
2072 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2073 gimple_call_set_arg (stmt, 0, dest);
2074 gimple_call_set_arg (stmt, 1, len);
2075 gimple_call_set_arg (stmt, 2, fmt);
2076 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2077 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2078 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2083 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2084 Return NULL_TREE if a normal call should be emitted rather than
2085 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2086 or BUILT_IN_VSPRINTF_CHK. */
2089 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2090 enum built_in_function fcode)
2092 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2093 tree dest, size, len, fn, fmt, flag;
2094 const char *fmt_str;
2095 unsigned nargs = gimple_call_num_args (stmt);
2097 /* Verify the required arguments in the original call. */
2100 dest = gimple_call_arg (stmt, 0);
2101 flag = gimple_call_arg (stmt, 1);
2102 size = gimple_call_arg (stmt, 2);
2103 fmt = gimple_call_arg (stmt, 3);
2105 if (! tree_fits_uhwi_p (size))
2110 if (!init_target_chars ())
2113 /* Check whether the format is a literal string constant. */
2114 fmt_str = c_getstr (fmt);
2115 if (fmt_str != NULL)
2117 /* If the format doesn't contain % args or %%, we know the size. */
2118 if (strchr (fmt_str, target_percent) == 0)
2120 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2121 len = build_int_cstu (size_type_node, strlen (fmt_str));
2123 /* If the format is "%s" and first ... argument is a string literal,
2124 we know the size too. */
2125 else if (fcode == BUILT_IN_SPRINTF_CHK
2126 && strcmp (fmt_str, target_percent_s) == 0)
2132 arg = gimple_call_arg (stmt, 4);
2133 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2135 len = c_strlen (arg, 1);
2136 if (! len || ! tree_fits_uhwi_p (len))
2143 if (! integer_all_onesp (size))
2145 if (! len || ! tree_int_cst_lt (len, size))
2149 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2150 or if format doesn't contain % chars or is "%s". */
2151 if (! integer_zerop (flag))
2153 if (fmt_str == NULL)
2155 if (strchr (fmt_str, target_percent) != NULL
2156 && strcmp (fmt_str, target_percent_s))
2160 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2161 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2162 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2166 /* Replace the called function and the first 4 argument by 2 retaining
2167 trailing varargs. */
2168 gimple_call_set_fndecl (stmt, fn);
2169 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2170 gimple_call_set_arg (stmt, 0, dest);
2171 gimple_call_set_arg (stmt, 1, fmt);
2172 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2173 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2174 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2179 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2180 ORIG may be null if this is a 2-argument call. We don't attempt to
2181 simplify calls with more than 3 arguments.
2183 Return NULL_TREE if no simplification was possible, otherwise return the
2184 simplified form of the call as a tree. If IGNORED is true, it means that
2185 the caller does not use the returned value of the function. */
2188 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2190 gimple *stmt = gsi_stmt (*gsi);
2191 tree dest = gimple_call_arg (stmt, 0);
2192 tree fmt = gimple_call_arg (stmt, 1);
2193 tree orig = NULL_TREE;
2194 const char *fmt_str = NULL;
2196 /* Verify the required arguments in the original call. We deal with two
2197 types of sprintf() calls: 'sprintf (str, fmt)' and
2198 'sprintf (dest, "%s", orig)'. */
2199 if (gimple_call_num_args (stmt) > 3)
2202 if (gimple_call_num_args (stmt) == 3)
2203 orig = gimple_call_arg (stmt, 2);
2205 /* Check whether the format is a literal string constant. */
2206 fmt_str = c_getstr (fmt);
2207 if (fmt_str == NULL)
2210 if (!init_target_chars ())
2213 /* If the format doesn't contain % args or %%, use strcpy. */
2214 if (strchr (fmt_str, target_percent) == NULL)
2216 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2221 /* Don't optimize sprintf (buf, "abc", ptr++). */
2225 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2226 'format' is known to contain no % formats. */
2227 gimple_seq stmts = NULL;
2228 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2229 gimple_seq_add_stmt_without_update (&stmts, repl);
2230 if (gimple_call_lhs (stmt))
2232 repl = gimple_build_assign (gimple_call_lhs (stmt),
2233 build_int_cst (integer_type_node,
2235 gimple_seq_add_stmt_without_update (&stmts, repl);
2236 gsi_replace_with_seq_vops (gsi, stmts);
2237 /* gsi now points at the assignment to the lhs, get a
2238 stmt iterator to the memcpy call.
2239 ??? We can't use gsi_for_stmt as that doesn't work when the
2240 CFG isn't built yet. */
2241 gimple_stmt_iterator gsi2 = *gsi;
2247 gsi_replace_with_seq_vops (gsi, stmts);
2253 /* If the format is "%s", use strcpy if the result isn't used. */
2254 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2257 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2262 /* Don't crash on sprintf (str1, "%s"). */
2266 tree orig_len = NULL_TREE;
2267 if (gimple_call_lhs (stmt))
2269 orig_len = get_maxval_strlen (orig, 0);
2274 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2275 gimple_seq stmts = NULL;
2276 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2277 gimple_seq_add_stmt_without_update (&stmts, repl);
2278 if (gimple_call_lhs (stmt))
2280 if (!useless_type_conversion_p (integer_type_node,
2281 TREE_TYPE (orig_len)))
2282 orig_len = fold_convert (integer_type_node, orig_len);
2283 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2284 gimple_seq_add_stmt_without_update (&stmts, repl);
2285 gsi_replace_with_seq_vops (gsi, stmts);
2286 /* gsi now points at the assignment to the lhs, get a
2287 stmt iterator to the memcpy call.
2288 ??? We can't use gsi_for_stmt as that doesn't work when the
2289 CFG isn't built yet. */
2290 gimple_stmt_iterator gsi2 = *gsi;
2296 gsi_replace_with_seq_vops (gsi, stmts);
2304 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2305 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2306 attempt to simplify calls with more than 4 arguments.
2308 Return NULL_TREE if no simplification was possible, otherwise return the
2309 simplified form of the call as a tree. If IGNORED is true, it means that
2310 the caller does not use the returned value of the function. */
2313 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2315 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2316 tree dest = gimple_call_arg (stmt, 0);
2317 tree destsize = gimple_call_arg (stmt, 1);
2318 tree fmt = gimple_call_arg (stmt, 2);
2319 tree orig = NULL_TREE;
2320 const char *fmt_str = NULL;
2322 if (gimple_call_num_args (stmt) > 4)
2325 if (gimple_call_num_args (stmt) == 4)
2326 orig = gimple_call_arg (stmt, 3);
2328 if (!tree_fits_uhwi_p (destsize))
2330 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2332 /* Check whether the format is a literal string constant. */
2333 fmt_str = c_getstr (fmt);
2334 if (fmt_str == NULL)
2337 if (!init_target_chars ())
2340 /* If the format doesn't contain % args or %%, use strcpy. */
2341 if (strchr (fmt_str, target_percent) == NULL)
2343 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2347 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2351 /* We could expand this as
2352 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2354 memcpy (str, fmt_with_nul_at_cstm1, cst);
2355 but in the former case that might increase code size
2356 and in the latter case grow .rodata section too much.
2358 size_t len = strlen (fmt_str);
2362 gimple_seq stmts = NULL;
2363 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2364 gimple_seq_add_stmt_without_update (&stmts, repl);
2365 if (gimple_call_lhs (stmt))
2367 repl = gimple_build_assign (gimple_call_lhs (stmt),
2368 build_int_cst (integer_type_node, len));
2369 gimple_seq_add_stmt_without_update (&stmts, repl);
2370 gsi_replace_with_seq_vops (gsi, stmts);
2371 /* gsi now points at the assignment to the lhs, get a
2372 stmt iterator to the memcpy call.
2373 ??? We can't use gsi_for_stmt as that doesn't work when the
2374 CFG isn't built yet. */
2375 gimple_stmt_iterator gsi2 = *gsi;
2381 gsi_replace_with_seq_vops (gsi, stmts);
2387 /* If the format is "%s", use strcpy if the result isn't used. */
2388 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2390 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2394 /* Don't crash on snprintf (str1, cst, "%s"). */
2398 tree orig_len = get_maxval_strlen (orig, 0);
2399 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2402 /* We could expand this as
2403 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2405 memcpy (str1, str2_with_nul_at_cstm1, cst);
2406 but in the former case that might increase code size
2407 and in the latter case grow .rodata section too much.
2409 if (compare_tree_int (orig_len, destlen) >= 0)
2412 /* Convert snprintf (str1, cst, "%s", str2) into
2413 strcpy (str1, str2) if strlen (str2) < cst. */
2414 gimple_seq stmts = NULL;
2415 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2416 gimple_seq_add_stmt_without_update (&stmts, repl);
2417 if (gimple_call_lhs (stmt))
2419 if (!useless_type_conversion_p (integer_type_node,
2420 TREE_TYPE (orig_len)))
2421 orig_len = fold_convert (integer_type_node, orig_len);
2422 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2423 gimple_seq_add_stmt_without_update (&stmts, repl);
2424 gsi_replace_with_seq_vops (gsi, stmts);
2425 /* gsi now points at the assignment to the lhs, get a
2426 stmt iterator to the memcpy call.
2427 ??? We can't use gsi_for_stmt as that doesn't work when the
2428 CFG isn't built yet. */
2429 gimple_stmt_iterator gsi2 = *gsi;
2435 gsi_replace_with_seq_vops (gsi, stmts);
2443 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2444 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2445 more than 3 arguments, and ARG may be null in the 2-argument case.
2447 Return NULL_TREE if no simplification was possible, otherwise return the
2448 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2449 code of the function to be simplified. */
2452 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2453 tree fp, tree fmt, tree arg,
2454 enum built_in_function fcode)
2456 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2457 tree fn_fputc, fn_fputs;
2458 const char *fmt_str = NULL;
2460 /* If the return value is used, don't do the transformation. */
2461 if (gimple_call_lhs (stmt) != NULL_TREE)
2464 /* Check whether the format is a literal string constant. */
2465 fmt_str = c_getstr (fmt);
2466 if (fmt_str == NULL)
2469 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2471 /* If we're using an unlocked function, assume the other
2472 unlocked functions exist explicitly. */
2473 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2474 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2478 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2479 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2482 if (!init_target_chars ())
2485 /* If the format doesn't contain % args or %%, use strcpy. */
2486 if (strchr (fmt_str, target_percent) == NULL)
2488 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2492 /* If the format specifier was "", fprintf does nothing. */
2493 if (fmt_str[0] == '\0')
2495 replace_call_with_value (gsi, NULL_TREE);
2499 /* When "string" doesn't contain %, replace all cases of
2500 fprintf (fp, string) with fputs (string, fp). The fputs
2501 builtin will take care of special cases like length == 1. */
2504 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2505 replace_call_with_call_and_fold (gsi, repl);
2510 /* The other optimizations can be done only on the non-va_list variants. */
2511 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2514 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2515 else if (strcmp (fmt_str, target_percent_s) == 0)
2517 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2521 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2522 replace_call_with_call_and_fold (gsi, repl);
2527 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2528 else if (strcmp (fmt_str, target_percent_c) == 0)
2531 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2535 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2536 replace_call_with_call_and_fold (gsi, repl);
2544 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2545 FMT and ARG are the arguments to the call; we don't fold cases with
2546 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2548 Return NULL_TREE if no simplification was possible, otherwise return the
2549 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2550 code of the function to be simplified. */
2553 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2554 tree arg, enum built_in_function fcode)
2556 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2557 tree fn_putchar, fn_puts, newarg;
2558 const char *fmt_str = NULL;
2560 /* If the return value is used, don't do the transformation. */
2561 if (gimple_call_lhs (stmt) != NULL_TREE)
2564 /* Check whether the format is a literal string constant. */
2565 fmt_str = c_getstr (fmt);
2566 if (fmt_str == NULL)
2569 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2571 /* If we're using an unlocked function, assume the other
2572 unlocked functions exist explicitly. */
2573 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2574 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2578 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2579 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2582 if (!init_target_chars ())
2585 if (strcmp (fmt_str, target_percent_s) == 0
2586 || strchr (fmt_str, target_percent) == NULL)
2590 if (strcmp (fmt_str, target_percent_s) == 0)
2592 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2595 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2598 str = c_getstr (arg);
2604 /* The format specifier doesn't contain any '%' characters. */
2605 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2611 /* If the string was "", printf does nothing. */
2614 replace_call_with_value (gsi, NULL_TREE);
2618 /* If the string has length of 1, call putchar. */
2621 /* Given printf("c"), (where c is any one character,)
2622 convert "c"[0] to an int and pass that to the replacement
2624 newarg = build_int_cst (integer_type_node, str[0]);
2627 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2628 replace_call_with_call_and_fold (gsi, repl);
2634 /* If the string was "string\n", call puts("string"). */
2635 size_t len = strlen (str);
2636 if ((unsigned char)str[len - 1] == target_newline
2637 && (size_t) (int) len == len
2641 tree offset_node, string_cst;
2643 /* Create a NUL-terminated string that's one char shorter
2644 than the original, stripping off the trailing '\n'. */
2645 newarg = build_string_literal (len, str);
2646 string_cst = string_constant (newarg, &offset_node);
2647 gcc_checking_assert (string_cst
2648 && (TREE_STRING_LENGTH (string_cst)
2650 && integer_zerop (offset_node)
2652 TREE_STRING_POINTER (string_cst)[len - 1]
2654 /* build_string_literal creates a new STRING_CST,
2655 modify it in place to avoid double copying. */
2656 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2657 newstr[len - 1] = '\0';
2660 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2661 replace_call_with_call_and_fold (gsi, repl);
2666 /* We'd like to arrange to call fputs(string,stdout) here,
2667 but we need stdout and don't have a way to get it yet. */
2672 /* The other optimizations can be done only on the non-va_list variants. */
2673 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2676 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2677 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2679 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2683 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2684 replace_call_with_call_and_fold (gsi, repl);
2689 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2690 else if (strcmp (fmt_str, target_percent_c) == 0)
2692 if (!arg || ! useless_type_conversion_p (integer_type_node,
2697 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2698 replace_call_with_call_and_fold (gsi, repl);
2708 /* Fold a call to __builtin_strlen with known length LEN. */
2711 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2713 gimple *stmt = gsi_stmt (*gsi);
2714 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2717 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2718 replace_call_with_value (gsi, len);
2722 /* Fold a call to __builtin_acc_on_device. */
2725 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2727 /* Defer folding until we know which compiler we're in. */
2728 if (symtab->state != EXPANSION)
2731 unsigned val_host = GOMP_DEVICE_HOST;
2732 unsigned val_dev = GOMP_DEVICE_NONE;
2734 #ifdef ACCEL_COMPILER
2735 val_host = GOMP_DEVICE_NOT_HOST;
2736 val_dev = ACCEL_COMPILER_acc_device;
2739 location_t loc = gimple_location (gsi_stmt (*gsi));
2741 tree host_eq = make_ssa_name (boolean_type_node);
2742 gimple *host_ass = gimple_build_assign
2743 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2744 gimple_set_location (host_ass, loc);
2745 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2747 tree dev_eq = make_ssa_name (boolean_type_node);
2748 gimple *dev_ass = gimple_build_assign
2749 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2750 gimple_set_location (dev_ass, loc);
2751 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2753 tree result = make_ssa_name (boolean_type_node);
2754 gimple *result_ass = gimple_build_assign
2755 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2756 gimple_set_location (result_ass, loc);
2757 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2759 replace_call_with_value (gsi, result);
2764 /* Fold the non-target builtin at *GSI and return whether any simplification
2768 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2770 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2771 tree callee = gimple_call_fndecl (stmt);
2773 /* Give up for always_inline inline builtins until they are
2775 if (avoid_folding_inline_builtin (callee))
2778 unsigned n = gimple_call_num_args (stmt);
2779 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2782 case BUILT_IN_BZERO:
2783 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2784 gimple_call_arg (stmt, 1));
2785 case BUILT_IN_MEMSET:
2786 return gimple_fold_builtin_memset (gsi,
2787 gimple_call_arg (stmt, 1),
2788 gimple_call_arg (stmt, 2));
2789 case BUILT_IN_BCOPY:
2790 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2791 gimple_call_arg (stmt, 0), 3);
2792 case BUILT_IN_MEMCPY:
2793 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2794 gimple_call_arg (stmt, 1), 0);
2795 case BUILT_IN_MEMPCPY:
2796 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2797 gimple_call_arg (stmt, 1), 1);
2798 case BUILT_IN_MEMMOVE:
2799 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2800 gimple_call_arg (stmt, 1), 3);
2801 case BUILT_IN_SPRINTF_CHK:
2802 case BUILT_IN_VSPRINTF_CHK:
2803 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2804 case BUILT_IN_STRCAT_CHK:
2805 return gimple_fold_builtin_strcat_chk (gsi);
2806 case BUILT_IN_STRNCAT_CHK:
2807 return gimple_fold_builtin_strncat_chk (gsi);
2808 case BUILT_IN_STRLEN:
2809 return gimple_fold_builtin_strlen (gsi);
2810 case BUILT_IN_STRCPY:
2811 return gimple_fold_builtin_strcpy (gsi,
2812 gimple_call_arg (stmt, 0),
2813 gimple_call_arg (stmt, 1));
2814 case BUILT_IN_STRNCPY:
2815 return gimple_fold_builtin_strncpy (gsi,
2816 gimple_call_arg (stmt, 0),
2817 gimple_call_arg (stmt, 1),
2818 gimple_call_arg (stmt, 2));
2819 case BUILT_IN_STRCAT:
2820 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2821 gimple_call_arg (stmt, 1));
2822 case BUILT_IN_STRNCAT:
2823 return gimple_fold_builtin_strncat (gsi);
2824 case BUILT_IN_FPUTS:
2825 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2826 gimple_call_arg (stmt, 1), false);
2827 case BUILT_IN_FPUTS_UNLOCKED:
2828 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2829 gimple_call_arg (stmt, 1), true);
2830 case BUILT_IN_MEMCPY_CHK:
2831 case BUILT_IN_MEMPCPY_CHK:
2832 case BUILT_IN_MEMMOVE_CHK:
2833 case BUILT_IN_MEMSET_CHK:
2834 return gimple_fold_builtin_memory_chk (gsi,
2835 gimple_call_arg (stmt, 0),
2836 gimple_call_arg (stmt, 1),
2837 gimple_call_arg (stmt, 2),
2838 gimple_call_arg (stmt, 3),
2840 case BUILT_IN_STPCPY:
2841 return gimple_fold_builtin_stpcpy (gsi);
2842 case BUILT_IN_STRCPY_CHK:
2843 case BUILT_IN_STPCPY_CHK:
2844 return gimple_fold_builtin_stxcpy_chk (gsi,
2845 gimple_call_arg (stmt, 0),
2846 gimple_call_arg (stmt, 1),
2847 gimple_call_arg (stmt, 2),
2849 case BUILT_IN_STRNCPY_CHK:
2850 case BUILT_IN_STPNCPY_CHK:
2851 return gimple_fold_builtin_stxncpy_chk (gsi,
2852 gimple_call_arg (stmt, 0),
2853 gimple_call_arg (stmt, 1),
2854 gimple_call_arg (stmt, 2),
2855 gimple_call_arg (stmt, 3),
2857 case BUILT_IN_SNPRINTF_CHK:
2858 case BUILT_IN_VSNPRINTF_CHK:
2859 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2860 case BUILT_IN_SNPRINTF:
2861 return gimple_fold_builtin_snprintf (gsi);
2862 case BUILT_IN_SPRINTF:
2863 return gimple_fold_builtin_sprintf (gsi);
2864 case BUILT_IN_FPRINTF:
2865 case BUILT_IN_FPRINTF_UNLOCKED:
2866 case BUILT_IN_VFPRINTF:
2867 if (n == 2 || n == 3)
2868 return gimple_fold_builtin_fprintf (gsi,
2869 gimple_call_arg (stmt, 0),
2870 gimple_call_arg (stmt, 1),
2872 ? gimple_call_arg (stmt, 2)
2876 case BUILT_IN_FPRINTF_CHK:
2877 case BUILT_IN_VFPRINTF_CHK:
2878 if (n == 3 || n == 4)
2879 return gimple_fold_builtin_fprintf (gsi,
2880 gimple_call_arg (stmt, 0),
2881 gimple_call_arg (stmt, 2),
2883 ? gimple_call_arg (stmt, 3)
2887 case BUILT_IN_PRINTF:
2888 case BUILT_IN_PRINTF_UNLOCKED:
2889 case BUILT_IN_VPRINTF:
2890 if (n == 1 || n == 2)
2891 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2893 ? gimple_call_arg (stmt, 1)
2894 : NULL_TREE, fcode);
2896 case BUILT_IN_PRINTF_CHK:
2897 case BUILT_IN_VPRINTF_CHK:
2898 if (n == 2 || n == 3)
2899 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2901 ? gimple_call_arg (stmt, 2)
2902 : NULL_TREE, fcode);
2904 case BUILT_IN_ACC_ON_DEVICE:
2905 return gimple_fold_builtin_acc_on_device (gsi,
2906 gimple_call_arg (stmt, 0));
2910 /* Try the generic builtin folder. */
2911 bool ignore = (gimple_call_lhs (stmt) == NULL);
2912 tree result = fold_call_stmt (stmt, ignore);
2916 STRIP_NOPS (result);
2918 result = fold_convert (gimple_call_return_type (stmt), result);
2919 if (!update_call_from_tree (gsi, result))
2920 gimplify_and_update_call_from_tree (gsi, result);
2927 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2928 doesn't fit into TYPE. The test for overflow should be regardless of
2929 -fwrapv, and even for unsigned types. */
2932 arith_overflowed_p (enum tree_code code, const_tree type,
2933 const_tree arg0, const_tree arg1)
2935 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2936 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2938 widest2_int warg0 = widest2_int_cst (arg0);
2939 widest2_int warg1 = widest2_int_cst (arg1);
2943 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2944 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2945 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2946 default: gcc_unreachable ();
2948 signop sign = TYPE_SIGN (type);
2949 if (sign == UNSIGNED && wi::neg_p (wres))
2951 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2954 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2955 The statement may be replaced by another statement, e.g., if the call
2956 simplifies to a constant value. Return true if any changes were made.
2957 It is assumed that the operands have been previously folded. */
2960 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2962 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2964 bool changed = false;
2967 /* Fold *& in call arguments. */
2968 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2969 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2971 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2974 gimple_call_set_arg (stmt, i, tmp);
2979 /* Check for virtual calls that became direct calls. */
2980 callee = gimple_call_fn (stmt);
2981 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
2983 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2985 if (dump_file && virtual_method_call_p (callee)
2986 && !possible_polymorphic_call_target_p
2987 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
2988 (OBJ_TYPE_REF_EXPR (callee)))))
2991 "Type inheritance inconsistent devirtualization of ");
2992 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2993 fprintf (dump_file, " to ");
2994 print_generic_expr (dump_file, callee, TDF_SLIM);
2995 fprintf (dump_file, "\n");
2998 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3001 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3004 vec <cgraph_node *>targets
3005 = possible_polymorphic_call_targets (callee, stmt, &final);
3006 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3008 tree lhs = gimple_call_lhs (stmt);
3009 if (dump_enabled_p ())
3011 location_t loc = gimple_location_safe (stmt);
3012 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3013 "folding virtual function call to %s\n",
3014 targets.length () == 1
3015 ? targets[0]->name ()
3016 : "__builtin_unreachable");
3018 if (targets.length () == 1)
3020 gimple_call_set_fndecl (stmt, targets[0]->decl);
3022 /* If the call becomes noreturn, remove the lhs. */
3023 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3025 if (TREE_CODE (lhs) == SSA_NAME)
3027 tree var = create_tmp_var (TREE_TYPE (lhs));
3028 tree def = get_or_create_ssa_default_def (cfun, var);
3029 gimple *new_stmt = gimple_build_assign (lhs, def);
3030 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3032 gimple_call_set_lhs (stmt, NULL_TREE);
3034 maybe_remove_unused_call_args (cfun, stmt);
3038 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3039 gimple *new_stmt = gimple_build_call (fndecl, 0);
3040 gimple_set_location (new_stmt, gimple_location (stmt));
3041 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3043 tree var = create_tmp_var (TREE_TYPE (lhs));
3044 tree def = get_or_create_ssa_default_def (cfun, var);
3046 /* To satisfy condition for
3047 cgraph_update_edges_for_call_stmt_node,
3048 we need to preserve GIMPLE_CALL statement
3049 at position of GSI iterator. */
3050 update_call_from_tree (gsi, def);
3051 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3055 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3056 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3057 gsi_replace (gsi, new_stmt, false);
3065 /* Check for indirect calls that became direct calls, and then
3066 no longer require a static chain. */
3067 if (gimple_call_chain (stmt))
3069 tree fn = gimple_call_fndecl (stmt);
3070 if (fn && !DECL_STATIC_CHAIN (fn))
3072 gimple_call_set_chain (stmt, NULL);
3077 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3080 gimple_call_set_chain (stmt, tmp);
3089 /* Check for builtins that CCP can handle using information not
3090 available in the generic fold routines. */
3091 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3093 if (gimple_fold_builtin (gsi))
3096 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3098 changed |= targetm.gimple_fold_builtin (gsi);
3100 else if (gimple_call_internal_p (stmt))
3102 enum tree_code subcode = ERROR_MARK;
3103 tree result = NULL_TREE;
3104 bool cplx_result = false;
3105 tree overflow = NULL_TREE;
3106 switch (gimple_call_internal_fn (stmt))
3108 case IFN_BUILTIN_EXPECT:
3109 result = fold_builtin_expect (gimple_location (stmt),
3110 gimple_call_arg (stmt, 0),
3111 gimple_call_arg (stmt, 1),
3112 gimple_call_arg (stmt, 2));
3114 case IFN_UBSAN_OBJECT_SIZE:
3115 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3116 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3117 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3118 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3119 gimple_call_arg (stmt, 2))))
3121 gsi_replace (gsi, gimple_build_nop (), false);
3122 unlink_stmt_vdef (stmt);
3123 release_defs (stmt);
3127 case IFN_UBSAN_CHECK_ADD:
3128 subcode = PLUS_EXPR;
3130 case IFN_UBSAN_CHECK_SUB:
3131 subcode = MINUS_EXPR;
3133 case IFN_UBSAN_CHECK_MUL:
3134 subcode = MULT_EXPR;
3136 case IFN_ADD_OVERFLOW:
3137 subcode = PLUS_EXPR;
3140 case IFN_SUB_OVERFLOW:
3141 subcode = MINUS_EXPR;
3144 case IFN_MUL_OVERFLOW:
3145 subcode = MULT_EXPR;
3151 if (subcode != ERROR_MARK)
3153 tree arg0 = gimple_call_arg (stmt, 0);
3154 tree arg1 = gimple_call_arg (stmt, 1);
3155 tree type = TREE_TYPE (arg0);
3158 tree lhs = gimple_call_lhs (stmt);
3159 if (lhs == NULL_TREE)
3162 type = TREE_TYPE (TREE_TYPE (lhs));
3164 if (type == NULL_TREE)
3166 /* x = y + 0; x = y - 0; x = y * 0; */
3167 else if (integer_zerop (arg1))
3168 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3169 /* x = 0 + y; x = 0 * y; */
3170 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3171 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3173 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3174 result = integer_zero_node;
3175 /* x = y * 1; x = 1 * y; */
3176 else if (subcode == MULT_EXPR && integer_onep (arg1))
3178 else if (subcode == MULT_EXPR && integer_onep (arg0))
3180 else if (TREE_CODE (arg0) == INTEGER_CST
3181 && TREE_CODE (arg1) == INTEGER_CST)
3184 result = int_const_binop (subcode, fold_convert (type, arg0),
3185 fold_convert (type, arg1));
3187 result = int_const_binop (subcode, arg0, arg1);
3188 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3191 overflow = build_one_cst (type);
3198 if (result == integer_zero_node)
3199 result = build_zero_cst (type);
3200 else if (cplx_result && TREE_TYPE (result) != type)
3202 if (TREE_CODE (result) == INTEGER_CST)
3204 if (arith_overflowed_p (PLUS_EXPR, type, result,
3206 overflow = build_one_cst (type);
3208 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3209 && TYPE_UNSIGNED (type))
3210 || (TYPE_PRECISION (type)
3211 < (TYPE_PRECISION (TREE_TYPE (result))
3212 + (TYPE_UNSIGNED (TREE_TYPE (result))
3213 && !TYPE_UNSIGNED (type)))))
3216 result = fold_convert (type, result);
3223 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3224 result = drop_tree_overflow (result);
3227 if (overflow == NULL_TREE)
3228 overflow = build_zero_cst (TREE_TYPE (result));
3229 tree ctype = build_complex_type (TREE_TYPE (result));
3230 if (TREE_CODE (result) == INTEGER_CST
3231 && TREE_CODE (overflow) == INTEGER_CST)
3232 result = build_complex (ctype, result, overflow);
3234 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3235 ctype, result, overflow);
3237 if (!update_call_from_tree (gsi, result))
3238 gimplify_and_update_call_from_tree (gsi, result);
3247 /* Return true whether NAME has a use on STMT. */
3250 has_use_on_stmt (tree name, gimple *stmt)
3252 imm_use_iterator iter;
3253 use_operand_p use_p;
3254 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3255 if (USE_STMT (use_p) == stmt)
3260 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3263 Replaces *GSI with the simplification result in RCODE and OPS
3264 and the associated statements in *SEQ. Does the replacement
3265 according to INPLACE and returns true if the operation succeeded. */
3268 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3269 code_helper rcode, tree *ops,
3270 gimple_seq *seq, bool inplace)
3272 gimple *stmt = gsi_stmt (*gsi);
3274 /* Play safe and do not allow abnormals to be mentioned in
3275 newly created statements. See also maybe_push_res_to_seq.
3276 As an exception allow such uses if there was a use of the
3277 same SSA name on the old stmt. */
3278 if ((TREE_CODE (ops[0]) == SSA_NAME
3279 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3280 && !has_use_on_stmt (ops[0], stmt))
3282 && TREE_CODE (ops[1]) == SSA_NAME
3283 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3284 && !has_use_on_stmt (ops[1], stmt))
3286 && TREE_CODE (ops[2]) == SSA_NAME
3287 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3288 && !has_use_on_stmt (ops[2], stmt)))
3291 /* Don't insert new statements when INPLACE is true, even if we could
3292 reuse STMT for the final statement. */
3293 if (inplace && !gimple_seq_empty_p (*seq))
3296 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3298 gcc_assert (rcode.is_tree_code ());
3299 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3300 /* GIMPLE_CONDs condition may not throw. */
3301 && (!flag_exceptions
3302 || !cfun->can_throw_non_call_exceptions
3303 || !operation_could_trap_p (rcode,
3304 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3306 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3307 else if (rcode == SSA_NAME)
3308 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3309 build_zero_cst (TREE_TYPE (ops[0])));
3310 else if (rcode == INTEGER_CST)
3312 if (integer_zerop (ops[0]))
3313 gimple_cond_make_false (cond_stmt);
3315 gimple_cond_make_true (cond_stmt);
3319 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3323 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3324 build_zero_cst (TREE_TYPE (res)));
3328 if (dump_file && (dump_flags & TDF_DETAILS))
3330 fprintf (dump_file, "gimple_simplified to ");
3331 if (!gimple_seq_empty_p (*seq))
3332 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3333 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3336 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3339 else if (is_gimple_assign (stmt)
3340 && rcode.is_tree_code ())
3343 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3345 maybe_build_generic_op (rcode,
3346 TREE_TYPE (gimple_assign_lhs (stmt)),
3347 &ops[0], ops[1], ops[2]);
3348 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3349 if (dump_file && (dump_flags & TDF_DETAILS))
3351 fprintf (dump_file, "gimple_simplified to ");
3352 if (!gimple_seq_empty_p (*seq))
3353 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3354 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3357 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3361 else if (rcode.is_fn_code ()
3362 && gimple_call_builtin_p (stmt, rcode))
3365 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3367 gcc_assert (ops[i] != NULL_TREE);
3368 gimple_call_set_arg (stmt, i, ops[i]);
3371 gcc_assert (ops[i] == NULL_TREE);
3372 if (dump_file && (dump_flags & TDF_DETAILS))
3374 fprintf (dump_file, "gimple_simplified to ");
3375 if (!gimple_seq_empty_p (*seq))
3376 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3377 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3379 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3384 if (gimple_has_lhs (stmt))
3386 tree lhs = gimple_get_lhs (stmt);
3387 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3390 if (dump_file && (dump_flags & TDF_DETAILS))
3392 fprintf (dump_file, "gimple_simplified to ");
3393 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3395 gsi_replace_with_seq_vops (gsi, *seq);
3405 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3408 maybe_canonicalize_mem_ref_addr (tree *t)
3412 if (TREE_CODE (*t) == ADDR_EXPR)
3413 t = &TREE_OPERAND (*t, 0);
3415 while (handled_component_p (*t))
3416 t = &TREE_OPERAND (*t, 0);
3418 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3419 of invariant addresses into a SSA name MEM_REF address. */
3420 if (TREE_CODE (*t) == MEM_REF
3421 || TREE_CODE (*t) == TARGET_MEM_REF)
3423 tree addr = TREE_OPERAND (*t, 0);
3424 if (TREE_CODE (addr) == ADDR_EXPR
3425 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3426 || handled_component_p (TREE_OPERAND (addr, 0))))
3429 HOST_WIDE_INT coffset;
3430 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3435 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3436 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3437 TREE_OPERAND (*t, 1),
3438 size_int (coffset));
3441 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3442 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3445 /* Canonicalize back MEM_REFs to plain reference trees if the object
3446 accessed is a decl that has the same access semantics as the MEM_REF. */
3447 if (TREE_CODE (*t) == MEM_REF
3448 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3449 && integer_zerop (TREE_OPERAND (*t, 1))
3450 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3452 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3453 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3454 if (/* Same volatile qualification. */
3455 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3456 /* Same TBAA behavior with -fstrict-aliasing. */
3457 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3458 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3459 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3460 /* Same alignment. */
3461 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3462 /* We have to look out here to not drop a required conversion
3463 from the rhs to the lhs if *t appears on the lhs or vice-versa
3464 if it appears on the rhs. Thus require strict type
3466 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3468 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3473 /* Canonicalize TARGET_MEM_REF in particular with respect to
3474 the indexes becoming constant. */
3475 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3477 tree tem = maybe_fold_tmr (*t);
3488 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3489 distinguishes both cases. */
3492 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3494 bool changed = false;
3495 gimple *stmt = gsi_stmt (*gsi);
3498 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3500 ??? This shouldn't be done in generic folding but in the
3501 propagation helpers which also know whether an address was
3503 Also canonicalize operand order. */
3504 switch (gimple_code (stmt))
3507 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3509 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3510 if ((REFERENCE_CLASS_P (*rhs)
3511 || TREE_CODE (*rhs) == ADDR_EXPR)
3512 && maybe_canonicalize_mem_ref_addr (rhs))
3514 tree *lhs = gimple_assign_lhs_ptr (stmt);
3515 if (REFERENCE_CLASS_P (*lhs)
3516 && maybe_canonicalize_mem_ref_addr (lhs))
3521 /* Canonicalize operand order. */
3522 enum tree_code code = gimple_assign_rhs_code (stmt);
3523 if (TREE_CODE_CLASS (code) == tcc_comparison
3524 || commutative_tree_code (code)
3525 || commutative_ternary_tree_code (code))
3527 tree rhs1 = gimple_assign_rhs1 (stmt);
3528 tree rhs2 = gimple_assign_rhs2 (stmt);
3529 if (tree_swap_operands_p (rhs1, rhs2, false))
3531 gimple_assign_set_rhs1 (stmt, rhs2);
3532 gimple_assign_set_rhs2 (stmt, rhs1);
3533 if (TREE_CODE_CLASS (code) == tcc_comparison)
3534 gimple_assign_set_rhs_code (stmt,
3535 swap_tree_comparison (code));
3543 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3545 tree *arg = gimple_call_arg_ptr (stmt, i);
3546 if (REFERENCE_CLASS_P (*arg)
3547 && maybe_canonicalize_mem_ref_addr (arg))
3550 tree *lhs = gimple_call_lhs_ptr (stmt);
3552 && REFERENCE_CLASS_P (*lhs)
3553 && maybe_canonicalize_mem_ref_addr (lhs))
3559 gasm *asm_stmt = as_a <gasm *> (stmt);
3560 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3562 tree link = gimple_asm_output_op (asm_stmt, i);
3563 tree op = TREE_VALUE (link);
3564 if (REFERENCE_CLASS_P (op)
3565 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3568 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3570 tree link = gimple_asm_input_op (asm_stmt, i);
3571 tree op = TREE_VALUE (link);
3572 if ((REFERENCE_CLASS_P (op)
3573 || TREE_CODE (op) == ADDR_EXPR)
3574 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3580 if (gimple_debug_bind_p (stmt))
3582 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3584 && (REFERENCE_CLASS_P (*val)
3585 || TREE_CODE (*val) == ADDR_EXPR)
3586 && maybe_canonicalize_mem_ref_addr (val))
3592 /* Canonicalize operand order. */
3593 tree lhs = gimple_cond_lhs (stmt);
3594 tree rhs = gimple_cond_rhs (stmt);
3595 if (tree_swap_operands_p (lhs, rhs, false))
3597 gcond *gc = as_a <gcond *> (stmt);
3598 gimple_cond_set_lhs (gc, rhs);
3599 gimple_cond_set_rhs (gc, lhs);
3600 gimple_cond_set_code (gc,
3601 swap_tree_comparison (gimple_cond_code (gc)));
3608 /* Dispatch to pattern-based folding. */
3610 || is_gimple_assign (stmt)
3611 || gimple_code (stmt) == GIMPLE_COND)
3613 gimple_seq seq = NULL;
3616 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3617 valueize, valueize))
3619 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3622 gimple_seq_discard (seq);
3626 stmt = gsi_stmt (*gsi);
3628 /* Fold the main computation performed by the statement. */
3629 switch (gimple_code (stmt))
3633 /* Try to canonicalize for boolean-typed X the comparisons
3634 X == 0, X == 1, X != 0, and X != 1. */
3635 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3636 || gimple_assign_rhs_code (stmt) == NE_EXPR)
3638 tree lhs = gimple_assign_lhs (stmt);
3639 tree op1 = gimple_assign_rhs1 (stmt);
3640 tree op2 = gimple_assign_rhs2 (stmt);
3641 tree type = TREE_TYPE (op1);
3643 /* Check whether the comparison operands are of the same boolean
3644 type as the result type is.
3645 Check that second operand is an integer-constant with value
3647 if (TREE_CODE (op2) == INTEGER_CST
3648 && (integer_zerop (op2) || integer_onep (op2))
3649 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3651 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3652 bool is_logical_not = false;
3654 /* X == 0 and X != 1 is a logical-not.of X
3655 X == 1 and X != 0 is X */
3656 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3657 || (cmp_code == NE_EXPR && integer_onep (op2)))
3658 is_logical_not = true;
3660 if (is_logical_not == false)
3661 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3662 /* Only for one-bit precision typed X the transformation
3663 !X -> ~X is valied. */
3664 else if (TYPE_PRECISION (type) == 1)
3665 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3666 /* Otherwise we use !X -> X ^ 1. */
3668 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3669 build_int_cst (type, 1));
3675 unsigned old_num_ops = gimple_num_ops (stmt);
3676 tree lhs = gimple_assign_lhs (stmt);
3677 tree new_rhs = fold_gimple_assign (gsi);
3679 && !useless_type_conversion_p (TREE_TYPE (lhs),
3680 TREE_TYPE (new_rhs)))
3681 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3684 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3686 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3693 changed |= gimple_fold_call (gsi, inplace);
3697 /* Fold *& in asm operands. */
3699 gasm *asm_stmt = as_a <gasm *> (stmt);
3701 const char **oconstraints;
3702 const char *constraint;
3703 bool allows_mem, allows_reg;
3705 noutputs = gimple_asm_noutputs (asm_stmt);
3706 oconstraints = XALLOCAVEC (const char *, noutputs);
3708 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3710 tree link = gimple_asm_output_op (asm_stmt, i);
3711 tree op = TREE_VALUE (link);
3713 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3714 if (REFERENCE_CLASS_P (op)
3715 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3717 TREE_VALUE (link) = op;
3721 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3723 tree link = gimple_asm_input_op (asm_stmt, i);
3724 tree op = TREE_VALUE (link);
3726 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3727 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3728 oconstraints, &allows_mem, &allows_reg);
3729 if (REFERENCE_CLASS_P (op)
3730 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3733 TREE_VALUE (link) = op;
3741 if (gimple_debug_bind_p (stmt))
3743 tree val = gimple_debug_bind_get_value (stmt);
3745 && REFERENCE_CLASS_P (val))
3747 tree tem = maybe_fold_reference (val, false);
3750 gimple_debug_bind_set_value (stmt, tem);
3755 && TREE_CODE (val) == ADDR_EXPR)
3757 tree ref = TREE_OPERAND (val, 0);
3758 tree tem = maybe_fold_reference (ref, false);
3761 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3762 gimple_debug_bind_set_value (stmt, tem);
3772 stmt = gsi_stmt (*gsi);
3774 /* Fold *& on the lhs. */
3775 if (gimple_has_lhs (stmt))
3777 tree lhs = gimple_get_lhs (stmt);
3778 if (lhs && REFERENCE_CLASS_P (lhs))
3780 tree new_lhs = maybe_fold_reference (lhs, true);
3783 gimple_set_lhs (stmt, new_lhs);
3792 /* Valueziation callback that ends up not following SSA edges. */
3795 no_follow_ssa_edges (tree)
3800 /* Valueization callback that ends up following single-use SSA edges only. */
3803 follow_single_use_edges (tree val)
3805 if (TREE_CODE (val) == SSA_NAME
3806 && !has_single_use (val))
3811 /* Fold the statement pointed to by GSI. In some cases, this function may
3812 replace the whole statement with a new one. Returns true iff folding
3814 The statement pointed to by GSI should be in valid gimple form but may
3815 be in unfolded state as resulting from for example constant propagation
3816 which can produce *&x = 0. */
3819 fold_stmt (gimple_stmt_iterator *gsi)
3821 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3825 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3827 return fold_stmt_1 (gsi, false, valueize);
3830 /* Perform the minimal folding on statement *GSI. Only operations like
3831 *&x created by constant propagation are handled. The statement cannot
3832 be replaced with a new one. Return true if the statement was
3833 changed, false otherwise.
3834 The statement *GSI should be in valid gimple form but may
3835 be in unfolded state as resulting from for example constant propagation
3836 which can produce *&x = 0. */
3839 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3841 gimple *stmt = gsi_stmt (*gsi);
3842 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3843 gcc_assert (gsi_stmt (*gsi) == stmt);
3847 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3848 if EXPR is null or we don't know how.
3849 If non-null, the result always has boolean type. */
3852 canonicalize_bool (tree expr, bool invert)
3858 if (integer_nonzerop (expr))
3859 return boolean_false_node;
3860 else if (integer_zerop (expr))
3861 return boolean_true_node;
3862 else if (TREE_CODE (expr) == SSA_NAME)
3863 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3864 build_int_cst (TREE_TYPE (expr), 0));
3865 else if (COMPARISON_CLASS_P (expr))
3866 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3868 TREE_OPERAND (expr, 0),
3869 TREE_OPERAND (expr, 1));
3875 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3877 if (integer_nonzerop (expr))
3878 return boolean_true_node;
3879 else if (integer_zerop (expr))
3880 return boolean_false_node;
3881 else if (TREE_CODE (expr) == SSA_NAME)
3882 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3883 build_int_cst (TREE_TYPE (expr), 0));
3884 else if (COMPARISON_CLASS_P (expr))
3885 return fold_build2 (TREE_CODE (expr),
3887 TREE_OPERAND (expr, 0),
3888 TREE_OPERAND (expr, 1));
3894 /* Check to see if a boolean expression EXPR is logically equivalent to the
3895 comparison (OP1 CODE OP2). Check for various identities involving
3899 same_bool_comparison_p (const_tree expr, enum tree_code code,
3900 const_tree op1, const_tree op2)
3904 /* The obvious case. */
3905 if (TREE_CODE (expr) == code
3906 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3907 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3910 /* Check for comparing (name, name != 0) and the case where expr
3911 is an SSA_NAME with a definition matching the comparison. */
3912 if (TREE_CODE (expr) == SSA_NAME
3913 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3915 if (operand_equal_p (expr, op1, 0))
3916 return ((code == NE_EXPR && integer_zerop (op2))
3917 || (code == EQ_EXPR && integer_nonzerop (op2)));
3918 s = SSA_NAME_DEF_STMT (expr);
3919 if (is_gimple_assign (s)
3920 && gimple_assign_rhs_code (s) == code
3921 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3922 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3926 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3927 of name is a comparison, recurse. */
3928 if (TREE_CODE (op1) == SSA_NAME
3929 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3931 s = SSA_NAME_DEF_STMT (op1);
3932 if (is_gimple_assign (s)
3933 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3935 enum tree_code c = gimple_assign_rhs_code (s);
3936 if ((c == NE_EXPR && integer_zerop (op2))
3937 || (c == EQ_EXPR && integer_nonzerop (op2)))
3938 return same_bool_comparison_p (expr, c,
3939 gimple_assign_rhs1 (s),
3940 gimple_assign_rhs2 (s));
3941 if ((c == EQ_EXPR && integer_zerop (op2))
3942 || (c == NE_EXPR && integer_nonzerop (op2)))
3943 return same_bool_comparison_p (expr,
3944 invert_tree_comparison (c, false),
3945 gimple_assign_rhs1 (s),
3946 gimple_assign_rhs2 (s));
3952 /* Check to see if two boolean expressions OP1 and OP2 are logically
3956 same_bool_result_p (const_tree op1, const_tree op2)
3958 /* Simple cases first. */
3959 if (operand_equal_p (op1, op2, 0))
3962 /* Check the cases where at least one of the operands is a comparison.
3963 These are a bit smarter than operand_equal_p in that they apply some
3964 identifies on SSA_NAMEs. */
3965 if (COMPARISON_CLASS_P (op2)
3966 && same_bool_comparison_p (op1, TREE_CODE (op2),
3967 TREE_OPERAND (op2, 0),
3968 TREE_OPERAND (op2, 1)))
3970 if (COMPARISON_CLASS_P (op1)
3971 && same_bool_comparison_p (op2, TREE_CODE (op1),
3972 TREE_OPERAND (op1, 0),
3973 TREE_OPERAND (op1, 1)))
3980 /* Forward declarations for some mutually recursive functions. */
3983 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3984 enum tree_code code2, tree op2a, tree op2b);
3986 and_var_with_comparison (tree var, bool invert,
3987 enum tree_code code2, tree op2a, tree op2b);
3989 and_var_with_comparison_1 (gimple *stmt,
3990 enum tree_code code2, tree op2a, tree op2b);
3992 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3993 enum tree_code code2, tree op2a, tree op2b);
3995 or_var_with_comparison (tree var, bool invert,
3996 enum tree_code code2, tree op2a, tree op2b);
3998 or_var_with_comparison_1 (gimple *stmt,
3999 enum tree_code code2, tree op2a, tree op2b);
4001 /* Helper function for and_comparisons_1: try to simplify the AND of the
4002 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4003 If INVERT is true, invert the value of the VAR before doing the AND.
4004 Return NULL_EXPR if we can't simplify this to a single expression. */
4007 and_var_with_comparison (tree var, bool invert,
4008 enum tree_code code2, tree op2a, tree op2b)
4011 gimple *stmt = SSA_NAME_DEF_STMT (var);
4013 /* We can only deal with variables whose definitions are assignments. */
4014 if (!is_gimple_assign (stmt))
4017 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4018 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4019 Then we only have to consider the simpler non-inverted cases. */
4021 t = or_var_with_comparison_1 (stmt,
4022 invert_tree_comparison (code2, false),
4025 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4026 return canonicalize_bool (t, invert);
4029 /* Try to simplify the AND of the ssa variable defined by the assignment
4030 STMT with the comparison specified by (OP2A CODE2 OP2B).
4031 Return NULL_EXPR if we can't simplify this to a single expression. */
4034 and_var_with_comparison_1 (gimple *stmt,
4035 enum tree_code code2, tree op2a, tree op2b)
4037 tree var = gimple_assign_lhs (stmt);
4038 tree true_test_var = NULL_TREE;
4039 tree false_test_var = NULL_TREE;
4040 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4042 /* Check for identities like (var AND (var == 0)) => false. */
4043 if (TREE_CODE (op2a) == SSA_NAME
4044 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4046 if ((code2 == NE_EXPR && integer_zerop (op2b))
4047 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4049 true_test_var = op2a;
4050 if (var == true_test_var)
4053 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4054 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4056 false_test_var = op2a;
4057 if (var == false_test_var)
4058 return boolean_false_node;
4062 /* If the definition is a comparison, recurse on it. */
4063 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4065 tree t = and_comparisons_1 (innercode,
4066 gimple_assign_rhs1 (stmt),
4067 gimple_assign_rhs2 (stmt),
4075 /* If the definition is an AND or OR expression, we may be able to
4076 simplify by reassociating. */
4077 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4078 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4080 tree inner1 = gimple_assign_rhs1 (stmt);
4081 tree inner2 = gimple_assign_rhs2 (stmt);
4084 tree partial = NULL_TREE;
4085 bool is_and = (innercode == BIT_AND_EXPR);
4087 /* Check for boolean identities that don't require recursive examination
4089 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4090 inner1 AND (inner1 OR inner2) => inner1
4091 !inner1 AND (inner1 AND inner2) => false
4092 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4093 Likewise for similar cases involving inner2. */
4094 if (inner1 == true_test_var)
4095 return (is_and ? var : inner1);
4096 else if (inner2 == true_test_var)
4097 return (is_and ? var : inner2);
4098 else if (inner1 == false_test_var)
4100 ? boolean_false_node
4101 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4102 else if (inner2 == false_test_var)
4104 ? boolean_false_node
4105 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4107 /* Next, redistribute/reassociate the AND across the inner tests.
4108 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4109 if (TREE_CODE (inner1) == SSA_NAME
4110 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4111 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4112 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4113 gimple_assign_rhs1 (s),
4114 gimple_assign_rhs2 (s),
4115 code2, op2a, op2b)))
4117 /* Handle the AND case, where we are reassociating:
4118 (inner1 AND inner2) AND (op2a code2 op2b)
4120 If the partial result t is a constant, we win. Otherwise
4121 continue on to try reassociating with the other inner test. */
4124 if (integer_onep (t))
4126 else if (integer_zerop (t))
4127 return boolean_false_node;
4130 /* Handle the OR case, where we are redistributing:
4131 (inner1 OR inner2) AND (op2a code2 op2b)
4132 => (t OR (inner2 AND (op2a code2 op2b))) */
4133 else if (integer_onep (t))
4134 return boolean_true_node;
4136 /* Save partial result for later. */
4140 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4141 if (TREE_CODE (inner2) == SSA_NAME
4142 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4143 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4144 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4145 gimple_assign_rhs1 (s),
4146 gimple_assign_rhs2 (s),
4147 code2, op2a, op2b)))
4149 /* Handle the AND case, where we are reassociating:
4150 (inner1 AND inner2) AND (op2a code2 op2b)
4151 => (inner1 AND t) */
4154 if (integer_onep (t))
4156 else if (integer_zerop (t))
4157 return boolean_false_node;
4158 /* If both are the same, we can apply the identity
4160 else if (partial && same_bool_result_p (t, partial))
4164 /* Handle the OR case. where we are redistributing:
4165 (inner1 OR inner2) AND (op2a code2 op2b)
4166 => (t OR (inner1 AND (op2a code2 op2b)))
4167 => (t OR partial) */
4170 if (integer_onep (t))
4171 return boolean_true_node;
4174 /* We already got a simplification for the other
4175 operand to the redistributed OR expression. The
4176 interesting case is when at least one is false.
4177 Or, if both are the same, we can apply the identity
4179 if (integer_zerop (partial))
4181 else if (integer_zerop (t))
4183 else if (same_bool_result_p (t, partial))
4192 /* Try to simplify the AND of two comparisons defined by
4193 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4194 If this can be done without constructing an intermediate value,
4195 return the resulting tree; otherwise NULL_TREE is returned.
4196 This function is deliberately asymmetric as it recurses on SSA_DEFs
4197 in the first comparison but not the second. */
4200 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4201 enum tree_code code2, tree op2a, tree op2b)
4203 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4205 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4206 if (operand_equal_p (op1a, op2a, 0)
4207 && operand_equal_p (op1b, op2b, 0))
4209 /* Result will be either NULL_TREE, or a combined comparison. */
4210 tree t = combine_comparisons (UNKNOWN_LOCATION,
4211 TRUTH_ANDIF_EXPR, code1, code2,
4212 truth_type, op1a, op1b);
4217 /* Likewise the swapped case of the above. */
4218 if (operand_equal_p (op1a, op2b, 0)
4219 && operand_equal_p (op1b, op2a, 0))
4221 /* Result will be either NULL_TREE, or a combined comparison. */
4222 tree t = combine_comparisons (UNKNOWN_LOCATION,
4223 TRUTH_ANDIF_EXPR, code1,
4224 swap_tree_comparison (code2),
4225 truth_type, op1a, op1b);
4230 /* If both comparisons are of the same value against constants, we might
4231 be able to merge them. */
4232 if (operand_equal_p (op1a, op2a, 0)
4233 && TREE_CODE (op1b) == INTEGER_CST
4234 && TREE_CODE (op2b) == INTEGER_CST)
4236 int cmp = tree_int_cst_compare (op1b, op2b);
4238 /* If we have (op1a == op1b), we should either be able to
4239 return that or FALSE, depending on whether the constant op1b
4240 also satisfies the other comparison against op2b. */
4241 if (code1 == EQ_EXPR)
4247 case EQ_EXPR: val = (cmp == 0); break;
4248 case NE_EXPR: val = (cmp != 0); break;
4249 case LT_EXPR: val = (cmp < 0); break;
4250 case GT_EXPR: val = (cmp > 0); break;
4251 case LE_EXPR: val = (cmp <= 0); break;
4252 case GE_EXPR: val = (cmp >= 0); break;
4253 default: done = false;
4258 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4260 return boolean_false_node;
4263 /* Likewise if the second comparison is an == comparison. */
4264 else if (code2 == EQ_EXPR)
4270 case EQ_EXPR: val = (cmp == 0); break;
4271 case NE_EXPR: val = (cmp != 0); break;
4272 case LT_EXPR: val = (cmp > 0); break;
4273 case GT_EXPR: val = (cmp < 0); break;
4274 case LE_EXPR: val = (cmp >= 0); break;
4275 case GE_EXPR: val = (cmp <= 0); break;
4276 default: done = false;
4281 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4283 return boolean_false_node;
4287 /* Same business with inequality tests. */
4288 else if (code1 == NE_EXPR)
4293 case EQ_EXPR: val = (cmp != 0); break;
4294 case NE_EXPR: val = (cmp == 0); break;
4295 case LT_EXPR: val = (cmp >= 0); break;
4296 case GT_EXPR: val = (cmp <= 0); break;
4297 case LE_EXPR: val = (cmp > 0); break;
4298 case GE_EXPR: val = (cmp < 0); break;
4303 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4305 else if (code2 == NE_EXPR)
4310 case EQ_EXPR: val = (cmp == 0); break;
4311 case NE_EXPR: val = (cmp != 0); break;
4312 case LT_EXPR: val = (cmp <= 0); break;
4313 case GT_EXPR: val = (cmp >= 0); break;
4314 case LE_EXPR: val = (cmp < 0); break;
4315 case GE_EXPR: val = (cmp > 0); break;
4320 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4323 /* Chose the more restrictive of two < or <= comparisons. */
4324 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4325 && (code2 == LT_EXPR || code2 == LE_EXPR))
4327 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4328 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4330 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4333 /* Likewise chose the more restrictive of two > or >= comparisons. */
4334 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4335 && (code2 == GT_EXPR || code2 == GE_EXPR))
4337 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4338 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4340 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4343 /* Check for singleton ranges. */
4345 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4346 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4347 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4349 /* Check for disjoint ranges. */
4351 && (code1 == LT_EXPR || code1 == LE_EXPR)
4352 && (code2 == GT_EXPR || code2 == GE_EXPR))
4353 return boolean_false_node;
4355 && (code1 == GT_EXPR || code1 == GE_EXPR)
4356 && (code2 == LT_EXPR || code2 == LE_EXPR))
4357 return boolean_false_node;
4360 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4361 NAME's definition is a truth value. See if there are any simplifications
4362 that can be done against the NAME's definition. */
4363 if (TREE_CODE (op1a) == SSA_NAME
4364 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4365 && (integer_zerop (op1b) || integer_onep (op1b)))
4367 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4368 || (code1 == NE_EXPR && integer_onep (op1b)));
4369 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4370 switch (gimple_code (stmt))
4373 /* Try to simplify by copy-propagating the definition. */
4374 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4377 /* If every argument to the PHI produces the same result when
4378 ANDed with the second comparison, we win.
4379 Do not do this unless the type is bool since we need a bool
4380 result here anyway. */
4381 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4383 tree result = NULL_TREE;
4385 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4387 tree arg = gimple_phi_arg_def (stmt, i);
4389 /* If this PHI has itself as an argument, ignore it.
4390 If all the other args produce the same result,
4392 if (arg == gimple_phi_result (stmt))
4394 else if (TREE_CODE (arg) == INTEGER_CST)
4396 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4399 result = boolean_false_node;
4400 else if (!integer_zerop (result))
4404 result = fold_build2 (code2, boolean_type_node,
4406 else if (!same_bool_comparison_p (result,
4410 else if (TREE_CODE (arg) == SSA_NAME
4411 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4414 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4415 /* In simple cases we can look through PHI nodes,
4416 but we have to be careful with loops.
4418 if (! dom_info_available_p (CDI_DOMINATORS)
4419 || gimple_bb (def_stmt) == gimple_bb (stmt)
4420 || dominated_by_p (CDI_DOMINATORS,
4421 gimple_bb (def_stmt),
4424 temp = and_var_with_comparison (arg, invert, code2,
4430 else if (!same_bool_result_p (result, temp))
4446 /* Try to simplify the AND of two comparisons, specified by
4447 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4448 If this can be simplified to a single expression (without requiring
4449 introducing more SSA variables to hold intermediate values),
4450 return the resulting tree. Otherwise return NULL_TREE.
4451 If the result expression is non-null, it has boolean type. */
4454 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4455 enum tree_code code2, tree op2a, tree op2b)
4457 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4461 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4464 /* Helper function for or_comparisons_1: try to simplify the OR of the
4465 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4466 If INVERT is true, invert the value of VAR before doing the OR.
4467 Return NULL_EXPR if we can't simplify this to a single expression. */
4470 or_var_with_comparison (tree var, bool invert,
4471 enum tree_code code2, tree op2a, tree op2b)
4474 gimple *stmt = SSA_NAME_DEF_STMT (var);
4476 /* We can only deal with variables whose definitions are assignments. */
4477 if (!is_gimple_assign (stmt))
4480 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4481 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4482 Then we only have to consider the simpler non-inverted cases. */
4484 t = and_var_with_comparison_1 (stmt,
4485 invert_tree_comparison (code2, false),
4488 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4489 return canonicalize_bool (t, invert);
4492 /* Try to simplify the OR of the ssa variable defined by the assignment
4493 STMT with the comparison specified by (OP2A CODE2 OP2B).
4494 Return NULL_EXPR if we can't simplify this to a single expression. */
4497 or_var_with_comparison_1 (gimple *stmt,
4498 enum tree_code code2, tree op2a, tree op2b)
4500 tree var = gimple_assign_lhs (stmt);
4501 tree true_test_var = NULL_TREE;
4502 tree false_test_var = NULL_TREE;
4503 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4505 /* Check for identities like (var OR (var != 0)) => true . */
4506 if (TREE_CODE (op2a) == SSA_NAME
4507 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4509 if ((code2 == NE_EXPR && integer_zerop (op2b))
4510 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4512 true_test_var = op2a;
4513 if (var == true_test_var)
4516 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4517 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4519 false_test_var = op2a;
4520 if (var == false_test_var)
4521 return boolean_true_node;
4525 /* If the definition is a comparison, recurse on it. */
4526 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4528 tree t = or_comparisons_1 (innercode,
4529 gimple_assign_rhs1 (stmt),
4530 gimple_assign_rhs2 (stmt),
4538 /* If the definition is an AND or OR expression, we may be able to
4539 simplify by reassociating. */
4540 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4541 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4543 tree inner1 = gimple_assign_rhs1 (stmt);
4544 tree inner2 = gimple_assign_rhs2 (stmt);
4547 tree partial = NULL_TREE;
4548 bool is_or = (innercode == BIT_IOR_EXPR);
4550 /* Check for boolean identities that don't require recursive examination
4552 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4553 inner1 OR (inner1 AND inner2) => inner1
4554 !inner1 OR (inner1 OR inner2) => true
4555 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4557 if (inner1 == true_test_var)
4558 return (is_or ? var : inner1);
4559 else if (inner2 == true_test_var)
4560 return (is_or ? var : inner2);
4561 else if (inner1 == false_test_var)
4564 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4565 else if (inner2 == false_test_var)
4568 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4570 /* Next, redistribute/reassociate the OR across the inner tests.
4571 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4572 if (TREE_CODE (inner1) == SSA_NAME
4573 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4574 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4575 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4576 gimple_assign_rhs1 (s),
4577 gimple_assign_rhs2 (s),
4578 code2, op2a, op2b)))
4580 /* Handle the OR case, where we are reassociating:
4581 (inner1 OR inner2) OR (op2a code2 op2b)
4583 If the partial result t is a constant, we win. Otherwise
4584 continue on to try reassociating with the other inner test. */
4587 if (integer_onep (t))
4588 return boolean_true_node;
4589 else if (integer_zerop (t))
4593 /* Handle the AND case, where we are redistributing:
4594 (inner1 AND inner2) OR (op2a code2 op2b)
4595 => (t AND (inner2 OR (op2a code op2b))) */
4596 else if (integer_zerop (t))
4597 return boolean_false_node;
4599 /* Save partial result for later. */
4603 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4604 if (TREE_CODE (inner2) == SSA_NAME
4605 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4606 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4607 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4608 gimple_assign_rhs1 (s),
4609 gimple_assign_rhs2 (s),
4610 code2, op2a, op2b)))
4612 /* Handle the OR case, where we are reassociating:
4613 (inner1 OR inner2) OR (op2a code2 op2b)
4615 => (t OR partial) */
4618 if (integer_zerop (t))
4620 else if (integer_onep (t))
4621 return boolean_true_node;
4622 /* If both are the same, we can apply the identity
4624 else if (partial && same_bool_result_p (t, partial))
4628 /* Handle the AND case, where we are redistributing:
4629 (inner1 AND inner2) OR (op2a code2 op2b)
4630 => (t AND (inner1 OR (op2a code2 op2b)))
4631 => (t AND partial) */
4634 if (integer_zerop (t))
4635 return boolean_false_node;
4638 /* We already got a simplification for the other
4639 operand to the redistributed AND expression. The
4640 interesting case is when at least one is true.
4641 Or, if both are the same, we can apply the identity
4643 if (integer_onep (partial))
4645 else if (integer_onep (t))
4647 else if (same_bool_result_p (t, partial))
4656 /* Try to simplify the OR of two comparisons defined by
4657 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4658 If this can be done without constructing an intermediate value,
4659 return the resulting tree; otherwise NULL_TREE is returned.
4660 This function is deliberately asymmetric as it recurses on SSA_DEFs
4661 in the first comparison but not the second. */
4664 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4665 enum tree_code code2, tree op2a, tree op2b)
4667 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4669 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4670 if (operand_equal_p (op1a, op2a, 0)
4671 && operand_equal_p (op1b, op2b, 0))
4673 /* Result will be either NULL_TREE, or a combined comparison. */
4674 tree t = combine_comparisons (UNKNOWN_LOCATION,
4675 TRUTH_ORIF_EXPR, code1, code2,
4676 truth_type, op1a, op1b);
4681 /* Likewise the swapped case of the above. */
4682 if (operand_equal_p (op1a, op2b, 0)
4683 && operand_equal_p (op1b, op2a, 0))
4685 /* Result will be either NULL_TREE, or a combined comparison. */
4686 tree t = combine_comparisons (UNKNOWN_LOCATION,
4687 TRUTH_ORIF_EXPR, code1,
4688 swap_tree_comparison (code2),
4689 truth_type, op1a, op1b);
4694 /* If both comparisons are of the same value against constants, we might
4695 be able to merge them. */
4696 if (operand_equal_p (op1a, op2a, 0)
4697 && TREE_CODE (op1b) == INTEGER_CST
4698 && TREE_CODE (op2b) == INTEGER_CST)
4700 int cmp = tree_int_cst_compare (op1b, op2b);
4702 /* If we have (op1a != op1b), we should either be able to
4703 return that or TRUE, depending on whether the constant op1b
4704 also satisfies the other comparison against op2b. */
4705 if (code1 == NE_EXPR)
4711 case EQ_EXPR: val = (cmp == 0); break;
4712 case NE_EXPR: val = (cmp != 0); break;
4713 case LT_EXPR: val = (cmp < 0); break;
4714 case GT_EXPR: val = (cmp > 0); break;
4715 case LE_EXPR: val = (cmp <= 0); break;
4716 case GE_EXPR: val = (cmp >= 0); break;
4717 default: done = false;
4722 return boolean_true_node;
4724 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4727 /* Likewise if the second comparison is a != comparison. */
4728 else if (code2 == NE_EXPR)
4734 case EQ_EXPR: val = (cmp == 0); break;
4735 case NE_EXPR: val = (cmp != 0); break;
4736 case LT_EXPR: val = (cmp > 0); break;
4737 case GT_EXPR: val = (cmp < 0); break;
4738 case LE_EXPR: val = (cmp >= 0); break;
4739 case GE_EXPR: val = (cmp <= 0); break;
4740 default: done = false;
4745 return boolean_true_node;
4747 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4751 /* See if an equality test is redundant with the other comparison. */
4752 else if (code1 == EQ_EXPR)
4757 case EQ_EXPR: val = (cmp == 0); break;
4758 case NE_EXPR: val = (cmp != 0); break;
4759 case LT_EXPR: val = (cmp < 0); break;
4760 case GT_EXPR: val = (cmp > 0); break;
4761 case LE_EXPR: val = (cmp <= 0); break;
4762 case GE_EXPR: val = (cmp >= 0); break;
4767 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4769 else if (code2 == EQ_EXPR)
4774 case EQ_EXPR: val = (cmp == 0); break;
4775 case NE_EXPR: val = (cmp != 0); break;
4776 case LT_EXPR: val = (cmp > 0); break;
4777 case GT_EXPR: val = (cmp < 0); break;
4778 case LE_EXPR: val = (cmp >= 0); break;
4779 case GE_EXPR: val = (cmp <= 0); break;
4784 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4787 /* Chose the less restrictive of two < or <= comparisons. */
4788 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4789 && (code2 == LT_EXPR || code2 == LE_EXPR))
4791 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4792 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4794 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4797 /* Likewise chose the less restrictive of two > or >= comparisons. */
4798 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4799 && (code2 == GT_EXPR || code2 == GE_EXPR))
4801 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4802 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4804 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4807 /* Check for singleton ranges. */
4809 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4810 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4811 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4813 /* Check for less/greater pairs that don't restrict the range at all. */
4815 && (code1 == LT_EXPR || code1 == LE_EXPR)
4816 && (code2 == GT_EXPR || code2 == GE_EXPR))
4817 return boolean_true_node;
4819 && (code1 == GT_EXPR || code1 == GE_EXPR)
4820 && (code2 == LT_EXPR || code2 == LE_EXPR))
4821 return boolean_true_node;
4824 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4825 NAME's definition is a truth value. See if there are any simplifications
4826 that can be done against the NAME's definition. */
4827 if (TREE_CODE (op1a) == SSA_NAME
4828 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4829 && (integer_zerop (op1b) || integer_onep (op1b)))
4831 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4832 || (code1 == NE_EXPR && integer_onep (op1b)));
4833 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4834 switch (gimple_code (stmt))
4837 /* Try to simplify by copy-propagating the definition. */
4838 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4841 /* If every argument to the PHI produces the same result when
4842 ORed with the second comparison, we win.
4843 Do not do this unless the type is bool since we need a bool
4844 result here anyway. */
4845 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4847 tree result = NULL_TREE;
4849 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4851 tree arg = gimple_phi_arg_def (stmt, i);
4853 /* If this PHI has itself as an argument, ignore it.
4854 If all the other args produce the same result,
4856 if (arg == gimple_phi_result (stmt))
4858 else if (TREE_CODE (arg) == INTEGER_CST)
4860 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4863 result = boolean_true_node;
4864 else if (!integer_onep (result))
4868 result = fold_build2 (code2, boolean_type_node,
4870 else if (!same_bool_comparison_p (result,
4874 else if (TREE_CODE (arg) == SSA_NAME
4875 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4878 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4879 /* In simple cases we can look through PHI nodes,
4880 but we have to be careful with loops.
4882 if (! dom_info_available_p (CDI_DOMINATORS)
4883 || gimple_bb (def_stmt) == gimple_bb (stmt)
4884 || dominated_by_p (CDI_DOMINATORS,
4885 gimple_bb (def_stmt),
4888 temp = or_var_with_comparison (arg, invert, code2,
4894 else if (!same_bool_result_p (result, temp))
4910 /* Try to simplify the OR of two comparisons, specified by
4911 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4912 If this can be simplified to a single expression (without requiring
4913 introducing more SSA variables to hold intermediate values),
4914 return the resulting tree. Otherwise return NULL_TREE.
4915 If the result expression is non-null, it has boolean type. */
4918 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4919 enum tree_code code2, tree op2a, tree op2b)
4921 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4925 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4929 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4931 Either NULL_TREE, a simplified but non-constant or a constant
4934 ??? This should go into a gimple-fold-inline.h file to be eventually
4935 privatized with the single valueize function used in the various TUs
4936 to avoid the indirect function call overhead. */
4939 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
4940 tree (*gvalueize) (tree))
4944 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4945 edges if there are intermediate VARYING defs. For this reason
4946 do not follow SSA edges here even though SCCVN can technically
4947 just deal fine with that. */
4948 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
4950 tree res = NULL_TREE;
4951 if (gimple_simplified_result_is_gimple_val (rcode, ops))
4953 else if (mprts_hook)
4954 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
4957 if (dump_file && dump_flags & TDF_DETAILS)
4959 fprintf (dump_file, "Match-and-simplified ");
4960 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4961 fprintf (dump_file, " to ");
4962 print_generic_expr (dump_file, res, 0);
4963 fprintf (dump_file, "\n");
4969 location_t loc = gimple_location (stmt);
4970 switch (gimple_code (stmt))
4974 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4976 switch (get_gimple_rhs_class (subcode))
4978 case GIMPLE_SINGLE_RHS:
4980 tree rhs = gimple_assign_rhs1 (stmt);
4981 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4983 if (TREE_CODE (rhs) == SSA_NAME)
4985 /* If the RHS is an SSA_NAME, return its known constant value,
4987 return (*valueize) (rhs);
4989 /* Handle propagating invariant addresses into address
4991 else if (TREE_CODE (rhs) == ADDR_EXPR
4992 && !is_gimple_min_invariant (rhs))
4994 HOST_WIDE_INT offset = 0;
4996 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5000 && (CONSTANT_CLASS_P (base)
5001 || decl_address_invariant_p (base)))
5002 return build_invariant_address (TREE_TYPE (rhs),
5005 else if (TREE_CODE (rhs) == CONSTRUCTOR
5006 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5007 && (CONSTRUCTOR_NELTS (rhs)
5008 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5013 vec = XALLOCAVEC (tree,
5014 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5015 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5017 val = (*valueize) (val);
5018 if (TREE_CODE (val) == INTEGER_CST
5019 || TREE_CODE (val) == REAL_CST
5020 || TREE_CODE (val) == FIXED_CST)
5026 return build_vector (TREE_TYPE (rhs), vec);
5028 if (subcode == OBJ_TYPE_REF)
5030 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5031 /* If callee is constant, we can fold away the wrapper. */
5032 if (is_gimple_min_invariant (val))
5036 if (kind == tcc_reference)
5038 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5039 || TREE_CODE (rhs) == REALPART_EXPR
5040 || TREE_CODE (rhs) == IMAGPART_EXPR)
5041 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5043 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5044 return fold_unary_loc (EXPR_LOCATION (rhs),
5046 TREE_TYPE (rhs), val);
5048 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5049 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5051 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5052 return fold_ternary_loc (EXPR_LOCATION (rhs),
5054 TREE_TYPE (rhs), val,
5055 TREE_OPERAND (rhs, 1),
5056 TREE_OPERAND (rhs, 2));
5058 else if (TREE_CODE (rhs) == MEM_REF
5059 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5061 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5062 if (TREE_CODE (val) == ADDR_EXPR
5063 && is_gimple_min_invariant (val))
5065 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5067 TREE_OPERAND (rhs, 1));
5072 return fold_const_aggregate_ref_1 (rhs, valueize);
5074 else if (kind == tcc_declaration)
5075 return get_symbol_constant_value (rhs);
5079 case GIMPLE_UNARY_RHS:
5082 case GIMPLE_BINARY_RHS:
5083 /* Translate &x + CST into an invariant form suitable for
5084 further propagation. */
5085 if (subcode == POINTER_PLUS_EXPR)
5087 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5088 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5089 if (TREE_CODE (op0) == ADDR_EXPR
5090 && TREE_CODE (op1) == INTEGER_CST)
5092 tree off = fold_convert (ptr_type_node, op1);
5093 return build_fold_addr_expr_loc
5095 fold_build2 (MEM_REF,
5096 TREE_TYPE (TREE_TYPE (op0)),
5097 unshare_expr (op0), off));
5100 /* Canonicalize bool != 0 and bool == 0 appearing after
5101 valueization. While gimple_simplify handles this
5102 it can get confused by the ~X == 1 -> X == 0 transform
5103 which we cant reduce to a SSA name or a constant
5104 (and we have no way to tell gimple_simplify to not
5105 consider those transforms in the first place). */
5106 else if (subcode == EQ_EXPR
5107 || subcode == NE_EXPR)
5109 tree lhs = gimple_assign_lhs (stmt);
5110 tree op0 = gimple_assign_rhs1 (stmt);
5111 if (useless_type_conversion_p (TREE_TYPE (lhs),
5114 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5115 op0 = (*valueize) (op0);
5116 if (TREE_CODE (op0) == INTEGER_CST)
5117 std::swap (op0, op1);
5118 if (TREE_CODE (op1) == INTEGER_CST
5119 && ((subcode == NE_EXPR && integer_zerop (op1))
5120 || (subcode == EQ_EXPR && integer_onep (op1))))
5126 case GIMPLE_TERNARY_RHS:
5128 /* Handle ternary operators that can appear in GIMPLE form. */
5129 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5130 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5131 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5132 return fold_ternary_loc (loc, subcode,
5133 gimple_expr_type (stmt), op0, op1, op2);
5144 gcall *call_stmt = as_a <gcall *> (stmt);
5146 if (gimple_call_internal_p (stmt))
5148 enum tree_code subcode = ERROR_MARK;
5149 switch (gimple_call_internal_fn (stmt))
5151 case IFN_UBSAN_CHECK_ADD:
5152 subcode = PLUS_EXPR;
5154 case IFN_UBSAN_CHECK_SUB:
5155 subcode = MINUS_EXPR;
5157 case IFN_UBSAN_CHECK_MUL:
5158 subcode = MULT_EXPR;
5163 tree arg0 = gimple_call_arg (stmt, 0);
5164 tree arg1 = gimple_call_arg (stmt, 1);
5165 tree op0 = (*valueize) (arg0);
5166 tree op1 = (*valueize) (arg1);
5168 if (TREE_CODE (op0) != INTEGER_CST
5169 || TREE_CODE (op1) != INTEGER_CST)
5174 /* x * 0 = 0 * x = 0 without overflow. */
5175 if (integer_zerop (op0) || integer_zerop (op1))
5176 return build_zero_cst (TREE_TYPE (arg0));
5179 /* y - y = 0 without overflow. */
5180 if (operand_equal_p (op0, op1, 0))
5181 return build_zero_cst (TREE_TYPE (arg0));
5188 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5190 && TREE_CODE (res) == INTEGER_CST
5191 && !TREE_OVERFLOW (res))
5196 fn = (*valueize) (gimple_call_fn (stmt));
5197 if (TREE_CODE (fn) == ADDR_EXPR
5198 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5199 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5200 && gimple_builtin_call_types_compatible_p (stmt,
5201 TREE_OPERAND (fn, 0)))
5203 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5206 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5207 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5208 retval = fold_builtin_call_array (loc,
5209 gimple_call_return_type (call_stmt),
5210 fn, gimple_call_num_args (stmt), args);
5213 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5214 STRIP_NOPS (retval);
5215 retval = fold_convert (gimple_call_return_type (call_stmt),
5228 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5229 Returns NULL_TREE if folding to a constant is not possible, otherwise
5230 returns a constant according to is_gimple_min_invariant. */
5233 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5235 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5236 if (res && is_gimple_min_invariant (res))
5242 /* The following set of functions are supposed to fold references using
5243 their constant initializers. */
5245 /* See if we can find constructor defining value of BASE.
5246 When we know the consructor with constant offset (such as
5247 base is array[40] and we do know constructor of array), then
5248 BIT_OFFSET is adjusted accordingly.
5250 As a special case, return error_mark_node when constructor
5251 is not explicitly available, but it is known to be zero
5252 such as 'static const int a;'. */
5254 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5255 tree (*valueize)(tree))
5257 HOST_WIDE_INT bit_offset2, size, max_size;
5258 if (TREE_CODE (base) == MEM_REF)
5260 if (!integer_zerop (TREE_OPERAND (base, 1)))
5262 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5264 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5269 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5270 base = valueize (TREE_OPERAND (base, 0));
5271 if (!base || TREE_CODE (base) != ADDR_EXPR)
5273 base = TREE_OPERAND (base, 0);
5276 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5277 DECL_INITIAL. If BASE is a nested reference into another
5278 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5279 the inner reference. */
5280 switch (TREE_CODE (base))
5285 tree init = ctor_for_folding (base);
5287 /* Our semantic is exact opposite of ctor_for_folding;
5288 NULL means unknown, while error_mark_node is 0. */
5289 if (init == error_mark_node)
5292 return error_mark_node;
5298 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
5299 if (max_size == -1 || size != max_size)
5301 *bit_offset += bit_offset2;
5302 return get_base_constructor (base, bit_offset, valueize);
5313 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5314 SIZE to the memory at bit OFFSET. */
5317 fold_array_ctor_reference (tree type, tree ctor,
5318 unsigned HOST_WIDE_INT offset,
5319 unsigned HOST_WIDE_INT size,
5322 unsigned HOST_WIDE_INT cnt;
5324 offset_int low_bound;
5325 offset_int elt_size;
5326 offset_int index, max_index;
5327 offset_int access_index;
5328 tree domain_type = NULL_TREE, index_type = NULL_TREE;
5329 HOST_WIDE_INT inner_offset;
5331 /* Compute low bound and elt size. */
5332 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5333 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5334 if (domain_type && TYPE_MIN_VALUE (domain_type))
5336 /* Static constructors for variably sized objects makes no sense. */
5337 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5338 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
5339 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5343 /* Static constructors for variably sized objects makes no sense. */
5344 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5346 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5348 /* We can handle only constantly sized accesses that are known to not
5349 be larger than size of array element. */
5350 if (!TYPE_SIZE_UNIT (type)
5351 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5352 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5356 /* Compute the array index we look for. */
5357 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5359 access_index += low_bound;
5361 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
5362 TYPE_SIGN (index_type));
5364 /* And offset within the access. */
5365 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5367 /* See if the array field is large enough to span whole access. We do not
5368 care to fold accesses spanning multiple array indexes. */
5369 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5372 index = low_bound - 1;
5374 index = wi::ext (index, TYPE_PRECISION (index_type),
5375 TYPE_SIGN (index_type));
5377 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
5379 /* Array constructor might explicitely set index, or specify range
5380 or leave index NULL meaning that it is next index after previous
5384 if (TREE_CODE (cfield) == INTEGER_CST)
5385 max_index = index = wi::to_offset (cfield);
5388 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
5389 index = wi::to_offset (TREE_OPERAND (cfield, 0));
5390 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
5397 index = wi::ext (index, TYPE_PRECISION (index_type),
5398 TYPE_SIGN (index_type));
5402 /* Do we have match? */
5403 if (wi::cmpu (access_index, index) >= 0
5404 && wi::cmpu (access_index, max_index) <= 0)
5405 return fold_ctor_reference (type, cval, inner_offset, size,
5408 /* When memory is not explicitely mentioned in constructor,
5409 it is 0 (or out of range). */
5410 return build_zero_cst (type);
5413 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5414 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5417 fold_nonarray_ctor_reference (tree type, tree ctor,
5418 unsigned HOST_WIDE_INT offset,
5419 unsigned HOST_WIDE_INT size,
5422 unsigned HOST_WIDE_INT cnt;
5425 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5428 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5429 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5430 tree field_size = DECL_SIZE (cfield);
5431 offset_int bitoffset;
5432 offset_int bitoffset_end, access_end;
5434 /* Variable sized objects in static constructors makes no sense,
5435 but field_size can be NULL for flexible array members. */
5436 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5437 && TREE_CODE (byte_offset) == INTEGER_CST
5438 && (field_size != NULL_TREE
5439 ? TREE_CODE (field_size) == INTEGER_CST
5440 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5442 /* Compute bit offset of the field. */
5443 bitoffset = (wi::to_offset (field_offset)
5444 + wi::lshift (wi::to_offset (byte_offset),
5445 LOG2_BITS_PER_UNIT));
5446 /* Compute bit offset where the field ends. */
5447 if (field_size != NULL_TREE)
5448 bitoffset_end = bitoffset + wi::to_offset (field_size);
5452 access_end = offset_int (offset) + size;
5454 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5455 [BITOFFSET, BITOFFSET_END)? */
5456 if (wi::cmps (access_end, bitoffset) > 0
5457 && (field_size == NULL_TREE
5458 || wi::lts_p (offset, bitoffset_end)))
5460 offset_int inner_offset = offset_int (offset) - bitoffset;
5461 /* We do have overlap. Now see if field is large enough to
5462 cover the access. Give up for accesses spanning multiple
5464 if (wi::cmps (access_end, bitoffset_end) > 0)
5466 if (wi::lts_p (offset, bitoffset))
5468 return fold_ctor_reference (type, cval,
5469 inner_offset.to_uhwi (), size,
5473 /* When memory is not explicitely mentioned in constructor, it is 0. */
5474 return build_zero_cst (type);
5477 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5478 to the memory at bit OFFSET. */
5481 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5482 unsigned HOST_WIDE_INT size, tree from_decl)
5486 /* We found the field with exact match. */
5487 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5489 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5491 /* We are at the end of walk, see if we can view convert the
5493 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5494 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5495 && !compare_tree_int (TYPE_SIZE (type), size)
5496 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5498 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5499 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5501 STRIP_USELESS_TYPE_CONVERSION (ret);
5504 /* For constants and byte-aligned/sized reads try to go through
5505 native_encode/interpret. */
5506 if (CONSTANT_CLASS_P (ctor)
5507 && BITS_PER_UNIT == 8
5508 && offset % BITS_PER_UNIT == 0
5509 && size % BITS_PER_UNIT == 0
5510 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5512 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5513 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5514 offset / BITS_PER_UNIT) > 0)
5515 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5517 if (TREE_CODE (ctor) == CONSTRUCTOR)
5520 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5521 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5522 return fold_array_ctor_reference (type, ctor, offset, size,
5525 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5532 /* Return the tree representing the element referenced by T if T is an
5533 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5534 names using VALUEIZE. Return NULL_TREE otherwise. */
5537 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5539 tree ctor, idx, base;
5540 HOST_WIDE_INT offset, size, max_size;
5543 if (TREE_THIS_VOLATILE (t))
5547 return get_symbol_constant_value (t);
5549 tem = fold_read_from_constant_string (t);
5553 switch (TREE_CODE (t))
5556 case ARRAY_RANGE_REF:
5557 /* Constant indexes are handled well by get_base_constructor.
5558 Only special case variable offsets.
5559 FIXME: This code can't handle nested references with variable indexes
5560 (they will be handled only by iteration of ccp). Perhaps we can bring
5561 get_ref_base_and_extent here and make it use a valueize callback. */
5562 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5564 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5565 && TREE_CODE (idx) == INTEGER_CST)
5567 tree low_bound, unit_size;
5569 /* If the resulting bit-offset is constant, track it. */
5570 if ((low_bound = array_ref_low_bound (t),
5571 TREE_CODE (low_bound) == INTEGER_CST)
5572 && (unit_size = array_ref_element_size (t),
5573 tree_fits_uhwi_p (unit_size)))
5576 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5577 TYPE_PRECISION (TREE_TYPE (idx)));
5579 if (wi::fits_shwi_p (woffset))
5581 offset = woffset.to_shwi ();
5582 /* TODO: This code seems wrong, multiply then check
5583 to see if it fits. */
5584 offset *= tree_to_uhwi (unit_size);
5585 offset *= BITS_PER_UNIT;
5587 base = TREE_OPERAND (t, 0);
5588 ctor = get_base_constructor (base, &offset, valueize);
5589 /* Empty constructor. Always fold to 0. */
5590 if (ctor == error_mark_node)
5591 return build_zero_cst (TREE_TYPE (t));
5592 /* Out of bound array access. Value is undefined,
5596 /* We can not determine ctor. */
5599 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5600 tree_to_uhwi (unit_size)
5610 case TARGET_MEM_REF:
5612 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5613 ctor = get_base_constructor (base, &offset, valueize);
5615 /* Empty constructor. Always fold to 0. */
5616 if (ctor == error_mark_node)
5617 return build_zero_cst (TREE_TYPE (t));
5618 /* We do not know precise address. */
5619 if (max_size == -1 || max_size != size)
5621 /* We can not determine ctor. */
5625 /* Out of bound array access. Value is undefined, but don't fold. */
5629 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5635 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5636 if (c && TREE_CODE (c) == COMPLEX_CST)
5637 return fold_build1_loc (EXPR_LOCATION (t),
5638 TREE_CODE (t), TREE_TYPE (t), c);
5650 fold_const_aggregate_ref (tree t)
5652 return fold_const_aggregate_ref_1 (t, NULL);
5655 /* Lookup virtual method with index TOKEN in a virtual table V
5657 Set CAN_REFER if non-NULL to false if method
5658 is not referable or if the virtual table is ill-formed (such as rewriten
5659 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5662 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5664 unsigned HOST_WIDE_INT offset,
5667 tree vtable = v, init, fn;
5668 unsigned HOST_WIDE_INT size;
5669 unsigned HOST_WIDE_INT elt_size, access_index;
5675 /* First of all double check we have virtual table. */
5676 if (TREE_CODE (v) != VAR_DECL
5677 || !DECL_VIRTUAL_P (v))
5679 /* Pass down that we lost track of the target. */
5685 init = ctor_for_folding (v);
5687 /* The virtual tables should always be born with constructors
5688 and we always should assume that they are avaialble for
5689 folding. At the moment we do not stream them in all cases,
5690 but it should never happen that ctor seem unreachable. */
5692 if (init == error_mark_node)
5694 gcc_assert (in_lto_p);
5695 /* Pass down that we lost track of the target. */
5700 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5701 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5702 offset *= BITS_PER_UNIT;
5703 offset += token * size;
5705 /* Lookup the value in the constructor that is assumed to be array.
5706 This is equivalent to
5707 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5708 offset, size, NULL);
5709 but in a constant time. We expect that frontend produced a simple
5710 array without indexed initializers. */
5712 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5713 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5714 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5715 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5717 access_index = offset / BITS_PER_UNIT / elt_size;
5718 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5720 /* This code makes an assumption that there are no
5721 indexed fileds produced by C++ FE, so we can directly index the array. */
5722 if (access_index < CONSTRUCTOR_NELTS (init))
5724 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5725 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5731 /* For type inconsistent program we may end up looking up virtual method
5732 in virtual table that does not contain TOKEN entries. We may overrun
5733 the virtual table and pick up a constant or RTTI info pointer.
5734 In any case the call is undefined. */
5736 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5737 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5738 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5741 fn = TREE_OPERAND (fn, 0);
5743 /* When cgraph node is missing and function is not public, we cannot
5744 devirtualize. This can happen in WHOPR when the actual method
5745 ends up in other partition, because we found devirtualization
5746 possibility too late. */
5747 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5758 /* Make sure we create a cgraph node for functions we'll reference.
5759 They can be non-existent if the reference comes from an entry
5760 of an external vtable for example. */
5761 cgraph_node::get_create (fn);
5766 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5767 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5768 KNOWN_BINFO carries the binfo describing the true type of
5769 OBJ_TYPE_REF_OBJECT(REF).
5770 Set CAN_REFER if non-NULL to false if method
5771 is not referable or if the virtual table is ill-formed (such as rewriten
5772 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5775 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5778 unsigned HOST_WIDE_INT offset;
5781 v = BINFO_VTABLE (known_binfo);
5782 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5786 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5792 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5795 /* Given a pointer value OP0, return a simplified version of an
5796 indirection through OP0, or NULL_TREE if no simplification is
5797 possible. Note that the resulting type may be different from
5798 the type pointed to in the sense that it is still compatible
5799 from the langhooks point of view. */
5802 gimple_fold_indirect_ref (tree t)
5804 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5809 subtype = TREE_TYPE (sub);
5810 if (!POINTER_TYPE_P (subtype))
5813 if (TREE_CODE (sub) == ADDR_EXPR)
5815 tree op = TREE_OPERAND (sub, 0);
5816 tree optype = TREE_TYPE (op);
5818 if (useless_type_conversion_p (type, optype))
5821 /* *(foo *)&fooarray => fooarray[0] */
5822 if (TREE_CODE (optype) == ARRAY_TYPE
5823 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5824 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5826 tree type_domain = TYPE_DOMAIN (optype);
5827 tree min_val = size_zero_node;
5828 if (type_domain && TYPE_MIN_VALUE (type_domain))
5829 min_val = TYPE_MIN_VALUE (type_domain);
5830 if (TREE_CODE (min_val) == INTEGER_CST)
5831 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5833 /* *(foo *)&complexfoo => __real__ complexfoo */
5834 else if (TREE_CODE (optype) == COMPLEX_TYPE
5835 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5836 return fold_build1 (REALPART_EXPR, type, op);
5837 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5838 else if (TREE_CODE (optype) == VECTOR_TYPE
5839 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5841 tree part_width = TYPE_SIZE (type);
5842 tree index = bitsize_int (0);
5843 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5847 /* *(p + CST) -> ... */
5848 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5849 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5851 tree addr = TREE_OPERAND (sub, 0);
5852 tree off = TREE_OPERAND (sub, 1);
5856 addrtype = TREE_TYPE (addr);
5858 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5859 if (TREE_CODE (addr) == ADDR_EXPR
5860 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5861 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5862 && tree_fits_uhwi_p (off))
5864 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5865 tree part_width = TYPE_SIZE (type);
5866 unsigned HOST_WIDE_INT part_widthi
5867 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5868 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5869 tree index = bitsize_int (indexi);
5870 if (offset / part_widthi
5871 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5872 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5876 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5877 if (TREE_CODE (addr) == ADDR_EXPR
5878 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5879 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5881 tree size = TYPE_SIZE_UNIT (type);
5882 if (tree_int_cst_equal (size, off))
5883 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5886 /* *(p + CST) -> MEM_REF <p, CST>. */
5887 if (TREE_CODE (addr) != ADDR_EXPR
5888 || DECL_P (TREE_OPERAND (addr, 0)))
5889 return fold_build2 (MEM_REF, type,
5891 wide_int_to_tree (ptype, off));
5894 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5895 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5896 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5897 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5900 tree min_val = size_zero_node;
5902 sub = gimple_fold_indirect_ref (sub);
5904 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5905 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5906 if (type_domain && TYPE_MIN_VALUE (type_domain))
5907 min_val = TYPE_MIN_VALUE (type_domain);
5908 if (TREE_CODE (min_val) == INTEGER_CST)
5909 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5915 /* Return true if CODE is an operation that when operating on signed
5916 integer types involves undefined behavior on overflow and the
5917 operation can be expressed with unsigned arithmetic. */
5920 arith_code_with_undefined_signed_overflow (tree_code code)
5928 case POINTER_PLUS_EXPR:
5935 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5936 operation that can be transformed to unsigned arithmetic by converting
5937 its operand, carrying out the operation in the corresponding unsigned
5938 type and converting the result back to the original type.
5940 Returns a sequence of statements that replace STMT and also contain
5941 a modified form of STMT itself. */
5944 rewrite_to_defined_overflow (gimple *stmt)
5946 if (dump_file && (dump_flags & TDF_DETAILS))
5948 fprintf (dump_file, "rewriting stmt with undefined signed "
5950 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5953 tree lhs = gimple_assign_lhs (stmt);
5954 tree type = unsigned_type_for (TREE_TYPE (lhs));
5955 gimple_seq stmts = NULL;
5956 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5958 tree op = gimple_op (stmt, i);
5959 op = gimple_convert (&stmts, type, op);
5960 gimple_set_op (stmt, i, op);
5962 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5963 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5964 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5965 gimple_seq_add_stmt (&stmts, stmt);
5966 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
5967 gimple_seq_add_stmt (&stmts, cvt);
5973 /* The valueization hook we use for the gimple_build API simplification.
5974 This makes us match fold_buildN behavior by only combining with
5975 statements in the sequence(s) we are currently building. */
5978 gimple_build_valueize (tree op)
5980 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
5985 /* Build the expression CODE OP0 of type TYPE with location LOC,
5986 simplifying it first if possible. Returns the built
5987 expression value and appends statements possibly defining it
5991 gimple_build (gimple_seq *seq, location_t loc,
5992 enum tree_code code, tree type, tree op0)
5994 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
5997 if (gimple_in_ssa_p (cfun))
5998 res = make_ssa_name (type);
6000 res = create_tmp_reg (type);
6002 if (code == REALPART_EXPR
6003 || code == IMAGPART_EXPR
6004 || code == VIEW_CONVERT_EXPR)
6005 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6007 stmt = gimple_build_assign (res, code, op0);
6008 gimple_set_location (stmt, loc);
6009 gimple_seq_add_stmt_without_update (seq, stmt);
6014 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6015 simplifying it first if possible. Returns the built
6016 expression value and appends statements possibly defining it
6020 gimple_build (gimple_seq *seq, location_t loc,
6021 enum tree_code code, tree type, tree op0, tree op1)
6023 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6026 if (gimple_in_ssa_p (cfun))
6027 res = make_ssa_name (type);
6029 res = create_tmp_reg (type);
6030 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6031 gimple_set_location (stmt, loc);
6032 gimple_seq_add_stmt_without_update (seq, stmt);
6037 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6038 simplifying it first if possible. Returns the built
6039 expression value and appends statements possibly defining it
6043 gimple_build (gimple_seq *seq, location_t loc,
6044 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6046 tree res = gimple_simplify (code, type, op0, op1, op2,
6047 seq, gimple_build_valueize);
6050 if (gimple_in_ssa_p (cfun))
6051 res = make_ssa_name (type);
6053 res = create_tmp_reg (type);
6055 if (code == BIT_FIELD_REF)
6056 stmt = gimple_build_assign (res, code,
6057 build3 (code, type, op0, op1, op2));
6059 stmt = gimple_build_assign (res, code, op0, op1, op2);
6060 gimple_set_location (stmt, loc);
6061 gimple_seq_add_stmt_without_update (seq, stmt);
6066 /* Build the call FN (ARG0) with a result of type TYPE
6067 (or no result if TYPE is void) with location LOC,
6068 simplifying it first if possible. Returns the built
6069 expression value (or NULL_TREE if TYPE is void) and appends
6070 statements possibly defining it to SEQ. */
6073 gimple_build (gimple_seq *seq, location_t loc,
6074 enum built_in_function fn, tree type, tree arg0)
6076 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6079 tree decl = builtin_decl_implicit (fn);
6080 gimple *stmt = gimple_build_call (decl, 1, arg0);
6081 if (!VOID_TYPE_P (type))
6083 if (gimple_in_ssa_p (cfun))
6084 res = make_ssa_name (type);
6086 res = create_tmp_reg (type);
6087 gimple_call_set_lhs (stmt, res);
6089 gimple_set_location (stmt, loc);
6090 gimple_seq_add_stmt_without_update (seq, stmt);
6095 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6096 (or no result if TYPE is void) with location LOC,
6097 simplifying it first if possible. Returns the built
6098 expression value (or NULL_TREE if TYPE is void) and appends
6099 statements possibly defining it to SEQ. */
6102 gimple_build (gimple_seq *seq, location_t loc,
6103 enum built_in_function fn, tree type, tree arg0, tree arg1)
6105 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6108 tree decl = builtin_decl_implicit (fn);
6109 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6110 if (!VOID_TYPE_P (type))
6112 if (gimple_in_ssa_p (cfun))
6113 res = make_ssa_name (type);
6115 res = create_tmp_reg (type);
6116 gimple_call_set_lhs (stmt, res);
6118 gimple_set_location (stmt, loc);
6119 gimple_seq_add_stmt_without_update (seq, stmt);
6124 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6125 (or no result if TYPE is void) with location LOC,
6126 simplifying it first if possible. Returns the built
6127 expression value (or NULL_TREE if TYPE is void) and appends
6128 statements possibly defining it to SEQ. */
6131 gimple_build (gimple_seq *seq, location_t loc,
6132 enum built_in_function fn, tree type,
6133 tree arg0, tree arg1, tree arg2)
6135 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6136 seq, gimple_build_valueize);
6139 tree decl = builtin_decl_implicit (fn);
6140 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6141 if (!VOID_TYPE_P (type))
6143 if (gimple_in_ssa_p (cfun))
6144 res = make_ssa_name (type);
6146 res = create_tmp_reg (type);
6147 gimple_call_set_lhs (stmt, res);
6149 gimple_set_location (stmt, loc);
6150 gimple_seq_add_stmt_without_update (seq, stmt);
6155 /* Build the conversion (TYPE) OP with a result of type TYPE
6156 with location LOC if such conversion is neccesary in GIMPLE,
6157 simplifying it first.
6158 Returns the built expression value and appends
6159 statements possibly defining it to SEQ. */
6162 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6164 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6166 return gimple_build (seq, loc, NOP_EXPR, type, op);
6169 /* Build the conversion (ptrofftype) OP with a result of a type
6170 compatible with ptrofftype with location LOC if such conversion
6171 is neccesary in GIMPLE, simplifying it first.
6172 Returns the built expression value and appends
6173 statements possibly defining it to SEQ. */
6176 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6178 if (ptrofftype_p (TREE_TYPE (op)))
6180 return gimple_convert (seq, loc, sizetype, op);
6183 /* Return true if the result of assignment STMT is known to be non-negative.
6184 If the return value is based on the assumption that signed overflow is
6185 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6186 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6189 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6192 enum tree_code code = gimple_assign_rhs_code (stmt);
6193 switch (get_gimple_rhs_class (code))
6195 case GIMPLE_UNARY_RHS:
6196 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6197 gimple_expr_type (stmt),
6198 gimple_assign_rhs1 (stmt),
6199 strict_overflow_p, depth);
6200 case GIMPLE_BINARY_RHS:
6201 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6202 gimple_expr_type (stmt),
6203 gimple_assign_rhs1 (stmt),
6204 gimple_assign_rhs2 (stmt),
6205 strict_overflow_p, depth);
6206 case GIMPLE_TERNARY_RHS:
6208 case GIMPLE_SINGLE_RHS:
6209 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6210 strict_overflow_p, depth);
6211 case GIMPLE_INVALID_RHS:
6217 /* Return true if return value of call STMT is known to be non-negative.
6218 If the return value is based on the assumption that signed overflow is
6219 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6220 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6223 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6226 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6227 gimple_call_arg (stmt, 0) : NULL_TREE;
6228 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6229 gimple_call_arg (stmt, 1) : NULL_TREE;
6231 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6232 gimple_call_fndecl (stmt),
6235 strict_overflow_p, depth);
6238 /* Return true if return value of call STMT is known to be non-negative.
6239 If the return value is based on the assumption that signed overflow is
6240 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6241 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6244 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6247 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6249 tree arg = gimple_phi_arg_def (stmt, i);
6250 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6256 /* Return true if STMT is known to compute a non-negative value.
6257 If the return value is based on the assumption that signed overflow is
6258 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6259 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6262 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6265 switch (gimple_code (stmt))
6268 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6271 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6274 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6281 /* Return true if the floating-point value computed by assignment STMT
6282 is known to have an integer value. We also allow +Inf, -Inf and NaN
6283 to be considered integer values.
6285 DEPTH is the current nesting depth of the query. */
6288 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6290 enum tree_code code = gimple_assign_rhs_code (stmt);
6291 switch (get_gimple_rhs_class (code))
6293 case GIMPLE_UNARY_RHS:
6294 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6295 gimple_assign_rhs1 (stmt), depth);
6296 case GIMPLE_BINARY_RHS:
6297 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6298 gimple_assign_rhs1 (stmt),
6299 gimple_assign_rhs2 (stmt), depth);
6300 case GIMPLE_TERNARY_RHS:
6302 case GIMPLE_SINGLE_RHS:
6303 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6304 case GIMPLE_INVALID_RHS:
6310 /* Return true if the floating-point value computed by call STMT is known
6311 to have an integer value. We also allow +Inf, -Inf and NaN to be
6312 considered integer values.
6314 DEPTH is the current nesting depth of the query. */
6317 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6319 tree arg0 = (gimple_call_num_args (stmt) > 0
6320 ? gimple_call_arg (stmt, 0)
6322 tree arg1 = (gimple_call_num_args (stmt) > 1
6323 ? gimple_call_arg (stmt, 1)
6325 return integer_valued_real_call_p (gimple_call_fndecl (stmt),
6329 /* Return true if the floating-point result of phi STMT is known to have
6330 an integer value. We also allow +Inf, -Inf and NaN to be considered
6333 DEPTH is the current nesting depth of the query. */
6336 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6338 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6340 tree arg = gimple_phi_arg_def (stmt, i);
6341 if (!integer_valued_real_single_p (arg, depth + 1))
6347 /* Return true if the floating-point value computed by STMT is known
6348 to have an integer value. We also allow +Inf, -Inf and NaN to be
6349 considered integer values.
6351 DEPTH is the current nesting depth of the query. */
6354 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6356 switch (gimple_code (stmt))
6359 return gimple_assign_integer_valued_real_p (stmt, depth);
6361 return gimple_call_integer_valued_real_p (stmt, depth);
6363 return gimple_phi_integer_valued_real_p (stmt, depth);