1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
36 #include "stor-layout.h"
38 #include "gimple-fold.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
60 /* Return true when DECL can be referenced from current unit.
61 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
62 We can get declarations that are not possible to reference for various
65 1) When analyzing C++ virtual tables.
66 C++ virtual tables do have known constructors even
67 when they are keyed to other compilation unit.
68 Those tables can contain pointers to methods and vars
69 in other units. Those methods have both STATIC and EXTERNAL
71 2) In WHOPR mode devirtualization might lead to reference
72 to method that was partitioned elsehwere.
73 In this case we have static VAR_DECL or FUNCTION_DECL
74 that has no corresponding callgraph/varpool node
76 3) COMDAT functions referred by external vtables that
77 we devirtualize only during final compilation stage.
78 At this time we already decided that we will not output
79 the function body and thus we can't reference the symbol
83 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
86 struct cgraph_node *node;
89 if (DECL_ABSTRACT_P (decl))
92 /* We are concerned only about static/external vars and functions. */
93 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
94 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
97 /* Static objects can be referred only if they was not optimized out yet. */
98 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
100 /* Before we start optimizing unreachable code we can be sure all
101 static objects are defined. */
102 if (symtab->function_flags_ready)
104 snode = symtab_node::get (decl);
105 if (!snode || !snode->definition)
107 node = dyn_cast <cgraph_node *> (snode);
108 return !node || !node->global.inlined_to;
111 /* We will later output the initializer, so we can refer to it.
112 So we are concerned only when DECL comes from initializer of
113 external var or var that has been optimized out. */
115 || TREE_CODE (from_decl) != VAR_DECL
116 || (!DECL_EXTERNAL (from_decl)
117 && (vnode = varpool_node::get (from_decl)) != NULL
118 && vnode->definition)
120 && (vnode = varpool_node::get (from_decl)) != NULL
121 && vnode->in_other_partition))
123 /* We are folding reference from external vtable. The vtable may reffer
124 to a symbol keyed to other compilation unit. The other compilation
125 unit may be in separate DSO and the symbol may be hidden. */
126 if (DECL_VISIBILITY_SPECIFIED (decl)
127 && DECL_EXTERNAL (decl)
128 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
129 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
131 /* When function is public, we always can introduce new reference.
132 Exception are the COMDAT functions where introducing a direct
133 reference imply need to include function body in the curren tunit. */
134 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
136 /* We have COMDAT. We are going to check if we still have definition
137 or if the definition is going to be output in other partition.
138 Bypass this when gimplifying; all needed functions will be produced.
140 As observed in PR20991 for already optimized out comdat virtual functions
141 it may be tempting to not necessarily give up because the copy will be
142 output elsewhere when corresponding vtable is output.
143 This is however not possible - ABI specify that COMDATs are output in
144 units where they are used and when the other unit was compiled with LTO
145 it is possible that vtable was kept public while the function itself
147 if (!symtab->function_flags_ready)
150 snode = symtab_node::get (decl);
152 || ((!snode->definition || DECL_EXTERNAL (decl))
153 && (!snode->in_other_partition
154 || (!snode->forced_by_abi && !snode->force_output))))
156 node = dyn_cast <cgraph_node *> (snode);
157 return !node || !node->global.inlined_to;
160 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
161 acceptable form for is_gimple_min_invariant.
162 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
165 canonicalize_constructor_val (tree cval, tree from_decl)
167 tree orig_cval = cval;
169 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
170 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
172 tree ptr = TREE_OPERAND (cval, 0);
173 if (is_gimple_min_invariant (ptr))
174 cval = build1_loc (EXPR_LOCATION (cval),
175 ADDR_EXPR, TREE_TYPE (ptr),
176 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
178 fold_convert (ptr_type_node,
179 TREE_OPERAND (cval, 1))));
181 if (TREE_CODE (cval) == ADDR_EXPR)
183 tree base = NULL_TREE;
184 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
186 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
188 TREE_OPERAND (cval, 0) = base;
191 base = get_base_address (TREE_OPERAND (cval, 0));
195 if ((TREE_CODE (base) == VAR_DECL
196 || TREE_CODE (base) == FUNCTION_DECL)
197 && !can_refer_decl_in_current_unit_p (base, from_decl))
199 if (TREE_TYPE (base) == error_mark_node)
201 if (TREE_CODE (base) == VAR_DECL)
202 TREE_ADDRESSABLE (base) = 1;
203 else if (TREE_CODE (base) == FUNCTION_DECL)
205 /* Make sure we create a cgraph node for functions we'll reference.
206 They can be non-existent if the reference comes from an entry
207 of an external vtable for example. */
208 cgraph_node::get_create (base);
210 /* Fixup types in global initializers. */
211 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
212 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
214 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
215 cval = fold_convert (TREE_TYPE (orig_cval), cval);
218 if (TREE_OVERFLOW_P (cval))
219 return drop_tree_overflow (cval);
223 /* If SYM is a constant variable with known value, return the value.
224 NULL_TREE is returned otherwise. */
227 get_symbol_constant_value (tree sym)
229 tree val = ctor_for_folding (sym);
230 if (val != error_mark_node)
234 val = canonicalize_constructor_val (unshare_expr (val), sym);
235 if (val && is_gimple_min_invariant (val))
240 /* Variables declared 'const' without an initializer
241 have zero as the initializer if they may not be
242 overridden at link or run time. */
244 && is_gimple_reg_type (TREE_TYPE (sym)))
245 return build_zero_cst (TREE_TYPE (sym));
253 /* Subroutine of fold_stmt. We perform several simplifications of the
254 memory reference tree EXPR and make sure to re-gimplify them properly
255 after propagation of constant addresses. IS_LHS is true if the
256 reference is supposed to be an lvalue. */
259 maybe_fold_reference (tree expr, bool is_lhs)
263 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
264 || TREE_CODE (expr) == REALPART_EXPR
265 || TREE_CODE (expr) == IMAGPART_EXPR)
266 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
267 return fold_unary_loc (EXPR_LOCATION (expr),
270 TREE_OPERAND (expr, 0));
271 else if (TREE_CODE (expr) == BIT_FIELD_REF
272 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
273 return fold_ternary_loc (EXPR_LOCATION (expr),
276 TREE_OPERAND (expr, 0),
277 TREE_OPERAND (expr, 1),
278 TREE_OPERAND (expr, 2));
281 && (result = fold_const_aggregate_ref (expr))
282 && is_gimple_min_invariant (result))
289 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
290 replacement rhs for the statement or NULL_TREE if no simplification
291 could be made. It is assumed that the operands have been previously
295 fold_gimple_assign (gimple_stmt_iterator *si)
297 gimple *stmt = gsi_stmt (*si);
298 enum tree_code subcode = gimple_assign_rhs_code (stmt);
299 location_t loc = gimple_location (stmt);
301 tree result = NULL_TREE;
303 switch (get_gimple_rhs_class (subcode))
305 case GIMPLE_SINGLE_RHS:
307 tree rhs = gimple_assign_rhs1 (stmt);
309 if (TREE_CLOBBER_P (rhs))
312 if (REFERENCE_CLASS_P (rhs))
313 return maybe_fold_reference (rhs, false);
315 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
317 tree val = OBJ_TYPE_REF_EXPR (rhs);
318 if (is_gimple_min_invariant (val))
320 else if (flag_devirtualize && virtual_method_call_p (rhs))
323 vec <cgraph_node *>targets
324 = possible_polymorphic_call_targets (rhs, stmt, &final);
325 if (final && targets.length () <= 1 && dbg_cnt (devirt))
327 if (dump_enabled_p ())
329 location_t loc = gimple_location_safe (stmt);
330 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
331 "resolving virtual function address "
332 "reference to function %s\n",
333 targets.length () == 1
334 ? targets[0]->name ()
337 if (targets.length () == 1)
339 val = fold_convert (TREE_TYPE (val),
340 build_fold_addr_expr_loc
341 (loc, targets[0]->decl));
342 STRIP_USELESS_TYPE_CONVERSION (val);
345 /* We can not use __builtin_unreachable here because it
346 can not have address taken. */
347 val = build_int_cst (TREE_TYPE (val), 0);
353 else if (TREE_CODE (rhs) == ADDR_EXPR)
355 tree ref = TREE_OPERAND (rhs, 0);
356 tree tem = maybe_fold_reference (ref, true);
358 && TREE_CODE (tem) == MEM_REF
359 && integer_zerop (TREE_OPERAND (tem, 1)))
360 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
362 result = fold_convert (TREE_TYPE (rhs),
363 build_fold_addr_expr_loc (loc, tem));
364 else if (TREE_CODE (ref) == MEM_REF
365 && integer_zerop (TREE_OPERAND (ref, 1)))
366 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
370 /* Strip away useless type conversions. Both the
371 NON_LVALUE_EXPR that may have been added by fold, and
372 "useless" type conversions that might now be apparent
373 due to propagation. */
374 STRIP_USELESS_TYPE_CONVERSION (result);
376 if (result != rhs && valid_gimple_rhs_p (result))
381 else if (TREE_CODE (rhs) == CONSTRUCTOR
382 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
384 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
388 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
389 if (! CONSTANT_CLASS_P (val))
392 return build_vector_from_ctor (TREE_TYPE (rhs),
393 CONSTRUCTOR_ELTS (rhs));
396 else if (DECL_P (rhs))
397 return get_symbol_constant_value (rhs);
401 case GIMPLE_UNARY_RHS:
404 case GIMPLE_BINARY_RHS:
407 case GIMPLE_TERNARY_RHS:
408 result = fold_ternary_loc (loc, subcode,
409 TREE_TYPE (gimple_assign_lhs (stmt)),
410 gimple_assign_rhs1 (stmt),
411 gimple_assign_rhs2 (stmt),
412 gimple_assign_rhs3 (stmt));
416 STRIP_USELESS_TYPE_CONVERSION (result);
417 if (valid_gimple_rhs_p (result))
422 case GIMPLE_INVALID_RHS:
430 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
431 adjusting the replacement stmts location and virtual operands.
432 If the statement has a lhs the last stmt in the sequence is expected
433 to assign to that lhs. */
436 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
438 gimple *stmt = gsi_stmt (*si_p);
440 if (gimple_has_location (stmt))
441 annotate_all_with_location (stmts, gimple_location (stmt));
443 /* First iterate over the replacement statements backward, assigning
444 virtual operands to their defining statements. */
445 gimple *laststore = NULL;
446 for (gimple_stmt_iterator i = gsi_last (stmts);
447 !gsi_end_p (i); gsi_prev (&i))
449 gimple *new_stmt = gsi_stmt (i);
450 if ((gimple_assign_single_p (new_stmt)
451 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
452 || (is_gimple_call (new_stmt)
453 && (gimple_call_flags (new_stmt)
454 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
458 vdef = gimple_vdef (stmt);
460 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
461 gimple_set_vdef (new_stmt, vdef);
462 if (vdef && TREE_CODE (vdef) == SSA_NAME)
463 SSA_NAME_DEF_STMT (vdef) = new_stmt;
464 laststore = new_stmt;
468 /* Second iterate over the statements forward, assigning virtual
469 operands to their uses. */
470 tree reaching_vuse = gimple_vuse (stmt);
471 for (gimple_stmt_iterator i = gsi_start (stmts);
472 !gsi_end_p (i); gsi_next (&i))
474 gimple *new_stmt = gsi_stmt (i);
475 /* If the new statement possibly has a VUSE, update it with exact SSA
476 name we know will reach this one. */
477 if (gimple_has_mem_ops (new_stmt))
478 gimple_set_vuse (new_stmt, reaching_vuse);
479 gimple_set_modified (new_stmt, true);
480 if (gimple_vdef (new_stmt))
481 reaching_vuse = gimple_vdef (new_stmt);
484 /* If the new sequence does not do a store release the virtual
485 definition of the original statement. */
487 && reaching_vuse == gimple_vuse (stmt))
489 tree vdef = gimple_vdef (stmt);
491 && TREE_CODE (vdef) == SSA_NAME)
493 unlink_stmt_vdef (stmt);
494 release_ssa_name (vdef);
498 /* Finally replace the original statement with the sequence. */
499 gsi_replace_with_seq (si_p, stmts, false);
502 /* Convert EXPR into a GIMPLE value suitable for substitution on the
503 RHS of an assignment. Insert the necessary statements before
504 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
505 is replaced. If the call is expected to produces a result, then it
506 is replaced by an assignment of the new RHS to the result variable.
507 If the result is to be ignored, then the call is replaced by a
508 GIMPLE_NOP. A proper VDEF chain is retained by making the first
509 VUSE and the last VDEF of the whole sequence be the same as the replaced
510 statement and using new SSA names for stores in between. */
513 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
516 gimple *stmt, *new_stmt;
517 gimple_stmt_iterator i;
518 gimple_seq stmts = NULL;
520 stmt = gsi_stmt (*si_p);
522 gcc_assert (is_gimple_call (stmt));
524 push_gimplify_context (gimple_in_ssa_p (cfun));
526 lhs = gimple_call_lhs (stmt);
527 if (lhs == NULL_TREE)
529 gimplify_and_add (expr, &stmts);
530 /* We can end up with folding a memcpy of an empty class assignment
531 which gets optimized away by C++ gimplification. */
532 if (gimple_seq_empty_p (stmts))
534 pop_gimplify_context (NULL);
535 if (gimple_in_ssa_p (cfun))
537 unlink_stmt_vdef (stmt);
540 gsi_replace (si_p, gimple_build_nop (), false);
546 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
547 new_stmt = gimple_build_assign (lhs, tmp);
548 i = gsi_last (stmts);
549 gsi_insert_after_without_update (&i, new_stmt,
550 GSI_CONTINUE_LINKING);
553 pop_gimplify_context (NULL);
555 gsi_replace_with_seq_vops (si_p, stmts);
559 /* Replace the call at *GSI with the gimple value VAL. */
562 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
564 gimple *stmt = gsi_stmt (*gsi);
565 tree lhs = gimple_call_lhs (stmt);
569 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
570 val = fold_convert (TREE_TYPE (lhs), val);
571 repl = gimple_build_assign (lhs, val);
574 repl = gimple_build_nop ();
575 tree vdef = gimple_vdef (stmt);
576 if (vdef && TREE_CODE (vdef) == SSA_NAME)
578 unlink_stmt_vdef (stmt);
579 release_ssa_name (vdef);
581 gsi_replace (gsi, repl, false);
584 /* Replace the call at *GSI with the new call REPL and fold that
588 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
590 gimple *stmt = gsi_stmt (*gsi);
591 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
592 gimple_set_location (repl, gimple_location (stmt));
593 if (gimple_vdef (stmt)
594 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
596 gimple_set_vdef (repl, gimple_vdef (stmt));
597 gimple_set_vuse (repl, gimple_vuse (stmt));
598 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
600 gsi_replace (gsi, repl, false);
604 /* Return true if VAR is a VAR_DECL or a component thereof. */
607 var_decl_component_p (tree var)
610 while (handled_component_p (inner))
611 inner = TREE_OPERAND (inner, 0);
612 return SSA_VAR_P (inner);
615 /* Fold function call to builtin mem{{,p}cpy,move}. Return
616 false if no simplification can be made.
617 If ENDP is 0, return DEST (like memcpy).
618 If ENDP is 1, return DEST+LEN (like mempcpy).
619 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
620 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
624 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
625 tree dest, tree src, int endp)
627 gimple *stmt = gsi_stmt (*gsi);
628 tree lhs = gimple_call_lhs (stmt);
629 tree len = gimple_call_arg (stmt, 2);
630 tree destvar, srcvar;
631 location_t loc = gimple_location (stmt);
633 /* If the LEN parameter is zero, return DEST. */
634 if (integer_zerop (len))
637 if (gimple_call_lhs (stmt))
638 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
640 repl = gimple_build_nop ();
641 tree vdef = gimple_vdef (stmt);
642 if (vdef && TREE_CODE (vdef) == SSA_NAME)
644 unlink_stmt_vdef (stmt);
645 release_ssa_name (vdef);
647 gsi_replace (gsi, repl, false);
651 /* If SRC and DEST are the same (and not volatile), return
652 DEST{,+LEN,+LEN-1}. */
653 if (operand_equal_p (src, dest, 0))
655 unlink_stmt_vdef (stmt);
656 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
657 release_ssa_name (gimple_vdef (stmt));
660 gsi_replace (gsi, gimple_build_nop (), false);
667 tree srctype, desttype;
668 unsigned int src_align, dest_align;
671 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
672 pointers as wide integer) and also may result in huge function
673 size because of inlined bounds copy. Thus don't inline for
674 functions we want to instrument. */
675 if (flag_check_pointer_bounds
676 && chkp_instrumentable_p (cfun->decl)
677 /* Even if data may contain pointers we can inline if copy
678 less than a pointer size. */
679 && (!tree_fits_uhwi_p (len)
680 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
683 /* Build accesses at offset zero with a ref-all character type. */
684 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
687 /* If we can perform the copy efficiently with first doing all loads
688 and then all stores inline it that way. Currently efficiently
689 means that we can load all the memory into a single integer
690 register which is what MOVE_MAX gives us. */
691 src_align = get_pointer_alignment (src);
692 dest_align = get_pointer_alignment (dest);
693 if (tree_fits_uhwi_p (len)
694 && compare_tree_int (len, MOVE_MAX) <= 0
695 /* ??? Don't transform copies from strings with known length this
696 confuses the tree-ssa-strlen.c. This doesn't handle
697 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
699 && !c_strlen (src, 2))
701 unsigned ilen = tree_to_uhwi (len);
702 if (pow2p_hwi (ilen))
704 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
706 && TYPE_MODE (type) != BLKmode
707 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
709 /* If the destination pointer is not aligned we must be able
710 to emit an unaligned store. */
711 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
712 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
713 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
714 != CODE_FOR_nothing)))
717 tree desttype = type;
718 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
719 srctype = build_aligned_type (type, src_align);
720 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
721 tree tem = fold_const_aggregate_ref (srcmem);
724 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
725 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
727 && (optab_handler (movmisalign_optab,
729 == CODE_FOR_nothing))
734 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
736 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
737 if (gimple_in_ssa_p (cfun))
738 srcmem = make_ssa_name (TREE_TYPE (srcmem),
741 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
742 gimple_assign_set_lhs (new_stmt, srcmem);
743 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
744 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
746 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
747 desttype = build_aligned_type (type, dest_align);
749 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
752 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
753 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
754 if (gimple_vdef (new_stmt)
755 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
756 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
759 gsi_replace (gsi, new_stmt, false);
762 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
771 /* Both DEST and SRC must be pointer types.
772 ??? This is what old code did. Is the testing for pointer types
775 If either SRC is readonly or length is 1, we can use memcpy. */
776 if (!dest_align || !src_align)
778 if (readonly_data_expr (src)
779 || (tree_fits_uhwi_p (len)
780 && (MIN (src_align, dest_align) / BITS_PER_UNIT
781 >= tree_to_uhwi (len))))
783 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
786 gimple_call_set_fndecl (stmt, fn);
787 gimple_call_set_arg (stmt, 0, dest);
788 gimple_call_set_arg (stmt, 1, src);
793 /* If *src and *dest can't overlap, optimize into memcpy as well. */
794 if (TREE_CODE (src) == ADDR_EXPR
795 && TREE_CODE (dest) == ADDR_EXPR)
797 tree src_base, dest_base, fn;
798 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
799 HOST_WIDE_INT maxsize;
801 srcvar = TREE_OPERAND (src, 0);
802 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
803 if (src_base == NULL)
805 destvar = TREE_OPERAND (dest, 0);
806 dest_base = get_addr_base_and_unit_offset (destvar,
808 if (dest_base == NULL)
810 if (tree_fits_uhwi_p (len))
811 maxsize = tree_to_uhwi (len);
814 if (SSA_VAR_P (src_base)
815 && SSA_VAR_P (dest_base))
817 if (operand_equal_p (src_base, dest_base, 0)
818 && ranges_overlap_p (src_offset, maxsize,
819 dest_offset, maxsize))
822 else if (TREE_CODE (src_base) == MEM_REF
823 && TREE_CODE (dest_base) == MEM_REF)
825 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
826 TREE_OPERAND (dest_base, 0), 0))
828 offset_int off = mem_ref_offset (src_base) + src_offset;
829 if (!wi::fits_shwi_p (off))
831 src_offset = off.to_shwi ();
833 off = mem_ref_offset (dest_base) + dest_offset;
834 if (!wi::fits_shwi_p (off))
836 dest_offset = off.to_shwi ();
837 if (ranges_overlap_p (src_offset, maxsize,
838 dest_offset, maxsize))
844 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
847 gimple_call_set_fndecl (stmt, fn);
848 gimple_call_set_arg (stmt, 0, dest);
849 gimple_call_set_arg (stmt, 1, src);
854 /* If the destination and source do not alias optimize into
856 if ((is_gimple_min_invariant (dest)
857 || TREE_CODE (dest) == SSA_NAME)
858 && (is_gimple_min_invariant (src)
859 || TREE_CODE (src) == SSA_NAME))
862 ao_ref_init_from_ptr_and_size (&destr, dest, len);
863 ao_ref_init_from_ptr_and_size (&srcr, src, len);
864 if (!refs_may_alias_p_1 (&destr, &srcr, false))
867 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
870 gimple_call_set_fndecl (stmt, fn);
871 gimple_call_set_arg (stmt, 0, dest);
872 gimple_call_set_arg (stmt, 1, src);
881 if (!tree_fits_shwi_p (len))
884 This logic lose for arguments like (type *)malloc (sizeof (type)),
885 since we strip the casts of up to VOID return value from malloc.
886 Perhaps we ought to inherit type from non-VOID argument here? */
889 if (!POINTER_TYPE_P (TREE_TYPE (src))
890 || !POINTER_TYPE_P (TREE_TYPE (dest)))
892 /* In the following try to find a type that is most natural to be
893 used for the memcpy source and destination and that allows
894 the most optimization when memcpy is turned into a plain assignment
895 using that type. In theory we could always use a char[len] type
896 but that only gains us that the destination and source possibly
897 no longer will have their address taken. */
898 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
899 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
901 tree tem = TREE_OPERAND (src, 0);
903 if (tem != TREE_OPERAND (src, 0))
904 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
906 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
908 tree tem = TREE_OPERAND (dest, 0);
910 if (tem != TREE_OPERAND (dest, 0))
911 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
913 srctype = TREE_TYPE (TREE_TYPE (src));
914 if (TREE_CODE (srctype) == ARRAY_TYPE
915 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
917 srctype = TREE_TYPE (srctype);
919 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
921 desttype = TREE_TYPE (TREE_TYPE (dest));
922 if (TREE_CODE (desttype) == ARRAY_TYPE
923 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
925 desttype = TREE_TYPE (desttype);
927 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
929 if (TREE_ADDRESSABLE (srctype)
930 || TREE_ADDRESSABLE (desttype))
933 /* Make sure we are not copying using a floating-point mode or
934 a type whose size possibly does not match its precision. */
935 if (FLOAT_MODE_P (TYPE_MODE (desttype))
936 || TREE_CODE (desttype) == BOOLEAN_TYPE
937 || TREE_CODE (desttype) == ENUMERAL_TYPE)
938 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
939 if (FLOAT_MODE_P (TYPE_MODE (srctype))
940 || TREE_CODE (srctype) == BOOLEAN_TYPE
941 || TREE_CODE (srctype) == ENUMERAL_TYPE)
942 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
950 src_align = get_pointer_alignment (src);
951 dest_align = get_pointer_alignment (dest);
952 if (dest_align < TYPE_ALIGN (desttype)
953 || src_align < TYPE_ALIGN (srctype))
957 STRIP_NOPS (destvar);
958 if (TREE_CODE (destvar) == ADDR_EXPR
959 && var_decl_component_p (TREE_OPERAND (destvar, 0))
960 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
961 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
967 if (TREE_CODE (srcvar) == ADDR_EXPR
968 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
969 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
972 || src_align >= TYPE_ALIGN (desttype))
973 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
975 else if (!STRICT_ALIGNMENT)
977 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
979 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
987 if (srcvar == NULL_TREE && destvar == NULL_TREE)
990 if (srcvar == NULL_TREE)
993 if (src_align >= TYPE_ALIGN (desttype))
994 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
997 if (STRICT_ALIGNMENT)
999 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1001 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1004 else if (destvar == NULL_TREE)
1007 if (dest_align >= TYPE_ALIGN (srctype))
1008 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1011 if (STRICT_ALIGNMENT)
1013 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1015 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1020 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1022 tree tem = fold_const_aggregate_ref (srcvar);
1025 if (! is_gimple_min_invariant (srcvar))
1027 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1028 if (gimple_in_ssa_p (cfun))
1029 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1031 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1032 gimple_assign_set_lhs (new_stmt, srcvar);
1033 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1034 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1037 new_stmt = gimple_build_assign (destvar, srcvar);
1038 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1039 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1040 if (gimple_vdef (new_stmt)
1041 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1042 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1045 gsi_replace (gsi, new_stmt, false);
1048 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1052 gimple_seq stmts = NULL;
1053 if (endp == 0 || endp == 3)
1056 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1058 if (endp == 2 || endp == 1)
1060 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1061 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1062 TREE_TYPE (dest), dest, len);
1065 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1066 gimple *repl = gimple_build_assign (lhs, dest);
1067 gsi_replace (gsi, repl, false);
1071 /* Fold function call to builtin memset or bzero at *GSI setting the
1072 memory of size LEN to VAL. Return whether a simplification was made. */
1075 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1077 gimple *stmt = gsi_stmt (*gsi);
1079 unsigned HOST_WIDE_INT length, cval;
1081 /* If the LEN parameter is zero, return DEST. */
1082 if (integer_zerop (len))
1084 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1088 if (! tree_fits_uhwi_p (len))
1091 if (TREE_CODE (c) != INTEGER_CST)
1094 tree dest = gimple_call_arg (stmt, 0);
1096 if (TREE_CODE (var) != ADDR_EXPR)
1099 var = TREE_OPERAND (var, 0);
1100 if (TREE_THIS_VOLATILE (var))
1103 etype = TREE_TYPE (var);
1104 if (TREE_CODE (etype) == ARRAY_TYPE)
1105 etype = TREE_TYPE (etype);
1107 if (!INTEGRAL_TYPE_P (etype)
1108 && !POINTER_TYPE_P (etype))
1111 if (! var_decl_component_p (var))
1114 length = tree_to_uhwi (len);
1115 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1116 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1119 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1122 if (integer_zerop (c))
1126 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1129 cval = TREE_INT_CST_LOW (c);
1133 cval |= (cval << 31) << 1;
1136 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1137 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1138 gimple_set_vuse (store, gimple_vuse (stmt));
1139 tree vdef = gimple_vdef (stmt);
1140 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1142 gimple_set_vdef (store, gimple_vdef (stmt));
1143 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1145 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1146 if (gimple_call_lhs (stmt))
1148 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1149 gsi_replace (gsi, asgn, false);
1153 gimple_stmt_iterator gsi2 = *gsi;
1155 gsi_remove (&gsi2, true);
1162 /* Obtain the minimum and maximum string length or minimum and maximum
1163 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1164 If ARG is an SSA name variable, follow its use-def chains. When
1165 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1166 if we are unable to determine the length or value, return False.
1167 VISITED is a bitmap of visited variables.
1168 TYPE is 0 if string length should be obtained, 1 for maximum string
1169 length and 2 for maximum value ARG can have.
1170 When FUZZY is set and the length of a string cannot be determined,
1171 the function instead considers as the maximum possible length the
1172 size of a character array it may refer to. */
1175 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1181 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1182 but MINLEN may be cleared during the execution of the function. */
1183 tree *minlen = length;
1184 tree *const maxlen = length + 1;
1186 if (TREE_CODE (arg) != SSA_NAME)
1188 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1189 if (TREE_CODE (arg) == ADDR_EXPR
1190 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1191 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1193 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1194 if (TREE_CODE (aop0) == INDIRECT_REF
1195 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1196 return get_range_strlen (TREE_OPERAND (aop0, 0),
1197 length, visited, type, fuzzy);
1203 if (TREE_CODE (val) != INTEGER_CST
1204 || tree_int_cst_sgn (val) < 0)
1208 val = c_strlen (arg, 1);
1212 if (TREE_CODE (arg) == ADDR_EXPR)
1213 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1214 visited, type, fuzzy);
1216 if (TREE_CODE (arg) == COMPONENT_REF
1217 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1219 /* Use the type of the member array to determine the upper
1220 bound on the length of the array. This may be overly
1221 optimistic if the array itself isn't NUL-terminated and
1222 the caller relies on the subsequent member to contain
1224 arg = TREE_OPERAND (arg, 1);
1225 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1226 if (!val || integer_zerop (val))
1228 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1230 /* Avoid using the array size as the minimum. */
1241 && TREE_CODE (*minlen) == INTEGER_CST
1242 && TREE_CODE (val) == INTEGER_CST
1243 && tree_int_cst_lt (val, *minlen))))
1250 if (TREE_CODE (*maxlen) != INTEGER_CST
1251 || TREE_CODE (val) != INTEGER_CST)
1254 if (tree_int_cst_lt (*maxlen, val))
1258 else if (simple_cst_equal (val, *maxlen) != 1)
1266 /* If ARG is registered for SSA update we cannot look at its defining
1268 if (name_registered_for_update_p (arg))
1271 /* If we were already here, break the infinite cycle. */
1273 *visited = BITMAP_ALLOC (NULL);
1274 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1278 def_stmt = SSA_NAME_DEF_STMT (var);
1280 switch (gimple_code (def_stmt))
1283 /* The RHS of the statement defining VAR must either have a
1284 constant length or come from another SSA_NAME with a constant
1286 if (gimple_assign_single_p (def_stmt)
1287 || gimple_assign_unary_nop_p (def_stmt))
1289 tree rhs = gimple_assign_rhs1 (def_stmt);
1290 return get_range_strlen (rhs, length, visited, type, fuzzy);
1292 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1294 tree op2 = gimple_assign_rhs2 (def_stmt);
1295 tree op3 = gimple_assign_rhs3 (def_stmt);
1296 return get_range_strlen (op2, length, visited, type, fuzzy)
1297 && get_range_strlen (op3, length, visited, type, fuzzy);
1303 /* All the arguments of the PHI node must have the same constant
1307 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1309 tree arg = gimple_phi_arg (def_stmt, i)->def;
1311 /* If this PHI has itself as an argument, we cannot
1312 determine the string length of this argument. However,
1313 if we can find a constant string length for the other
1314 PHI args then we can still be sure that this is a
1315 constant string length. So be optimistic and just
1316 continue with the next argument. */
1317 if (arg == gimple_phi_result (def_stmt))
1320 if (!get_range_strlen (arg, length, visited, type, fuzzy))
1323 *maxlen = build_all_ones_cst (size_type_node);
1336 /* Determine the minimum and maximum value or string length that ARG
1337 refers to and store each in the first two elements of MINMAXLEN.
1338 For expressions that point to strings of unknown lengths that are
1339 character arrays, use the upper bound of the array as the maximum
1340 length. For example, given an expression like 'x ? array : "xyz"'
1341 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1342 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1346 void get_range_strlen (tree arg, tree minmaxlen[2])
1348 bitmap visited = NULL;
1350 minmaxlen[0] = NULL_TREE;
1351 minmaxlen[1] = NULL_TREE;
1353 get_range_strlen (arg, minmaxlen, &visited, 1, true);
1356 BITMAP_FREE (visited);
1360 get_maxval_strlen (tree arg, int type)
1362 bitmap visited = NULL;
1363 tree len[2] = { NULL_TREE, NULL_TREE };
1364 if (!get_range_strlen (arg, len, &visited, type, false))
1367 BITMAP_FREE (visited);
1373 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1374 If LEN is not NULL, it represents the length of the string to be
1375 copied. Return NULL_TREE if no simplification can be made. */
1378 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1379 tree dest, tree src)
1381 location_t loc = gimple_location (gsi_stmt (*gsi));
1384 /* If SRC and DEST are the same (and not volatile), return DEST. */
1385 if (operand_equal_p (src, dest, 0))
1387 replace_call_with_value (gsi, dest);
1391 if (optimize_function_for_size_p (cfun))
1394 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1398 tree len = get_maxval_strlen (src, 0);
1402 len = fold_convert_loc (loc, size_type_node, len);
1403 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1404 len = force_gimple_operand_gsi (gsi, len, true,
1405 NULL_TREE, true, GSI_SAME_STMT);
1406 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1407 replace_call_with_call_and_fold (gsi, repl);
1411 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1412 If SLEN is not NULL, it represents the length of the source string.
1413 Return NULL_TREE if no simplification can be made. */
1416 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1417 tree dest, tree src, tree len)
1419 location_t loc = gimple_location (gsi_stmt (*gsi));
1422 /* If the LEN parameter is zero, return DEST. */
1423 if (integer_zerop (len))
1425 replace_call_with_value (gsi, dest);
1429 /* We can't compare slen with len as constants below if len is not a
1431 if (TREE_CODE (len) != INTEGER_CST)
1434 /* Now, we must be passed a constant src ptr parameter. */
1435 tree slen = get_maxval_strlen (src, 0);
1436 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1439 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1441 /* We do not support simplification of this case, though we do
1442 support it when expanding trees into RTL. */
1443 /* FIXME: generate a call to __builtin_memset. */
1444 if (tree_int_cst_lt (slen, len))
1447 /* OK transform into builtin memcpy. */
1448 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1452 len = fold_convert_loc (loc, size_type_node, len);
1453 len = force_gimple_operand_gsi (gsi, len, true,
1454 NULL_TREE, true, GSI_SAME_STMT);
1455 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1456 replace_call_with_call_and_fold (gsi, repl);
1460 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1463 Return NULL_TREE if no simplification was possible, otherwise return the
1464 simplified form of the call as a tree.
1466 The simplified form may be a constant or other expression which
1467 computes the same value, but in a more efficient manner (including
1468 calls to other builtin functions).
1470 The call may contain arguments which need to be evaluated, but
1471 which are not useful to determine the result of the call. In
1472 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1473 COMPOUND_EXPR will be an argument which must be evaluated.
1474 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1475 COMPOUND_EXPR in the chain will contain the tree for the simplified
1476 form of the builtin function call. */
1479 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1481 gimple *stmt = gsi_stmt (*gsi);
1482 location_t loc = gimple_location (stmt);
1484 const char *p = c_getstr (src);
1486 /* If the string length is zero, return the dst parameter. */
1487 if (p && *p == '\0')
1489 replace_call_with_value (gsi, dst);
1493 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1496 /* See if we can store by pieces into (dst + strlen(dst)). */
1498 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1499 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1501 if (!strlen_fn || !memcpy_fn)
1504 /* If the length of the source string isn't computable don't
1505 split strcat into strlen and memcpy. */
1506 tree len = get_maxval_strlen (src, 0);
1510 /* Create strlen (dst). */
1511 gimple_seq stmts = NULL, stmts2;
1512 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1513 gimple_set_location (repl, loc);
1514 if (gimple_in_ssa_p (cfun))
1515 newdst = make_ssa_name (size_type_node);
1517 newdst = create_tmp_reg (size_type_node);
1518 gimple_call_set_lhs (repl, newdst);
1519 gimple_seq_add_stmt_without_update (&stmts, repl);
1521 /* Create (dst p+ strlen (dst)). */
1522 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1523 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1524 gimple_seq_add_seq_without_update (&stmts, stmts2);
1526 len = fold_convert_loc (loc, size_type_node, len);
1527 len = size_binop_loc (loc, PLUS_EXPR, len,
1528 build_int_cst (size_type_node, 1));
1529 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1530 gimple_seq_add_seq_without_update (&stmts, stmts2);
1532 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1533 gimple_seq_add_stmt_without_update (&stmts, repl);
1534 if (gimple_call_lhs (stmt))
1536 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1537 gimple_seq_add_stmt_without_update (&stmts, repl);
1538 gsi_replace_with_seq_vops (gsi, stmts);
1539 /* gsi now points at the assignment to the lhs, get a
1540 stmt iterator to the memcpy call.
1541 ??? We can't use gsi_for_stmt as that doesn't work when the
1542 CFG isn't built yet. */
1543 gimple_stmt_iterator gsi2 = *gsi;
1549 gsi_replace_with_seq_vops (gsi, stmts);
1555 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1556 are the arguments to the call. */
1559 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1561 gimple *stmt = gsi_stmt (*gsi);
1562 tree dest = gimple_call_arg (stmt, 0);
1563 tree src = gimple_call_arg (stmt, 1);
1564 tree size = gimple_call_arg (stmt, 2);
1570 /* If the SRC parameter is "", return DEST. */
1571 if (p && *p == '\0')
1573 replace_call_with_value (gsi, dest);
1577 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1580 /* If __builtin_strcat_chk is used, assume strcat is available. */
1581 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1585 gimple *repl = gimple_build_call (fn, 2, dest, src);
1586 replace_call_with_call_and_fold (gsi, repl);
1590 /* Simplify a call to the strncat builtin. */
1593 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1595 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1596 tree dst = gimple_call_arg (stmt, 0);
1597 tree src = gimple_call_arg (stmt, 1);
1598 tree len = gimple_call_arg (stmt, 2);
1600 const char *p = c_getstr (src);
1602 /* If the requested length is zero, or the src parameter string
1603 length is zero, return the dst parameter. */
1604 if (integer_zerop (len) || (p && *p == '\0'))
1606 replace_call_with_value (gsi, dst);
1610 /* If the requested len is greater than or equal to the string
1611 length, call strcat. */
1612 if (TREE_CODE (len) == INTEGER_CST && p
1613 && compare_tree_int (len, strlen (p)) >= 0)
1615 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1617 /* If the replacement _DECL isn't initialized, don't do the
1622 gcall *repl = gimple_build_call (fn, 2, dst, src);
1623 replace_call_with_call_and_fold (gsi, repl);
1630 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1634 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1636 gimple *stmt = gsi_stmt (*gsi);
1637 tree dest = gimple_call_arg (stmt, 0);
1638 tree src = gimple_call_arg (stmt, 1);
1639 tree len = gimple_call_arg (stmt, 2);
1640 tree size = gimple_call_arg (stmt, 3);
1645 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1646 if ((p && *p == '\0')
1647 || integer_zerop (len))
1649 replace_call_with_value (gsi, dest);
1653 if (! tree_fits_uhwi_p (size))
1656 if (! integer_all_onesp (size))
1658 tree src_len = c_strlen (src, 1);
1660 && tree_fits_uhwi_p (src_len)
1661 && tree_fits_uhwi_p (len)
1662 && ! tree_int_cst_lt (len, src_len))
1664 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1665 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1669 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1670 replace_call_with_call_and_fold (gsi, repl);
1676 /* If __builtin_strncat_chk is used, assume strncat is available. */
1677 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1681 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1682 replace_call_with_call_and_fold (gsi, repl);
1686 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1687 to the call. IGNORE is true if the value returned
1688 by the builtin will be ignored. UNLOCKED is true is true if this
1689 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1690 the known length of the string. Return NULL_TREE if no simplification
1694 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1695 tree arg0, tree arg1,
1698 gimple *stmt = gsi_stmt (*gsi);
1700 /* If we're using an unlocked function, assume the other unlocked
1701 functions exist explicitly. */
1702 tree const fn_fputc = (unlocked
1703 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1704 : builtin_decl_implicit (BUILT_IN_FPUTC));
1705 tree const fn_fwrite = (unlocked
1706 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1707 : builtin_decl_implicit (BUILT_IN_FWRITE));
1709 /* If the return value is used, don't do the transformation. */
1710 if (gimple_call_lhs (stmt))
1713 /* Get the length of the string passed to fputs. If the length
1714 can't be determined, punt. */
1715 tree len = get_maxval_strlen (arg0, 0);
1717 || TREE_CODE (len) != INTEGER_CST)
1720 switch (compare_tree_int (len, 1))
1722 case -1: /* length is 0, delete the call entirely . */
1723 replace_call_with_value (gsi, integer_zero_node);
1726 case 0: /* length is 1, call fputc. */
1728 const char *p = c_getstr (arg0);
1734 gimple *repl = gimple_build_call (fn_fputc, 2,
1736 (integer_type_node, p[0]), arg1);
1737 replace_call_with_call_and_fold (gsi, repl);
1742 case 1: /* length is greater than 1, call fwrite. */
1744 /* If optimizing for size keep fputs. */
1745 if (optimize_function_for_size_p (cfun))
1747 /* New argument list transforming fputs(string, stream) to
1748 fwrite(string, 1, len, stream). */
1752 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
1753 size_one_node, len, arg1);
1754 replace_call_with_call_and_fold (gsi, repl);
1763 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1764 DEST, SRC, LEN, and SIZE are the arguments to the call.
1765 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1766 code of the builtin. If MAXLEN is not NULL, it is maximum length
1767 passed as third argument. */
1770 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1771 tree dest, tree src, tree len, tree size,
1772 enum built_in_function fcode)
1774 gimple *stmt = gsi_stmt (*gsi);
1775 location_t loc = gimple_location (stmt);
1776 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1779 /* If SRC and DEST are the same (and not volatile), return DEST
1780 (resp. DEST+LEN for __mempcpy_chk). */
1781 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1783 if (fcode != BUILT_IN_MEMPCPY_CHK)
1785 replace_call_with_value (gsi, dest);
1790 gimple_seq stmts = NULL;
1791 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1792 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1793 TREE_TYPE (dest), dest, len);
1794 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1795 replace_call_with_value (gsi, temp);
1800 if (! tree_fits_uhwi_p (size))
1803 tree maxlen = get_maxval_strlen (len, 2);
1804 if (! integer_all_onesp (size))
1806 if (! tree_fits_uhwi_p (len))
1808 /* If LEN is not constant, try MAXLEN too.
1809 For MAXLEN only allow optimizing into non-_ocs function
1810 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1811 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1813 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1815 /* (void) __mempcpy_chk () can be optimized into
1816 (void) __memcpy_chk (). */
1817 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1821 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1822 replace_call_with_call_and_fold (gsi, repl);
1831 if (tree_int_cst_lt (size, maxlen))
1836 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1837 mem{cpy,pcpy,move,set} is available. */
1840 case BUILT_IN_MEMCPY_CHK:
1841 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1843 case BUILT_IN_MEMPCPY_CHK:
1844 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1846 case BUILT_IN_MEMMOVE_CHK:
1847 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1849 case BUILT_IN_MEMSET_CHK:
1850 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1859 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1860 replace_call_with_call_and_fold (gsi, repl);
1864 /* Fold a call to the __st[rp]cpy_chk builtin.
1865 DEST, SRC, and SIZE are the arguments to the call.
1866 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1867 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1868 strings passed as second argument. */
1871 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1873 tree src, tree size,
1874 enum built_in_function fcode)
1876 gimple *stmt = gsi_stmt (*gsi);
1877 location_t loc = gimple_location (stmt);
1878 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1881 /* If SRC and DEST are the same (and not volatile), return DEST. */
1882 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1884 replace_call_with_value (gsi, dest);
1888 if (! tree_fits_uhwi_p (size))
1891 tree maxlen = get_maxval_strlen (src, 1);
1892 if (! integer_all_onesp (size))
1894 len = c_strlen (src, 1);
1895 if (! len || ! tree_fits_uhwi_p (len))
1897 /* If LEN is not constant, try MAXLEN too.
1898 For MAXLEN only allow optimizing into non-_ocs function
1899 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1900 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1902 if (fcode == BUILT_IN_STPCPY_CHK)
1907 /* If return value of __stpcpy_chk is ignored,
1908 optimize into __strcpy_chk. */
1909 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1913 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1914 replace_call_with_call_and_fold (gsi, repl);
1918 if (! len || TREE_SIDE_EFFECTS (len))
1921 /* If c_strlen returned something, but not a constant,
1922 transform __strcpy_chk into __memcpy_chk. */
1923 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1927 gimple_seq stmts = NULL;
1928 len = gimple_convert (&stmts, loc, size_type_node, len);
1929 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
1930 build_int_cst (size_type_node, 1));
1931 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1932 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1933 replace_call_with_call_and_fold (gsi, repl);
1940 if (! tree_int_cst_lt (maxlen, size))
1944 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1945 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1946 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1950 gimple *repl = gimple_build_call (fn, 2, dest, src);
1951 replace_call_with_call_and_fold (gsi, repl);
1955 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1956 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1957 length passed as third argument. IGNORE is true if return value can be
1958 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1961 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1962 tree dest, tree src,
1963 tree len, tree size,
1964 enum built_in_function fcode)
1966 gimple *stmt = gsi_stmt (*gsi);
1967 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1970 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1972 /* If return value of __stpncpy_chk is ignored,
1973 optimize into __strncpy_chk. */
1974 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1977 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1978 replace_call_with_call_and_fold (gsi, repl);
1983 if (! tree_fits_uhwi_p (size))
1986 tree maxlen = get_maxval_strlen (len, 2);
1987 if (! integer_all_onesp (size))
1989 if (! tree_fits_uhwi_p (len))
1991 /* If LEN is not constant, try MAXLEN too.
1992 For MAXLEN only allow optimizing into non-_ocs function
1993 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1994 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2000 if (tree_int_cst_lt (size, maxlen))
2004 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2005 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2006 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2010 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2011 replace_call_with_call_and_fold (gsi, repl);
2015 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2016 Return NULL_TREE if no simplification can be made. */
2019 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2021 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2022 location_t loc = gimple_location (stmt);
2023 tree dest = gimple_call_arg (stmt, 0);
2024 tree src = gimple_call_arg (stmt, 1);
2025 tree fn, len, lenp1;
2027 /* If the result is unused, replace stpcpy with strcpy. */
2028 if (gimple_call_lhs (stmt) == NULL_TREE)
2030 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2033 gimple_call_set_fndecl (stmt, fn);
2038 len = c_strlen (src, 1);
2040 || TREE_CODE (len) != INTEGER_CST)
2043 if (optimize_function_for_size_p (cfun)
2044 /* If length is zero it's small enough. */
2045 && !integer_zerop (len))
2048 /* If the source has a known length replace stpcpy with memcpy. */
2049 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2053 gimple_seq stmts = NULL;
2054 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2055 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2056 tem, build_int_cst (size_type_node, 1));
2057 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2058 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2059 gimple_set_vuse (repl, gimple_vuse (stmt));
2060 gimple_set_vdef (repl, gimple_vdef (stmt));
2061 if (gimple_vdef (repl)
2062 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2063 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2064 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2065 /* Replace the result with dest + len. */
2067 tem = gimple_convert (&stmts, loc, sizetype, len);
2068 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2069 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2070 POINTER_PLUS_EXPR, dest, tem);
2071 gsi_replace (gsi, ret, false);
2072 /* Finally fold the memcpy call. */
2073 gimple_stmt_iterator gsi2 = *gsi;
2079 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2080 NULL_TREE if a normal call should be emitted rather than expanding
2081 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2082 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2083 passed as second argument. */
2086 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2087 enum built_in_function fcode)
2089 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2090 tree dest, size, len, fn, fmt, flag;
2091 const char *fmt_str;
2093 /* Verify the required arguments in the original call. */
2094 if (gimple_call_num_args (stmt) < 5)
2097 dest = gimple_call_arg (stmt, 0);
2098 len = gimple_call_arg (stmt, 1);
2099 flag = gimple_call_arg (stmt, 2);
2100 size = gimple_call_arg (stmt, 3);
2101 fmt = gimple_call_arg (stmt, 4);
2103 if (! tree_fits_uhwi_p (size))
2106 if (! integer_all_onesp (size))
2108 tree maxlen = get_maxval_strlen (len, 2);
2109 if (! tree_fits_uhwi_p (len))
2111 /* If LEN is not constant, try MAXLEN too.
2112 For MAXLEN only allow optimizing into non-_ocs function
2113 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2114 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2120 if (tree_int_cst_lt (size, maxlen))
2124 if (!init_target_chars ())
2127 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2128 or if format doesn't contain % chars or is "%s". */
2129 if (! integer_zerop (flag))
2131 fmt_str = c_getstr (fmt);
2132 if (fmt_str == NULL)
2134 if (strchr (fmt_str, target_percent) != NULL
2135 && strcmp (fmt_str, target_percent_s))
2139 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2141 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2142 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2146 /* Replace the called function and the first 5 argument by 3 retaining
2147 trailing varargs. */
2148 gimple_call_set_fndecl (stmt, fn);
2149 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2150 gimple_call_set_arg (stmt, 0, dest);
2151 gimple_call_set_arg (stmt, 1, len);
2152 gimple_call_set_arg (stmt, 2, fmt);
2153 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2154 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2155 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2160 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2161 Return NULL_TREE if a normal call should be emitted rather than
2162 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2163 or BUILT_IN_VSPRINTF_CHK. */
2166 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2167 enum built_in_function fcode)
2169 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2170 tree dest, size, len, fn, fmt, flag;
2171 const char *fmt_str;
2172 unsigned nargs = gimple_call_num_args (stmt);
2174 /* Verify the required arguments in the original call. */
2177 dest = gimple_call_arg (stmt, 0);
2178 flag = gimple_call_arg (stmt, 1);
2179 size = gimple_call_arg (stmt, 2);
2180 fmt = gimple_call_arg (stmt, 3);
2182 if (! tree_fits_uhwi_p (size))
2187 if (!init_target_chars ())
2190 /* Check whether the format is a literal string constant. */
2191 fmt_str = c_getstr (fmt);
2192 if (fmt_str != NULL)
2194 /* If the format doesn't contain % args or %%, we know the size. */
2195 if (strchr (fmt_str, target_percent) == 0)
2197 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2198 len = build_int_cstu (size_type_node, strlen (fmt_str));
2200 /* If the format is "%s" and first ... argument is a string literal,
2201 we know the size too. */
2202 else if (fcode == BUILT_IN_SPRINTF_CHK
2203 && strcmp (fmt_str, target_percent_s) == 0)
2209 arg = gimple_call_arg (stmt, 4);
2210 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2212 len = c_strlen (arg, 1);
2213 if (! len || ! tree_fits_uhwi_p (len))
2220 if (! integer_all_onesp (size))
2222 if (! len || ! tree_int_cst_lt (len, size))
2226 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2227 or if format doesn't contain % chars or is "%s". */
2228 if (! integer_zerop (flag))
2230 if (fmt_str == NULL)
2232 if (strchr (fmt_str, target_percent) != NULL
2233 && strcmp (fmt_str, target_percent_s))
2237 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2238 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2239 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2243 /* Replace the called function and the first 4 argument by 2 retaining
2244 trailing varargs. */
2245 gimple_call_set_fndecl (stmt, fn);
2246 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2247 gimple_call_set_arg (stmt, 0, dest);
2248 gimple_call_set_arg (stmt, 1, fmt);
2249 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2250 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2251 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2256 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2257 ORIG may be null if this is a 2-argument call. We don't attempt to
2258 simplify calls with more than 3 arguments.
2260 Return NULL_TREE if no simplification was possible, otherwise return the
2261 simplified form of the call as a tree. If IGNORED is true, it means that
2262 the caller does not use the returned value of the function. */
2265 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2267 gimple *stmt = gsi_stmt (*gsi);
2268 tree dest = gimple_call_arg (stmt, 0);
2269 tree fmt = gimple_call_arg (stmt, 1);
2270 tree orig = NULL_TREE;
2271 const char *fmt_str = NULL;
2273 /* Verify the required arguments in the original call. We deal with two
2274 types of sprintf() calls: 'sprintf (str, fmt)' and
2275 'sprintf (dest, "%s", orig)'. */
2276 if (gimple_call_num_args (stmt) > 3)
2279 if (gimple_call_num_args (stmt) == 3)
2280 orig = gimple_call_arg (stmt, 2);
2282 /* Check whether the format is a literal string constant. */
2283 fmt_str = c_getstr (fmt);
2284 if (fmt_str == NULL)
2287 if (!init_target_chars ())
2290 /* If the format doesn't contain % args or %%, use strcpy. */
2291 if (strchr (fmt_str, target_percent) == NULL)
2293 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2298 /* Don't optimize sprintf (buf, "abc", ptr++). */
2302 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2303 'format' is known to contain no % formats. */
2304 gimple_seq stmts = NULL;
2305 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2306 gimple_seq_add_stmt_without_update (&stmts, repl);
2307 if (gimple_call_lhs (stmt))
2309 repl = gimple_build_assign (gimple_call_lhs (stmt),
2310 build_int_cst (integer_type_node,
2312 gimple_seq_add_stmt_without_update (&stmts, repl);
2313 gsi_replace_with_seq_vops (gsi, stmts);
2314 /* gsi now points at the assignment to the lhs, get a
2315 stmt iterator to the memcpy call.
2316 ??? We can't use gsi_for_stmt as that doesn't work when the
2317 CFG isn't built yet. */
2318 gimple_stmt_iterator gsi2 = *gsi;
2324 gsi_replace_with_seq_vops (gsi, stmts);
2330 /* If the format is "%s", use strcpy if the result isn't used. */
2331 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2334 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2339 /* Don't crash on sprintf (str1, "%s"). */
2343 tree orig_len = NULL_TREE;
2344 if (gimple_call_lhs (stmt))
2346 orig_len = get_maxval_strlen (orig, 0);
2351 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2352 gimple_seq stmts = NULL;
2353 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2354 gimple_seq_add_stmt_without_update (&stmts, repl);
2355 if (gimple_call_lhs (stmt))
2357 if (!useless_type_conversion_p (integer_type_node,
2358 TREE_TYPE (orig_len)))
2359 orig_len = fold_convert (integer_type_node, orig_len);
2360 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2361 gimple_seq_add_stmt_without_update (&stmts, repl);
2362 gsi_replace_with_seq_vops (gsi, stmts);
2363 /* gsi now points at the assignment to the lhs, get a
2364 stmt iterator to the memcpy call.
2365 ??? We can't use gsi_for_stmt as that doesn't work when the
2366 CFG isn't built yet. */
2367 gimple_stmt_iterator gsi2 = *gsi;
2373 gsi_replace_with_seq_vops (gsi, stmts);
2381 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2382 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2383 attempt to simplify calls with more than 4 arguments.
2385 Return NULL_TREE if no simplification was possible, otherwise return the
2386 simplified form of the call as a tree. If IGNORED is true, it means that
2387 the caller does not use the returned value of the function. */
2390 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2392 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2393 tree dest = gimple_call_arg (stmt, 0);
2394 tree destsize = gimple_call_arg (stmt, 1);
2395 tree fmt = gimple_call_arg (stmt, 2);
2396 tree orig = NULL_TREE;
2397 const char *fmt_str = NULL;
2399 if (gimple_call_num_args (stmt) > 4)
2402 if (gimple_call_num_args (stmt) == 4)
2403 orig = gimple_call_arg (stmt, 3);
2405 if (!tree_fits_uhwi_p (destsize))
2407 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2409 /* Check whether the format is a literal string constant. */
2410 fmt_str = c_getstr (fmt);
2411 if (fmt_str == NULL)
2414 if (!init_target_chars ())
2417 /* If the format doesn't contain % args or %%, use strcpy. */
2418 if (strchr (fmt_str, target_percent) == NULL)
2420 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2424 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2428 /* We could expand this as
2429 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2431 memcpy (str, fmt_with_nul_at_cstm1, cst);
2432 but in the former case that might increase code size
2433 and in the latter case grow .rodata section too much.
2435 size_t len = strlen (fmt_str);
2439 gimple_seq stmts = NULL;
2440 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2441 gimple_seq_add_stmt_without_update (&stmts, repl);
2442 if (gimple_call_lhs (stmt))
2444 repl = gimple_build_assign (gimple_call_lhs (stmt),
2445 build_int_cst (integer_type_node, len));
2446 gimple_seq_add_stmt_without_update (&stmts, repl);
2447 gsi_replace_with_seq_vops (gsi, stmts);
2448 /* gsi now points at the assignment to the lhs, get a
2449 stmt iterator to the memcpy call.
2450 ??? We can't use gsi_for_stmt as that doesn't work when the
2451 CFG isn't built yet. */
2452 gimple_stmt_iterator gsi2 = *gsi;
2458 gsi_replace_with_seq_vops (gsi, stmts);
2464 /* If the format is "%s", use strcpy if the result isn't used. */
2465 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2467 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2471 /* Don't crash on snprintf (str1, cst, "%s"). */
2475 tree orig_len = get_maxval_strlen (orig, 0);
2476 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2479 /* We could expand this as
2480 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2482 memcpy (str1, str2_with_nul_at_cstm1, cst);
2483 but in the former case that might increase code size
2484 and in the latter case grow .rodata section too much.
2486 if (compare_tree_int (orig_len, destlen) >= 0)
2489 /* Convert snprintf (str1, cst, "%s", str2) into
2490 strcpy (str1, str2) if strlen (str2) < cst. */
2491 gimple_seq stmts = NULL;
2492 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2493 gimple_seq_add_stmt_without_update (&stmts, repl);
2494 if (gimple_call_lhs (stmt))
2496 if (!useless_type_conversion_p (integer_type_node,
2497 TREE_TYPE (orig_len)))
2498 orig_len = fold_convert (integer_type_node, orig_len);
2499 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2500 gimple_seq_add_stmt_without_update (&stmts, repl);
2501 gsi_replace_with_seq_vops (gsi, stmts);
2502 /* gsi now points at the assignment to the lhs, get a
2503 stmt iterator to the memcpy call.
2504 ??? We can't use gsi_for_stmt as that doesn't work when the
2505 CFG isn't built yet. */
2506 gimple_stmt_iterator gsi2 = *gsi;
2512 gsi_replace_with_seq_vops (gsi, stmts);
2520 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2521 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2522 more than 3 arguments, and ARG may be null in the 2-argument case.
2524 Return NULL_TREE if no simplification was possible, otherwise return the
2525 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2526 code of the function to be simplified. */
2529 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2530 tree fp, tree fmt, tree arg,
2531 enum built_in_function fcode)
2533 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2534 tree fn_fputc, fn_fputs;
2535 const char *fmt_str = NULL;
2537 /* If the return value is used, don't do the transformation. */
2538 if (gimple_call_lhs (stmt) != NULL_TREE)
2541 /* Check whether the format is a literal string constant. */
2542 fmt_str = c_getstr (fmt);
2543 if (fmt_str == NULL)
2546 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2548 /* If we're using an unlocked function, assume the other
2549 unlocked functions exist explicitly. */
2550 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2551 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2555 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2556 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2559 if (!init_target_chars ())
2562 /* If the format doesn't contain % args or %%, use strcpy. */
2563 if (strchr (fmt_str, target_percent) == NULL)
2565 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2569 /* If the format specifier was "", fprintf does nothing. */
2570 if (fmt_str[0] == '\0')
2572 replace_call_with_value (gsi, NULL_TREE);
2576 /* When "string" doesn't contain %, replace all cases of
2577 fprintf (fp, string) with fputs (string, fp). The fputs
2578 builtin will take care of special cases like length == 1. */
2581 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2582 replace_call_with_call_and_fold (gsi, repl);
2587 /* The other optimizations can be done only on the non-va_list variants. */
2588 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2591 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2592 else if (strcmp (fmt_str, target_percent_s) == 0)
2594 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2598 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2599 replace_call_with_call_and_fold (gsi, repl);
2604 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2605 else if (strcmp (fmt_str, target_percent_c) == 0)
2608 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2612 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2613 replace_call_with_call_and_fold (gsi, repl);
2621 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2622 FMT and ARG are the arguments to the call; we don't fold cases with
2623 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2625 Return NULL_TREE if no simplification was possible, otherwise return the
2626 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2627 code of the function to be simplified. */
2630 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2631 tree arg, enum built_in_function fcode)
2633 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2634 tree fn_putchar, fn_puts, newarg;
2635 const char *fmt_str = NULL;
2637 /* If the return value is used, don't do the transformation. */
2638 if (gimple_call_lhs (stmt) != NULL_TREE)
2641 /* Check whether the format is a literal string constant. */
2642 fmt_str = c_getstr (fmt);
2643 if (fmt_str == NULL)
2646 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2648 /* If we're using an unlocked function, assume the other
2649 unlocked functions exist explicitly. */
2650 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2651 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2655 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2656 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2659 if (!init_target_chars ())
2662 if (strcmp (fmt_str, target_percent_s) == 0
2663 || strchr (fmt_str, target_percent) == NULL)
2667 if (strcmp (fmt_str, target_percent_s) == 0)
2669 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2672 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2675 str = c_getstr (arg);
2681 /* The format specifier doesn't contain any '%' characters. */
2682 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2688 /* If the string was "", printf does nothing. */
2691 replace_call_with_value (gsi, NULL_TREE);
2695 /* If the string has length of 1, call putchar. */
2698 /* Given printf("c"), (where c is any one character,)
2699 convert "c"[0] to an int and pass that to the replacement
2701 newarg = build_int_cst (integer_type_node, str[0]);
2704 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2705 replace_call_with_call_and_fold (gsi, repl);
2711 /* If the string was "string\n", call puts("string"). */
2712 size_t len = strlen (str);
2713 if ((unsigned char)str[len - 1] == target_newline
2714 && (size_t) (int) len == len
2718 tree offset_node, string_cst;
2720 /* Create a NUL-terminated string that's one char shorter
2721 than the original, stripping off the trailing '\n'. */
2722 newarg = build_string_literal (len, str);
2723 string_cst = string_constant (newarg, &offset_node);
2724 gcc_checking_assert (string_cst
2725 && (TREE_STRING_LENGTH (string_cst)
2727 && integer_zerop (offset_node)
2729 TREE_STRING_POINTER (string_cst)[len - 1]
2731 /* build_string_literal creates a new STRING_CST,
2732 modify it in place to avoid double copying. */
2733 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2734 newstr[len - 1] = '\0';
2737 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2738 replace_call_with_call_and_fold (gsi, repl);
2743 /* We'd like to arrange to call fputs(string,stdout) here,
2744 but we need stdout and don't have a way to get it yet. */
2749 /* The other optimizations can be done only on the non-va_list variants. */
2750 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2753 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2754 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2756 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2760 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2761 replace_call_with_call_and_fold (gsi, repl);
2766 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2767 else if (strcmp (fmt_str, target_percent_c) == 0)
2769 if (!arg || ! useless_type_conversion_p (integer_type_node,
2774 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2775 replace_call_with_call_and_fold (gsi, repl);
2785 /* Fold a call to __builtin_strlen with known length LEN. */
2788 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2790 gimple *stmt = gsi_stmt (*gsi);
2791 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2794 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2795 replace_call_with_value (gsi, len);
2799 /* Fold a call to __builtin_acc_on_device. */
2802 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2804 /* Defer folding until we know which compiler we're in. */
2805 if (symtab->state != EXPANSION)
2808 unsigned val_host = GOMP_DEVICE_HOST;
2809 unsigned val_dev = GOMP_DEVICE_NONE;
2811 #ifdef ACCEL_COMPILER
2812 val_host = GOMP_DEVICE_NOT_HOST;
2813 val_dev = ACCEL_COMPILER_acc_device;
2816 location_t loc = gimple_location (gsi_stmt (*gsi));
2818 tree host_eq = make_ssa_name (boolean_type_node);
2819 gimple *host_ass = gimple_build_assign
2820 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2821 gimple_set_location (host_ass, loc);
2822 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2824 tree dev_eq = make_ssa_name (boolean_type_node);
2825 gimple *dev_ass = gimple_build_assign
2826 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2827 gimple_set_location (dev_ass, loc);
2828 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2830 tree result = make_ssa_name (boolean_type_node);
2831 gimple *result_ass = gimple_build_assign
2832 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2833 gimple_set_location (result_ass, loc);
2834 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2836 replace_call_with_value (gsi, result);
2841 /* Fold the non-target builtin at *GSI and return whether any simplification
2845 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2847 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2848 tree callee = gimple_call_fndecl (stmt);
2850 /* Give up for always_inline inline builtins until they are
2852 if (avoid_folding_inline_builtin (callee))
2855 unsigned n = gimple_call_num_args (stmt);
2856 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2859 case BUILT_IN_BZERO:
2860 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2861 gimple_call_arg (stmt, 1));
2862 case BUILT_IN_MEMSET:
2863 return gimple_fold_builtin_memset (gsi,
2864 gimple_call_arg (stmt, 1),
2865 gimple_call_arg (stmt, 2));
2866 case BUILT_IN_BCOPY:
2867 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2868 gimple_call_arg (stmt, 0), 3);
2869 case BUILT_IN_MEMCPY:
2870 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2871 gimple_call_arg (stmt, 1), 0);
2872 case BUILT_IN_MEMPCPY:
2873 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2874 gimple_call_arg (stmt, 1), 1);
2875 case BUILT_IN_MEMMOVE:
2876 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2877 gimple_call_arg (stmt, 1), 3);
2878 case BUILT_IN_SPRINTF_CHK:
2879 case BUILT_IN_VSPRINTF_CHK:
2880 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2881 case BUILT_IN_STRCAT_CHK:
2882 return gimple_fold_builtin_strcat_chk (gsi);
2883 case BUILT_IN_STRNCAT_CHK:
2884 return gimple_fold_builtin_strncat_chk (gsi);
2885 case BUILT_IN_STRLEN:
2886 return gimple_fold_builtin_strlen (gsi);
2887 case BUILT_IN_STRCPY:
2888 return gimple_fold_builtin_strcpy (gsi,
2889 gimple_call_arg (stmt, 0),
2890 gimple_call_arg (stmt, 1));
2891 case BUILT_IN_STRNCPY:
2892 return gimple_fold_builtin_strncpy (gsi,
2893 gimple_call_arg (stmt, 0),
2894 gimple_call_arg (stmt, 1),
2895 gimple_call_arg (stmt, 2));
2896 case BUILT_IN_STRCAT:
2897 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2898 gimple_call_arg (stmt, 1));
2899 case BUILT_IN_STRNCAT:
2900 return gimple_fold_builtin_strncat (gsi);
2901 case BUILT_IN_FPUTS:
2902 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2903 gimple_call_arg (stmt, 1), false);
2904 case BUILT_IN_FPUTS_UNLOCKED:
2905 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2906 gimple_call_arg (stmt, 1), true);
2907 case BUILT_IN_MEMCPY_CHK:
2908 case BUILT_IN_MEMPCPY_CHK:
2909 case BUILT_IN_MEMMOVE_CHK:
2910 case BUILT_IN_MEMSET_CHK:
2911 return gimple_fold_builtin_memory_chk (gsi,
2912 gimple_call_arg (stmt, 0),
2913 gimple_call_arg (stmt, 1),
2914 gimple_call_arg (stmt, 2),
2915 gimple_call_arg (stmt, 3),
2917 case BUILT_IN_STPCPY:
2918 return gimple_fold_builtin_stpcpy (gsi);
2919 case BUILT_IN_STRCPY_CHK:
2920 case BUILT_IN_STPCPY_CHK:
2921 return gimple_fold_builtin_stxcpy_chk (gsi,
2922 gimple_call_arg (stmt, 0),
2923 gimple_call_arg (stmt, 1),
2924 gimple_call_arg (stmt, 2),
2926 case BUILT_IN_STRNCPY_CHK:
2927 case BUILT_IN_STPNCPY_CHK:
2928 return gimple_fold_builtin_stxncpy_chk (gsi,
2929 gimple_call_arg (stmt, 0),
2930 gimple_call_arg (stmt, 1),
2931 gimple_call_arg (stmt, 2),
2932 gimple_call_arg (stmt, 3),
2934 case BUILT_IN_SNPRINTF_CHK:
2935 case BUILT_IN_VSNPRINTF_CHK:
2936 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2937 case BUILT_IN_SNPRINTF:
2938 return gimple_fold_builtin_snprintf (gsi);
2939 case BUILT_IN_SPRINTF:
2940 return gimple_fold_builtin_sprintf (gsi);
2941 case BUILT_IN_FPRINTF:
2942 case BUILT_IN_FPRINTF_UNLOCKED:
2943 case BUILT_IN_VFPRINTF:
2944 if (n == 2 || n == 3)
2945 return gimple_fold_builtin_fprintf (gsi,
2946 gimple_call_arg (stmt, 0),
2947 gimple_call_arg (stmt, 1),
2949 ? gimple_call_arg (stmt, 2)
2953 case BUILT_IN_FPRINTF_CHK:
2954 case BUILT_IN_VFPRINTF_CHK:
2955 if (n == 3 || n == 4)
2956 return gimple_fold_builtin_fprintf (gsi,
2957 gimple_call_arg (stmt, 0),
2958 gimple_call_arg (stmt, 2),
2960 ? gimple_call_arg (stmt, 3)
2964 case BUILT_IN_PRINTF:
2965 case BUILT_IN_PRINTF_UNLOCKED:
2966 case BUILT_IN_VPRINTF:
2967 if (n == 1 || n == 2)
2968 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2970 ? gimple_call_arg (stmt, 1)
2971 : NULL_TREE, fcode);
2973 case BUILT_IN_PRINTF_CHK:
2974 case BUILT_IN_VPRINTF_CHK:
2975 if (n == 2 || n == 3)
2976 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2978 ? gimple_call_arg (stmt, 2)
2979 : NULL_TREE, fcode);
2981 case BUILT_IN_ACC_ON_DEVICE:
2982 return gimple_fold_builtin_acc_on_device (gsi,
2983 gimple_call_arg (stmt, 0));
2987 /* Try the generic builtin folder. */
2988 bool ignore = (gimple_call_lhs (stmt) == NULL);
2989 tree result = fold_call_stmt (stmt, ignore);
2993 STRIP_NOPS (result);
2995 result = fold_convert (gimple_call_return_type (stmt), result);
2996 if (!update_call_from_tree (gsi, result))
2997 gimplify_and_update_call_from_tree (gsi, result);
3004 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3005 function calls to constants, where possible. */
3008 fold_internal_goacc_dim (const gimple *call)
3010 int axis = get_oacc_ifn_dim_arg (call);
3011 int size = get_oacc_fn_dim_size (current_function_decl, axis);
3012 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
3013 tree result = NULL_TREE;
3015 /* If the size is 1, or we only want the size and it is not dynamic,
3016 we know the answer. */
3017 if (size == 1 || (!is_pos && size))
3019 tree type = TREE_TYPE (gimple_call_lhs (call));
3020 result = build_int_cst (type, size - is_pos);
3026 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3027 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3028 &var where var is only addressable because of such calls. */
3031 optimize_atomic_compare_exchange_p (gimple *stmt)
3033 if (gimple_call_num_args (stmt) != 6
3034 || !flag_inline_atomics
3036 || (flag_sanitize & (SANITIZE_THREAD | SANITIZE_ADDRESS)) != 0
3037 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3038 || !gimple_vdef (stmt)
3039 || !gimple_vuse (stmt))
3042 tree fndecl = gimple_call_fndecl (stmt);
3043 switch (DECL_FUNCTION_CODE (fndecl))
3045 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3046 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3047 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3048 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3049 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3055 tree expected = gimple_call_arg (stmt, 1);
3056 if (TREE_CODE (expected) != ADDR_EXPR
3057 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3060 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3061 if (!is_gimple_reg_type (etype)
3062 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3063 || TREE_THIS_VOLATILE (etype)
3064 || VECTOR_TYPE_P (etype)
3065 || TREE_CODE (etype) == COMPLEX_TYPE
3066 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3067 might not preserve all the bits. See PR71716. */
3068 || SCALAR_FLOAT_TYPE_P (etype)
3069 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
3072 tree weak = gimple_call_arg (stmt, 3);
3073 if (!integer_zerop (weak) && !integer_onep (weak))
3076 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3077 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3078 machine_mode mode = TYPE_MODE (itype);
3080 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3082 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3085 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
3092 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3094 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3095 i = IMAGPART_EXPR <t>;
3097 e = REALPART_EXPR <t>; */
3100 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3102 gimple *stmt = gsi_stmt (*gsi);
3103 tree fndecl = gimple_call_fndecl (stmt);
3104 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3105 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3106 tree ctype = build_complex_type (itype);
3107 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3108 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3110 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3111 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3112 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3114 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3115 build1 (VIEW_CONVERT_EXPR, itype,
3116 gimple_assign_lhs (g)));
3117 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3119 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3120 + int_size_in_bytes (itype);
3121 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3122 gimple_call_arg (stmt, 0),
3123 gimple_assign_lhs (g),
3124 gimple_call_arg (stmt, 2),
3125 build_int_cst (integer_type_node, flag),
3126 gimple_call_arg (stmt, 4),
3127 gimple_call_arg (stmt, 5));
3128 tree lhs = make_ssa_name (ctype);
3129 gimple_call_set_lhs (g, lhs);
3130 gimple_set_vdef (g, gimple_vdef (stmt));
3131 gimple_set_vuse (g, gimple_vuse (stmt));
3132 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3133 if (gimple_call_lhs (stmt))
3135 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3136 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3137 build1 (IMAGPART_EXPR, itype, lhs));
3138 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3139 g = gimple_build_assign (gimple_call_lhs (stmt), NOP_EXPR,
3140 gimple_assign_lhs (g));
3142 gsi_replace (gsi, g, true);
3143 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3144 build1 (REALPART_EXPR, itype, lhs));
3145 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3146 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3148 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3150 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3151 gimple_assign_lhs (g)));
3152 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3154 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3155 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3159 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3160 doesn't fit into TYPE. The test for overflow should be regardless of
3161 -fwrapv, and even for unsigned types. */
3164 arith_overflowed_p (enum tree_code code, const_tree type,
3165 const_tree arg0, const_tree arg1)
3167 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3168 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3170 widest2_int warg0 = widest2_int_cst (arg0);
3171 widest2_int warg1 = widest2_int_cst (arg1);
3175 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3176 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3177 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3178 default: gcc_unreachable ();
3180 signop sign = TYPE_SIGN (type);
3181 if (sign == UNSIGNED && wi::neg_p (wres))
3183 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3186 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3187 The statement may be replaced by another statement, e.g., if the call
3188 simplifies to a constant value. Return true if any changes were made.
3189 It is assumed that the operands have been previously folded. */
3192 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3194 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3196 bool changed = false;
3199 /* Fold *& in call arguments. */
3200 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3201 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3203 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3206 gimple_call_set_arg (stmt, i, tmp);
3211 /* Check for virtual calls that became direct calls. */
3212 callee = gimple_call_fn (stmt);
3213 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3215 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3217 if (dump_file && virtual_method_call_p (callee)
3218 && !possible_polymorphic_call_target_p
3219 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3220 (OBJ_TYPE_REF_EXPR (callee)))))
3223 "Type inheritance inconsistent devirtualization of ");
3224 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3225 fprintf (dump_file, " to ");
3226 print_generic_expr (dump_file, callee, TDF_SLIM);
3227 fprintf (dump_file, "\n");
3230 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3233 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3236 vec <cgraph_node *>targets
3237 = possible_polymorphic_call_targets (callee, stmt, &final);
3238 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3240 tree lhs = gimple_call_lhs (stmt);
3241 if (dump_enabled_p ())
3243 location_t loc = gimple_location_safe (stmt);
3244 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3245 "folding virtual function call to %s\n",
3246 targets.length () == 1
3247 ? targets[0]->name ()
3248 : "__builtin_unreachable");
3250 if (targets.length () == 1)
3252 tree fndecl = targets[0]->decl;
3253 gimple_call_set_fndecl (stmt, fndecl);
3255 /* If changing the call to __cxa_pure_virtual
3256 or similar noreturn function, adjust gimple_call_fntype
3258 if (gimple_call_noreturn_p (stmt)
3259 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3260 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3261 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3263 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3264 /* If the call becomes noreturn, remove the lhs. */
3266 && gimple_call_noreturn_p (stmt)
3267 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3268 || should_remove_lhs_p (lhs)))
3270 if (TREE_CODE (lhs) == SSA_NAME)
3272 tree var = create_tmp_var (TREE_TYPE (lhs));
3273 tree def = get_or_create_ssa_default_def (cfun, var);
3274 gimple *new_stmt = gimple_build_assign (lhs, def);
3275 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3277 gimple_call_set_lhs (stmt, NULL_TREE);
3279 maybe_remove_unused_call_args (cfun, stmt);
3283 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3284 gimple *new_stmt = gimple_build_call (fndecl, 0);
3285 gimple_set_location (new_stmt, gimple_location (stmt));
3286 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3288 tree var = create_tmp_var (TREE_TYPE (lhs));
3289 tree def = get_or_create_ssa_default_def (cfun, var);
3291 /* To satisfy condition for
3292 cgraph_update_edges_for_call_stmt_node,
3293 we need to preserve GIMPLE_CALL statement
3294 at position of GSI iterator. */
3295 update_call_from_tree (gsi, def);
3296 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3300 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3301 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3302 gsi_replace (gsi, new_stmt, false);
3310 /* Check for indirect calls that became direct calls, and then
3311 no longer require a static chain. */
3312 if (gimple_call_chain (stmt))
3314 tree fn = gimple_call_fndecl (stmt);
3315 if (fn && !DECL_STATIC_CHAIN (fn))
3317 gimple_call_set_chain (stmt, NULL);
3322 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3325 gimple_call_set_chain (stmt, tmp);
3334 /* Check for builtins that CCP can handle using information not
3335 available in the generic fold routines. */
3336 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3338 if (gimple_fold_builtin (gsi))
3341 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3343 changed |= targetm.gimple_fold_builtin (gsi);
3345 else if (gimple_call_internal_p (stmt))
3347 enum tree_code subcode = ERROR_MARK;
3348 tree result = NULL_TREE;
3349 bool cplx_result = false;
3350 tree overflow = NULL_TREE;
3351 switch (gimple_call_internal_fn (stmt))
3353 case IFN_BUILTIN_EXPECT:
3354 result = fold_builtin_expect (gimple_location (stmt),
3355 gimple_call_arg (stmt, 0),
3356 gimple_call_arg (stmt, 1),
3357 gimple_call_arg (stmt, 2));
3359 case IFN_UBSAN_OBJECT_SIZE:
3360 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3361 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3362 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3363 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3364 gimple_call_arg (stmt, 2))))
3366 gsi_replace (gsi, gimple_build_nop (), false);
3367 unlink_stmt_vdef (stmt);
3368 release_defs (stmt);
3372 case IFN_GOACC_DIM_SIZE:
3373 case IFN_GOACC_DIM_POS:
3374 result = fold_internal_goacc_dim (stmt);
3376 case IFN_UBSAN_CHECK_ADD:
3377 subcode = PLUS_EXPR;
3379 case IFN_UBSAN_CHECK_SUB:
3380 subcode = MINUS_EXPR;
3382 case IFN_UBSAN_CHECK_MUL:
3383 subcode = MULT_EXPR;
3385 case IFN_ADD_OVERFLOW:
3386 subcode = PLUS_EXPR;
3389 case IFN_SUB_OVERFLOW:
3390 subcode = MINUS_EXPR;
3393 case IFN_MUL_OVERFLOW:
3394 subcode = MULT_EXPR;
3400 if (subcode != ERROR_MARK)
3402 tree arg0 = gimple_call_arg (stmt, 0);
3403 tree arg1 = gimple_call_arg (stmt, 1);
3404 tree type = TREE_TYPE (arg0);
3407 tree lhs = gimple_call_lhs (stmt);
3408 if (lhs == NULL_TREE)
3411 type = TREE_TYPE (TREE_TYPE (lhs));
3413 if (type == NULL_TREE)
3415 /* x = y + 0; x = y - 0; x = y * 0; */
3416 else if (integer_zerop (arg1))
3417 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3418 /* x = 0 + y; x = 0 * y; */
3419 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3420 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3422 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3423 result = integer_zero_node;
3424 /* x = y * 1; x = 1 * y; */
3425 else if (subcode == MULT_EXPR && integer_onep (arg1))
3427 else if (subcode == MULT_EXPR && integer_onep (arg0))
3429 else if (TREE_CODE (arg0) == INTEGER_CST
3430 && TREE_CODE (arg1) == INTEGER_CST)
3433 result = int_const_binop (subcode, fold_convert (type, arg0),
3434 fold_convert (type, arg1));
3436 result = int_const_binop (subcode, arg0, arg1);
3437 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3440 overflow = build_one_cst (type);
3447 if (result == integer_zero_node)
3448 result = build_zero_cst (type);
3449 else if (cplx_result && TREE_TYPE (result) != type)
3451 if (TREE_CODE (result) == INTEGER_CST)
3453 if (arith_overflowed_p (PLUS_EXPR, type, result,
3455 overflow = build_one_cst (type);
3457 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3458 && TYPE_UNSIGNED (type))
3459 || (TYPE_PRECISION (type)
3460 < (TYPE_PRECISION (TREE_TYPE (result))
3461 + (TYPE_UNSIGNED (TREE_TYPE (result))
3462 && !TYPE_UNSIGNED (type)))))
3465 result = fold_convert (type, result);
3472 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3473 result = drop_tree_overflow (result);
3476 if (overflow == NULL_TREE)
3477 overflow = build_zero_cst (TREE_TYPE (result));
3478 tree ctype = build_complex_type (TREE_TYPE (result));
3479 if (TREE_CODE (result) == INTEGER_CST
3480 && TREE_CODE (overflow) == INTEGER_CST)
3481 result = build_complex (ctype, result, overflow);
3483 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3484 ctype, result, overflow);
3486 if (!update_call_from_tree (gsi, result))
3487 gimplify_and_update_call_from_tree (gsi, result);
3496 /* Return true whether NAME has a use on STMT. */
3499 has_use_on_stmt (tree name, gimple *stmt)
3501 imm_use_iterator iter;
3502 use_operand_p use_p;
3503 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3504 if (USE_STMT (use_p) == stmt)
3509 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3512 Replaces *GSI with the simplification result in RCODE and OPS
3513 and the associated statements in *SEQ. Does the replacement
3514 according to INPLACE and returns true if the operation succeeded. */
3517 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3518 code_helper rcode, tree *ops,
3519 gimple_seq *seq, bool inplace)
3521 gimple *stmt = gsi_stmt (*gsi);
3523 /* Play safe and do not allow abnormals to be mentioned in
3524 newly created statements. See also maybe_push_res_to_seq.
3525 As an exception allow such uses if there was a use of the
3526 same SSA name on the old stmt. */
3527 if ((TREE_CODE (ops[0]) == SSA_NAME
3528 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3529 && !has_use_on_stmt (ops[0], stmt))
3531 && TREE_CODE (ops[1]) == SSA_NAME
3532 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3533 && !has_use_on_stmt (ops[1], stmt))
3535 && TREE_CODE (ops[2]) == SSA_NAME
3536 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3537 && !has_use_on_stmt (ops[2], stmt))
3538 || (COMPARISON_CLASS_P (ops[0])
3539 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
3540 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
3541 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
3542 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
3543 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
3544 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
3547 /* Don't insert new statements when INPLACE is true, even if we could
3548 reuse STMT for the final statement. */
3549 if (inplace && !gimple_seq_empty_p (*seq))
3552 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3554 gcc_assert (rcode.is_tree_code ());
3555 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3556 /* GIMPLE_CONDs condition may not throw. */
3557 && (!flag_exceptions
3558 || !cfun->can_throw_non_call_exceptions
3559 || !operation_could_trap_p (rcode,
3560 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3562 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3563 else if (rcode == SSA_NAME)
3564 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3565 build_zero_cst (TREE_TYPE (ops[0])));
3566 else if (rcode == INTEGER_CST)
3568 if (integer_zerop (ops[0]))
3569 gimple_cond_make_false (cond_stmt);
3571 gimple_cond_make_true (cond_stmt);
3575 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3579 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3580 build_zero_cst (TREE_TYPE (res)));
3584 if (dump_file && (dump_flags & TDF_DETAILS))
3586 fprintf (dump_file, "gimple_simplified to ");
3587 if (!gimple_seq_empty_p (*seq))
3588 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3589 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3592 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3595 else if (is_gimple_assign (stmt)
3596 && rcode.is_tree_code ())
3599 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3601 maybe_build_generic_op (rcode,
3602 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
3603 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3604 if (dump_file && (dump_flags & TDF_DETAILS))
3606 fprintf (dump_file, "gimple_simplified to ");
3607 if (!gimple_seq_empty_p (*seq))
3608 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3609 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3612 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3616 else if (rcode.is_fn_code ()
3617 && gimple_call_combined_fn (stmt) == rcode)
3620 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3622 gcc_assert (ops[i] != NULL_TREE);
3623 gimple_call_set_arg (stmt, i, ops[i]);
3626 gcc_assert (ops[i] == NULL_TREE);
3627 if (dump_file && (dump_flags & TDF_DETAILS))
3629 fprintf (dump_file, "gimple_simplified to ");
3630 if (!gimple_seq_empty_p (*seq))
3631 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3632 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3634 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3639 if (gimple_has_lhs (stmt))
3641 tree lhs = gimple_get_lhs (stmt);
3642 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3645 if (dump_file && (dump_flags & TDF_DETAILS))
3647 fprintf (dump_file, "gimple_simplified to ");
3648 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3650 gsi_replace_with_seq_vops (gsi, *seq);
3660 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3663 maybe_canonicalize_mem_ref_addr (tree *t)
3667 if (TREE_CODE (*t) == ADDR_EXPR)
3668 t = &TREE_OPERAND (*t, 0);
3670 /* The C and C++ frontends use an ARRAY_REF for indexing with their
3671 generic vector extension. The actual vector referenced is
3672 view-converted to an array type for this purpose. If the index
3673 is constant the canonical representation in the middle-end is a
3674 BIT_FIELD_REF so re-write the former to the latter here. */
3675 if (TREE_CODE (*t) == ARRAY_REF
3676 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
3677 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
3678 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
3680 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
3681 if (VECTOR_TYPE_P (vtype))
3683 tree low = array_ref_low_bound (*t);
3684 if (TREE_CODE (low) == INTEGER_CST)
3686 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
3688 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
3689 wi::to_widest (low));
3690 idx = wi::mul (idx, wi::to_widest
3691 (TYPE_SIZE (TREE_TYPE (*t))));
3693 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
3694 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
3696 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
3698 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
3699 TYPE_SIZE (TREE_TYPE (*t)),
3700 wide_int_to_tree (sizetype, idx));
3708 while (handled_component_p (*t))
3709 t = &TREE_OPERAND (*t, 0);
3711 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3712 of invariant addresses into a SSA name MEM_REF address. */
3713 if (TREE_CODE (*t) == MEM_REF
3714 || TREE_CODE (*t) == TARGET_MEM_REF)
3716 tree addr = TREE_OPERAND (*t, 0);
3717 if (TREE_CODE (addr) == ADDR_EXPR
3718 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3719 || handled_component_p (TREE_OPERAND (addr, 0))))
3722 HOST_WIDE_INT coffset;
3723 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3728 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3729 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3730 TREE_OPERAND (*t, 1),
3731 size_int (coffset));
3734 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3735 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3738 /* Canonicalize back MEM_REFs to plain reference trees if the object
3739 accessed is a decl that has the same access semantics as the MEM_REF. */
3740 if (TREE_CODE (*t) == MEM_REF
3741 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3742 && integer_zerop (TREE_OPERAND (*t, 1))
3743 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3745 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3746 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3747 if (/* Same volatile qualification. */
3748 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3749 /* Same TBAA behavior with -fstrict-aliasing. */
3750 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3751 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3752 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3753 /* Same alignment. */
3754 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3755 /* We have to look out here to not drop a required conversion
3756 from the rhs to the lhs if *t appears on the lhs or vice-versa
3757 if it appears on the rhs. Thus require strict type
3759 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3761 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3766 /* Canonicalize TARGET_MEM_REF in particular with respect to
3767 the indexes becoming constant. */
3768 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3770 tree tem = maybe_fold_tmr (*t);
3781 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3782 distinguishes both cases. */
3785 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3787 bool changed = false;
3788 gimple *stmt = gsi_stmt (*gsi);
3789 bool nowarning = gimple_no_warning_p (stmt);
3791 fold_defer_overflow_warnings ();
3793 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3795 ??? This shouldn't be done in generic folding but in the
3796 propagation helpers which also know whether an address was
3798 Also canonicalize operand order. */
3799 switch (gimple_code (stmt))
3802 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3804 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3805 if ((REFERENCE_CLASS_P (*rhs)
3806 || TREE_CODE (*rhs) == ADDR_EXPR)
3807 && maybe_canonicalize_mem_ref_addr (rhs))
3809 tree *lhs = gimple_assign_lhs_ptr (stmt);
3810 if (REFERENCE_CLASS_P (*lhs)
3811 && maybe_canonicalize_mem_ref_addr (lhs))
3816 /* Canonicalize operand order. */
3817 enum tree_code code = gimple_assign_rhs_code (stmt);
3818 if (TREE_CODE_CLASS (code) == tcc_comparison
3819 || commutative_tree_code (code)
3820 || commutative_ternary_tree_code (code))
3822 tree rhs1 = gimple_assign_rhs1 (stmt);
3823 tree rhs2 = gimple_assign_rhs2 (stmt);
3824 if (tree_swap_operands_p (rhs1, rhs2, false))
3826 gimple_assign_set_rhs1 (stmt, rhs2);
3827 gimple_assign_set_rhs2 (stmt, rhs1);
3828 if (TREE_CODE_CLASS (code) == tcc_comparison)
3829 gimple_assign_set_rhs_code (stmt,
3830 swap_tree_comparison (code));
3838 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3840 tree *arg = gimple_call_arg_ptr (stmt, i);
3841 if (REFERENCE_CLASS_P (*arg)
3842 && maybe_canonicalize_mem_ref_addr (arg))
3845 tree *lhs = gimple_call_lhs_ptr (stmt);
3847 && REFERENCE_CLASS_P (*lhs)
3848 && maybe_canonicalize_mem_ref_addr (lhs))
3854 gasm *asm_stmt = as_a <gasm *> (stmt);
3855 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3857 tree link = gimple_asm_output_op (asm_stmt, i);
3858 tree op = TREE_VALUE (link);
3859 if (REFERENCE_CLASS_P (op)
3860 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3863 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3865 tree link = gimple_asm_input_op (asm_stmt, i);
3866 tree op = TREE_VALUE (link);
3867 if ((REFERENCE_CLASS_P (op)
3868 || TREE_CODE (op) == ADDR_EXPR)
3869 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3875 if (gimple_debug_bind_p (stmt))
3877 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3879 && (REFERENCE_CLASS_P (*val)
3880 || TREE_CODE (*val) == ADDR_EXPR)
3881 && maybe_canonicalize_mem_ref_addr (val))
3887 /* Canonicalize operand order. */
3888 tree lhs = gimple_cond_lhs (stmt);
3889 tree rhs = gimple_cond_rhs (stmt);
3890 if (tree_swap_operands_p (lhs, rhs, false))
3892 gcond *gc = as_a <gcond *> (stmt);
3893 gimple_cond_set_lhs (gc, rhs);
3894 gimple_cond_set_rhs (gc, lhs);
3895 gimple_cond_set_code (gc,
3896 swap_tree_comparison (gimple_cond_code (gc)));
3903 /* Dispatch to pattern-based folding. */
3905 || is_gimple_assign (stmt)
3906 || gimple_code (stmt) == GIMPLE_COND)
3908 gimple_seq seq = NULL;
3911 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3912 valueize, valueize))
3914 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3917 gimple_seq_discard (seq);
3921 stmt = gsi_stmt (*gsi);
3923 /* Fold the main computation performed by the statement. */
3924 switch (gimple_code (stmt))
3928 /* Try to canonicalize for boolean-typed X the comparisons
3929 X == 0, X == 1, X != 0, and X != 1. */
3930 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3931 || gimple_assign_rhs_code (stmt) == NE_EXPR)
3933 tree lhs = gimple_assign_lhs (stmt);
3934 tree op1 = gimple_assign_rhs1 (stmt);
3935 tree op2 = gimple_assign_rhs2 (stmt);
3936 tree type = TREE_TYPE (op1);
3938 /* Check whether the comparison operands are of the same boolean
3939 type as the result type is.
3940 Check that second operand is an integer-constant with value
3942 if (TREE_CODE (op2) == INTEGER_CST
3943 && (integer_zerop (op2) || integer_onep (op2))
3944 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3946 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3947 bool is_logical_not = false;
3949 /* X == 0 and X != 1 is a logical-not.of X
3950 X == 1 and X != 0 is X */
3951 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3952 || (cmp_code == NE_EXPR && integer_onep (op2)))
3953 is_logical_not = true;
3955 if (is_logical_not == false)
3956 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3957 /* Only for one-bit precision typed X the transformation
3958 !X -> ~X is valied. */
3959 else if (TYPE_PRECISION (type) == 1)
3960 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3961 /* Otherwise we use !X -> X ^ 1. */
3963 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3964 build_int_cst (type, 1));
3970 unsigned old_num_ops = gimple_num_ops (stmt);
3971 tree lhs = gimple_assign_lhs (stmt);
3972 tree new_rhs = fold_gimple_assign (gsi);
3974 && !useless_type_conversion_p (TREE_TYPE (lhs),
3975 TREE_TYPE (new_rhs)))
3976 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3979 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3981 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3988 changed |= gimple_fold_call (gsi, inplace);
3992 /* Fold *& in asm operands. */
3994 gasm *asm_stmt = as_a <gasm *> (stmt);
3996 const char **oconstraints;
3997 const char *constraint;
3998 bool allows_mem, allows_reg;
4000 noutputs = gimple_asm_noutputs (asm_stmt);
4001 oconstraints = XALLOCAVEC (const char *, noutputs);
4003 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4005 tree link = gimple_asm_output_op (asm_stmt, i);
4006 tree op = TREE_VALUE (link);
4008 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4009 if (REFERENCE_CLASS_P (op)
4010 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4012 TREE_VALUE (link) = op;
4016 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4018 tree link = gimple_asm_input_op (asm_stmt, i);
4019 tree op = TREE_VALUE (link);
4021 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4022 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4023 oconstraints, &allows_mem, &allows_reg);
4024 if (REFERENCE_CLASS_P (op)
4025 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4028 TREE_VALUE (link) = op;
4036 if (gimple_debug_bind_p (stmt))
4038 tree val = gimple_debug_bind_get_value (stmt);
4040 && REFERENCE_CLASS_P (val))
4042 tree tem = maybe_fold_reference (val, false);
4045 gimple_debug_bind_set_value (stmt, tem);
4050 && TREE_CODE (val) == ADDR_EXPR)
4052 tree ref = TREE_OPERAND (val, 0);
4053 tree tem = maybe_fold_reference (ref, false);
4056 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4057 gimple_debug_bind_set_value (stmt, tem);
4067 stmt = gsi_stmt (*gsi);
4069 /* Fold *& on the lhs. */
4070 if (gimple_has_lhs (stmt))
4072 tree lhs = gimple_get_lhs (stmt);
4073 if (lhs && REFERENCE_CLASS_P (lhs))
4075 tree new_lhs = maybe_fold_reference (lhs, true);
4078 gimple_set_lhs (stmt, new_lhs);
4084 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4088 /* Valueziation callback that ends up not following SSA edges. */
4091 no_follow_ssa_edges (tree)
4096 /* Valueization callback that ends up following single-use SSA edges only. */
4099 follow_single_use_edges (tree val)
4101 if (TREE_CODE (val) == SSA_NAME
4102 && !has_single_use (val))
4107 /* Fold the statement pointed to by GSI. In some cases, this function may
4108 replace the whole statement with a new one. Returns true iff folding
4110 The statement pointed to by GSI should be in valid gimple form but may
4111 be in unfolded state as resulting from for example constant propagation
4112 which can produce *&x = 0. */
4115 fold_stmt (gimple_stmt_iterator *gsi)
4117 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4121 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4123 return fold_stmt_1 (gsi, false, valueize);
4126 /* Perform the minimal folding on statement *GSI. Only operations like
4127 *&x created by constant propagation are handled. The statement cannot
4128 be replaced with a new one. Return true if the statement was
4129 changed, false otherwise.
4130 The statement *GSI should be in valid gimple form but may
4131 be in unfolded state as resulting from for example constant propagation
4132 which can produce *&x = 0. */
4135 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4137 gimple *stmt = gsi_stmt (*gsi);
4138 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4139 gcc_assert (gsi_stmt (*gsi) == stmt);
4143 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4144 if EXPR is null or we don't know how.
4145 If non-null, the result always has boolean type. */
4148 canonicalize_bool (tree expr, bool invert)
4154 if (integer_nonzerop (expr))
4155 return boolean_false_node;
4156 else if (integer_zerop (expr))
4157 return boolean_true_node;
4158 else if (TREE_CODE (expr) == SSA_NAME)
4159 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4160 build_int_cst (TREE_TYPE (expr), 0));
4161 else if (COMPARISON_CLASS_P (expr))
4162 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4164 TREE_OPERAND (expr, 0),
4165 TREE_OPERAND (expr, 1));
4171 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4173 if (integer_nonzerop (expr))
4174 return boolean_true_node;
4175 else if (integer_zerop (expr))
4176 return boolean_false_node;
4177 else if (TREE_CODE (expr) == SSA_NAME)
4178 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4179 build_int_cst (TREE_TYPE (expr), 0));
4180 else if (COMPARISON_CLASS_P (expr))
4181 return fold_build2 (TREE_CODE (expr),
4183 TREE_OPERAND (expr, 0),
4184 TREE_OPERAND (expr, 1));
4190 /* Check to see if a boolean expression EXPR is logically equivalent to the
4191 comparison (OP1 CODE OP2). Check for various identities involving
4195 same_bool_comparison_p (const_tree expr, enum tree_code code,
4196 const_tree op1, const_tree op2)
4200 /* The obvious case. */
4201 if (TREE_CODE (expr) == code
4202 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4203 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4206 /* Check for comparing (name, name != 0) and the case where expr
4207 is an SSA_NAME with a definition matching the comparison. */
4208 if (TREE_CODE (expr) == SSA_NAME
4209 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4211 if (operand_equal_p (expr, op1, 0))
4212 return ((code == NE_EXPR && integer_zerop (op2))
4213 || (code == EQ_EXPR && integer_nonzerop (op2)));
4214 s = SSA_NAME_DEF_STMT (expr);
4215 if (is_gimple_assign (s)
4216 && gimple_assign_rhs_code (s) == code
4217 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4218 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4222 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4223 of name is a comparison, recurse. */
4224 if (TREE_CODE (op1) == SSA_NAME
4225 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4227 s = SSA_NAME_DEF_STMT (op1);
4228 if (is_gimple_assign (s)
4229 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4231 enum tree_code c = gimple_assign_rhs_code (s);
4232 if ((c == NE_EXPR && integer_zerop (op2))
4233 || (c == EQ_EXPR && integer_nonzerop (op2)))
4234 return same_bool_comparison_p (expr, c,
4235 gimple_assign_rhs1 (s),
4236 gimple_assign_rhs2 (s));
4237 if ((c == EQ_EXPR && integer_zerop (op2))
4238 || (c == NE_EXPR && integer_nonzerop (op2)))
4239 return same_bool_comparison_p (expr,
4240 invert_tree_comparison (c, false),
4241 gimple_assign_rhs1 (s),
4242 gimple_assign_rhs2 (s));
4248 /* Check to see if two boolean expressions OP1 and OP2 are logically
4252 same_bool_result_p (const_tree op1, const_tree op2)
4254 /* Simple cases first. */
4255 if (operand_equal_p (op1, op2, 0))
4258 /* Check the cases where at least one of the operands is a comparison.
4259 These are a bit smarter than operand_equal_p in that they apply some
4260 identifies on SSA_NAMEs. */
4261 if (COMPARISON_CLASS_P (op2)
4262 && same_bool_comparison_p (op1, TREE_CODE (op2),
4263 TREE_OPERAND (op2, 0),
4264 TREE_OPERAND (op2, 1)))
4266 if (COMPARISON_CLASS_P (op1)
4267 && same_bool_comparison_p (op2, TREE_CODE (op1),
4268 TREE_OPERAND (op1, 0),
4269 TREE_OPERAND (op1, 1)))
4276 /* Forward declarations for some mutually recursive functions. */
4279 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4280 enum tree_code code2, tree op2a, tree op2b);
4282 and_var_with_comparison (tree var, bool invert,
4283 enum tree_code code2, tree op2a, tree op2b);
4285 and_var_with_comparison_1 (gimple *stmt,
4286 enum tree_code code2, tree op2a, tree op2b);
4288 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4289 enum tree_code code2, tree op2a, tree op2b);
4291 or_var_with_comparison (tree var, bool invert,
4292 enum tree_code code2, tree op2a, tree op2b);
4294 or_var_with_comparison_1 (gimple *stmt,
4295 enum tree_code code2, tree op2a, tree op2b);
4297 /* Helper function for and_comparisons_1: try to simplify the AND of the
4298 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4299 If INVERT is true, invert the value of the VAR before doing the AND.
4300 Return NULL_EXPR if we can't simplify this to a single expression. */
4303 and_var_with_comparison (tree var, bool invert,
4304 enum tree_code code2, tree op2a, tree op2b)
4307 gimple *stmt = SSA_NAME_DEF_STMT (var);
4309 /* We can only deal with variables whose definitions are assignments. */
4310 if (!is_gimple_assign (stmt))
4313 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4314 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4315 Then we only have to consider the simpler non-inverted cases. */
4317 t = or_var_with_comparison_1 (stmt,
4318 invert_tree_comparison (code2, false),
4321 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4322 return canonicalize_bool (t, invert);
4325 /* Try to simplify the AND of the ssa variable defined by the assignment
4326 STMT with the comparison specified by (OP2A CODE2 OP2B).
4327 Return NULL_EXPR if we can't simplify this to a single expression. */
4330 and_var_with_comparison_1 (gimple *stmt,
4331 enum tree_code code2, tree op2a, tree op2b)
4333 tree var = gimple_assign_lhs (stmt);
4334 tree true_test_var = NULL_TREE;
4335 tree false_test_var = NULL_TREE;
4336 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4338 /* Check for identities like (var AND (var == 0)) => false. */
4339 if (TREE_CODE (op2a) == SSA_NAME
4340 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4342 if ((code2 == NE_EXPR && integer_zerop (op2b))
4343 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4345 true_test_var = op2a;
4346 if (var == true_test_var)
4349 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4350 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4352 false_test_var = op2a;
4353 if (var == false_test_var)
4354 return boolean_false_node;
4358 /* If the definition is a comparison, recurse on it. */
4359 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4361 tree t = and_comparisons_1 (innercode,
4362 gimple_assign_rhs1 (stmt),
4363 gimple_assign_rhs2 (stmt),
4371 /* If the definition is an AND or OR expression, we may be able to
4372 simplify by reassociating. */
4373 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4374 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4376 tree inner1 = gimple_assign_rhs1 (stmt);
4377 tree inner2 = gimple_assign_rhs2 (stmt);
4380 tree partial = NULL_TREE;
4381 bool is_and = (innercode == BIT_AND_EXPR);
4383 /* Check for boolean identities that don't require recursive examination
4385 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4386 inner1 AND (inner1 OR inner2) => inner1
4387 !inner1 AND (inner1 AND inner2) => false
4388 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4389 Likewise for similar cases involving inner2. */
4390 if (inner1 == true_test_var)
4391 return (is_and ? var : inner1);
4392 else if (inner2 == true_test_var)
4393 return (is_and ? var : inner2);
4394 else if (inner1 == false_test_var)
4396 ? boolean_false_node
4397 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4398 else if (inner2 == false_test_var)
4400 ? boolean_false_node
4401 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4403 /* Next, redistribute/reassociate the AND across the inner tests.
4404 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4405 if (TREE_CODE (inner1) == SSA_NAME
4406 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4407 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4408 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4409 gimple_assign_rhs1 (s),
4410 gimple_assign_rhs2 (s),
4411 code2, op2a, op2b)))
4413 /* Handle the AND case, where we are reassociating:
4414 (inner1 AND inner2) AND (op2a code2 op2b)
4416 If the partial result t is a constant, we win. Otherwise
4417 continue on to try reassociating with the other inner test. */
4420 if (integer_onep (t))
4422 else if (integer_zerop (t))
4423 return boolean_false_node;
4426 /* Handle the OR case, where we are redistributing:
4427 (inner1 OR inner2) AND (op2a code2 op2b)
4428 => (t OR (inner2 AND (op2a code2 op2b))) */
4429 else if (integer_onep (t))
4430 return boolean_true_node;
4432 /* Save partial result for later. */
4436 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4437 if (TREE_CODE (inner2) == SSA_NAME
4438 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4439 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4440 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4441 gimple_assign_rhs1 (s),
4442 gimple_assign_rhs2 (s),
4443 code2, op2a, op2b)))
4445 /* Handle the AND case, where we are reassociating:
4446 (inner1 AND inner2) AND (op2a code2 op2b)
4447 => (inner1 AND t) */
4450 if (integer_onep (t))
4452 else if (integer_zerop (t))
4453 return boolean_false_node;
4454 /* If both are the same, we can apply the identity
4456 else if (partial && same_bool_result_p (t, partial))
4460 /* Handle the OR case. where we are redistributing:
4461 (inner1 OR inner2) AND (op2a code2 op2b)
4462 => (t OR (inner1 AND (op2a code2 op2b)))
4463 => (t OR partial) */
4466 if (integer_onep (t))
4467 return boolean_true_node;
4470 /* We already got a simplification for the other
4471 operand to the redistributed OR expression. The
4472 interesting case is when at least one is false.
4473 Or, if both are the same, we can apply the identity
4475 if (integer_zerop (partial))
4477 else if (integer_zerop (t))
4479 else if (same_bool_result_p (t, partial))
4488 /* Try to simplify the AND of two comparisons defined by
4489 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4490 If this can be done without constructing an intermediate value,
4491 return the resulting tree; otherwise NULL_TREE is returned.
4492 This function is deliberately asymmetric as it recurses on SSA_DEFs
4493 in the first comparison but not the second. */
4496 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4497 enum tree_code code2, tree op2a, tree op2b)
4499 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4501 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4502 if (operand_equal_p (op1a, op2a, 0)
4503 && operand_equal_p (op1b, op2b, 0))
4505 /* Result will be either NULL_TREE, or a combined comparison. */
4506 tree t = combine_comparisons (UNKNOWN_LOCATION,
4507 TRUTH_ANDIF_EXPR, code1, code2,
4508 truth_type, op1a, op1b);
4513 /* Likewise the swapped case of the above. */
4514 if (operand_equal_p (op1a, op2b, 0)
4515 && operand_equal_p (op1b, op2a, 0))
4517 /* Result will be either NULL_TREE, or a combined comparison. */
4518 tree t = combine_comparisons (UNKNOWN_LOCATION,
4519 TRUTH_ANDIF_EXPR, code1,
4520 swap_tree_comparison (code2),
4521 truth_type, op1a, op1b);
4526 /* If both comparisons are of the same value against constants, we might
4527 be able to merge them. */
4528 if (operand_equal_p (op1a, op2a, 0)
4529 && TREE_CODE (op1b) == INTEGER_CST
4530 && TREE_CODE (op2b) == INTEGER_CST)
4532 int cmp = tree_int_cst_compare (op1b, op2b);
4534 /* If we have (op1a == op1b), we should either be able to
4535 return that or FALSE, depending on whether the constant op1b
4536 also satisfies the other comparison against op2b. */
4537 if (code1 == EQ_EXPR)
4543 case EQ_EXPR: val = (cmp == 0); break;
4544 case NE_EXPR: val = (cmp != 0); break;
4545 case LT_EXPR: val = (cmp < 0); break;
4546 case GT_EXPR: val = (cmp > 0); break;
4547 case LE_EXPR: val = (cmp <= 0); break;
4548 case GE_EXPR: val = (cmp >= 0); break;
4549 default: done = false;
4554 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4556 return boolean_false_node;
4559 /* Likewise if the second comparison is an == comparison. */
4560 else if (code2 == EQ_EXPR)
4566 case EQ_EXPR: val = (cmp == 0); break;
4567 case NE_EXPR: val = (cmp != 0); break;
4568 case LT_EXPR: val = (cmp > 0); break;
4569 case GT_EXPR: val = (cmp < 0); break;
4570 case LE_EXPR: val = (cmp >= 0); break;
4571 case GE_EXPR: val = (cmp <= 0); break;
4572 default: done = false;
4577 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4579 return boolean_false_node;
4583 /* Same business with inequality tests. */
4584 else if (code1 == NE_EXPR)
4589 case EQ_EXPR: val = (cmp != 0); break;
4590 case NE_EXPR: val = (cmp == 0); break;
4591 case LT_EXPR: val = (cmp >= 0); break;
4592 case GT_EXPR: val = (cmp <= 0); break;
4593 case LE_EXPR: val = (cmp > 0); break;
4594 case GE_EXPR: val = (cmp < 0); break;
4599 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4601 else if (code2 == NE_EXPR)
4606 case EQ_EXPR: val = (cmp == 0); break;
4607 case NE_EXPR: val = (cmp != 0); break;
4608 case LT_EXPR: val = (cmp <= 0); break;
4609 case GT_EXPR: val = (cmp >= 0); break;
4610 case LE_EXPR: val = (cmp < 0); break;
4611 case GE_EXPR: val = (cmp > 0); break;
4616 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4619 /* Chose the more restrictive of two < or <= comparisons. */
4620 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4621 && (code2 == LT_EXPR || code2 == LE_EXPR))
4623 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4624 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4626 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4629 /* Likewise chose the more restrictive of two > or >= comparisons. */
4630 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4631 && (code2 == GT_EXPR || code2 == GE_EXPR))
4633 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4634 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4636 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4639 /* Check for singleton ranges. */
4641 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4642 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4643 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4645 /* Check for disjoint ranges. */
4647 && (code1 == LT_EXPR || code1 == LE_EXPR)
4648 && (code2 == GT_EXPR || code2 == GE_EXPR))
4649 return boolean_false_node;
4651 && (code1 == GT_EXPR || code1 == GE_EXPR)
4652 && (code2 == LT_EXPR || code2 == LE_EXPR))
4653 return boolean_false_node;
4656 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4657 NAME's definition is a truth value. See if there are any simplifications
4658 that can be done against the NAME's definition. */
4659 if (TREE_CODE (op1a) == SSA_NAME
4660 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4661 && (integer_zerop (op1b) || integer_onep (op1b)))
4663 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4664 || (code1 == NE_EXPR && integer_onep (op1b)));
4665 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4666 switch (gimple_code (stmt))
4669 /* Try to simplify by copy-propagating the definition. */
4670 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4673 /* If every argument to the PHI produces the same result when
4674 ANDed with the second comparison, we win.
4675 Do not do this unless the type is bool since we need a bool
4676 result here anyway. */
4677 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4679 tree result = NULL_TREE;
4681 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4683 tree arg = gimple_phi_arg_def (stmt, i);
4685 /* If this PHI has itself as an argument, ignore it.
4686 If all the other args produce the same result,
4688 if (arg == gimple_phi_result (stmt))
4690 else if (TREE_CODE (arg) == INTEGER_CST)
4692 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4695 result = boolean_false_node;
4696 else if (!integer_zerop (result))
4700 result = fold_build2 (code2, boolean_type_node,
4702 else if (!same_bool_comparison_p (result,
4706 else if (TREE_CODE (arg) == SSA_NAME
4707 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4710 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4711 /* In simple cases we can look through PHI nodes,
4712 but we have to be careful with loops.
4714 if (! dom_info_available_p (CDI_DOMINATORS)
4715 || gimple_bb (def_stmt) == gimple_bb (stmt)
4716 || dominated_by_p (CDI_DOMINATORS,
4717 gimple_bb (def_stmt),
4720 temp = and_var_with_comparison (arg, invert, code2,
4726 else if (!same_bool_result_p (result, temp))
4742 /* Try to simplify the AND of two comparisons, specified by
4743 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4744 If this can be simplified to a single expression (without requiring
4745 introducing more SSA variables to hold intermediate values),
4746 return the resulting tree. Otherwise return NULL_TREE.
4747 If the result expression is non-null, it has boolean type. */
4750 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4751 enum tree_code code2, tree op2a, tree op2b)
4753 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4757 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4760 /* Helper function for or_comparisons_1: try to simplify the OR of the
4761 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4762 If INVERT is true, invert the value of VAR before doing the OR.
4763 Return NULL_EXPR if we can't simplify this to a single expression. */
4766 or_var_with_comparison (tree var, bool invert,
4767 enum tree_code code2, tree op2a, tree op2b)
4770 gimple *stmt = SSA_NAME_DEF_STMT (var);
4772 /* We can only deal with variables whose definitions are assignments. */
4773 if (!is_gimple_assign (stmt))
4776 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4777 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4778 Then we only have to consider the simpler non-inverted cases. */
4780 t = and_var_with_comparison_1 (stmt,
4781 invert_tree_comparison (code2, false),
4784 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4785 return canonicalize_bool (t, invert);
4788 /* Try to simplify the OR of the ssa variable defined by the assignment
4789 STMT with the comparison specified by (OP2A CODE2 OP2B).
4790 Return NULL_EXPR if we can't simplify this to a single expression. */
4793 or_var_with_comparison_1 (gimple *stmt,
4794 enum tree_code code2, tree op2a, tree op2b)
4796 tree var = gimple_assign_lhs (stmt);
4797 tree true_test_var = NULL_TREE;
4798 tree false_test_var = NULL_TREE;
4799 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4801 /* Check for identities like (var OR (var != 0)) => true . */
4802 if (TREE_CODE (op2a) == SSA_NAME
4803 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4805 if ((code2 == NE_EXPR && integer_zerop (op2b))
4806 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4808 true_test_var = op2a;
4809 if (var == true_test_var)
4812 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4813 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4815 false_test_var = op2a;
4816 if (var == false_test_var)
4817 return boolean_true_node;
4821 /* If the definition is a comparison, recurse on it. */
4822 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4824 tree t = or_comparisons_1 (innercode,
4825 gimple_assign_rhs1 (stmt),
4826 gimple_assign_rhs2 (stmt),
4834 /* If the definition is an AND or OR expression, we may be able to
4835 simplify by reassociating. */
4836 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4837 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4839 tree inner1 = gimple_assign_rhs1 (stmt);
4840 tree inner2 = gimple_assign_rhs2 (stmt);
4843 tree partial = NULL_TREE;
4844 bool is_or = (innercode == BIT_IOR_EXPR);
4846 /* Check for boolean identities that don't require recursive examination
4848 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4849 inner1 OR (inner1 AND inner2) => inner1
4850 !inner1 OR (inner1 OR inner2) => true
4851 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4853 if (inner1 == true_test_var)
4854 return (is_or ? var : inner1);
4855 else if (inner2 == true_test_var)
4856 return (is_or ? var : inner2);
4857 else if (inner1 == false_test_var)
4860 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4861 else if (inner2 == false_test_var)
4864 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4866 /* Next, redistribute/reassociate the OR across the inner tests.
4867 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4868 if (TREE_CODE (inner1) == SSA_NAME
4869 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4870 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4871 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4872 gimple_assign_rhs1 (s),
4873 gimple_assign_rhs2 (s),
4874 code2, op2a, op2b)))
4876 /* Handle the OR case, where we are reassociating:
4877 (inner1 OR inner2) OR (op2a code2 op2b)
4879 If the partial result t is a constant, we win. Otherwise
4880 continue on to try reassociating with the other inner test. */
4883 if (integer_onep (t))
4884 return boolean_true_node;
4885 else if (integer_zerop (t))
4889 /* Handle the AND case, where we are redistributing:
4890 (inner1 AND inner2) OR (op2a code2 op2b)
4891 => (t AND (inner2 OR (op2a code op2b))) */
4892 else if (integer_zerop (t))
4893 return boolean_false_node;
4895 /* Save partial result for later. */
4899 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4900 if (TREE_CODE (inner2) == SSA_NAME
4901 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4902 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4903 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4904 gimple_assign_rhs1 (s),
4905 gimple_assign_rhs2 (s),
4906 code2, op2a, op2b)))
4908 /* Handle the OR case, where we are reassociating:
4909 (inner1 OR inner2) OR (op2a code2 op2b)
4911 => (t OR partial) */
4914 if (integer_zerop (t))
4916 else if (integer_onep (t))
4917 return boolean_true_node;
4918 /* If both are the same, we can apply the identity
4920 else if (partial && same_bool_result_p (t, partial))
4924 /* Handle the AND case, where we are redistributing:
4925 (inner1 AND inner2) OR (op2a code2 op2b)
4926 => (t AND (inner1 OR (op2a code2 op2b)))
4927 => (t AND partial) */
4930 if (integer_zerop (t))
4931 return boolean_false_node;
4934 /* We already got a simplification for the other
4935 operand to the redistributed AND expression. The
4936 interesting case is when at least one is true.
4937 Or, if both are the same, we can apply the identity
4939 if (integer_onep (partial))
4941 else if (integer_onep (t))
4943 else if (same_bool_result_p (t, partial))
4952 /* Try to simplify the OR of two comparisons defined by
4953 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4954 If this can be done without constructing an intermediate value,
4955 return the resulting tree; otherwise NULL_TREE is returned.
4956 This function is deliberately asymmetric as it recurses on SSA_DEFs
4957 in the first comparison but not the second. */
4960 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4961 enum tree_code code2, tree op2a, tree op2b)
4963 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4965 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4966 if (operand_equal_p (op1a, op2a, 0)
4967 && operand_equal_p (op1b, op2b, 0))
4969 /* Result will be either NULL_TREE, or a combined comparison. */
4970 tree t = combine_comparisons (UNKNOWN_LOCATION,
4971 TRUTH_ORIF_EXPR, code1, code2,
4972 truth_type, op1a, op1b);
4977 /* Likewise the swapped case of the above. */
4978 if (operand_equal_p (op1a, op2b, 0)
4979 && operand_equal_p (op1b, op2a, 0))
4981 /* Result will be either NULL_TREE, or a combined comparison. */
4982 tree t = combine_comparisons (UNKNOWN_LOCATION,
4983 TRUTH_ORIF_EXPR, code1,
4984 swap_tree_comparison (code2),
4985 truth_type, op1a, op1b);
4990 /* If both comparisons are of the same value against constants, we might
4991 be able to merge them. */
4992 if (operand_equal_p (op1a, op2a, 0)
4993 && TREE_CODE (op1b) == INTEGER_CST
4994 && TREE_CODE (op2b) == INTEGER_CST)
4996 int cmp = tree_int_cst_compare (op1b, op2b);
4998 /* If we have (op1a != op1b), we should either be able to
4999 return that or TRUE, depending on whether the constant op1b
5000 also satisfies the other comparison against op2b. */
5001 if (code1 == NE_EXPR)
5007 case EQ_EXPR: val = (cmp == 0); break;
5008 case NE_EXPR: val = (cmp != 0); break;
5009 case LT_EXPR: val = (cmp < 0); break;
5010 case GT_EXPR: val = (cmp > 0); break;
5011 case LE_EXPR: val = (cmp <= 0); break;
5012 case GE_EXPR: val = (cmp >= 0); break;
5013 default: done = false;
5018 return boolean_true_node;
5020 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5023 /* Likewise if the second comparison is a != comparison. */
5024 else if (code2 == NE_EXPR)
5030 case EQ_EXPR: val = (cmp == 0); break;
5031 case NE_EXPR: val = (cmp != 0); break;
5032 case LT_EXPR: val = (cmp > 0); break;
5033 case GT_EXPR: val = (cmp < 0); break;
5034 case LE_EXPR: val = (cmp >= 0); break;
5035 case GE_EXPR: val = (cmp <= 0); break;
5036 default: done = false;
5041 return boolean_true_node;
5043 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5047 /* See if an equality test is redundant with the other comparison. */
5048 else if (code1 == EQ_EXPR)
5053 case EQ_EXPR: val = (cmp == 0); break;
5054 case NE_EXPR: val = (cmp != 0); break;
5055 case LT_EXPR: val = (cmp < 0); break;
5056 case GT_EXPR: val = (cmp > 0); break;
5057 case LE_EXPR: val = (cmp <= 0); break;
5058 case GE_EXPR: val = (cmp >= 0); break;
5063 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5065 else if (code2 == EQ_EXPR)
5070 case EQ_EXPR: val = (cmp == 0); break;
5071 case NE_EXPR: val = (cmp != 0); break;
5072 case LT_EXPR: val = (cmp > 0); break;
5073 case GT_EXPR: val = (cmp < 0); break;
5074 case LE_EXPR: val = (cmp >= 0); break;
5075 case GE_EXPR: val = (cmp <= 0); break;
5080 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5083 /* Chose the less restrictive of two < or <= comparisons. */
5084 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5085 && (code2 == LT_EXPR || code2 == LE_EXPR))
5087 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5088 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5090 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5093 /* Likewise chose the less restrictive of two > or >= comparisons. */
5094 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5095 && (code2 == GT_EXPR || code2 == GE_EXPR))
5097 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5098 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5100 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5103 /* Check for singleton ranges. */
5105 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5106 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5107 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5109 /* Check for less/greater pairs that don't restrict the range at all. */
5111 && (code1 == LT_EXPR || code1 == LE_EXPR)
5112 && (code2 == GT_EXPR || code2 == GE_EXPR))
5113 return boolean_true_node;
5115 && (code1 == GT_EXPR || code1 == GE_EXPR)
5116 && (code2 == LT_EXPR || code2 == LE_EXPR))
5117 return boolean_true_node;
5120 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5121 NAME's definition is a truth value. See if there are any simplifications
5122 that can be done against the NAME's definition. */
5123 if (TREE_CODE (op1a) == SSA_NAME
5124 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5125 && (integer_zerop (op1b) || integer_onep (op1b)))
5127 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5128 || (code1 == NE_EXPR && integer_onep (op1b)));
5129 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5130 switch (gimple_code (stmt))
5133 /* Try to simplify by copy-propagating the definition. */
5134 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5137 /* If every argument to the PHI produces the same result when
5138 ORed with the second comparison, we win.
5139 Do not do this unless the type is bool since we need a bool
5140 result here anyway. */
5141 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5143 tree result = NULL_TREE;
5145 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5147 tree arg = gimple_phi_arg_def (stmt, i);
5149 /* If this PHI has itself as an argument, ignore it.
5150 If all the other args produce the same result,
5152 if (arg == gimple_phi_result (stmt))
5154 else if (TREE_CODE (arg) == INTEGER_CST)
5156 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5159 result = boolean_true_node;
5160 else if (!integer_onep (result))
5164 result = fold_build2 (code2, boolean_type_node,
5166 else if (!same_bool_comparison_p (result,
5170 else if (TREE_CODE (arg) == SSA_NAME
5171 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5174 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5175 /* In simple cases we can look through PHI nodes,
5176 but we have to be careful with loops.
5178 if (! dom_info_available_p (CDI_DOMINATORS)
5179 || gimple_bb (def_stmt) == gimple_bb (stmt)
5180 || dominated_by_p (CDI_DOMINATORS,
5181 gimple_bb (def_stmt),
5184 temp = or_var_with_comparison (arg, invert, code2,
5190 else if (!same_bool_result_p (result, temp))
5206 /* Try to simplify the OR of two comparisons, specified by
5207 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5208 If this can be simplified to a single expression (without requiring
5209 introducing more SSA variables to hold intermediate values),
5210 return the resulting tree. Otherwise return NULL_TREE.
5211 If the result expression is non-null, it has boolean type. */
5214 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5215 enum tree_code code2, tree op2a, tree op2b)
5217 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5221 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5225 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5227 Either NULL_TREE, a simplified but non-constant or a constant
5230 ??? This should go into a gimple-fold-inline.h file to be eventually
5231 privatized with the single valueize function used in the various TUs
5232 to avoid the indirect function call overhead. */
5235 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
5236 tree (*gvalueize) (tree))
5240 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5241 edges if there are intermediate VARYING defs. For this reason
5242 do not follow SSA edges here even though SCCVN can technically
5243 just deal fine with that. */
5244 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
5246 tree res = NULL_TREE;
5247 if (gimple_simplified_result_is_gimple_val (rcode, ops))
5249 else if (mprts_hook)
5250 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5253 if (dump_file && dump_flags & TDF_DETAILS)
5255 fprintf (dump_file, "Match-and-simplified ");
5256 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5257 fprintf (dump_file, " to ");
5258 print_generic_expr (dump_file, res, 0);
5259 fprintf (dump_file, "\n");
5265 location_t loc = gimple_location (stmt);
5266 switch (gimple_code (stmt))
5270 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5272 switch (get_gimple_rhs_class (subcode))
5274 case GIMPLE_SINGLE_RHS:
5276 tree rhs = gimple_assign_rhs1 (stmt);
5277 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5279 if (TREE_CODE (rhs) == SSA_NAME)
5281 /* If the RHS is an SSA_NAME, return its known constant value,
5283 return (*valueize) (rhs);
5285 /* Handle propagating invariant addresses into address
5287 else if (TREE_CODE (rhs) == ADDR_EXPR
5288 && !is_gimple_min_invariant (rhs))
5290 HOST_WIDE_INT offset = 0;
5292 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5296 && (CONSTANT_CLASS_P (base)
5297 || decl_address_invariant_p (base)))
5298 return build_invariant_address (TREE_TYPE (rhs),
5301 else if (TREE_CODE (rhs) == CONSTRUCTOR
5302 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5303 && (CONSTRUCTOR_NELTS (rhs)
5304 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5309 vec = XALLOCAVEC (tree,
5310 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5311 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5313 val = (*valueize) (val);
5314 if (TREE_CODE (val) == INTEGER_CST
5315 || TREE_CODE (val) == REAL_CST
5316 || TREE_CODE (val) == FIXED_CST)
5322 return build_vector (TREE_TYPE (rhs), vec);
5324 if (subcode == OBJ_TYPE_REF)
5326 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5327 /* If callee is constant, we can fold away the wrapper. */
5328 if (is_gimple_min_invariant (val))
5332 if (kind == tcc_reference)
5334 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5335 || TREE_CODE (rhs) == REALPART_EXPR
5336 || TREE_CODE (rhs) == IMAGPART_EXPR)
5337 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5339 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5340 return fold_unary_loc (EXPR_LOCATION (rhs),
5342 TREE_TYPE (rhs), val);
5344 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5345 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5347 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5348 return fold_ternary_loc (EXPR_LOCATION (rhs),
5350 TREE_TYPE (rhs), val,
5351 TREE_OPERAND (rhs, 1),
5352 TREE_OPERAND (rhs, 2));
5354 else if (TREE_CODE (rhs) == MEM_REF
5355 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5357 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5358 if (TREE_CODE (val) == ADDR_EXPR
5359 && is_gimple_min_invariant (val))
5361 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5363 TREE_OPERAND (rhs, 1));
5368 return fold_const_aggregate_ref_1 (rhs, valueize);
5370 else if (kind == tcc_declaration)
5371 return get_symbol_constant_value (rhs);
5375 case GIMPLE_UNARY_RHS:
5378 case GIMPLE_BINARY_RHS:
5379 /* Translate &x + CST into an invariant form suitable for
5380 further propagation. */
5381 if (subcode == POINTER_PLUS_EXPR)
5383 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5384 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5385 if (TREE_CODE (op0) == ADDR_EXPR
5386 && TREE_CODE (op1) == INTEGER_CST)
5388 tree off = fold_convert (ptr_type_node, op1);
5389 return build_fold_addr_expr_loc
5391 fold_build2 (MEM_REF,
5392 TREE_TYPE (TREE_TYPE (op0)),
5393 unshare_expr (op0), off));
5396 /* Canonicalize bool != 0 and bool == 0 appearing after
5397 valueization. While gimple_simplify handles this
5398 it can get confused by the ~X == 1 -> X == 0 transform
5399 which we cant reduce to a SSA name or a constant
5400 (and we have no way to tell gimple_simplify to not
5401 consider those transforms in the first place). */
5402 else if (subcode == EQ_EXPR
5403 || subcode == NE_EXPR)
5405 tree lhs = gimple_assign_lhs (stmt);
5406 tree op0 = gimple_assign_rhs1 (stmt);
5407 if (useless_type_conversion_p (TREE_TYPE (lhs),
5410 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5411 op0 = (*valueize) (op0);
5412 if (TREE_CODE (op0) == INTEGER_CST)
5413 std::swap (op0, op1);
5414 if (TREE_CODE (op1) == INTEGER_CST
5415 && ((subcode == NE_EXPR && integer_zerop (op1))
5416 || (subcode == EQ_EXPR && integer_onep (op1))))
5422 case GIMPLE_TERNARY_RHS:
5424 /* Handle ternary operators that can appear in GIMPLE form. */
5425 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5426 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5427 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5428 return fold_ternary_loc (loc, subcode,
5429 gimple_expr_type (stmt), op0, op1, op2);
5440 gcall *call_stmt = as_a <gcall *> (stmt);
5442 if (gimple_call_internal_p (stmt))
5444 enum tree_code subcode = ERROR_MARK;
5445 switch (gimple_call_internal_fn (stmt))
5447 case IFN_UBSAN_CHECK_ADD:
5448 subcode = PLUS_EXPR;
5450 case IFN_UBSAN_CHECK_SUB:
5451 subcode = MINUS_EXPR;
5453 case IFN_UBSAN_CHECK_MUL:
5454 subcode = MULT_EXPR;
5456 case IFN_BUILTIN_EXPECT:
5458 tree arg0 = gimple_call_arg (stmt, 0);
5459 tree op0 = (*valueize) (arg0);
5460 if (TREE_CODE (op0) == INTEGER_CST)
5467 tree arg0 = gimple_call_arg (stmt, 0);
5468 tree arg1 = gimple_call_arg (stmt, 1);
5469 tree op0 = (*valueize) (arg0);
5470 tree op1 = (*valueize) (arg1);
5472 if (TREE_CODE (op0) != INTEGER_CST
5473 || TREE_CODE (op1) != INTEGER_CST)
5478 /* x * 0 = 0 * x = 0 without overflow. */
5479 if (integer_zerop (op0) || integer_zerop (op1))
5480 return build_zero_cst (TREE_TYPE (arg0));
5483 /* y - y = 0 without overflow. */
5484 if (operand_equal_p (op0, op1, 0))
5485 return build_zero_cst (TREE_TYPE (arg0));
5492 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5494 && TREE_CODE (res) == INTEGER_CST
5495 && !TREE_OVERFLOW (res))
5500 fn = (*valueize) (gimple_call_fn (stmt));
5501 if (TREE_CODE (fn) == ADDR_EXPR
5502 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5503 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5504 && gimple_builtin_call_types_compatible_p (stmt,
5505 TREE_OPERAND (fn, 0)))
5507 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5510 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5511 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5512 retval = fold_builtin_call_array (loc,
5513 gimple_call_return_type (call_stmt),
5514 fn, gimple_call_num_args (stmt), args);
5517 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5518 STRIP_NOPS (retval);
5519 retval = fold_convert (gimple_call_return_type (call_stmt),
5532 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5533 Returns NULL_TREE if folding to a constant is not possible, otherwise
5534 returns a constant according to is_gimple_min_invariant. */
5537 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5539 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5540 if (res && is_gimple_min_invariant (res))
5546 /* The following set of functions are supposed to fold references using
5547 their constant initializers. */
5549 /* See if we can find constructor defining value of BASE.
5550 When we know the consructor with constant offset (such as
5551 base is array[40] and we do know constructor of array), then
5552 BIT_OFFSET is adjusted accordingly.
5554 As a special case, return error_mark_node when constructor
5555 is not explicitly available, but it is known to be zero
5556 such as 'static const int a;'. */
5558 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5559 tree (*valueize)(tree))
5561 HOST_WIDE_INT bit_offset2, size, max_size;
5564 if (TREE_CODE (base) == MEM_REF)
5566 if (!integer_zerop (TREE_OPERAND (base, 1)))
5568 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5570 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5575 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5576 base = valueize (TREE_OPERAND (base, 0));
5577 if (!base || TREE_CODE (base) != ADDR_EXPR)
5579 base = TREE_OPERAND (base, 0);
5582 && TREE_CODE (base) == SSA_NAME)
5583 base = valueize (base);
5585 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5586 DECL_INITIAL. If BASE is a nested reference into another
5587 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5588 the inner reference. */
5589 switch (TREE_CODE (base))
5594 tree init = ctor_for_folding (base);
5596 /* Our semantic is exact opposite of ctor_for_folding;
5597 NULL means unknown, while error_mark_node is 0. */
5598 if (init == error_mark_node)
5601 return error_mark_node;
5605 case VIEW_CONVERT_EXPR:
5606 return get_base_constructor (TREE_OPERAND (base, 0),
5607 bit_offset, valueize);
5611 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5613 if (max_size == -1 || size != max_size)
5615 *bit_offset += bit_offset2;
5616 return get_base_constructor (base, bit_offset, valueize);
5622 if (CONSTANT_CLASS_P (base))
5629 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5630 SIZE to the memory at bit OFFSET. */
5633 fold_array_ctor_reference (tree type, tree ctor,
5634 unsigned HOST_WIDE_INT offset,
5635 unsigned HOST_WIDE_INT size,
5638 offset_int low_bound;
5639 offset_int elt_size;
5640 offset_int access_index;
5641 tree domain_type = NULL_TREE;
5642 HOST_WIDE_INT inner_offset;
5644 /* Compute low bound and elt size. */
5645 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5646 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5647 if (domain_type && TYPE_MIN_VALUE (domain_type))
5649 /* Static constructors for variably sized objects makes no sense. */
5650 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5651 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5655 /* Static constructors for variably sized objects makes no sense. */
5656 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5658 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5660 /* We can handle only constantly sized accesses that are known to not
5661 be larger than size of array element. */
5662 if (!TYPE_SIZE_UNIT (type)
5663 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5664 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
5668 /* Compute the array index we look for. */
5669 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5671 access_index += low_bound;
5673 /* And offset within the access. */
5674 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5676 /* See if the array field is large enough to span whole access. We do not
5677 care to fold accesses spanning multiple array indexes. */
5678 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5680 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5681 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
5683 /* When memory is not explicitely mentioned in constructor,
5684 it is 0 (or out of range). */
5685 return build_zero_cst (type);
5688 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5689 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5692 fold_nonarray_ctor_reference (tree type, tree ctor,
5693 unsigned HOST_WIDE_INT offset,
5694 unsigned HOST_WIDE_INT size,
5697 unsigned HOST_WIDE_INT cnt;
5700 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5703 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5704 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5705 tree field_size = DECL_SIZE (cfield);
5706 offset_int bitoffset;
5707 offset_int bitoffset_end, access_end;
5709 /* Variable sized objects in static constructors makes no sense,
5710 but field_size can be NULL for flexible array members. */
5711 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5712 && TREE_CODE (byte_offset) == INTEGER_CST
5713 && (field_size != NULL_TREE
5714 ? TREE_CODE (field_size) == INTEGER_CST
5715 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5717 /* Compute bit offset of the field. */
5718 bitoffset = (wi::to_offset (field_offset)
5719 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
5720 /* Compute bit offset where the field ends. */
5721 if (field_size != NULL_TREE)
5722 bitoffset_end = bitoffset + wi::to_offset (field_size);
5726 access_end = offset_int (offset) + size;
5728 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5729 [BITOFFSET, BITOFFSET_END)? */
5730 if (wi::cmps (access_end, bitoffset) > 0
5731 && (field_size == NULL_TREE
5732 || wi::lts_p (offset, bitoffset_end)))
5734 offset_int inner_offset = offset_int (offset) - bitoffset;
5735 /* We do have overlap. Now see if field is large enough to
5736 cover the access. Give up for accesses spanning multiple
5738 if (wi::cmps (access_end, bitoffset_end) > 0)
5740 if (offset < bitoffset)
5742 return fold_ctor_reference (type, cval,
5743 inner_offset.to_uhwi (), size,
5747 /* When memory is not explicitely mentioned in constructor, it is 0. */
5748 return build_zero_cst (type);
5751 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5752 to the memory at bit OFFSET. */
5755 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5756 unsigned HOST_WIDE_INT size, tree from_decl)
5760 /* We found the field with exact match. */
5761 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5763 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5765 /* We are at the end of walk, see if we can view convert the
5767 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5768 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5769 && !compare_tree_int (TYPE_SIZE (type), size)
5770 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5772 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5773 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5775 STRIP_USELESS_TYPE_CONVERSION (ret);
5778 /* For constants and byte-aligned/sized reads try to go through
5779 native_encode/interpret. */
5780 if (CONSTANT_CLASS_P (ctor)
5781 && BITS_PER_UNIT == 8
5782 && offset % BITS_PER_UNIT == 0
5783 && size % BITS_PER_UNIT == 0
5784 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5786 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5787 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5788 offset / BITS_PER_UNIT);
5790 return native_interpret_expr (type, buf, len);
5792 if (TREE_CODE (ctor) == CONSTRUCTOR)
5795 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5796 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5797 return fold_array_ctor_reference (type, ctor, offset, size,
5800 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5807 /* Return the tree representing the element referenced by T if T is an
5808 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5809 names using VALUEIZE. Return NULL_TREE otherwise. */
5812 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5814 tree ctor, idx, base;
5815 HOST_WIDE_INT offset, size, max_size;
5819 if (TREE_THIS_VOLATILE (t))
5823 return get_symbol_constant_value (t);
5825 tem = fold_read_from_constant_string (t);
5829 switch (TREE_CODE (t))
5832 case ARRAY_RANGE_REF:
5833 /* Constant indexes are handled well by get_base_constructor.
5834 Only special case variable offsets.
5835 FIXME: This code can't handle nested references with variable indexes
5836 (they will be handled only by iteration of ccp). Perhaps we can bring
5837 get_ref_base_and_extent here and make it use a valueize callback. */
5838 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5840 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5841 && TREE_CODE (idx) == INTEGER_CST)
5843 tree low_bound, unit_size;
5845 /* If the resulting bit-offset is constant, track it. */
5846 if ((low_bound = array_ref_low_bound (t),
5847 TREE_CODE (low_bound) == INTEGER_CST)
5848 && (unit_size = array_ref_element_size (t),
5849 tree_fits_uhwi_p (unit_size)))
5852 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5853 TYPE_PRECISION (TREE_TYPE (idx)));
5855 if (wi::fits_shwi_p (woffset))
5857 offset = woffset.to_shwi ();
5858 /* TODO: This code seems wrong, multiply then check
5859 to see if it fits. */
5860 offset *= tree_to_uhwi (unit_size);
5861 offset *= BITS_PER_UNIT;
5863 base = TREE_OPERAND (t, 0);
5864 ctor = get_base_constructor (base, &offset, valueize);
5865 /* Empty constructor. Always fold to 0. */
5866 if (ctor == error_mark_node)
5867 return build_zero_cst (TREE_TYPE (t));
5868 /* Out of bound array access. Value is undefined,
5872 /* We can not determine ctor. */
5875 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5876 tree_to_uhwi (unit_size)
5886 case TARGET_MEM_REF:
5888 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
5889 ctor = get_base_constructor (base, &offset, valueize);
5891 /* Empty constructor. Always fold to 0. */
5892 if (ctor == error_mark_node)
5893 return build_zero_cst (TREE_TYPE (t));
5894 /* We do not know precise address. */
5895 if (max_size == -1 || max_size != size)
5897 /* We can not determine ctor. */
5901 /* Out of bound array access. Value is undefined, but don't fold. */
5905 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5911 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5912 if (c && TREE_CODE (c) == COMPLEX_CST)
5913 return fold_build1_loc (EXPR_LOCATION (t),
5914 TREE_CODE (t), TREE_TYPE (t), c);
5926 fold_const_aggregate_ref (tree t)
5928 return fold_const_aggregate_ref_1 (t, NULL);
5931 /* Lookup virtual method with index TOKEN in a virtual table V
5933 Set CAN_REFER if non-NULL to false if method
5934 is not referable or if the virtual table is ill-formed (such as rewriten
5935 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5938 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5940 unsigned HOST_WIDE_INT offset,
5943 tree vtable = v, init, fn;
5944 unsigned HOST_WIDE_INT size;
5945 unsigned HOST_WIDE_INT elt_size, access_index;
5951 /* First of all double check we have virtual table. */
5952 if (TREE_CODE (v) != VAR_DECL
5953 || !DECL_VIRTUAL_P (v))
5955 /* Pass down that we lost track of the target. */
5961 init = ctor_for_folding (v);
5963 /* The virtual tables should always be born with constructors
5964 and we always should assume that they are avaialble for
5965 folding. At the moment we do not stream them in all cases,
5966 but it should never happen that ctor seem unreachable. */
5968 if (init == error_mark_node)
5970 gcc_assert (in_lto_p);
5971 /* Pass down that we lost track of the target. */
5976 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5977 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5978 offset *= BITS_PER_UNIT;
5979 offset += token * size;
5981 /* Lookup the value in the constructor that is assumed to be array.
5982 This is equivalent to
5983 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5984 offset, size, NULL);
5985 but in a constant time. We expect that frontend produced a simple
5986 array without indexed initializers. */
5988 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5989 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5990 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5991 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5993 access_index = offset / BITS_PER_UNIT / elt_size;
5994 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5996 /* This code makes an assumption that there are no
5997 indexed fileds produced by C++ FE, so we can directly index the array. */
5998 if (access_index < CONSTRUCTOR_NELTS (init))
6000 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6001 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6007 /* For type inconsistent program we may end up looking up virtual method
6008 in virtual table that does not contain TOKEN entries. We may overrun
6009 the virtual table and pick up a constant or RTTI info pointer.
6010 In any case the call is undefined. */
6012 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6013 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6014 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6017 fn = TREE_OPERAND (fn, 0);
6019 /* When cgraph node is missing and function is not public, we cannot
6020 devirtualize. This can happen in WHOPR when the actual method
6021 ends up in other partition, because we found devirtualization
6022 possibility too late. */
6023 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6034 /* Make sure we create a cgraph node for functions we'll reference.
6035 They can be non-existent if the reference comes from an entry
6036 of an external vtable for example. */
6037 cgraph_node::get_create (fn);
6042 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6043 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6044 KNOWN_BINFO carries the binfo describing the true type of
6045 OBJ_TYPE_REF_OBJECT(REF).
6046 Set CAN_REFER if non-NULL to false if method
6047 is not referable or if the virtual table is ill-formed (such as rewriten
6048 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6051 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6054 unsigned HOST_WIDE_INT offset;
6057 v = BINFO_VTABLE (known_binfo);
6058 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6062 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6068 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6071 /* Given a pointer value OP0, return a simplified version of an
6072 indirection through OP0, or NULL_TREE if no simplification is
6073 possible. Note that the resulting type may be different from
6074 the type pointed to in the sense that it is still compatible
6075 from the langhooks point of view. */
6078 gimple_fold_indirect_ref (tree t)
6080 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6085 subtype = TREE_TYPE (sub);
6086 if (!POINTER_TYPE_P (subtype))
6089 if (TREE_CODE (sub) == ADDR_EXPR)
6091 tree op = TREE_OPERAND (sub, 0);
6092 tree optype = TREE_TYPE (op);
6094 if (useless_type_conversion_p (type, optype))
6097 /* *(foo *)&fooarray => fooarray[0] */
6098 if (TREE_CODE (optype) == ARRAY_TYPE
6099 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6100 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6102 tree type_domain = TYPE_DOMAIN (optype);
6103 tree min_val = size_zero_node;
6104 if (type_domain && TYPE_MIN_VALUE (type_domain))
6105 min_val = TYPE_MIN_VALUE (type_domain);
6106 if (TREE_CODE (min_val) == INTEGER_CST)
6107 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6109 /* *(foo *)&complexfoo => __real__ complexfoo */
6110 else if (TREE_CODE (optype) == COMPLEX_TYPE
6111 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6112 return fold_build1 (REALPART_EXPR, type, op);
6113 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6114 else if (TREE_CODE (optype) == VECTOR_TYPE
6115 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6117 tree part_width = TYPE_SIZE (type);
6118 tree index = bitsize_int (0);
6119 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6123 /* *(p + CST) -> ... */
6124 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6125 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6127 tree addr = TREE_OPERAND (sub, 0);
6128 tree off = TREE_OPERAND (sub, 1);
6132 addrtype = TREE_TYPE (addr);
6134 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6135 if (TREE_CODE (addr) == ADDR_EXPR
6136 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6137 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6138 && tree_fits_uhwi_p (off))
6140 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
6141 tree part_width = TYPE_SIZE (type);
6142 unsigned HOST_WIDE_INT part_widthi
6143 = tree_to_shwi (part_width) / BITS_PER_UNIT;
6144 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6145 tree index = bitsize_int (indexi);
6146 if (offset / part_widthi
6147 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
6148 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6152 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6153 if (TREE_CODE (addr) == ADDR_EXPR
6154 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6155 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6157 tree size = TYPE_SIZE_UNIT (type);
6158 if (tree_int_cst_equal (size, off))
6159 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6162 /* *(p + CST) -> MEM_REF <p, CST>. */
6163 if (TREE_CODE (addr) != ADDR_EXPR
6164 || DECL_P (TREE_OPERAND (addr, 0)))
6165 return fold_build2 (MEM_REF, type,
6167 wide_int_to_tree (ptype, off));
6170 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6171 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6172 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6173 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6176 tree min_val = size_zero_node;
6178 sub = gimple_fold_indirect_ref (sub);
6180 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6181 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6182 if (type_domain && TYPE_MIN_VALUE (type_domain))
6183 min_val = TYPE_MIN_VALUE (type_domain);
6184 if (TREE_CODE (min_val) == INTEGER_CST)
6185 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6191 /* Return true if CODE is an operation that when operating on signed
6192 integer types involves undefined behavior on overflow and the
6193 operation can be expressed with unsigned arithmetic. */
6196 arith_code_with_undefined_signed_overflow (tree_code code)
6204 case POINTER_PLUS_EXPR:
6211 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6212 operation that can be transformed to unsigned arithmetic by converting
6213 its operand, carrying out the operation in the corresponding unsigned
6214 type and converting the result back to the original type.
6216 Returns a sequence of statements that replace STMT and also contain
6217 a modified form of STMT itself. */
6220 rewrite_to_defined_overflow (gimple *stmt)
6222 if (dump_file && (dump_flags & TDF_DETAILS))
6224 fprintf (dump_file, "rewriting stmt with undefined signed "
6226 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6229 tree lhs = gimple_assign_lhs (stmt);
6230 tree type = unsigned_type_for (TREE_TYPE (lhs));
6231 gimple_seq stmts = NULL;
6232 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6234 tree op = gimple_op (stmt, i);
6235 op = gimple_convert (&stmts, type, op);
6236 gimple_set_op (stmt, i, op);
6238 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6239 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6240 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6241 gimple_seq_add_stmt (&stmts, stmt);
6242 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6243 gimple_seq_add_stmt (&stmts, cvt);
6249 /* The valueization hook we use for the gimple_build API simplification.
6250 This makes us match fold_buildN behavior by only combining with
6251 statements in the sequence(s) we are currently building. */
6254 gimple_build_valueize (tree op)
6256 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6261 /* Build the expression CODE OP0 of type TYPE with location LOC,
6262 simplifying it first if possible. Returns the built
6263 expression value and appends statements possibly defining it
6267 gimple_build (gimple_seq *seq, location_t loc,
6268 enum tree_code code, tree type, tree op0)
6270 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6273 if (gimple_in_ssa_p (cfun))
6274 res = make_ssa_name (type);
6276 res = create_tmp_reg (type);
6278 if (code == REALPART_EXPR
6279 || code == IMAGPART_EXPR
6280 || code == VIEW_CONVERT_EXPR)
6281 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6283 stmt = gimple_build_assign (res, code, op0);
6284 gimple_set_location (stmt, loc);
6285 gimple_seq_add_stmt_without_update (seq, stmt);
6290 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6291 simplifying it first if possible. Returns the built
6292 expression value and appends statements possibly defining it
6296 gimple_build (gimple_seq *seq, location_t loc,
6297 enum tree_code code, tree type, tree op0, tree op1)
6299 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6302 if (gimple_in_ssa_p (cfun))
6303 res = make_ssa_name (type);
6305 res = create_tmp_reg (type);
6306 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6307 gimple_set_location (stmt, loc);
6308 gimple_seq_add_stmt_without_update (seq, stmt);
6313 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6314 simplifying it first if possible. Returns the built
6315 expression value and appends statements possibly defining it
6319 gimple_build (gimple_seq *seq, location_t loc,
6320 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6322 tree res = gimple_simplify (code, type, op0, op1, op2,
6323 seq, gimple_build_valueize);
6326 if (gimple_in_ssa_p (cfun))
6327 res = make_ssa_name (type);
6329 res = create_tmp_reg (type);
6331 if (code == BIT_FIELD_REF)
6332 stmt = gimple_build_assign (res, code,
6333 build3 (code, type, op0, op1, op2));
6335 stmt = gimple_build_assign (res, code, op0, op1, op2);
6336 gimple_set_location (stmt, loc);
6337 gimple_seq_add_stmt_without_update (seq, stmt);
6342 /* Build the call FN (ARG0) with a result of type TYPE
6343 (or no result if TYPE is void) with location LOC,
6344 simplifying it first if possible. Returns the built
6345 expression value (or NULL_TREE if TYPE is void) and appends
6346 statements possibly defining it to SEQ. */
6349 gimple_build (gimple_seq *seq, location_t loc,
6350 enum built_in_function fn, tree type, tree arg0)
6352 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6355 tree decl = builtin_decl_implicit (fn);
6356 gimple *stmt = gimple_build_call (decl, 1, arg0);
6357 if (!VOID_TYPE_P (type))
6359 if (gimple_in_ssa_p (cfun))
6360 res = make_ssa_name (type);
6362 res = create_tmp_reg (type);
6363 gimple_call_set_lhs (stmt, res);
6365 gimple_set_location (stmt, loc);
6366 gimple_seq_add_stmt_without_update (seq, stmt);
6371 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6372 (or no result if TYPE is void) with location LOC,
6373 simplifying it first if possible. Returns the built
6374 expression value (or NULL_TREE if TYPE is void) and appends
6375 statements possibly defining it to SEQ. */
6378 gimple_build (gimple_seq *seq, location_t loc,
6379 enum built_in_function fn, tree type, tree arg0, tree arg1)
6381 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6384 tree decl = builtin_decl_implicit (fn);
6385 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6386 if (!VOID_TYPE_P (type))
6388 if (gimple_in_ssa_p (cfun))
6389 res = make_ssa_name (type);
6391 res = create_tmp_reg (type);
6392 gimple_call_set_lhs (stmt, res);
6394 gimple_set_location (stmt, loc);
6395 gimple_seq_add_stmt_without_update (seq, stmt);
6400 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6401 (or no result if TYPE is void) with location LOC,
6402 simplifying it first if possible. Returns the built
6403 expression value (or NULL_TREE if TYPE is void) and appends
6404 statements possibly defining it to SEQ. */
6407 gimple_build (gimple_seq *seq, location_t loc,
6408 enum built_in_function fn, tree type,
6409 tree arg0, tree arg1, tree arg2)
6411 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6412 seq, gimple_build_valueize);
6415 tree decl = builtin_decl_implicit (fn);
6416 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6417 if (!VOID_TYPE_P (type))
6419 if (gimple_in_ssa_p (cfun))
6420 res = make_ssa_name (type);
6422 res = create_tmp_reg (type);
6423 gimple_call_set_lhs (stmt, res);
6425 gimple_set_location (stmt, loc);
6426 gimple_seq_add_stmt_without_update (seq, stmt);
6431 /* Build the conversion (TYPE) OP with a result of type TYPE
6432 with location LOC if such conversion is neccesary in GIMPLE,
6433 simplifying it first.
6434 Returns the built expression value and appends
6435 statements possibly defining it to SEQ. */
6438 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6440 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6442 return gimple_build (seq, loc, NOP_EXPR, type, op);
6445 /* Build the conversion (ptrofftype) OP with a result of a type
6446 compatible with ptrofftype with location LOC if such conversion
6447 is neccesary in GIMPLE, simplifying it first.
6448 Returns the built expression value and appends
6449 statements possibly defining it to SEQ. */
6452 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6454 if (ptrofftype_p (TREE_TYPE (op)))
6456 return gimple_convert (seq, loc, sizetype, op);
6459 /* Return true if the result of assignment STMT is known to be non-negative.
6460 If the return value is based on the assumption that signed overflow is
6461 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6462 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6465 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6468 enum tree_code code = gimple_assign_rhs_code (stmt);
6469 switch (get_gimple_rhs_class (code))
6471 case GIMPLE_UNARY_RHS:
6472 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6473 gimple_expr_type (stmt),
6474 gimple_assign_rhs1 (stmt),
6475 strict_overflow_p, depth);
6476 case GIMPLE_BINARY_RHS:
6477 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6478 gimple_expr_type (stmt),
6479 gimple_assign_rhs1 (stmt),
6480 gimple_assign_rhs2 (stmt),
6481 strict_overflow_p, depth);
6482 case GIMPLE_TERNARY_RHS:
6484 case GIMPLE_SINGLE_RHS:
6485 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6486 strict_overflow_p, depth);
6487 case GIMPLE_INVALID_RHS:
6493 /* Return true if return value of call STMT is known to be non-negative.
6494 If the return value is based on the assumption that signed overflow is
6495 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6496 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6499 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6502 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6503 gimple_call_arg (stmt, 0) : NULL_TREE;
6504 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6505 gimple_call_arg (stmt, 1) : NULL_TREE;
6507 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6508 gimple_call_combined_fn (stmt),
6511 strict_overflow_p, depth);
6514 /* Return true if return value of call STMT is known to be non-negative.
6515 If the return value is based on the assumption that signed overflow is
6516 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6517 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6520 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6523 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6525 tree arg = gimple_phi_arg_def (stmt, i);
6526 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6532 /* Return true if STMT is known to compute a non-negative value.
6533 If the return value is based on the assumption that signed overflow is
6534 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6535 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6538 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6541 switch (gimple_code (stmt))
6544 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6547 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6550 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6557 /* Return true if the floating-point value computed by assignment STMT
6558 is known to have an integer value. We also allow +Inf, -Inf and NaN
6559 to be considered integer values. Return false for signaling NaN.
6561 DEPTH is the current nesting depth of the query. */
6564 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6566 enum tree_code code = gimple_assign_rhs_code (stmt);
6567 switch (get_gimple_rhs_class (code))
6569 case GIMPLE_UNARY_RHS:
6570 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6571 gimple_assign_rhs1 (stmt), depth);
6572 case GIMPLE_BINARY_RHS:
6573 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6574 gimple_assign_rhs1 (stmt),
6575 gimple_assign_rhs2 (stmt), depth);
6576 case GIMPLE_TERNARY_RHS:
6578 case GIMPLE_SINGLE_RHS:
6579 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6580 case GIMPLE_INVALID_RHS:
6586 /* Return true if the floating-point value computed by call STMT is known
6587 to have an integer value. We also allow +Inf, -Inf and NaN to be
6588 considered integer values. Return false for signaling NaN.
6590 DEPTH is the current nesting depth of the query. */
6593 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6595 tree arg0 = (gimple_call_num_args (stmt) > 0
6596 ? gimple_call_arg (stmt, 0)
6598 tree arg1 = (gimple_call_num_args (stmt) > 1
6599 ? gimple_call_arg (stmt, 1)
6601 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
6605 /* Return true if the floating-point result of phi STMT is known to have
6606 an integer value. We also allow +Inf, -Inf and NaN to be considered
6607 integer values. Return false for signaling NaN.
6609 DEPTH is the current nesting depth of the query. */
6612 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6614 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6616 tree arg = gimple_phi_arg_def (stmt, i);
6617 if (!integer_valued_real_single_p (arg, depth + 1))
6623 /* Return true if the floating-point value computed by STMT is known
6624 to have an integer value. We also allow +Inf, -Inf and NaN to be
6625 considered integer values. Return false for signaling NaN.
6627 DEPTH is the current nesting depth of the query. */
6630 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6632 switch (gimple_code (stmt))
6635 return gimple_assign_integer_valued_real_p (stmt, depth);
6637 return gimple_call_integer_valued_real_p (stmt, depth);
6639 return gimple_phi_integer_valued_real_p (stmt, depth);