1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
36 #include "stor-layout.h"
38 #include "gimple-fold.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
59 /* Return true when DECL can be referenced from current unit.
60 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
61 We can get declarations that are not possible to reference for various
64 1) When analyzing C++ virtual tables.
65 C++ virtual tables do have known constructors even
66 when they are keyed to other compilation unit.
67 Those tables can contain pointers to methods and vars
68 in other units. Those methods have both STATIC and EXTERNAL
70 2) In WHOPR mode devirtualization might lead to reference
71 to method that was partitioned elsehwere.
72 In this case we have static VAR_DECL or FUNCTION_DECL
73 that has no corresponding callgraph/varpool node
75 3) COMDAT functions referred by external vtables that
76 we devirtualize only during final compilation stage.
77 At this time we already decided that we will not output
78 the function body and thus we can't reference the symbol
82 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
85 struct cgraph_node *node;
88 if (DECL_ABSTRACT_P (decl))
91 /* We are concerned only about static/external vars and functions. */
92 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
93 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
96 /* Static objects can be referred only if they was not optimized out yet. */
97 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
99 /* Before we start optimizing unreachable code we can be sure all
100 static objects are defined. */
101 if (symtab->function_flags_ready)
103 snode = symtab_node::get (decl);
104 if (!snode || !snode->definition)
106 node = dyn_cast <cgraph_node *> (snode);
107 return !node || !node->global.inlined_to;
110 /* We will later output the initializer, so we can refer to it.
111 So we are concerned only when DECL comes from initializer of
112 external var or var that has been optimized out. */
114 || TREE_CODE (from_decl) != VAR_DECL
115 || (!DECL_EXTERNAL (from_decl)
116 && (vnode = varpool_node::get (from_decl)) != NULL
117 && vnode->definition)
119 && (vnode = varpool_node::get (from_decl)) != NULL
120 && vnode->in_other_partition))
122 /* We are folding reference from external vtable. The vtable may reffer
123 to a symbol keyed to other compilation unit. The other compilation
124 unit may be in separate DSO and the symbol may be hidden. */
125 if (DECL_VISIBILITY_SPECIFIED (decl)
126 && DECL_EXTERNAL (decl)
127 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
128 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
130 /* When function is public, we always can introduce new reference.
131 Exception are the COMDAT functions where introducing a direct
132 reference imply need to include function body in the curren tunit. */
133 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
135 /* We have COMDAT. We are going to check if we still have definition
136 or if the definition is going to be output in other partition.
137 Bypass this when gimplifying; all needed functions will be produced.
139 As observed in PR20991 for already optimized out comdat virtual functions
140 it may be tempting to not necessarily give up because the copy will be
141 output elsewhere when corresponding vtable is output.
142 This is however not possible - ABI specify that COMDATs are output in
143 units where they are used and when the other unit was compiled with LTO
144 it is possible that vtable was kept public while the function itself
146 if (!symtab->function_flags_ready)
149 snode = symtab_node::get (decl);
151 || ((!snode->definition || DECL_EXTERNAL (decl))
152 && (!snode->in_other_partition
153 || (!snode->forced_by_abi && !snode->force_output))))
155 node = dyn_cast <cgraph_node *> (snode);
156 return !node || !node->global.inlined_to;
159 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
160 acceptable form for is_gimple_min_invariant.
161 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
164 canonicalize_constructor_val (tree cval, tree from_decl)
166 tree orig_cval = cval;
168 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
169 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
171 tree ptr = TREE_OPERAND (cval, 0);
172 if (is_gimple_min_invariant (ptr))
173 cval = build1_loc (EXPR_LOCATION (cval),
174 ADDR_EXPR, TREE_TYPE (ptr),
175 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
177 fold_convert (ptr_type_node,
178 TREE_OPERAND (cval, 1))));
180 if (TREE_CODE (cval) == ADDR_EXPR)
182 tree base = NULL_TREE;
183 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
185 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
187 TREE_OPERAND (cval, 0) = base;
190 base = get_base_address (TREE_OPERAND (cval, 0));
194 if ((TREE_CODE (base) == VAR_DECL
195 || TREE_CODE (base) == FUNCTION_DECL)
196 && !can_refer_decl_in_current_unit_p (base, from_decl))
198 if (TREE_TYPE (base) == error_mark_node)
200 if (TREE_CODE (base) == VAR_DECL)
201 TREE_ADDRESSABLE (base) = 1;
202 else if (TREE_CODE (base) == FUNCTION_DECL)
204 /* Make sure we create a cgraph node for functions we'll reference.
205 They can be non-existent if the reference comes from an entry
206 of an external vtable for example. */
207 cgraph_node::get_create (base);
209 /* Fixup types in global initializers. */
210 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
211 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
213 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
214 cval = fold_convert (TREE_TYPE (orig_cval), cval);
217 if (TREE_OVERFLOW_P (cval))
218 return drop_tree_overflow (cval);
222 /* If SYM is a constant variable with known value, return the value.
223 NULL_TREE is returned otherwise. */
226 get_symbol_constant_value (tree sym)
228 tree val = ctor_for_folding (sym);
229 if (val != error_mark_node)
233 val = canonicalize_constructor_val (unshare_expr (val), sym);
234 if (val && is_gimple_min_invariant (val))
239 /* Variables declared 'const' without an initializer
240 have zero as the initializer if they may not be
241 overridden at link or run time. */
243 && is_gimple_reg_type (TREE_TYPE (sym)))
244 return build_zero_cst (TREE_TYPE (sym));
252 /* Subroutine of fold_stmt. We perform several simplifications of the
253 memory reference tree EXPR and make sure to re-gimplify them properly
254 after propagation of constant addresses. IS_LHS is true if the
255 reference is supposed to be an lvalue. */
258 maybe_fold_reference (tree expr, bool is_lhs)
262 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
263 || TREE_CODE (expr) == REALPART_EXPR
264 || TREE_CODE (expr) == IMAGPART_EXPR)
265 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
266 return fold_unary_loc (EXPR_LOCATION (expr),
269 TREE_OPERAND (expr, 0));
270 else if (TREE_CODE (expr) == BIT_FIELD_REF
271 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
272 return fold_ternary_loc (EXPR_LOCATION (expr),
275 TREE_OPERAND (expr, 0),
276 TREE_OPERAND (expr, 1),
277 TREE_OPERAND (expr, 2));
280 && (result = fold_const_aggregate_ref (expr))
281 && is_gimple_min_invariant (result))
288 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
289 replacement rhs for the statement or NULL_TREE if no simplification
290 could be made. It is assumed that the operands have been previously
294 fold_gimple_assign (gimple_stmt_iterator *si)
296 gimple *stmt = gsi_stmt (*si);
297 enum tree_code subcode = gimple_assign_rhs_code (stmt);
298 location_t loc = gimple_location (stmt);
300 tree result = NULL_TREE;
302 switch (get_gimple_rhs_class (subcode))
304 case GIMPLE_SINGLE_RHS:
306 tree rhs = gimple_assign_rhs1 (stmt);
308 if (TREE_CLOBBER_P (rhs))
311 if (REFERENCE_CLASS_P (rhs))
312 return maybe_fold_reference (rhs, false);
314 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
316 tree val = OBJ_TYPE_REF_EXPR (rhs);
317 if (is_gimple_min_invariant (val))
319 else if (flag_devirtualize && virtual_method_call_p (rhs))
322 vec <cgraph_node *>targets
323 = possible_polymorphic_call_targets (rhs, stmt, &final);
324 if (final && targets.length () <= 1 && dbg_cnt (devirt))
326 if (dump_enabled_p ())
328 location_t loc = gimple_location_safe (stmt);
329 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
330 "resolving virtual function address "
331 "reference to function %s\n",
332 targets.length () == 1
333 ? targets[0]->name ()
336 if (targets.length () == 1)
338 val = fold_convert (TREE_TYPE (val),
339 build_fold_addr_expr_loc
340 (loc, targets[0]->decl));
341 STRIP_USELESS_TYPE_CONVERSION (val);
344 /* We can not use __builtin_unreachable here because it
345 can not have address taken. */
346 val = build_int_cst (TREE_TYPE (val), 0);
352 else if (TREE_CODE (rhs) == ADDR_EXPR)
354 tree ref = TREE_OPERAND (rhs, 0);
355 tree tem = maybe_fold_reference (ref, true);
357 && TREE_CODE (tem) == MEM_REF
358 && integer_zerop (TREE_OPERAND (tem, 1)))
359 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
361 result = fold_convert (TREE_TYPE (rhs),
362 build_fold_addr_expr_loc (loc, tem));
363 else if (TREE_CODE (ref) == MEM_REF
364 && integer_zerop (TREE_OPERAND (ref, 1)))
365 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
369 /* Strip away useless type conversions. Both the
370 NON_LVALUE_EXPR that may have been added by fold, and
371 "useless" type conversions that might now be apparent
372 due to propagation. */
373 STRIP_USELESS_TYPE_CONVERSION (result);
375 if (result != rhs && valid_gimple_rhs_p (result))
380 else if (TREE_CODE (rhs) == CONSTRUCTOR
381 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
383 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
387 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
388 if (! CONSTANT_CLASS_P (val))
391 return build_vector_from_ctor (TREE_TYPE (rhs),
392 CONSTRUCTOR_ELTS (rhs));
395 else if (DECL_P (rhs))
396 return get_symbol_constant_value (rhs);
400 case GIMPLE_UNARY_RHS:
403 case GIMPLE_BINARY_RHS:
406 case GIMPLE_TERNARY_RHS:
407 result = fold_ternary_loc (loc, subcode,
408 TREE_TYPE (gimple_assign_lhs (stmt)),
409 gimple_assign_rhs1 (stmt),
410 gimple_assign_rhs2 (stmt),
411 gimple_assign_rhs3 (stmt));
415 STRIP_USELESS_TYPE_CONVERSION (result);
416 if (valid_gimple_rhs_p (result))
421 case GIMPLE_INVALID_RHS:
429 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
430 adjusting the replacement stmts location and virtual operands.
431 If the statement has a lhs the last stmt in the sequence is expected
432 to assign to that lhs. */
435 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
437 gimple *stmt = gsi_stmt (*si_p);
439 if (gimple_has_location (stmt))
440 annotate_all_with_location (stmts, gimple_location (stmt));
442 /* First iterate over the replacement statements backward, assigning
443 virtual operands to their defining statements. */
444 gimple *laststore = NULL;
445 for (gimple_stmt_iterator i = gsi_last (stmts);
446 !gsi_end_p (i); gsi_prev (&i))
448 gimple *new_stmt = gsi_stmt (i);
449 if ((gimple_assign_single_p (new_stmt)
450 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
451 || (is_gimple_call (new_stmt)
452 && (gimple_call_flags (new_stmt)
453 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
457 vdef = gimple_vdef (stmt);
459 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
460 gimple_set_vdef (new_stmt, vdef);
461 if (vdef && TREE_CODE (vdef) == SSA_NAME)
462 SSA_NAME_DEF_STMT (vdef) = new_stmt;
463 laststore = new_stmt;
467 /* Second iterate over the statements forward, assigning virtual
468 operands to their uses. */
469 tree reaching_vuse = gimple_vuse (stmt);
470 for (gimple_stmt_iterator i = gsi_start (stmts);
471 !gsi_end_p (i); gsi_next (&i))
473 gimple *new_stmt = gsi_stmt (i);
474 /* If the new statement possibly has a VUSE, update it with exact SSA
475 name we know will reach this one. */
476 if (gimple_has_mem_ops (new_stmt))
477 gimple_set_vuse (new_stmt, reaching_vuse);
478 gimple_set_modified (new_stmt, true);
479 if (gimple_vdef (new_stmt))
480 reaching_vuse = gimple_vdef (new_stmt);
483 /* If the new sequence does not do a store release the virtual
484 definition of the original statement. */
486 && reaching_vuse == gimple_vuse (stmt))
488 tree vdef = gimple_vdef (stmt);
490 && TREE_CODE (vdef) == SSA_NAME)
492 unlink_stmt_vdef (stmt);
493 release_ssa_name (vdef);
497 /* Finally replace the original statement with the sequence. */
498 gsi_replace_with_seq (si_p, stmts, false);
501 /* Convert EXPR into a GIMPLE value suitable for substitution on the
502 RHS of an assignment. Insert the necessary statements before
503 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
504 is replaced. If the call is expected to produces a result, then it
505 is replaced by an assignment of the new RHS to the result variable.
506 If the result is to be ignored, then the call is replaced by a
507 GIMPLE_NOP. A proper VDEF chain is retained by making the first
508 VUSE and the last VDEF of the whole sequence be the same as the replaced
509 statement and using new SSA names for stores in between. */
512 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
515 gimple *stmt, *new_stmt;
516 gimple_stmt_iterator i;
517 gimple_seq stmts = NULL;
519 stmt = gsi_stmt (*si_p);
521 gcc_assert (is_gimple_call (stmt));
523 push_gimplify_context (gimple_in_ssa_p (cfun));
525 lhs = gimple_call_lhs (stmt);
526 if (lhs == NULL_TREE)
528 gimplify_and_add (expr, &stmts);
529 /* We can end up with folding a memcpy of an empty class assignment
530 which gets optimized away by C++ gimplification. */
531 if (gimple_seq_empty_p (stmts))
533 pop_gimplify_context (NULL);
534 if (gimple_in_ssa_p (cfun))
536 unlink_stmt_vdef (stmt);
539 gsi_replace (si_p, gimple_build_nop (), false);
545 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
546 new_stmt = gimple_build_assign (lhs, tmp);
547 i = gsi_last (stmts);
548 gsi_insert_after_without_update (&i, new_stmt,
549 GSI_CONTINUE_LINKING);
552 pop_gimplify_context (NULL);
554 gsi_replace_with_seq_vops (si_p, stmts);
558 /* Replace the call at *GSI with the gimple value VAL. */
561 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
563 gimple *stmt = gsi_stmt (*gsi);
564 tree lhs = gimple_call_lhs (stmt);
568 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
569 val = fold_convert (TREE_TYPE (lhs), val);
570 repl = gimple_build_assign (lhs, val);
573 repl = gimple_build_nop ();
574 tree vdef = gimple_vdef (stmt);
575 if (vdef && TREE_CODE (vdef) == SSA_NAME)
577 unlink_stmt_vdef (stmt);
578 release_ssa_name (vdef);
580 gsi_replace (gsi, repl, false);
583 /* Replace the call at *GSI with the new call REPL and fold that
587 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
589 gimple *stmt = gsi_stmt (*gsi);
590 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
591 gimple_set_location (repl, gimple_location (stmt));
592 if (gimple_vdef (stmt)
593 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
595 gimple_set_vdef (repl, gimple_vdef (stmt));
596 gimple_set_vuse (repl, gimple_vuse (stmt));
597 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
599 gsi_replace (gsi, repl, false);
603 /* Return true if VAR is a VAR_DECL or a component thereof. */
606 var_decl_component_p (tree var)
609 while (handled_component_p (inner))
610 inner = TREE_OPERAND (inner, 0);
611 return SSA_VAR_P (inner);
614 /* Fold function call to builtin mem{{,p}cpy,move}. Return
615 false if no simplification can be made.
616 If ENDP is 0, return DEST (like memcpy).
617 If ENDP is 1, return DEST+LEN (like mempcpy).
618 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
619 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
623 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
624 tree dest, tree src, int endp)
626 gimple *stmt = gsi_stmt (*gsi);
627 tree lhs = gimple_call_lhs (stmt);
628 tree len = gimple_call_arg (stmt, 2);
629 tree destvar, srcvar;
630 location_t loc = gimple_location (stmt);
632 /* If the LEN parameter is zero, return DEST. */
633 if (integer_zerop (len))
636 if (gimple_call_lhs (stmt))
637 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
639 repl = gimple_build_nop ();
640 tree vdef = gimple_vdef (stmt);
641 if (vdef && TREE_CODE (vdef) == SSA_NAME)
643 unlink_stmt_vdef (stmt);
644 release_ssa_name (vdef);
646 gsi_replace (gsi, repl, false);
650 /* If SRC and DEST are the same (and not volatile), return
651 DEST{,+LEN,+LEN-1}. */
652 if (operand_equal_p (src, dest, 0))
654 unlink_stmt_vdef (stmt);
655 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
656 release_ssa_name (gimple_vdef (stmt));
659 gsi_replace (gsi, gimple_build_nop (), false);
666 tree srctype, desttype;
667 unsigned int src_align, dest_align;
670 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
671 pointers as wide integer) and also may result in huge function
672 size because of inlined bounds copy. Thus don't inline for
673 functions we want to instrument. */
674 if (flag_check_pointer_bounds
675 && chkp_instrumentable_p (cfun->decl)
676 /* Even if data may contain pointers we can inline if copy
677 less than a pointer size. */
678 && (!tree_fits_uhwi_p (len)
679 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
682 /* Build accesses at offset zero with a ref-all character type. */
683 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
686 /* If we can perform the copy efficiently with first doing all loads
687 and then all stores inline it that way. Currently efficiently
688 means that we can load all the memory into a single integer
689 register which is what MOVE_MAX gives us. */
690 src_align = get_pointer_alignment (src);
691 dest_align = get_pointer_alignment (dest);
692 if (tree_fits_uhwi_p (len)
693 && compare_tree_int (len, MOVE_MAX) <= 0
694 /* ??? Don't transform copies from strings with known length this
695 confuses the tree-ssa-strlen.c. This doesn't handle
696 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
698 && !c_strlen (src, 2))
700 unsigned ilen = tree_to_uhwi (len);
701 if (exact_log2 (ilen) != -1)
703 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
705 && TYPE_MODE (type) != BLKmode
706 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
708 /* If the destination pointer is not aligned we must be able
709 to emit an unaligned store. */
710 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
711 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
712 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
713 != CODE_FOR_nothing)))
716 tree desttype = type;
717 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
718 srctype = build_aligned_type (type, src_align);
719 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
720 tree tem = fold_const_aggregate_ref (srcmem);
723 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
724 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
726 && (optab_handler (movmisalign_optab,
728 == CODE_FOR_nothing))
733 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
735 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
736 if (gimple_in_ssa_p (cfun))
737 srcmem = make_ssa_name (TREE_TYPE (srcmem),
740 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
741 gimple_assign_set_lhs (new_stmt, srcmem);
742 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
743 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
745 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
746 desttype = build_aligned_type (type, dest_align);
748 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
751 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
752 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
753 if (gimple_vdef (new_stmt)
754 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
755 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
758 gsi_replace (gsi, new_stmt, false);
761 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
770 /* Both DEST and SRC must be pointer types.
771 ??? This is what old code did. Is the testing for pointer types
774 If either SRC is readonly or length is 1, we can use memcpy. */
775 if (!dest_align || !src_align)
777 if (readonly_data_expr (src)
778 || (tree_fits_uhwi_p (len)
779 && (MIN (src_align, dest_align) / BITS_PER_UNIT
780 >= tree_to_uhwi (len))))
782 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
785 gimple_call_set_fndecl (stmt, fn);
786 gimple_call_set_arg (stmt, 0, dest);
787 gimple_call_set_arg (stmt, 1, src);
792 /* If *src and *dest can't overlap, optimize into memcpy as well. */
793 if (TREE_CODE (src) == ADDR_EXPR
794 && TREE_CODE (dest) == ADDR_EXPR)
796 tree src_base, dest_base, fn;
797 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
798 HOST_WIDE_INT size = -1;
799 HOST_WIDE_INT maxsize = -1;
802 srcvar = TREE_OPERAND (src, 0);
803 src_base = get_ref_base_and_extent (srcvar, &src_offset,
804 &size, &maxsize, &reverse);
805 destvar = TREE_OPERAND (dest, 0);
806 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
807 &size, &maxsize, &reverse);
808 if (tree_fits_uhwi_p (len))
809 maxsize = tree_to_uhwi (len);
812 src_offset /= BITS_PER_UNIT;
813 dest_offset /= BITS_PER_UNIT;
814 if (SSA_VAR_P (src_base)
815 && SSA_VAR_P (dest_base))
817 if (operand_equal_p (src_base, dest_base, 0)
818 && ranges_overlap_p (src_offset, maxsize,
819 dest_offset, maxsize))
822 else if (TREE_CODE (src_base) == MEM_REF
823 && TREE_CODE (dest_base) == MEM_REF)
825 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
826 TREE_OPERAND (dest_base, 0), 0))
828 offset_int off = mem_ref_offset (src_base) + src_offset;
829 if (!wi::fits_shwi_p (off))
831 src_offset = off.to_shwi ();
833 off = mem_ref_offset (dest_base) + dest_offset;
834 if (!wi::fits_shwi_p (off))
836 dest_offset = off.to_shwi ();
837 if (ranges_overlap_p (src_offset, maxsize,
838 dest_offset, maxsize))
844 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
847 gimple_call_set_fndecl (stmt, fn);
848 gimple_call_set_arg (stmt, 0, dest);
849 gimple_call_set_arg (stmt, 1, src);
854 /* If the destination and source do not alias optimize into
856 if ((is_gimple_min_invariant (dest)
857 || TREE_CODE (dest) == SSA_NAME)
858 && (is_gimple_min_invariant (src)
859 || TREE_CODE (src) == SSA_NAME))
862 ao_ref_init_from_ptr_and_size (&destr, dest, len);
863 ao_ref_init_from_ptr_and_size (&srcr, src, len);
864 if (!refs_may_alias_p_1 (&destr, &srcr, false))
867 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
870 gimple_call_set_fndecl (stmt, fn);
871 gimple_call_set_arg (stmt, 0, dest);
872 gimple_call_set_arg (stmt, 1, src);
881 if (!tree_fits_shwi_p (len))
884 This logic lose for arguments like (type *)malloc (sizeof (type)),
885 since we strip the casts of up to VOID return value from malloc.
886 Perhaps we ought to inherit type from non-VOID argument here? */
889 if (!POINTER_TYPE_P (TREE_TYPE (src))
890 || !POINTER_TYPE_P (TREE_TYPE (dest)))
892 /* In the following try to find a type that is most natural to be
893 used for the memcpy source and destination and that allows
894 the most optimization when memcpy is turned into a plain assignment
895 using that type. In theory we could always use a char[len] type
896 but that only gains us that the destination and source possibly
897 no longer will have their address taken. */
898 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
899 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
901 tree tem = TREE_OPERAND (src, 0);
903 if (tem != TREE_OPERAND (src, 0))
904 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
906 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
908 tree tem = TREE_OPERAND (dest, 0);
910 if (tem != TREE_OPERAND (dest, 0))
911 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
913 srctype = TREE_TYPE (TREE_TYPE (src));
914 if (TREE_CODE (srctype) == ARRAY_TYPE
915 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
917 srctype = TREE_TYPE (srctype);
919 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
921 desttype = TREE_TYPE (TREE_TYPE (dest));
922 if (TREE_CODE (desttype) == ARRAY_TYPE
923 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
925 desttype = TREE_TYPE (desttype);
927 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
929 if (TREE_ADDRESSABLE (srctype)
930 || TREE_ADDRESSABLE (desttype))
933 /* Make sure we are not copying using a floating-point mode or
934 a type whose size possibly does not match its precision. */
935 if (FLOAT_MODE_P (TYPE_MODE (desttype))
936 || TREE_CODE (desttype) == BOOLEAN_TYPE
937 || TREE_CODE (desttype) == ENUMERAL_TYPE)
938 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
939 if (FLOAT_MODE_P (TYPE_MODE (srctype))
940 || TREE_CODE (srctype) == BOOLEAN_TYPE
941 || TREE_CODE (srctype) == ENUMERAL_TYPE)
942 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
950 src_align = get_pointer_alignment (src);
951 dest_align = get_pointer_alignment (dest);
952 if (dest_align < TYPE_ALIGN (desttype)
953 || src_align < TYPE_ALIGN (srctype))
957 STRIP_NOPS (destvar);
958 if (TREE_CODE (destvar) == ADDR_EXPR
959 && var_decl_component_p (TREE_OPERAND (destvar, 0))
960 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
961 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
967 if (TREE_CODE (srcvar) == ADDR_EXPR
968 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
969 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
972 || src_align >= TYPE_ALIGN (desttype))
973 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
975 else if (!STRICT_ALIGNMENT)
977 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
979 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
987 if (srcvar == NULL_TREE && destvar == NULL_TREE)
990 if (srcvar == NULL_TREE)
993 if (src_align >= TYPE_ALIGN (desttype))
994 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
997 if (STRICT_ALIGNMENT)
999 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1001 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1004 else if (destvar == NULL_TREE)
1007 if (dest_align >= TYPE_ALIGN (srctype))
1008 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1011 if (STRICT_ALIGNMENT)
1013 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1015 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1020 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1022 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1023 if (gimple_in_ssa_p (cfun))
1024 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1026 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1027 gimple_assign_set_lhs (new_stmt, srcvar);
1028 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1029 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1031 new_stmt = gimple_build_assign (destvar, srcvar);
1032 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1033 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1034 if (gimple_vdef (new_stmt)
1035 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1036 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1039 gsi_replace (gsi, new_stmt, false);
1042 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1046 gimple_seq stmts = NULL;
1047 if (endp == 0 || endp == 3)
1050 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1052 if (endp == 2 || endp == 1)
1054 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1055 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1056 TREE_TYPE (dest), dest, len);
1059 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1060 gimple *repl = gimple_build_assign (lhs, dest);
1061 gsi_replace (gsi, repl, false);
1065 /* Fold function call to builtin memset or bzero at *GSI setting the
1066 memory of size LEN to VAL. Return whether a simplification was made. */
1069 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1071 gimple *stmt = gsi_stmt (*gsi);
1073 unsigned HOST_WIDE_INT length, cval;
1075 /* If the LEN parameter is zero, return DEST. */
1076 if (integer_zerop (len))
1078 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1082 if (! tree_fits_uhwi_p (len))
1085 if (TREE_CODE (c) != INTEGER_CST)
1088 tree dest = gimple_call_arg (stmt, 0);
1090 if (TREE_CODE (var) != ADDR_EXPR)
1093 var = TREE_OPERAND (var, 0);
1094 if (TREE_THIS_VOLATILE (var))
1097 etype = TREE_TYPE (var);
1098 if (TREE_CODE (etype) == ARRAY_TYPE)
1099 etype = TREE_TYPE (etype);
1101 if (!INTEGRAL_TYPE_P (etype)
1102 && !POINTER_TYPE_P (etype))
1105 if (! var_decl_component_p (var))
1108 length = tree_to_uhwi (len);
1109 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1110 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1113 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1116 if (integer_zerop (c))
1120 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1123 cval = TREE_INT_CST_LOW (c);
1127 cval |= (cval << 31) << 1;
1130 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1131 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1132 gimple_set_vuse (store, gimple_vuse (stmt));
1133 tree vdef = gimple_vdef (stmt);
1134 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1136 gimple_set_vdef (store, gimple_vdef (stmt));
1137 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1139 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1140 if (gimple_call_lhs (stmt))
1142 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1143 gsi_replace (gsi, asgn, false);
1147 gimple_stmt_iterator gsi2 = *gsi;
1149 gsi_remove (&gsi2, true);
1156 /* Return the string length, maximum string length or maximum value of
1158 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1159 is not NULL and, for TYPE == 0, its value is not equal to the length
1160 we determine or if we are unable to determine the length or value,
1161 return false. VISITED is a bitmap of visited variables.
1162 TYPE is 0 if string length should be returned, 1 for maximum string
1163 length and 2 for maximum value ARG can have. */
1166 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1171 if (TREE_CODE (arg) != SSA_NAME)
1173 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1174 if (TREE_CODE (arg) == ADDR_EXPR
1175 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1176 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1178 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1179 if (TREE_CODE (aop0) == INDIRECT_REF
1180 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1181 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1182 length, visited, type);
1188 if (TREE_CODE (val) != INTEGER_CST
1189 || tree_int_cst_sgn (val) < 0)
1193 val = c_strlen (arg, 1);
1201 if (TREE_CODE (*length) != INTEGER_CST
1202 || TREE_CODE (val) != INTEGER_CST)
1205 if (tree_int_cst_lt (*length, val))
1209 else if (simple_cst_equal (val, *length) != 1)
1217 /* If ARG is registered for SSA update we cannot look at its defining
1219 if (name_registered_for_update_p (arg))
1222 /* If we were already here, break the infinite cycle. */
1224 *visited = BITMAP_ALLOC (NULL);
1225 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1229 def_stmt = SSA_NAME_DEF_STMT (var);
1231 switch (gimple_code (def_stmt))
1234 /* The RHS of the statement defining VAR must either have a
1235 constant length or come from another SSA_NAME with a constant
1237 if (gimple_assign_single_p (def_stmt)
1238 || gimple_assign_unary_nop_p (def_stmt))
1240 tree rhs = gimple_assign_rhs1 (def_stmt);
1241 return get_maxval_strlen (rhs, length, visited, type);
1243 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1245 tree op2 = gimple_assign_rhs2 (def_stmt);
1246 tree op3 = gimple_assign_rhs3 (def_stmt);
1247 return get_maxval_strlen (op2, length, visited, type)
1248 && get_maxval_strlen (op3, length, visited, type);
1254 /* All the arguments of the PHI node must have the same constant
1258 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1260 tree arg = gimple_phi_arg (def_stmt, i)->def;
1262 /* If this PHI has itself as an argument, we cannot
1263 determine the string length of this argument. However,
1264 if we can find a constant string length for the other
1265 PHI args then we can still be sure that this is a
1266 constant string length. So be optimistic and just
1267 continue with the next argument. */
1268 if (arg == gimple_phi_result (def_stmt))
1271 if (!get_maxval_strlen (arg, length, visited, type))
1283 get_maxval_strlen (tree arg, int type)
1285 bitmap visited = NULL;
1286 tree len = NULL_TREE;
1287 if (!get_maxval_strlen (arg, &len, &visited, type))
1290 BITMAP_FREE (visited);
1296 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1297 If LEN is not NULL, it represents the length of the string to be
1298 copied. Return NULL_TREE if no simplification can be made. */
1301 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1302 tree dest, tree src)
1304 location_t loc = gimple_location (gsi_stmt (*gsi));
1307 /* If SRC and DEST are the same (and not volatile), return DEST. */
1308 if (operand_equal_p (src, dest, 0))
1310 replace_call_with_value (gsi, dest);
1314 if (optimize_function_for_size_p (cfun))
1317 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1321 tree len = get_maxval_strlen (src, 0);
1325 len = fold_convert_loc (loc, size_type_node, len);
1326 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1327 len = force_gimple_operand_gsi (gsi, len, true,
1328 NULL_TREE, true, GSI_SAME_STMT);
1329 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1330 replace_call_with_call_and_fold (gsi, repl);
1334 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1335 If SLEN is not NULL, it represents the length of the source string.
1336 Return NULL_TREE if no simplification can be made. */
1339 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1340 tree dest, tree src, tree len)
1342 location_t loc = gimple_location (gsi_stmt (*gsi));
1345 /* If the LEN parameter is zero, return DEST. */
1346 if (integer_zerop (len))
1348 replace_call_with_value (gsi, dest);
1352 /* We can't compare slen with len as constants below if len is not a
1354 if (TREE_CODE (len) != INTEGER_CST)
1357 /* Now, we must be passed a constant src ptr parameter. */
1358 tree slen = get_maxval_strlen (src, 0);
1359 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1362 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1364 /* We do not support simplification of this case, though we do
1365 support it when expanding trees into RTL. */
1366 /* FIXME: generate a call to __builtin_memset. */
1367 if (tree_int_cst_lt (slen, len))
1370 /* OK transform into builtin memcpy. */
1371 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1375 len = fold_convert_loc (loc, size_type_node, len);
1376 len = force_gimple_operand_gsi (gsi, len, true,
1377 NULL_TREE, true, GSI_SAME_STMT);
1378 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1379 replace_call_with_call_and_fold (gsi, repl);
1383 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1386 Return NULL_TREE if no simplification was possible, otherwise return the
1387 simplified form of the call as a tree.
1389 The simplified form may be a constant or other expression which
1390 computes the same value, but in a more efficient manner (including
1391 calls to other builtin functions).
1393 The call may contain arguments which need to be evaluated, but
1394 which are not useful to determine the result of the call. In
1395 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1396 COMPOUND_EXPR will be an argument which must be evaluated.
1397 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1398 COMPOUND_EXPR in the chain will contain the tree for the simplified
1399 form of the builtin function call. */
1402 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1404 gimple *stmt = gsi_stmt (*gsi);
1405 location_t loc = gimple_location (stmt);
1407 const char *p = c_getstr (src);
1409 /* If the string length is zero, return the dst parameter. */
1410 if (p && *p == '\0')
1412 replace_call_with_value (gsi, dst);
1416 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1419 /* See if we can store by pieces into (dst + strlen(dst)). */
1421 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1422 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1424 if (!strlen_fn || !memcpy_fn)
1427 /* If the length of the source string isn't computable don't
1428 split strcat into strlen and memcpy. */
1429 tree len = get_maxval_strlen (src, 0);
1433 /* Create strlen (dst). */
1434 gimple_seq stmts = NULL, stmts2;
1435 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1436 gimple_set_location (repl, loc);
1437 if (gimple_in_ssa_p (cfun))
1438 newdst = make_ssa_name (size_type_node);
1440 newdst = create_tmp_reg (size_type_node);
1441 gimple_call_set_lhs (repl, newdst);
1442 gimple_seq_add_stmt_without_update (&stmts, repl);
1444 /* Create (dst p+ strlen (dst)). */
1445 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1446 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1447 gimple_seq_add_seq_without_update (&stmts, stmts2);
1449 len = fold_convert_loc (loc, size_type_node, len);
1450 len = size_binop_loc (loc, PLUS_EXPR, len,
1451 build_int_cst (size_type_node, 1));
1452 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1453 gimple_seq_add_seq_without_update (&stmts, stmts2);
1455 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1456 gimple_seq_add_stmt_without_update (&stmts, repl);
1457 if (gimple_call_lhs (stmt))
1459 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1460 gimple_seq_add_stmt_without_update (&stmts, repl);
1461 gsi_replace_with_seq_vops (gsi, stmts);
1462 /* gsi now points at the assignment to the lhs, get a
1463 stmt iterator to the memcpy call.
1464 ??? We can't use gsi_for_stmt as that doesn't work when the
1465 CFG isn't built yet. */
1466 gimple_stmt_iterator gsi2 = *gsi;
1472 gsi_replace_with_seq_vops (gsi, stmts);
1478 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1479 are the arguments to the call. */
1482 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1484 gimple *stmt = gsi_stmt (*gsi);
1485 tree dest = gimple_call_arg (stmt, 0);
1486 tree src = gimple_call_arg (stmt, 1);
1487 tree size = gimple_call_arg (stmt, 2);
1493 /* If the SRC parameter is "", return DEST. */
1494 if (p && *p == '\0')
1496 replace_call_with_value (gsi, dest);
1500 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1503 /* If __builtin_strcat_chk is used, assume strcat is available. */
1504 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1508 gimple *repl = gimple_build_call (fn, 2, dest, src);
1509 replace_call_with_call_and_fold (gsi, repl);
1513 /* Simplify a call to the strncat builtin. */
1516 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1518 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1519 tree dst = gimple_call_arg (stmt, 0);
1520 tree src = gimple_call_arg (stmt, 1);
1521 tree len = gimple_call_arg (stmt, 2);
1523 const char *p = c_getstr (src);
1525 /* If the requested length is zero, or the src parameter string
1526 length is zero, return the dst parameter. */
1527 if (integer_zerop (len) || (p && *p == '\0'))
1529 replace_call_with_value (gsi, dst);
1533 /* If the requested len is greater than or equal to the string
1534 length, call strcat. */
1535 if (TREE_CODE (len) == INTEGER_CST && p
1536 && compare_tree_int (len, strlen (p)) >= 0)
1538 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1540 /* If the replacement _DECL isn't initialized, don't do the
1545 gcall *repl = gimple_build_call (fn, 2, dst, src);
1546 replace_call_with_call_and_fold (gsi, repl);
1553 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1557 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1559 gimple *stmt = gsi_stmt (*gsi);
1560 tree dest = gimple_call_arg (stmt, 0);
1561 tree src = gimple_call_arg (stmt, 1);
1562 tree len = gimple_call_arg (stmt, 2);
1563 tree size = gimple_call_arg (stmt, 3);
1568 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1569 if ((p && *p == '\0')
1570 || integer_zerop (len))
1572 replace_call_with_value (gsi, dest);
1576 if (! tree_fits_uhwi_p (size))
1579 if (! integer_all_onesp (size))
1581 tree src_len = c_strlen (src, 1);
1583 && tree_fits_uhwi_p (src_len)
1584 && tree_fits_uhwi_p (len)
1585 && ! tree_int_cst_lt (len, src_len))
1587 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1588 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1592 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1593 replace_call_with_call_and_fold (gsi, repl);
1599 /* If __builtin_strncat_chk is used, assume strncat is available. */
1600 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1604 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1605 replace_call_with_call_and_fold (gsi, repl);
1609 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1610 to the call. IGNORE is true if the value returned
1611 by the builtin will be ignored. UNLOCKED is true is true if this
1612 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1613 the known length of the string. Return NULL_TREE if no simplification
1617 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1618 tree arg0, tree arg1,
1621 gimple *stmt = gsi_stmt (*gsi);
1623 /* If we're using an unlocked function, assume the other unlocked
1624 functions exist explicitly. */
1625 tree const fn_fputc = (unlocked
1626 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1627 : builtin_decl_implicit (BUILT_IN_FPUTC));
1628 tree const fn_fwrite = (unlocked
1629 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1630 : builtin_decl_implicit (BUILT_IN_FWRITE));
1632 /* If the return value is used, don't do the transformation. */
1633 if (gimple_call_lhs (stmt))
1636 /* Get the length of the string passed to fputs. If the length
1637 can't be determined, punt. */
1638 tree len = get_maxval_strlen (arg0, 0);
1640 || TREE_CODE (len) != INTEGER_CST)
1643 switch (compare_tree_int (len, 1))
1645 case -1: /* length is 0, delete the call entirely . */
1646 replace_call_with_value (gsi, integer_zero_node);
1649 case 0: /* length is 1, call fputc. */
1651 const char *p = c_getstr (arg0);
1657 gimple *repl = gimple_build_call (fn_fputc, 2,
1659 (integer_type_node, p[0]), arg1);
1660 replace_call_with_call_and_fold (gsi, repl);
1665 case 1: /* length is greater than 1, call fwrite. */
1667 /* If optimizing for size keep fputs. */
1668 if (optimize_function_for_size_p (cfun))
1670 /* New argument list transforming fputs(string, stream) to
1671 fwrite(string, 1, len, stream). */
1675 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
1676 size_one_node, len, arg1);
1677 replace_call_with_call_and_fold (gsi, repl);
1686 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1687 DEST, SRC, LEN, and SIZE are the arguments to the call.
1688 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1689 code of the builtin. If MAXLEN is not NULL, it is maximum length
1690 passed as third argument. */
1693 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1694 tree dest, tree src, tree len, tree size,
1695 enum built_in_function fcode)
1697 gimple *stmt = gsi_stmt (*gsi);
1698 location_t loc = gimple_location (stmt);
1699 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1702 /* If SRC and DEST are the same (and not volatile), return DEST
1703 (resp. DEST+LEN for __mempcpy_chk). */
1704 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1706 if (fcode != BUILT_IN_MEMPCPY_CHK)
1708 replace_call_with_value (gsi, dest);
1713 gimple_seq stmts = NULL;
1714 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1715 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1716 TREE_TYPE (dest), dest, len);
1717 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1718 replace_call_with_value (gsi, temp);
1723 if (! tree_fits_uhwi_p (size))
1726 tree maxlen = get_maxval_strlen (len, 2);
1727 if (! integer_all_onesp (size))
1729 if (! tree_fits_uhwi_p (len))
1731 /* If LEN is not constant, try MAXLEN too.
1732 For MAXLEN only allow optimizing into non-_ocs function
1733 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1734 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1736 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1738 /* (void) __mempcpy_chk () can be optimized into
1739 (void) __memcpy_chk (). */
1740 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1744 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1745 replace_call_with_call_and_fold (gsi, repl);
1754 if (tree_int_cst_lt (size, maxlen))
1759 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1760 mem{cpy,pcpy,move,set} is available. */
1763 case BUILT_IN_MEMCPY_CHK:
1764 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1766 case BUILT_IN_MEMPCPY_CHK:
1767 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1769 case BUILT_IN_MEMMOVE_CHK:
1770 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1772 case BUILT_IN_MEMSET_CHK:
1773 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1782 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1783 replace_call_with_call_and_fold (gsi, repl);
1787 /* Fold a call to the __st[rp]cpy_chk builtin.
1788 DEST, SRC, and SIZE are the arguments to the call.
1789 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1790 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1791 strings passed as second argument. */
1794 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1796 tree src, tree size,
1797 enum built_in_function fcode)
1799 gimple *stmt = gsi_stmt (*gsi);
1800 location_t loc = gimple_location (stmt);
1801 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1804 /* If SRC and DEST are the same (and not volatile), return DEST. */
1805 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1807 replace_call_with_value (gsi, dest);
1811 if (! tree_fits_uhwi_p (size))
1814 tree maxlen = get_maxval_strlen (src, 1);
1815 if (! integer_all_onesp (size))
1817 len = c_strlen (src, 1);
1818 if (! len || ! tree_fits_uhwi_p (len))
1820 /* If LEN is not constant, try MAXLEN too.
1821 For MAXLEN only allow optimizing into non-_ocs function
1822 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1823 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1825 if (fcode == BUILT_IN_STPCPY_CHK)
1830 /* If return value of __stpcpy_chk is ignored,
1831 optimize into __strcpy_chk. */
1832 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1836 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1837 replace_call_with_call_and_fold (gsi, repl);
1841 if (! len || TREE_SIDE_EFFECTS (len))
1844 /* If c_strlen returned something, but not a constant,
1845 transform __strcpy_chk into __memcpy_chk. */
1846 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1850 gimple_seq stmts = NULL;
1851 len = gimple_convert (&stmts, loc, size_type_node, len);
1852 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
1853 build_int_cst (size_type_node, 1));
1854 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1855 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1856 replace_call_with_call_and_fold (gsi, repl);
1863 if (! tree_int_cst_lt (maxlen, size))
1867 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1868 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1869 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1873 gimple *repl = gimple_build_call (fn, 2, dest, src);
1874 replace_call_with_call_and_fold (gsi, repl);
1878 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1879 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1880 length passed as third argument. IGNORE is true if return value can be
1881 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1884 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1885 tree dest, tree src,
1886 tree len, tree size,
1887 enum built_in_function fcode)
1889 gimple *stmt = gsi_stmt (*gsi);
1890 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1893 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1895 /* If return value of __stpncpy_chk is ignored,
1896 optimize into __strncpy_chk. */
1897 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1900 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1901 replace_call_with_call_and_fold (gsi, repl);
1906 if (! tree_fits_uhwi_p (size))
1909 tree maxlen = get_maxval_strlen (len, 2);
1910 if (! integer_all_onesp (size))
1912 if (! tree_fits_uhwi_p (len))
1914 /* If LEN is not constant, try MAXLEN too.
1915 For MAXLEN only allow optimizing into non-_ocs function
1916 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1917 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1923 if (tree_int_cst_lt (size, maxlen))
1927 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1928 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1929 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1933 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1934 replace_call_with_call_and_fold (gsi, repl);
1938 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1939 Return NULL_TREE if no simplification can be made. */
1942 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1944 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1945 location_t loc = gimple_location (stmt);
1946 tree dest = gimple_call_arg (stmt, 0);
1947 tree src = gimple_call_arg (stmt, 1);
1948 tree fn, len, lenp1;
1950 /* If the result is unused, replace stpcpy with strcpy. */
1951 if (gimple_call_lhs (stmt) == NULL_TREE)
1953 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1956 gimple_call_set_fndecl (stmt, fn);
1961 len = c_strlen (src, 1);
1963 || TREE_CODE (len) != INTEGER_CST)
1966 if (optimize_function_for_size_p (cfun)
1967 /* If length is zero it's small enough. */
1968 && !integer_zerop (len))
1971 /* If the source has a known length replace stpcpy with memcpy. */
1972 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1976 gimple_seq stmts = NULL;
1977 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1978 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1979 tem, build_int_cst (size_type_node, 1));
1980 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1981 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1982 gimple_set_vuse (repl, gimple_vuse (stmt));
1983 gimple_set_vdef (repl, gimple_vdef (stmt));
1984 if (gimple_vdef (repl)
1985 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1986 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1987 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1988 /* Replace the result with dest + len. */
1990 tem = gimple_convert (&stmts, loc, sizetype, len);
1991 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1992 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
1993 POINTER_PLUS_EXPR, dest, tem);
1994 gsi_replace (gsi, ret, false);
1995 /* Finally fold the memcpy call. */
1996 gimple_stmt_iterator gsi2 = *gsi;
2002 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2003 NULL_TREE if a normal call should be emitted rather than expanding
2004 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2005 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2006 passed as second argument. */
2009 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2010 enum built_in_function fcode)
2012 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2013 tree dest, size, len, fn, fmt, flag;
2014 const char *fmt_str;
2016 /* Verify the required arguments in the original call. */
2017 if (gimple_call_num_args (stmt) < 5)
2020 dest = gimple_call_arg (stmt, 0);
2021 len = gimple_call_arg (stmt, 1);
2022 flag = gimple_call_arg (stmt, 2);
2023 size = gimple_call_arg (stmt, 3);
2024 fmt = gimple_call_arg (stmt, 4);
2026 if (! tree_fits_uhwi_p (size))
2029 if (! integer_all_onesp (size))
2031 tree maxlen = get_maxval_strlen (len, 2);
2032 if (! tree_fits_uhwi_p (len))
2034 /* If LEN is not constant, try MAXLEN too.
2035 For MAXLEN only allow optimizing into non-_ocs function
2036 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2037 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2043 if (tree_int_cst_lt (size, maxlen))
2047 if (!init_target_chars ())
2050 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2051 or if format doesn't contain % chars or is "%s". */
2052 if (! integer_zerop (flag))
2054 fmt_str = c_getstr (fmt);
2055 if (fmt_str == NULL)
2057 if (strchr (fmt_str, target_percent) != NULL
2058 && strcmp (fmt_str, target_percent_s))
2062 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2064 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2065 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2069 /* Replace the called function and the first 5 argument by 3 retaining
2070 trailing varargs. */
2071 gimple_call_set_fndecl (stmt, fn);
2072 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2073 gimple_call_set_arg (stmt, 0, dest);
2074 gimple_call_set_arg (stmt, 1, len);
2075 gimple_call_set_arg (stmt, 2, fmt);
2076 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2077 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2078 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2083 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2084 Return NULL_TREE if a normal call should be emitted rather than
2085 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2086 or BUILT_IN_VSPRINTF_CHK. */
2089 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2090 enum built_in_function fcode)
2092 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2093 tree dest, size, len, fn, fmt, flag;
2094 const char *fmt_str;
2095 unsigned nargs = gimple_call_num_args (stmt);
2097 /* Verify the required arguments in the original call. */
2100 dest = gimple_call_arg (stmt, 0);
2101 flag = gimple_call_arg (stmt, 1);
2102 size = gimple_call_arg (stmt, 2);
2103 fmt = gimple_call_arg (stmt, 3);
2105 if (! tree_fits_uhwi_p (size))
2110 if (!init_target_chars ())
2113 /* Check whether the format is a literal string constant. */
2114 fmt_str = c_getstr (fmt);
2115 if (fmt_str != NULL)
2117 /* If the format doesn't contain % args or %%, we know the size. */
2118 if (strchr (fmt_str, target_percent) == 0)
2120 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2121 len = build_int_cstu (size_type_node, strlen (fmt_str));
2123 /* If the format is "%s" and first ... argument is a string literal,
2124 we know the size too. */
2125 else if (fcode == BUILT_IN_SPRINTF_CHK
2126 && strcmp (fmt_str, target_percent_s) == 0)
2132 arg = gimple_call_arg (stmt, 4);
2133 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2135 len = c_strlen (arg, 1);
2136 if (! len || ! tree_fits_uhwi_p (len))
2143 if (! integer_all_onesp (size))
2145 if (! len || ! tree_int_cst_lt (len, size))
2149 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2150 or if format doesn't contain % chars or is "%s". */
2151 if (! integer_zerop (flag))
2153 if (fmt_str == NULL)
2155 if (strchr (fmt_str, target_percent) != NULL
2156 && strcmp (fmt_str, target_percent_s))
2160 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2161 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2162 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2166 /* Replace the called function and the first 4 argument by 2 retaining
2167 trailing varargs. */
2168 gimple_call_set_fndecl (stmt, fn);
2169 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2170 gimple_call_set_arg (stmt, 0, dest);
2171 gimple_call_set_arg (stmt, 1, fmt);
2172 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2173 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2174 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2179 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2180 ORIG may be null if this is a 2-argument call. We don't attempt to
2181 simplify calls with more than 3 arguments.
2183 Return NULL_TREE if no simplification was possible, otherwise return the
2184 simplified form of the call as a tree. If IGNORED is true, it means that
2185 the caller does not use the returned value of the function. */
2188 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2190 gimple *stmt = gsi_stmt (*gsi);
2191 tree dest = gimple_call_arg (stmt, 0);
2192 tree fmt = gimple_call_arg (stmt, 1);
2193 tree orig = NULL_TREE;
2194 const char *fmt_str = NULL;
2196 /* Verify the required arguments in the original call. We deal with two
2197 types of sprintf() calls: 'sprintf (str, fmt)' and
2198 'sprintf (dest, "%s", orig)'. */
2199 if (gimple_call_num_args (stmt) > 3)
2202 if (gimple_call_num_args (stmt) == 3)
2203 orig = gimple_call_arg (stmt, 2);
2205 /* Check whether the format is a literal string constant. */
2206 fmt_str = c_getstr (fmt);
2207 if (fmt_str == NULL)
2210 if (!init_target_chars ())
2213 /* If the format doesn't contain % args or %%, use strcpy. */
2214 if (strchr (fmt_str, target_percent) == NULL)
2216 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2221 /* Don't optimize sprintf (buf, "abc", ptr++). */
2225 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2226 'format' is known to contain no % formats. */
2227 gimple_seq stmts = NULL;
2228 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2229 gimple_seq_add_stmt_without_update (&stmts, repl);
2230 if (gimple_call_lhs (stmt))
2232 repl = gimple_build_assign (gimple_call_lhs (stmt),
2233 build_int_cst (integer_type_node,
2235 gimple_seq_add_stmt_without_update (&stmts, repl);
2236 gsi_replace_with_seq_vops (gsi, stmts);
2237 /* gsi now points at the assignment to the lhs, get a
2238 stmt iterator to the memcpy call.
2239 ??? We can't use gsi_for_stmt as that doesn't work when the
2240 CFG isn't built yet. */
2241 gimple_stmt_iterator gsi2 = *gsi;
2247 gsi_replace_with_seq_vops (gsi, stmts);
2253 /* If the format is "%s", use strcpy if the result isn't used. */
2254 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2257 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2262 /* Don't crash on sprintf (str1, "%s"). */
2266 tree orig_len = NULL_TREE;
2267 if (gimple_call_lhs (stmt))
2269 orig_len = get_maxval_strlen (orig, 0);
2274 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2275 gimple_seq stmts = NULL;
2276 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2277 gimple_seq_add_stmt_without_update (&stmts, repl);
2278 if (gimple_call_lhs (stmt))
2280 if (!useless_type_conversion_p (integer_type_node,
2281 TREE_TYPE (orig_len)))
2282 orig_len = fold_convert (integer_type_node, orig_len);
2283 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2284 gimple_seq_add_stmt_without_update (&stmts, repl);
2285 gsi_replace_with_seq_vops (gsi, stmts);
2286 /* gsi now points at the assignment to the lhs, get a
2287 stmt iterator to the memcpy call.
2288 ??? We can't use gsi_for_stmt as that doesn't work when the
2289 CFG isn't built yet. */
2290 gimple_stmt_iterator gsi2 = *gsi;
2296 gsi_replace_with_seq_vops (gsi, stmts);
2304 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2305 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2306 attempt to simplify calls with more than 4 arguments.
2308 Return NULL_TREE if no simplification was possible, otherwise return the
2309 simplified form of the call as a tree. If IGNORED is true, it means that
2310 the caller does not use the returned value of the function. */
2313 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2315 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2316 tree dest = gimple_call_arg (stmt, 0);
2317 tree destsize = gimple_call_arg (stmt, 1);
2318 tree fmt = gimple_call_arg (stmt, 2);
2319 tree orig = NULL_TREE;
2320 const char *fmt_str = NULL;
2322 if (gimple_call_num_args (stmt) > 4)
2325 if (gimple_call_num_args (stmt) == 4)
2326 orig = gimple_call_arg (stmt, 3);
2328 if (!tree_fits_uhwi_p (destsize))
2330 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2332 /* Check whether the format is a literal string constant. */
2333 fmt_str = c_getstr (fmt);
2334 if (fmt_str == NULL)
2337 if (!init_target_chars ())
2340 /* If the format doesn't contain % args or %%, use strcpy. */
2341 if (strchr (fmt_str, target_percent) == NULL)
2343 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2347 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2351 /* We could expand this as
2352 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2354 memcpy (str, fmt_with_nul_at_cstm1, cst);
2355 but in the former case that might increase code size
2356 and in the latter case grow .rodata section too much.
2358 size_t len = strlen (fmt_str);
2362 gimple_seq stmts = NULL;
2363 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2364 gimple_seq_add_stmt_without_update (&stmts, repl);
2365 if (gimple_call_lhs (stmt))
2367 repl = gimple_build_assign (gimple_call_lhs (stmt),
2368 build_int_cst (integer_type_node, len));
2369 gimple_seq_add_stmt_without_update (&stmts, repl);
2370 gsi_replace_with_seq_vops (gsi, stmts);
2371 /* gsi now points at the assignment to the lhs, get a
2372 stmt iterator to the memcpy call.
2373 ??? We can't use gsi_for_stmt as that doesn't work when the
2374 CFG isn't built yet. */
2375 gimple_stmt_iterator gsi2 = *gsi;
2381 gsi_replace_with_seq_vops (gsi, stmts);
2387 /* If the format is "%s", use strcpy if the result isn't used. */
2388 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2390 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2394 /* Don't crash on snprintf (str1, cst, "%s"). */
2398 tree orig_len = get_maxval_strlen (orig, 0);
2399 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2402 /* We could expand this as
2403 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2405 memcpy (str1, str2_with_nul_at_cstm1, cst);
2406 but in the former case that might increase code size
2407 and in the latter case grow .rodata section too much.
2409 if (compare_tree_int (orig_len, destlen) >= 0)
2412 /* Convert snprintf (str1, cst, "%s", str2) into
2413 strcpy (str1, str2) if strlen (str2) < cst. */
2414 gimple_seq stmts = NULL;
2415 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2416 gimple_seq_add_stmt_without_update (&stmts, repl);
2417 if (gimple_call_lhs (stmt))
2419 if (!useless_type_conversion_p (integer_type_node,
2420 TREE_TYPE (orig_len)))
2421 orig_len = fold_convert (integer_type_node, orig_len);
2422 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2423 gimple_seq_add_stmt_without_update (&stmts, repl);
2424 gsi_replace_with_seq_vops (gsi, stmts);
2425 /* gsi now points at the assignment to the lhs, get a
2426 stmt iterator to the memcpy call.
2427 ??? We can't use gsi_for_stmt as that doesn't work when the
2428 CFG isn't built yet. */
2429 gimple_stmt_iterator gsi2 = *gsi;
2435 gsi_replace_with_seq_vops (gsi, stmts);
2443 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2444 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2445 more than 3 arguments, and ARG may be null in the 2-argument case.
2447 Return NULL_TREE if no simplification was possible, otherwise return the
2448 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2449 code of the function to be simplified. */
2452 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2453 tree fp, tree fmt, tree arg,
2454 enum built_in_function fcode)
2456 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2457 tree fn_fputc, fn_fputs;
2458 const char *fmt_str = NULL;
2460 /* If the return value is used, don't do the transformation. */
2461 if (gimple_call_lhs (stmt) != NULL_TREE)
2464 /* Check whether the format is a literal string constant. */
2465 fmt_str = c_getstr (fmt);
2466 if (fmt_str == NULL)
2469 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2471 /* If we're using an unlocked function, assume the other
2472 unlocked functions exist explicitly. */
2473 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2474 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2478 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2479 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2482 if (!init_target_chars ())
2485 /* If the format doesn't contain % args or %%, use strcpy. */
2486 if (strchr (fmt_str, target_percent) == NULL)
2488 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2492 /* If the format specifier was "", fprintf does nothing. */
2493 if (fmt_str[0] == '\0')
2495 replace_call_with_value (gsi, NULL_TREE);
2499 /* When "string" doesn't contain %, replace all cases of
2500 fprintf (fp, string) with fputs (string, fp). The fputs
2501 builtin will take care of special cases like length == 1. */
2504 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2505 replace_call_with_call_and_fold (gsi, repl);
2510 /* The other optimizations can be done only on the non-va_list variants. */
2511 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2514 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2515 else if (strcmp (fmt_str, target_percent_s) == 0)
2517 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2521 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2522 replace_call_with_call_and_fold (gsi, repl);
2527 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2528 else if (strcmp (fmt_str, target_percent_c) == 0)
2531 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2535 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2536 replace_call_with_call_and_fold (gsi, repl);
2544 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2545 FMT and ARG are the arguments to the call; we don't fold cases with
2546 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2548 Return NULL_TREE if no simplification was possible, otherwise return the
2549 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2550 code of the function to be simplified. */
2553 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2554 tree arg, enum built_in_function fcode)
2556 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2557 tree fn_putchar, fn_puts, newarg;
2558 const char *fmt_str = NULL;
2560 /* If the return value is used, don't do the transformation. */
2561 if (gimple_call_lhs (stmt) != NULL_TREE)
2564 /* Check whether the format is a literal string constant. */
2565 fmt_str = c_getstr (fmt);
2566 if (fmt_str == NULL)
2569 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2571 /* If we're using an unlocked function, assume the other
2572 unlocked functions exist explicitly. */
2573 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2574 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2578 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2579 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2582 if (!init_target_chars ())
2585 if (strcmp (fmt_str, target_percent_s) == 0
2586 || strchr (fmt_str, target_percent) == NULL)
2590 if (strcmp (fmt_str, target_percent_s) == 0)
2592 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2595 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2598 str = c_getstr (arg);
2604 /* The format specifier doesn't contain any '%' characters. */
2605 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2611 /* If the string was "", printf does nothing. */
2614 replace_call_with_value (gsi, NULL_TREE);
2618 /* If the string has length of 1, call putchar. */
2621 /* Given printf("c"), (where c is any one character,)
2622 convert "c"[0] to an int and pass that to the replacement
2624 newarg = build_int_cst (integer_type_node, str[0]);
2627 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2628 replace_call_with_call_and_fold (gsi, repl);
2634 /* If the string was "string\n", call puts("string"). */
2635 size_t len = strlen (str);
2636 if ((unsigned char)str[len - 1] == target_newline
2637 && (size_t) (int) len == len
2641 tree offset_node, string_cst;
2643 /* Create a NUL-terminated string that's one char shorter
2644 than the original, stripping off the trailing '\n'. */
2645 newarg = build_string_literal (len, str);
2646 string_cst = string_constant (newarg, &offset_node);
2647 gcc_checking_assert (string_cst
2648 && (TREE_STRING_LENGTH (string_cst)
2650 && integer_zerop (offset_node)
2652 TREE_STRING_POINTER (string_cst)[len - 1]
2654 /* build_string_literal creates a new STRING_CST,
2655 modify it in place to avoid double copying. */
2656 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2657 newstr[len - 1] = '\0';
2660 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2661 replace_call_with_call_and_fold (gsi, repl);
2666 /* We'd like to arrange to call fputs(string,stdout) here,
2667 but we need stdout and don't have a way to get it yet. */
2672 /* The other optimizations can be done only on the non-va_list variants. */
2673 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2676 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2677 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2679 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2683 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2684 replace_call_with_call_and_fold (gsi, repl);
2689 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2690 else if (strcmp (fmt_str, target_percent_c) == 0)
2692 if (!arg || ! useless_type_conversion_p (integer_type_node,
2697 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2698 replace_call_with_call_and_fold (gsi, repl);
2708 /* Fold a call to __builtin_strlen with known length LEN. */
2711 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2713 gimple *stmt = gsi_stmt (*gsi);
2714 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2717 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2718 replace_call_with_value (gsi, len);
2722 /* Fold a call to __builtin_acc_on_device. */
2725 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2727 /* Defer folding until we know which compiler we're in. */
2728 if (symtab->state != EXPANSION)
2731 unsigned val_host = GOMP_DEVICE_HOST;
2732 unsigned val_dev = GOMP_DEVICE_NONE;
2734 #ifdef ACCEL_COMPILER
2735 val_host = GOMP_DEVICE_NOT_HOST;
2736 val_dev = ACCEL_COMPILER_acc_device;
2739 location_t loc = gimple_location (gsi_stmt (*gsi));
2741 tree host_eq = make_ssa_name (boolean_type_node);
2742 gimple *host_ass = gimple_build_assign
2743 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2744 gimple_set_location (host_ass, loc);
2745 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2747 tree dev_eq = make_ssa_name (boolean_type_node);
2748 gimple *dev_ass = gimple_build_assign
2749 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2750 gimple_set_location (dev_ass, loc);
2751 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2753 tree result = make_ssa_name (boolean_type_node);
2754 gimple *result_ass = gimple_build_assign
2755 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2756 gimple_set_location (result_ass, loc);
2757 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2759 replace_call_with_value (gsi, result);
2764 /* Fold the non-target builtin at *GSI and return whether any simplification
2768 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2770 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2771 tree callee = gimple_call_fndecl (stmt);
2773 /* Give up for always_inline inline builtins until they are
2775 if (avoid_folding_inline_builtin (callee))
2778 unsigned n = gimple_call_num_args (stmt);
2779 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2782 case BUILT_IN_BZERO:
2783 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2784 gimple_call_arg (stmt, 1));
2785 case BUILT_IN_MEMSET:
2786 return gimple_fold_builtin_memset (gsi,
2787 gimple_call_arg (stmt, 1),
2788 gimple_call_arg (stmt, 2));
2789 case BUILT_IN_BCOPY:
2790 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2791 gimple_call_arg (stmt, 0), 3);
2792 case BUILT_IN_MEMCPY:
2793 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2794 gimple_call_arg (stmt, 1), 0);
2795 case BUILT_IN_MEMPCPY:
2796 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2797 gimple_call_arg (stmt, 1), 1);
2798 case BUILT_IN_MEMMOVE:
2799 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2800 gimple_call_arg (stmt, 1), 3);
2801 case BUILT_IN_SPRINTF_CHK:
2802 case BUILT_IN_VSPRINTF_CHK:
2803 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2804 case BUILT_IN_STRCAT_CHK:
2805 return gimple_fold_builtin_strcat_chk (gsi);
2806 case BUILT_IN_STRNCAT_CHK:
2807 return gimple_fold_builtin_strncat_chk (gsi);
2808 case BUILT_IN_STRLEN:
2809 return gimple_fold_builtin_strlen (gsi);
2810 case BUILT_IN_STRCPY:
2811 return gimple_fold_builtin_strcpy (gsi,
2812 gimple_call_arg (stmt, 0),
2813 gimple_call_arg (stmt, 1));
2814 case BUILT_IN_STRNCPY:
2815 return gimple_fold_builtin_strncpy (gsi,
2816 gimple_call_arg (stmt, 0),
2817 gimple_call_arg (stmt, 1),
2818 gimple_call_arg (stmt, 2));
2819 case BUILT_IN_STRCAT:
2820 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2821 gimple_call_arg (stmt, 1));
2822 case BUILT_IN_STRNCAT:
2823 return gimple_fold_builtin_strncat (gsi);
2824 case BUILT_IN_FPUTS:
2825 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2826 gimple_call_arg (stmt, 1), false);
2827 case BUILT_IN_FPUTS_UNLOCKED:
2828 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2829 gimple_call_arg (stmt, 1), true);
2830 case BUILT_IN_MEMCPY_CHK:
2831 case BUILT_IN_MEMPCPY_CHK:
2832 case BUILT_IN_MEMMOVE_CHK:
2833 case BUILT_IN_MEMSET_CHK:
2834 return gimple_fold_builtin_memory_chk (gsi,
2835 gimple_call_arg (stmt, 0),
2836 gimple_call_arg (stmt, 1),
2837 gimple_call_arg (stmt, 2),
2838 gimple_call_arg (stmt, 3),
2840 case BUILT_IN_STPCPY:
2841 return gimple_fold_builtin_stpcpy (gsi);
2842 case BUILT_IN_STRCPY_CHK:
2843 case BUILT_IN_STPCPY_CHK:
2844 return gimple_fold_builtin_stxcpy_chk (gsi,
2845 gimple_call_arg (stmt, 0),
2846 gimple_call_arg (stmt, 1),
2847 gimple_call_arg (stmt, 2),
2849 case BUILT_IN_STRNCPY_CHK:
2850 case BUILT_IN_STPNCPY_CHK:
2851 return gimple_fold_builtin_stxncpy_chk (gsi,
2852 gimple_call_arg (stmt, 0),
2853 gimple_call_arg (stmt, 1),
2854 gimple_call_arg (stmt, 2),
2855 gimple_call_arg (stmt, 3),
2857 case BUILT_IN_SNPRINTF_CHK:
2858 case BUILT_IN_VSNPRINTF_CHK:
2859 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2860 case BUILT_IN_SNPRINTF:
2861 return gimple_fold_builtin_snprintf (gsi);
2862 case BUILT_IN_SPRINTF:
2863 return gimple_fold_builtin_sprintf (gsi);
2864 case BUILT_IN_FPRINTF:
2865 case BUILT_IN_FPRINTF_UNLOCKED:
2866 case BUILT_IN_VFPRINTF:
2867 if (n == 2 || n == 3)
2868 return gimple_fold_builtin_fprintf (gsi,
2869 gimple_call_arg (stmt, 0),
2870 gimple_call_arg (stmt, 1),
2872 ? gimple_call_arg (stmt, 2)
2876 case BUILT_IN_FPRINTF_CHK:
2877 case BUILT_IN_VFPRINTF_CHK:
2878 if (n == 3 || n == 4)
2879 return gimple_fold_builtin_fprintf (gsi,
2880 gimple_call_arg (stmt, 0),
2881 gimple_call_arg (stmt, 2),
2883 ? gimple_call_arg (stmt, 3)
2887 case BUILT_IN_PRINTF:
2888 case BUILT_IN_PRINTF_UNLOCKED:
2889 case BUILT_IN_VPRINTF:
2890 if (n == 1 || n == 2)
2891 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2893 ? gimple_call_arg (stmt, 1)
2894 : NULL_TREE, fcode);
2896 case BUILT_IN_PRINTF_CHK:
2897 case BUILT_IN_VPRINTF_CHK:
2898 if (n == 2 || n == 3)
2899 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2901 ? gimple_call_arg (stmt, 2)
2902 : NULL_TREE, fcode);
2904 case BUILT_IN_ACC_ON_DEVICE:
2905 return gimple_fold_builtin_acc_on_device (gsi,
2906 gimple_call_arg (stmt, 0));
2910 /* Try the generic builtin folder. */
2911 bool ignore = (gimple_call_lhs (stmt) == NULL);
2912 tree result = fold_call_stmt (stmt, ignore);
2916 STRIP_NOPS (result);
2918 result = fold_convert (gimple_call_return_type (stmt), result);
2919 if (!update_call_from_tree (gsi, result))
2920 gimplify_and_update_call_from_tree (gsi, result);
2927 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
2928 function calls to constants, where possible. */
2931 fold_internal_goacc_dim (const gimple *call)
2933 int axis = get_oacc_ifn_dim_arg (call);
2934 int size = get_oacc_fn_dim_size (current_function_decl, axis);
2935 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
2936 tree result = NULL_TREE;
2938 /* If the size is 1, or we only want the size and it is not dynamic,
2939 we know the answer. */
2940 if (size == 1 || (!is_pos && size))
2942 tree type = TREE_TYPE (gimple_call_lhs (call));
2943 result = build_int_cst (type, size - is_pos);
2949 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2950 doesn't fit into TYPE. The test for overflow should be regardless of
2951 -fwrapv, and even for unsigned types. */
2954 arith_overflowed_p (enum tree_code code, const_tree type,
2955 const_tree arg0, const_tree arg1)
2957 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2958 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2960 widest2_int warg0 = widest2_int_cst (arg0);
2961 widest2_int warg1 = widest2_int_cst (arg1);
2965 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2966 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2967 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2968 default: gcc_unreachable ();
2970 signop sign = TYPE_SIGN (type);
2971 if (sign == UNSIGNED && wi::neg_p (wres))
2973 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2976 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2977 The statement may be replaced by another statement, e.g., if the call
2978 simplifies to a constant value. Return true if any changes were made.
2979 It is assumed that the operands have been previously folded. */
2982 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2984 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2986 bool changed = false;
2989 /* Fold *& in call arguments. */
2990 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2991 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2993 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2996 gimple_call_set_arg (stmt, i, tmp);
3001 /* Check for virtual calls that became direct calls. */
3002 callee = gimple_call_fn (stmt);
3003 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3005 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3007 if (dump_file && virtual_method_call_p (callee)
3008 && !possible_polymorphic_call_target_p
3009 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3010 (OBJ_TYPE_REF_EXPR (callee)))))
3013 "Type inheritance inconsistent devirtualization of ");
3014 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3015 fprintf (dump_file, " to ");
3016 print_generic_expr (dump_file, callee, TDF_SLIM);
3017 fprintf (dump_file, "\n");
3020 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3023 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3026 vec <cgraph_node *>targets
3027 = possible_polymorphic_call_targets (callee, stmt, &final);
3028 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3030 tree lhs = gimple_call_lhs (stmt);
3031 if (dump_enabled_p ())
3033 location_t loc = gimple_location_safe (stmt);
3034 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3035 "folding virtual function call to %s\n",
3036 targets.length () == 1
3037 ? targets[0]->name ()
3038 : "__builtin_unreachable");
3040 if (targets.length () == 1)
3042 tree fndecl = targets[0]->decl;
3043 gimple_call_set_fndecl (stmt, fndecl);
3045 /* If changing the call to __cxa_pure_virtual
3046 or similar noreturn function, adjust gimple_call_fntype
3048 if ((gimple_call_flags (stmt) & ECF_NORETURN)
3049 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3050 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3051 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3053 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3054 /* If the call becomes noreturn, remove the lhs. */
3056 && (gimple_call_flags (stmt) & ECF_NORETURN)
3057 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3058 || ((TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))
3060 && !TREE_ADDRESSABLE (TREE_TYPE (lhs)))))
3062 if (TREE_CODE (lhs) == SSA_NAME)
3064 tree var = create_tmp_var (TREE_TYPE (lhs));
3065 tree def = get_or_create_ssa_default_def (cfun, var);
3066 gimple *new_stmt = gimple_build_assign (lhs, def);
3067 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3069 gimple_call_set_lhs (stmt, NULL_TREE);
3071 maybe_remove_unused_call_args (cfun, stmt);
3075 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3076 gimple *new_stmt = gimple_build_call (fndecl, 0);
3077 gimple_set_location (new_stmt, gimple_location (stmt));
3078 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3080 tree var = create_tmp_var (TREE_TYPE (lhs));
3081 tree def = get_or_create_ssa_default_def (cfun, var);
3083 /* To satisfy condition for
3084 cgraph_update_edges_for_call_stmt_node,
3085 we need to preserve GIMPLE_CALL statement
3086 at position of GSI iterator. */
3087 update_call_from_tree (gsi, def);
3088 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3092 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3093 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3094 gsi_replace (gsi, new_stmt, false);
3102 /* Check for indirect calls that became direct calls, and then
3103 no longer require a static chain. */
3104 if (gimple_call_chain (stmt))
3106 tree fn = gimple_call_fndecl (stmt);
3107 if (fn && !DECL_STATIC_CHAIN (fn))
3109 gimple_call_set_chain (stmt, NULL);
3114 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3117 gimple_call_set_chain (stmt, tmp);
3126 /* Check for builtins that CCP can handle using information not
3127 available in the generic fold routines. */
3128 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3130 if (gimple_fold_builtin (gsi))
3133 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3135 changed |= targetm.gimple_fold_builtin (gsi);
3137 else if (gimple_call_internal_p (stmt))
3139 enum tree_code subcode = ERROR_MARK;
3140 tree result = NULL_TREE;
3141 bool cplx_result = false;
3142 tree overflow = NULL_TREE;
3143 switch (gimple_call_internal_fn (stmt))
3145 case IFN_BUILTIN_EXPECT:
3146 result = fold_builtin_expect (gimple_location (stmt),
3147 gimple_call_arg (stmt, 0),
3148 gimple_call_arg (stmt, 1),
3149 gimple_call_arg (stmt, 2));
3151 case IFN_UBSAN_OBJECT_SIZE:
3152 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3153 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3154 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3155 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3156 gimple_call_arg (stmt, 2))))
3158 gsi_replace (gsi, gimple_build_nop (), false);
3159 unlink_stmt_vdef (stmt);
3160 release_defs (stmt);
3164 case IFN_GOACC_DIM_SIZE:
3165 case IFN_GOACC_DIM_POS:
3166 result = fold_internal_goacc_dim (stmt);
3168 case IFN_UBSAN_CHECK_ADD:
3169 subcode = PLUS_EXPR;
3171 case IFN_UBSAN_CHECK_SUB:
3172 subcode = MINUS_EXPR;
3174 case IFN_UBSAN_CHECK_MUL:
3175 subcode = MULT_EXPR;
3177 case IFN_ADD_OVERFLOW:
3178 subcode = PLUS_EXPR;
3181 case IFN_SUB_OVERFLOW:
3182 subcode = MINUS_EXPR;
3185 case IFN_MUL_OVERFLOW:
3186 subcode = MULT_EXPR;
3192 if (subcode != ERROR_MARK)
3194 tree arg0 = gimple_call_arg (stmt, 0);
3195 tree arg1 = gimple_call_arg (stmt, 1);
3196 tree type = TREE_TYPE (arg0);
3199 tree lhs = gimple_call_lhs (stmt);
3200 if (lhs == NULL_TREE)
3203 type = TREE_TYPE (TREE_TYPE (lhs));
3205 if (type == NULL_TREE)
3207 /* x = y + 0; x = y - 0; x = y * 0; */
3208 else if (integer_zerop (arg1))
3209 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3210 /* x = 0 + y; x = 0 * y; */
3211 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3212 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3214 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3215 result = integer_zero_node;
3216 /* x = y * 1; x = 1 * y; */
3217 else if (subcode == MULT_EXPR && integer_onep (arg1))
3219 else if (subcode == MULT_EXPR && integer_onep (arg0))
3221 else if (TREE_CODE (arg0) == INTEGER_CST
3222 && TREE_CODE (arg1) == INTEGER_CST)
3225 result = int_const_binop (subcode, fold_convert (type, arg0),
3226 fold_convert (type, arg1));
3228 result = int_const_binop (subcode, arg0, arg1);
3229 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3232 overflow = build_one_cst (type);
3239 if (result == integer_zero_node)
3240 result = build_zero_cst (type);
3241 else if (cplx_result && TREE_TYPE (result) != type)
3243 if (TREE_CODE (result) == INTEGER_CST)
3245 if (arith_overflowed_p (PLUS_EXPR, type, result,
3247 overflow = build_one_cst (type);
3249 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3250 && TYPE_UNSIGNED (type))
3251 || (TYPE_PRECISION (type)
3252 < (TYPE_PRECISION (TREE_TYPE (result))
3253 + (TYPE_UNSIGNED (TREE_TYPE (result))
3254 && !TYPE_UNSIGNED (type)))))
3257 result = fold_convert (type, result);
3264 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3265 result = drop_tree_overflow (result);
3268 if (overflow == NULL_TREE)
3269 overflow = build_zero_cst (TREE_TYPE (result));
3270 tree ctype = build_complex_type (TREE_TYPE (result));
3271 if (TREE_CODE (result) == INTEGER_CST
3272 && TREE_CODE (overflow) == INTEGER_CST)
3273 result = build_complex (ctype, result, overflow);
3275 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3276 ctype, result, overflow);
3278 if (!update_call_from_tree (gsi, result))
3279 gimplify_and_update_call_from_tree (gsi, result);
3288 /* Return true whether NAME has a use on STMT. */
3291 has_use_on_stmt (tree name, gimple *stmt)
3293 imm_use_iterator iter;
3294 use_operand_p use_p;
3295 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3296 if (USE_STMT (use_p) == stmt)
3301 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3304 Replaces *GSI with the simplification result in RCODE and OPS
3305 and the associated statements in *SEQ. Does the replacement
3306 according to INPLACE and returns true if the operation succeeded. */
3309 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3310 code_helper rcode, tree *ops,
3311 gimple_seq *seq, bool inplace)
3313 gimple *stmt = gsi_stmt (*gsi);
3315 /* Play safe and do not allow abnormals to be mentioned in
3316 newly created statements. See also maybe_push_res_to_seq.
3317 As an exception allow such uses if there was a use of the
3318 same SSA name on the old stmt. */
3319 if ((TREE_CODE (ops[0]) == SSA_NAME
3320 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3321 && !has_use_on_stmt (ops[0], stmt))
3323 && TREE_CODE (ops[1]) == SSA_NAME
3324 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3325 && !has_use_on_stmt (ops[1], stmt))
3327 && TREE_CODE (ops[2]) == SSA_NAME
3328 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3329 && !has_use_on_stmt (ops[2], stmt))
3330 || (COMPARISON_CLASS_P (ops[0])
3331 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
3332 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
3333 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
3334 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
3335 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
3336 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
3339 /* Don't insert new statements when INPLACE is true, even if we could
3340 reuse STMT for the final statement. */
3341 if (inplace && !gimple_seq_empty_p (*seq))
3344 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3346 gcc_assert (rcode.is_tree_code ());
3347 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3348 /* GIMPLE_CONDs condition may not throw. */
3349 && (!flag_exceptions
3350 || !cfun->can_throw_non_call_exceptions
3351 || !operation_could_trap_p (rcode,
3352 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3354 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3355 else if (rcode == SSA_NAME)
3356 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3357 build_zero_cst (TREE_TYPE (ops[0])));
3358 else if (rcode == INTEGER_CST)
3360 if (integer_zerop (ops[0]))
3361 gimple_cond_make_false (cond_stmt);
3363 gimple_cond_make_true (cond_stmt);
3367 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3371 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3372 build_zero_cst (TREE_TYPE (res)));
3376 if (dump_file && (dump_flags & TDF_DETAILS))
3378 fprintf (dump_file, "gimple_simplified to ");
3379 if (!gimple_seq_empty_p (*seq))
3380 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3381 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3384 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3387 else if (is_gimple_assign (stmt)
3388 && rcode.is_tree_code ())
3391 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3393 maybe_build_generic_op (rcode,
3394 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
3395 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3396 if (dump_file && (dump_flags & TDF_DETAILS))
3398 fprintf (dump_file, "gimple_simplified to ");
3399 if (!gimple_seq_empty_p (*seq))
3400 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3401 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3404 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3408 else if (rcode.is_fn_code ()
3409 && gimple_call_combined_fn (stmt) == rcode)
3412 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3414 gcc_assert (ops[i] != NULL_TREE);
3415 gimple_call_set_arg (stmt, i, ops[i]);
3418 gcc_assert (ops[i] == NULL_TREE);
3419 if (dump_file && (dump_flags & TDF_DETAILS))
3421 fprintf (dump_file, "gimple_simplified to ");
3422 if (!gimple_seq_empty_p (*seq))
3423 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3424 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3426 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3431 if (gimple_has_lhs (stmt))
3433 tree lhs = gimple_get_lhs (stmt);
3434 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3437 if (dump_file && (dump_flags & TDF_DETAILS))
3439 fprintf (dump_file, "gimple_simplified to ");
3440 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3442 gsi_replace_with_seq_vops (gsi, *seq);
3452 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3455 maybe_canonicalize_mem_ref_addr (tree *t)
3459 if (TREE_CODE (*t) == ADDR_EXPR)
3460 t = &TREE_OPERAND (*t, 0);
3462 while (handled_component_p (*t))
3463 t = &TREE_OPERAND (*t, 0);
3465 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3466 of invariant addresses into a SSA name MEM_REF address. */
3467 if (TREE_CODE (*t) == MEM_REF
3468 || TREE_CODE (*t) == TARGET_MEM_REF)
3470 tree addr = TREE_OPERAND (*t, 0);
3471 if (TREE_CODE (addr) == ADDR_EXPR
3472 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3473 || handled_component_p (TREE_OPERAND (addr, 0))))
3476 HOST_WIDE_INT coffset;
3477 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3482 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3483 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3484 TREE_OPERAND (*t, 1),
3485 size_int (coffset));
3488 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3489 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3492 /* Canonicalize back MEM_REFs to plain reference trees if the object
3493 accessed is a decl that has the same access semantics as the MEM_REF. */
3494 if (TREE_CODE (*t) == MEM_REF
3495 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3496 && integer_zerop (TREE_OPERAND (*t, 1))
3497 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3499 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3500 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3501 if (/* Same volatile qualification. */
3502 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3503 /* Same TBAA behavior with -fstrict-aliasing. */
3504 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3505 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3506 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3507 /* Same alignment. */
3508 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3509 /* We have to look out here to not drop a required conversion
3510 from the rhs to the lhs if *t appears on the lhs or vice-versa
3511 if it appears on the rhs. Thus require strict type
3513 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3515 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3520 /* Canonicalize TARGET_MEM_REF in particular with respect to
3521 the indexes becoming constant. */
3522 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3524 tree tem = maybe_fold_tmr (*t);
3535 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3536 distinguishes both cases. */
3539 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3541 bool changed = false;
3542 gimple *stmt = gsi_stmt (*gsi);
3543 bool nowarning = gimple_no_warning_p (stmt);
3545 fold_defer_overflow_warnings ();
3547 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3549 ??? This shouldn't be done in generic folding but in the
3550 propagation helpers which also know whether an address was
3552 Also canonicalize operand order. */
3553 switch (gimple_code (stmt))
3556 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3558 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3559 if ((REFERENCE_CLASS_P (*rhs)
3560 || TREE_CODE (*rhs) == ADDR_EXPR)
3561 && maybe_canonicalize_mem_ref_addr (rhs))
3563 tree *lhs = gimple_assign_lhs_ptr (stmt);
3564 if (REFERENCE_CLASS_P (*lhs)
3565 && maybe_canonicalize_mem_ref_addr (lhs))
3570 /* Canonicalize operand order. */
3571 enum tree_code code = gimple_assign_rhs_code (stmt);
3572 if (TREE_CODE_CLASS (code) == tcc_comparison
3573 || commutative_tree_code (code)
3574 || commutative_ternary_tree_code (code))
3576 tree rhs1 = gimple_assign_rhs1 (stmt);
3577 tree rhs2 = gimple_assign_rhs2 (stmt);
3578 if (tree_swap_operands_p (rhs1, rhs2, false))
3580 gimple_assign_set_rhs1 (stmt, rhs2);
3581 gimple_assign_set_rhs2 (stmt, rhs1);
3582 if (TREE_CODE_CLASS (code) == tcc_comparison)
3583 gimple_assign_set_rhs_code (stmt,
3584 swap_tree_comparison (code));
3592 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3594 tree *arg = gimple_call_arg_ptr (stmt, i);
3595 if (REFERENCE_CLASS_P (*arg)
3596 && maybe_canonicalize_mem_ref_addr (arg))
3599 tree *lhs = gimple_call_lhs_ptr (stmt);
3601 && REFERENCE_CLASS_P (*lhs)
3602 && maybe_canonicalize_mem_ref_addr (lhs))
3608 gasm *asm_stmt = as_a <gasm *> (stmt);
3609 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3611 tree link = gimple_asm_output_op (asm_stmt, i);
3612 tree op = TREE_VALUE (link);
3613 if (REFERENCE_CLASS_P (op)
3614 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3617 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3619 tree link = gimple_asm_input_op (asm_stmt, i);
3620 tree op = TREE_VALUE (link);
3621 if ((REFERENCE_CLASS_P (op)
3622 || TREE_CODE (op) == ADDR_EXPR)
3623 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3629 if (gimple_debug_bind_p (stmt))
3631 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3633 && (REFERENCE_CLASS_P (*val)
3634 || TREE_CODE (*val) == ADDR_EXPR)
3635 && maybe_canonicalize_mem_ref_addr (val))
3641 /* Canonicalize operand order. */
3642 tree lhs = gimple_cond_lhs (stmt);
3643 tree rhs = gimple_cond_rhs (stmt);
3644 if (tree_swap_operands_p (lhs, rhs, false))
3646 gcond *gc = as_a <gcond *> (stmt);
3647 gimple_cond_set_lhs (gc, rhs);
3648 gimple_cond_set_rhs (gc, lhs);
3649 gimple_cond_set_code (gc,
3650 swap_tree_comparison (gimple_cond_code (gc)));
3657 /* Dispatch to pattern-based folding. */
3659 || is_gimple_assign (stmt)
3660 || gimple_code (stmt) == GIMPLE_COND)
3662 gimple_seq seq = NULL;
3665 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3666 valueize, valueize))
3668 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3671 gimple_seq_discard (seq);
3675 stmt = gsi_stmt (*gsi);
3677 /* Fold the main computation performed by the statement. */
3678 switch (gimple_code (stmt))
3682 /* Try to canonicalize for boolean-typed X the comparisons
3683 X == 0, X == 1, X != 0, and X != 1. */
3684 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3685 || gimple_assign_rhs_code (stmt) == NE_EXPR)
3687 tree lhs = gimple_assign_lhs (stmt);
3688 tree op1 = gimple_assign_rhs1 (stmt);
3689 tree op2 = gimple_assign_rhs2 (stmt);
3690 tree type = TREE_TYPE (op1);
3692 /* Check whether the comparison operands are of the same boolean
3693 type as the result type is.
3694 Check that second operand is an integer-constant with value
3696 if (TREE_CODE (op2) == INTEGER_CST
3697 && (integer_zerop (op2) || integer_onep (op2))
3698 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3700 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3701 bool is_logical_not = false;
3703 /* X == 0 and X != 1 is a logical-not.of X
3704 X == 1 and X != 0 is X */
3705 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3706 || (cmp_code == NE_EXPR && integer_onep (op2)))
3707 is_logical_not = true;
3709 if (is_logical_not == false)
3710 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3711 /* Only for one-bit precision typed X the transformation
3712 !X -> ~X is valied. */
3713 else if (TYPE_PRECISION (type) == 1)
3714 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3715 /* Otherwise we use !X -> X ^ 1. */
3717 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3718 build_int_cst (type, 1));
3724 unsigned old_num_ops = gimple_num_ops (stmt);
3725 tree lhs = gimple_assign_lhs (stmt);
3726 tree new_rhs = fold_gimple_assign (gsi);
3728 && !useless_type_conversion_p (TREE_TYPE (lhs),
3729 TREE_TYPE (new_rhs)))
3730 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3733 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3735 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3742 changed |= gimple_fold_call (gsi, inplace);
3746 /* Fold *& in asm operands. */
3748 gasm *asm_stmt = as_a <gasm *> (stmt);
3750 const char **oconstraints;
3751 const char *constraint;
3752 bool allows_mem, allows_reg;
3754 noutputs = gimple_asm_noutputs (asm_stmt);
3755 oconstraints = XALLOCAVEC (const char *, noutputs);
3757 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3759 tree link = gimple_asm_output_op (asm_stmt, i);
3760 tree op = TREE_VALUE (link);
3762 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3763 if (REFERENCE_CLASS_P (op)
3764 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3766 TREE_VALUE (link) = op;
3770 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3772 tree link = gimple_asm_input_op (asm_stmt, i);
3773 tree op = TREE_VALUE (link);
3775 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3776 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3777 oconstraints, &allows_mem, &allows_reg);
3778 if (REFERENCE_CLASS_P (op)
3779 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3782 TREE_VALUE (link) = op;
3790 if (gimple_debug_bind_p (stmt))
3792 tree val = gimple_debug_bind_get_value (stmt);
3794 && REFERENCE_CLASS_P (val))
3796 tree tem = maybe_fold_reference (val, false);
3799 gimple_debug_bind_set_value (stmt, tem);
3804 && TREE_CODE (val) == ADDR_EXPR)
3806 tree ref = TREE_OPERAND (val, 0);
3807 tree tem = maybe_fold_reference (ref, false);
3810 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3811 gimple_debug_bind_set_value (stmt, tem);
3821 stmt = gsi_stmt (*gsi);
3823 /* Fold *& on the lhs. */
3824 if (gimple_has_lhs (stmt))
3826 tree lhs = gimple_get_lhs (stmt);
3827 if (lhs && REFERENCE_CLASS_P (lhs))
3829 tree new_lhs = maybe_fold_reference (lhs, true);
3832 gimple_set_lhs (stmt, new_lhs);
3838 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
3842 /* Valueziation callback that ends up not following SSA edges. */
3845 no_follow_ssa_edges (tree)
3850 /* Valueization callback that ends up following single-use SSA edges only. */
3853 follow_single_use_edges (tree val)
3855 if (TREE_CODE (val) == SSA_NAME
3856 && !has_single_use (val))
3861 /* Fold the statement pointed to by GSI. In some cases, this function may
3862 replace the whole statement with a new one. Returns true iff folding
3864 The statement pointed to by GSI should be in valid gimple form but may
3865 be in unfolded state as resulting from for example constant propagation
3866 which can produce *&x = 0. */
3869 fold_stmt (gimple_stmt_iterator *gsi)
3871 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3875 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3877 return fold_stmt_1 (gsi, false, valueize);
3880 /* Perform the minimal folding on statement *GSI. Only operations like
3881 *&x created by constant propagation are handled. The statement cannot
3882 be replaced with a new one. Return true if the statement was
3883 changed, false otherwise.
3884 The statement *GSI should be in valid gimple form but may
3885 be in unfolded state as resulting from for example constant propagation
3886 which can produce *&x = 0. */
3889 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3891 gimple *stmt = gsi_stmt (*gsi);
3892 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3893 gcc_assert (gsi_stmt (*gsi) == stmt);
3897 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3898 if EXPR is null or we don't know how.
3899 If non-null, the result always has boolean type. */
3902 canonicalize_bool (tree expr, bool invert)
3908 if (integer_nonzerop (expr))
3909 return boolean_false_node;
3910 else if (integer_zerop (expr))
3911 return boolean_true_node;
3912 else if (TREE_CODE (expr) == SSA_NAME)
3913 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3914 build_int_cst (TREE_TYPE (expr), 0));
3915 else if (COMPARISON_CLASS_P (expr))
3916 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3918 TREE_OPERAND (expr, 0),
3919 TREE_OPERAND (expr, 1));
3925 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3927 if (integer_nonzerop (expr))
3928 return boolean_true_node;
3929 else if (integer_zerop (expr))
3930 return boolean_false_node;
3931 else if (TREE_CODE (expr) == SSA_NAME)
3932 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3933 build_int_cst (TREE_TYPE (expr), 0));
3934 else if (COMPARISON_CLASS_P (expr))
3935 return fold_build2 (TREE_CODE (expr),
3937 TREE_OPERAND (expr, 0),
3938 TREE_OPERAND (expr, 1));
3944 /* Check to see if a boolean expression EXPR is logically equivalent to the
3945 comparison (OP1 CODE OP2). Check for various identities involving
3949 same_bool_comparison_p (const_tree expr, enum tree_code code,
3950 const_tree op1, const_tree op2)
3954 /* The obvious case. */
3955 if (TREE_CODE (expr) == code
3956 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3957 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3960 /* Check for comparing (name, name != 0) and the case where expr
3961 is an SSA_NAME with a definition matching the comparison. */
3962 if (TREE_CODE (expr) == SSA_NAME
3963 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3965 if (operand_equal_p (expr, op1, 0))
3966 return ((code == NE_EXPR && integer_zerop (op2))
3967 || (code == EQ_EXPR && integer_nonzerop (op2)));
3968 s = SSA_NAME_DEF_STMT (expr);
3969 if (is_gimple_assign (s)
3970 && gimple_assign_rhs_code (s) == code
3971 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3972 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3976 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3977 of name is a comparison, recurse. */
3978 if (TREE_CODE (op1) == SSA_NAME
3979 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3981 s = SSA_NAME_DEF_STMT (op1);
3982 if (is_gimple_assign (s)
3983 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3985 enum tree_code c = gimple_assign_rhs_code (s);
3986 if ((c == NE_EXPR && integer_zerop (op2))
3987 || (c == EQ_EXPR && integer_nonzerop (op2)))
3988 return same_bool_comparison_p (expr, c,
3989 gimple_assign_rhs1 (s),
3990 gimple_assign_rhs2 (s));
3991 if ((c == EQ_EXPR && integer_zerop (op2))
3992 || (c == NE_EXPR && integer_nonzerop (op2)))
3993 return same_bool_comparison_p (expr,
3994 invert_tree_comparison (c, false),
3995 gimple_assign_rhs1 (s),
3996 gimple_assign_rhs2 (s));
4002 /* Check to see if two boolean expressions OP1 and OP2 are logically
4006 same_bool_result_p (const_tree op1, const_tree op2)
4008 /* Simple cases first. */
4009 if (operand_equal_p (op1, op2, 0))
4012 /* Check the cases where at least one of the operands is a comparison.
4013 These are a bit smarter than operand_equal_p in that they apply some
4014 identifies on SSA_NAMEs. */
4015 if (COMPARISON_CLASS_P (op2)
4016 && same_bool_comparison_p (op1, TREE_CODE (op2),
4017 TREE_OPERAND (op2, 0),
4018 TREE_OPERAND (op2, 1)))
4020 if (COMPARISON_CLASS_P (op1)
4021 && same_bool_comparison_p (op2, TREE_CODE (op1),
4022 TREE_OPERAND (op1, 0),
4023 TREE_OPERAND (op1, 1)))
4030 /* Forward declarations for some mutually recursive functions. */
4033 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4034 enum tree_code code2, tree op2a, tree op2b);
4036 and_var_with_comparison (tree var, bool invert,
4037 enum tree_code code2, tree op2a, tree op2b);
4039 and_var_with_comparison_1 (gimple *stmt,
4040 enum tree_code code2, tree op2a, tree op2b);
4042 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4043 enum tree_code code2, tree op2a, tree op2b);
4045 or_var_with_comparison (tree var, bool invert,
4046 enum tree_code code2, tree op2a, tree op2b);
4048 or_var_with_comparison_1 (gimple *stmt,
4049 enum tree_code code2, tree op2a, tree op2b);
4051 /* Helper function for and_comparisons_1: try to simplify the AND of the
4052 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4053 If INVERT is true, invert the value of the VAR before doing the AND.
4054 Return NULL_EXPR if we can't simplify this to a single expression. */
4057 and_var_with_comparison (tree var, bool invert,
4058 enum tree_code code2, tree op2a, tree op2b)
4061 gimple *stmt = SSA_NAME_DEF_STMT (var);
4063 /* We can only deal with variables whose definitions are assignments. */
4064 if (!is_gimple_assign (stmt))
4067 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4068 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4069 Then we only have to consider the simpler non-inverted cases. */
4071 t = or_var_with_comparison_1 (stmt,
4072 invert_tree_comparison (code2, false),
4075 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4076 return canonicalize_bool (t, invert);
4079 /* Try to simplify the AND of the ssa variable defined by the assignment
4080 STMT with the comparison specified by (OP2A CODE2 OP2B).
4081 Return NULL_EXPR if we can't simplify this to a single expression. */
4084 and_var_with_comparison_1 (gimple *stmt,
4085 enum tree_code code2, tree op2a, tree op2b)
4087 tree var = gimple_assign_lhs (stmt);
4088 tree true_test_var = NULL_TREE;
4089 tree false_test_var = NULL_TREE;
4090 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4092 /* Check for identities like (var AND (var == 0)) => false. */
4093 if (TREE_CODE (op2a) == SSA_NAME
4094 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4096 if ((code2 == NE_EXPR && integer_zerop (op2b))
4097 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4099 true_test_var = op2a;
4100 if (var == true_test_var)
4103 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4104 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4106 false_test_var = op2a;
4107 if (var == false_test_var)
4108 return boolean_false_node;
4112 /* If the definition is a comparison, recurse on it. */
4113 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4115 tree t = and_comparisons_1 (innercode,
4116 gimple_assign_rhs1 (stmt),
4117 gimple_assign_rhs2 (stmt),
4125 /* If the definition is an AND or OR expression, we may be able to
4126 simplify by reassociating. */
4127 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4128 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4130 tree inner1 = gimple_assign_rhs1 (stmt);
4131 tree inner2 = gimple_assign_rhs2 (stmt);
4134 tree partial = NULL_TREE;
4135 bool is_and = (innercode == BIT_AND_EXPR);
4137 /* Check for boolean identities that don't require recursive examination
4139 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4140 inner1 AND (inner1 OR inner2) => inner1
4141 !inner1 AND (inner1 AND inner2) => false
4142 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4143 Likewise for similar cases involving inner2. */
4144 if (inner1 == true_test_var)
4145 return (is_and ? var : inner1);
4146 else if (inner2 == true_test_var)
4147 return (is_and ? var : inner2);
4148 else if (inner1 == false_test_var)
4150 ? boolean_false_node
4151 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4152 else if (inner2 == false_test_var)
4154 ? boolean_false_node
4155 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4157 /* Next, redistribute/reassociate the AND across the inner tests.
4158 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4159 if (TREE_CODE (inner1) == SSA_NAME
4160 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4161 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4162 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4163 gimple_assign_rhs1 (s),
4164 gimple_assign_rhs2 (s),
4165 code2, op2a, op2b)))
4167 /* Handle the AND case, where we are reassociating:
4168 (inner1 AND inner2) AND (op2a code2 op2b)
4170 If the partial result t is a constant, we win. Otherwise
4171 continue on to try reassociating with the other inner test. */
4174 if (integer_onep (t))
4176 else if (integer_zerop (t))
4177 return boolean_false_node;
4180 /* Handle the OR case, where we are redistributing:
4181 (inner1 OR inner2) AND (op2a code2 op2b)
4182 => (t OR (inner2 AND (op2a code2 op2b))) */
4183 else if (integer_onep (t))
4184 return boolean_true_node;
4186 /* Save partial result for later. */
4190 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4191 if (TREE_CODE (inner2) == SSA_NAME
4192 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4193 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4194 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4195 gimple_assign_rhs1 (s),
4196 gimple_assign_rhs2 (s),
4197 code2, op2a, op2b)))
4199 /* Handle the AND case, where we are reassociating:
4200 (inner1 AND inner2) AND (op2a code2 op2b)
4201 => (inner1 AND t) */
4204 if (integer_onep (t))
4206 else if (integer_zerop (t))
4207 return boolean_false_node;
4208 /* If both are the same, we can apply the identity
4210 else if (partial && same_bool_result_p (t, partial))
4214 /* Handle the OR case. where we are redistributing:
4215 (inner1 OR inner2) AND (op2a code2 op2b)
4216 => (t OR (inner1 AND (op2a code2 op2b)))
4217 => (t OR partial) */
4220 if (integer_onep (t))
4221 return boolean_true_node;
4224 /* We already got a simplification for the other
4225 operand to the redistributed OR expression. The
4226 interesting case is when at least one is false.
4227 Or, if both are the same, we can apply the identity
4229 if (integer_zerop (partial))
4231 else if (integer_zerop (t))
4233 else if (same_bool_result_p (t, partial))
4242 /* Try to simplify the AND of two comparisons defined by
4243 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4244 If this can be done without constructing an intermediate value,
4245 return the resulting tree; otherwise NULL_TREE is returned.
4246 This function is deliberately asymmetric as it recurses on SSA_DEFs
4247 in the first comparison but not the second. */
4250 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4251 enum tree_code code2, tree op2a, tree op2b)
4253 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4255 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4256 if (operand_equal_p (op1a, op2a, 0)
4257 && operand_equal_p (op1b, op2b, 0))
4259 /* Result will be either NULL_TREE, or a combined comparison. */
4260 tree t = combine_comparisons (UNKNOWN_LOCATION,
4261 TRUTH_ANDIF_EXPR, code1, code2,
4262 truth_type, op1a, op1b);
4267 /* Likewise the swapped case of the above. */
4268 if (operand_equal_p (op1a, op2b, 0)
4269 && operand_equal_p (op1b, op2a, 0))
4271 /* Result will be either NULL_TREE, or a combined comparison. */
4272 tree t = combine_comparisons (UNKNOWN_LOCATION,
4273 TRUTH_ANDIF_EXPR, code1,
4274 swap_tree_comparison (code2),
4275 truth_type, op1a, op1b);
4280 /* If both comparisons are of the same value against constants, we might
4281 be able to merge them. */
4282 if (operand_equal_p (op1a, op2a, 0)
4283 && TREE_CODE (op1b) == INTEGER_CST
4284 && TREE_CODE (op2b) == INTEGER_CST)
4286 int cmp = tree_int_cst_compare (op1b, op2b);
4288 /* If we have (op1a == op1b), we should either be able to
4289 return that or FALSE, depending on whether the constant op1b
4290 also satisfies the other comparison against op2b. */
4291 if (code1 == EQ_EXPR)
4297 case EQ_EXPR: val = (cmp == 0); break;
4298 case NE_EXPR: val = (cmp != 0); break;
4299 case LT_EXPR: val = (cmp < 0); break;
4300 case GT_EXPR: val = (cmp > 0); break;
4301 case LE_EXPR: val = (cmp <= 0); break;
4302 case GE_EXPR: val = (cmp >= 0); break;
4303 default: done = false;
4308 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4310 return boolean_false_node;
4313 /* Likewise if the second comparison is an == comparison. */
4314 else if (code2 == EQ_EXPR)
4320 case EQ_EXPR: val = (cmp == 0); break;
4321 case NE_EXPR: val = (cmp != 0); break;
4322 case LT_EXPR: val = (cmp > 0); break;
4323 case GT_EXPR: val = (cmp < 0); break;
4324 case LE_EXPR: val = (cmp >= 0); break;
4325 case GE_EXPR: val = (cmp <= 0); break;
4326 default: done = false;
4331 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4333 return boolean_false_node;
4337 /* Same business with inequality tests. */
4338 else if (code1 == NE_EXPR)
4343 case EQ_EXPR: val = (cmp != 0); break;
4344 case NE_EXPR: val = (cmp == 0); break;
4345 case LT_EXPR: val = (cmp >= 0); break;
4346 case GT_EXPR: val = (cmp <= 0); break;
4347 case LE_EXPR: val = (cmp > 0); break;
4348 case GE_EXPR: val = (cmp < 0); break;
4353 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4355 else if (code2 == NE_EXPR)
4360 case EQ_EXPR: val = (cmp == 0); break;
4361 case NE_EXPR: val = (cmp != 0); break;
4362 case LT_EXPR: val = (cmp <= 0); break;
4363 case GT_EXPR: val = (cmp >= 0); break;
4364 case LE_EXPR: val = (cmp < 0); break;
4365 case GE_EXPR: val = (cmp > 0); break;
4370 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4373 /* Chose the more restrictive of two < or <= comparisons. */
4374 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4375 && (code2 == LT_EXPR || code2 == LE_EXPR))
4377 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4378 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4380 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4383 /* Likewise chose the more restrictive of two > or >= comparisons. */
4384 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4385 && (code2 == GT_EXPR || code2 == GE_EXPR))
4387 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4388 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4390 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4393 /* Check for singleton ranges. */
4395 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4396 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4397 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4399 /* Check for disjoint ranges. */
4401 && (code1 == LT_EXPR || code1 == LE_EXPR)
4402 && (code2 == GT_EXPR || code2 == GE_EXPR))
4403 return boolean_false_node;
4405 && (code1 == GT_EXPR || code1 == GE_EXPR)
4406 && (code2 == LT_EXPR || code2 == LE_EXPR))
4407 return boolean_false_node;
4410 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4411 NAME's definition is a truth value. See if there are any simplifications
4412 that can be done against the NAME's definition. */
4413 if (TREE_CODE (op1a) == SSA_NAME
4414 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4415 && (integer_zerop (op1b) || integer_onep (op1b)))
4417 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4418 || (code1 == NE_EXPR && integer_onep (op1b)));
4419 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4420 switch (gimple_code (stmt))
4423 /* Try to simplify by copy-propagating the definition. */
4424 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4427 /* If every argument to the PHI produces the same result when
4428 ANDed with the second comparison, we win.
4429 Do not do this unless the type is bool since we need a bool
4430 result here anyway. */
4431 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4433 tree result = NULL_TREE;
4435 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4437 tree arg = gimple_phi_arg_def (stmt, i);
4439 /* If this PHI has itself as an argument, ignore it.
4440 If all the other args produce the same result,
4442 if (arg == gimple_phi_result (stmt))
4444 else if (TREE_CODE (arg) == INTEGER_CST)
4446 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4449 result = boolean_false_node;
4450 else if (!integer_zerop (result))
4454 result = fold_build2 (code2, boolean_type_node,
4456 else if (!same_bool_comparison_p (result,
4460 else if (TREE_CODE (arg) == SSA_NAME
4461 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4464 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4465 /* In simple cases we can look through PHI nodes,
4466 but we have to be careful with loops.
4468 if (! dom_info_available_p (CDI_DOMINATORS)
4469 || gimple_bb (def_stmt) == gimple_bb (stmt)
4470 || dominated_by_p (CDI_DOMINATORS,
4471 gimple_bb (def_stmt),
4474 temp = and_var_with_comparison (arg, invert, code2,
4480 else if (!same_bool_result_p (result, temp))
4496 /* Try to simplify the AND of two comparisons, specified by
4497 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4498 If this can be simplified to a single expression (without requiring
4499 introducing more SSA variables to hold intermediate values),
4500 return the resulting tree. Otherwise return NULL_TREE.
4501 If the result expression is non-null, it has boolean type. */
4504 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4505 enum tree_code code2, tree op2a, tree op2b)
4507 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4511 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4514 /* Helper function for or_comparisons_1: try to simplify the OR of the
4515 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4516 If INVERT is true, invert the value of VAR before doing the OR.
4517 Return NULL_EXPR if we can't simplify this to a single expression. */
4520 or_var_with_comparison (tree var, bool invert,
4521 enum tree_code code2, tree op2a, tree op2b)
4524 gimple *stmt = SSA_NAME_DEF_STMT (var);
4526 /* We can only deal with variables whose definitions are assignments. */
4527 if (!is_gimple_assign (stmt))
4530 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4531 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4532 Then we only have to consider the simpler non-inverted cases. */
4534 t = and_var_with_comparison_1 (stmt,
4535 invert_tree_comparison (code2, false),
4538 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4539 return canonicalize_bool (t, invert);
4542 /* Try to simplify the OR of the ssa variable defined by the assignment
4543 STMT with the comparison specified by (OP2A CODE2 OP2B).
4544 Return NULL_EXPR if we can't simplify this to a single expression. */
4547 or_var_with_comparison_1 (gimple *stmt,
4548 enum tree_code code2, tree op2a, tree op2b)
4550 tree var = gimple_assign_lhs (stmt);
4551 tree true_test_var = NULL_TREE;
4552 tree false_test_var = NULL_TREE;
4553 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4555 /* Check for identities like (var OR (var != 0)) => true . */
4556 if (TREE_CODE (op2a) == SSA_NAME
4557 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4559 if ((code2 == NE_EXPR && integer_zerop (op2b))
4560 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4562 true_test_var = op2a;
4563 if (var == true_test_var)
4566 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4567 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4569 false_test_var = op2a;
4570 if (var == false_test_var)
4571 return boolean_true_node;
4575 /* If the definition is a comparison, recurse on it. */
4576 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4578 tree t = or_comparisons_1 (innercode,
4579 gimple_assign_rhs1 (stmt),
4580 gimple_assign_rhs2 (stmt),
4588 /* If the definition is an AND or OR expression, we may be able to
4589 simplify by reassociating. */
4590 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4591 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4593 tree inner1 = gimple_assign_rhs1 (stmt);
4594 tree inner2 = gimple_assign_rhs2 (stmt);
4597 tree partial = NULL_TREE;
4598 bool is_or = (innercode == BIT_IOR_EXPR);
4600 /* Check for boolean identities that don't require recursive examination
4602 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4603 inner1 OR (inner1 AND inner2) => inner1
4604 !inner1 OR (inner1 OR inner2) => true
4605 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4607 if (inner1 == true_test_var)
4608 return (is_or ? var : inner1);
4609 else if (inner2 == true_test_var)
4610 return (is_or ? var : inner2);
4611 else if (inner1 == false_test_var)
4614 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4615 else if (inner2 == false_test_var)
4618 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4620 /* Next, redistribute/reassociate the OR across the inner tests.
4621 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4622 if (TREE_CODE (inner1) == SSA_NAME
4623 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4624 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4625 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4626 gimple_assign_rhs1 (s),
4627 gimple_assign_rhs2 (s),
4628 code2, op2a, op2b)))
4630 /* Handle the OR case, where we are reassociating:
4631 (inner1 OR inner2) OR (op2a code2 op2b)
4633 If the partial result t is a constant, we win. Otherwise
4634 continue on to try reassociating with the other inner test. */
4637 if (integer_onep (t))
4638 return boolean_true_node;
4639 else if (integer_zerop (t))
4643 /* Handle the AND case, where we are redistributing:
4644 (inner1 AND inner2) OR (op2a code2 op2b)
4645 => (t AND (inner2 OR (op2a code op2b))) */
4646 else if (integer_zerop (t))
4647 return boolean_false_node;
4649 /* Save partial result for later. */
4653 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4654 if (TREE_CODE (inner2) == SSA_NAME
4655 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4656 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4657 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4658 gimple_assign_rhs1 (s),
4659 gimple_assign_rhs2 (s),
4660 code2, op2a, op2b)))
4662 /* Handle the OR case, where we are reassociating:
4663 (inner1 OR inner2) OR (op2a code2 op2b)
4665 => (t OR partial) */
4668 if (integer_zerop (t))
4670 else if (integer_onep (t))
4671 return boolean_true_node;
4672 /* If both are the same, we can apply the identity
4674 else if (partial && same_bool_result_p (t, partial))
4678 /* Handle the AND case, where we are redistributing:
4679 (inner1 AND inner2) OR (op2a code2 op2b)
4680 => (t AND (inner1 OR (op2a code2 op2b)))
4681 => (t AND partial) */
4684 if (integer_zerop (t))
4685 return boolean_false_node;
4688 /* We already got a simplification for the other
4689 operand to the redistributed AND expression. The
4690 interesting case is when at least one is true.
4691 Or, if both are the same, we can apply the identity
4693 if (integer_onep (partial))
4695 else if (integer_onep (t))
4697 else if (same_bool_result_p (t, partial))
4706 /* Try to simplify the OR of two comparisons defined by
4707 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4708 If this can be done without constructing an intermediate value,
4709 return the resulting tree; otherwise NULL_TREE is returned.
4710 This function is deliberately asymmetric as it recurses on SSA_DEFs
4711 in the first comparison but not the second. */
4714 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4715 enum tree_code code2, tree op2a, tree op2b)
4717 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4719 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4720 if (operand_equal_p (op1a, op2a, 0)
4721 && operand_equal_p (op1b, op2b, 0))
4723 /* Result will be either NULL_TREE, or a combined comparison. */
4724 tree t = combine_comparisons (UNKNOWN_LOCATION,
4725 TRUTH_ORIF_EXPR, code1, code2,
4726 truth_type, op1a, op1b);
4731 /* Likewise the swapped case of the above. */
4732 if (operand_equal_p (op1a, op2b, 0)
4733 && operand_equal_p (op1b, op2a, 0))
4735 /* Result will be either NULL_TREE, or a combined comparison. */
4736 tree t = combine_comparisons (UNKNOWN_LOCATION,
4737 TRUTH_ORIF_EXPR, code1,
4738 swap_tree_comparison (code2),
4739 truth_type, op1a, op1b);
4744 /* If both comparisons are of the same value against constants, we might
4745 be able to merge them. */
4746 if (operand_equal_p (op1a, op2a, 0)
4747 && TREE_CODE (op1b) == INTEGER_CST
4748 && TREE_CODE (op2b) == INTEGER_CST)
4750 int cmp = tree_int_cst_compare (op1b, op2b);
4752 /* If we have (op1a != op1b), we should either be able to
4753 return that or TRUE, depending on whether the constant op1b
4754 also satisfies the other comparison against op2b. */
4755 if (code1 == NE_EXPR)
4761 case EQ_EXPR: val = (cmp == 0); break;
4762 case NE_EXPR: val = (cmp != 0); break;
4763 case LT_EXPR: val = (cmp < 0); break;
4764 case GT_EXPR: val = (cmp > 0); break;
4765 case LE_EXPR: val = (cmp <= 0); break;
4766 case GE_EXPR: val = (cmp >= 0); break;
4767 default: done = false;
4772 return boolean_true_node;
4774 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4777 /* Likewise if the second comparison is a != comparison. */
4778 else if (code2 == NE_EXPR)
4784 case EQ_EXPR: val = (cmp == 0); break;
4785 case NE_EXPR: val = (cmp != 0); break;
4786 case LT_EXPR: val = (cmp > 0); break;
4787 case GT_EXPR: val = (cmp < 0); break;
4788 case LE_EXPR: val = (cmp >= 0); break;
4789 case GE_EXPR: val = (cmp <= 0); break;
4790 default: done = false;
4795 return boolean_true_node;
4797 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4801 /* See if an equality test is redundant with the other comparison. */
4802 else if (code1 == EQ_EXPR)
4807 case EQ_EXPR: val = (cmp == 0); break;
4808 case NE_EXPR: val = (cmp != 0); break;
4809 case LT_EXPR: val = (cmp < 0); break;
4810 case GT_EXPR: val = (cmp > 0); break;
4811 case LE_EXPR: val = (cmp <= 0); break;
4812 case GE_EXPR: val = (cmp >= 0); break;
4817 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4819 else if (code2 == EQ_EXPR)
4824 case EQ_EXPR: val = (cmp == 0); break;
4825 case NE_EXPR: val = (cmp != 0); break;
4826 case LT_EXPR: val = (cmp > 0); break;
4827 case GT_EXPR: val = (cmp < 0); break;
4828 case LE_EXPR: val = (cmp >= 0); break;
4829 case GE_EXPR: val = (cmp <= 0); break;
4834 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4837 /* Chose the less restrictive of two < or <= comparisons. */
4838 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4839 && (code2 == LT_EXPR || code2 == LE_EXPR))
4841 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4842 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4844 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4847 /* Likewise chose the less restrictive of two > or >= comparisons. */
4848 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4849 && (code2 == GT_EXPR || code2 == GE_EXPR))
4851 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4852 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4854 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4857 /* Check for singleton ranges. */
4859 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4860 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4861 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4863 /* Check for less/greater pairs that don't restrict the range at all. */
4865 && (code1 == LT_EXPR || code1 == LE_EXPR)
4866 && (code2 == GT_EXPR || code2 == GE_EXPR))
4867 return boolean_true_node;
4869 && (code1 == GT_EXPR || code1 == GE_EXPR)
4870 && (code2 == LT_EXPR || code2 == LE_EXPR))
4871 return boolean_true_node;
4874 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4875 NAME's definition is a truth value. See if there are any simplifications
4876 that can be done against the NAME's definition. */
4877 if (TREE_CODE (op1a) == SSA_NAME
4878 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4879 && (integer_zerop (op1b) || integer_onep (op1b)))
4881 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4882 || (code1 == NE_EXPR && integer_onep (op1b)));
4883 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4884 switch (gimple_code (stmt))
4887 /* Try to simplify by copy-propagating the definition. */
4888 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4891 /* If every argument to the PHI produces the same result when
4892 ORed with the second comparison, we win.
4893 Do not do this unless the type is bool since we need a bool
4894 result here anyway. */
4895 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4897 tree result = NULL_TREE;
4899 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4901 tree arg = gimple_phi_arg_def (stmt, i);
4903 /* If this PHI has itself as an argument, ignore it.
4904 If all the other args produce the same result,
4906 if (arg == gimple_phi_result (stmt))
4908 else if (TREE_CODE (arg) == INTEGER_CST)
4910 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4913 result = boolean_true_node;
4914 else if (!integer_onep (result))
4918 result = fold_build2 (code2, boolean_type_node,
4920 else if (!same_bool_comparison_p (result,
4924 else if (TREE_CODE (arg) == SSA_NAME
4925 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4928 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4929 /* In simple cases we can look through PHI nodes,
4930 but we have to be careful with loops.
4932 if (! dom_info_available_p (CDI_DOMINATORS)
4933 || gimple_bb (def_stmt) == gimple_bb (stmt)
4934 || dominated_by_p (CDI_DOMINATORS,
4935 gimple_bb (def_stmt),
4938 temp = or_var_with_comparison (arg, invert, code2,
4944 else if (!same_bool_result_p (result, temp))
4960 /* Try to simplify the OR of two comparisons, specified by
4961 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4962 If this can be simplified to a single expression (without requiring
4963 introducing more SSA variables to hold intermediate values),
4964 return the resulting tree. Otherwise return NULL_TREE.
4965 If the result expression is non-null, it has boolean type. */
4968 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4969 enum tree_code code2, tree op2a, tree op2b)
4971 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4975 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4979 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4981 Either NULL_TREE, a simplified but non-constant or a constant
4984 ??? This should go into a gimple-fold-inline.h file to be eventually
4985 privatized with the single valueize function used in the various TUs
4986 to avoid the indirect function call overhead. */
4989 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
4990 tree (*gvalueize) (tree))
4994 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4995 edges if there are intermediate VARYING defs. For this reason
4996 do not follow SSA edges here even though SCCVN can technically
4997 just deal fine with that. */
4998 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
5000 tree res = NULL_TREE;
5001 if (gimple_simplified_result_is_gimple_val (rcode, ops))
5003 else if (mprts_hook)
5004 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5007 if (dump_file && dump_flags & TDF_DETAILS)
5009 fprintf (dump_file, "Match-and-simplified ");
5010 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5011 fprintf (dump_file, " to ");
5012 print_generic_expr (dump_file, res, 0);
5013 fprintf (dump_file, "\n");
5019 location_t loc = gimple_location (stmt);
5020 switch (gimple_code (stmt))
5024 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5026 switch (get_gimple_rhs_class (subcode))
5028 case GIMPLE_SINGLE_RHS:
5030 tree rhs = gimple_assign_rhs1 (stmt);
5031 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5033 if (TREE_CODE (rhs) == SSA_NAME)
5035 /* If the RHS is an SSA_NAME, return its known constant value,
5037 return (*valueize) (rhs);
5039 /* Handle propagating invariant addresses into address
5041 else if (TREE_CODE (rhs) == ADDR_EXPR
5042 && !is_gimple_min_invariant (rhs))
5044 HOST_WIDE_INT offset = 0;
5046 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5050 && (CONSTANT_CLASS_P (base)
5051 || decl_address_invariant_p (base)))
5052 return build_invariant_address (TREE_TYPE (rhs),
5055 else if (TREE_CODE (rhs) == CONSTRUCTOR
5056 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5057 && (CONSTRUCTOR_NELTS (rhs)
5058 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5063 vec = XALLOCAVEC (tree,
5064 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5065 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5067 val = (*valueize) (val);
5068 if (TREE_CODE (val) == INTEGER_CST
5069 || TREE_CODE (val) == REAL_CST
5070 || TREE_CODE (val) == FIXED_CST)
5076 return build_vector (TREE_TYPE (rhs), vec);
5078 if (subcode == OBJ_TYPE_REF)
5080 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5081 /* If callee is constant, we can fold away the wrapper. */
5082 if (is_gimple_min_invariant (val))
5086 if (kind == tcc_reference)
5088 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5089 || TREE_CODE (rhs) == REALPART_EXPR
5090 || TREE_CODE (rhs) == IMAGPART_EXPR)
5091 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5093 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5094 return fold_unary_loc (EXPR_LOCATION (rhs),
5096 TREE_TYPE (rhs), val);
5098 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5099 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5101 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5102 return fold_ternary_loc (EXPR_LOCATION (rhs),
5104 TREE_TYPE (rhs), val,
5105 TREE_OPERAND (rhs, 1),
5106 TREE_OPERAND (rhs, 2));
5108 else if (TREE_CODE (rhs) == MEM_REF
5109 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5111 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5112 if (TREE_CODE (val) == ADDR_EXPR
5113 && is_gimple_min_invariant (val))
5115 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5117 TREE_OPERAND (rhs, 1));
5122 return fold_const_aggregate_ref_1 (rhs, valueize);
5124 else if (kind == tcc_declaration)
5125 return get_symbol_constant_value (rhs);
5129 case GIMPLE_UNARY_RHS:
5132 case GIMPLE_BINARY_RHS:
5133 /* Translate &x + CST into an invariant form suitable for
5134 further propagation. */
5135 if (subcode == POINTER_PLUS_EXPR)
5137 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5138 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5139 if (TREE_CODE (op0) == ADDR_EXPR
5140 && TREE_CODE (op1) == INTEGER_CST)
5142 tree off = fold_convert (ptr_type_node, op1);
5143 return build_fold_addr_expr_loc
5145 fold_build2 (MEM_REF,
5146 TREE_TYPE (TREE_TYPE (op0)),
5147 unshare_expr (op0), off));
5150 /* Canonicalize bool != 0 and bool == 0 appearing after
5151 valueization. While gimple_simplify handles this
5152 it can get confused by the ~X == 1 -> X == 0 transform
5153 which we cant reduce to a SSA name or a constant
5154 (and we have no way to tell gimple_simplify to not
5155 consider those transforms in the first place). */
5156 else if (subcode == EQ_EXPR
5157 || subcode == NE_EXPR)
5159 tree lhs = gimple_assign_lhs (stmt);
5160 tree op0 = gimple_assign_rhs1 (stmt);
5161 if (useless_type_conversion_p (TREE_TYPE (lhs),
5164 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5165 op0 = (*valueize) (op0);
5166 if (TREE_CODE (op0) == INTEGER_CST)
5167 std::swap (op0, op1);
5168 if (TREE_CODE (op1) == INTEGER_CST
5169 && ((subcode == NE_EXPR && integer_zerop (op1))
5170 || (subcode == EQ_EXPR && integer_onep (op1))))
5176 case GIMPLE_TERNARY_RHS:
5178 /* Handle ternary operators that can appear in GIMPLE form. */
5179 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5180 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5181 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5182 return fold_ternary_loc (loc, subcode,
5183 gimple_expr_type (stmt), op0, op1, op2);
5194 gcall *call_stmt = as_a <gcall *> (stmt);
5196 if (gimple_call_internal_p (stmt))
5198 enum tree_code subcode = ERROR_MARK;
5199 switch (gimple_call_internal_fn (stmt))
5201 case IFN_UBSAN_CHECK_ADD:
5202 subcode = PLUS_EXPR;
5204 case IFN_UBSAN_CHECK_SUB:
5205 subcode = MINUS_EXPR;
5207 case IFN_UBSAN_CHECK_MUL:
5208 subcode = MULT_EXPR;
5213 tree arg0 = gimple_call_arg (stmt, 0);
5214 tree arg1 = gimple_call_arg (stmt, 1);
5215 tree op0 = (*valueize) (arg0);
5216 tree op1 = (*valueize) (arg1);
5218 if (TREE_CODE (op0) != INTEGER_CST
5219 || TREE_CODE (op1) != INTEGER_CST)
5224 /* x * 0 = 0 * x = 0 without overflow. */
5225 if (integer_zerop (op0) || integer_zerop (op1))
5226 return build_zero_cst (TREE_TYPE (arg0));
5229 /* y - y = 0 without overflow. */
5230 if (operand_equal_p (op0, op1, 0))
5231 return build_zero_cst (TREE_TYPE (arg0));
5238 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5240 && TREE_CODE (res) == INTEGER_CST
5241 && !TREE_OVERFLOW (res))
5246 fn = (*valueize) (gimple_call_fn (stmt));
5247 if (TREE_CODE (fn) == ADDR_EXPR
5248 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5249 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5250 && gimple_builtin_call_types_compatible_p (stmt,
5251 TREE_OPERAND (fn, 0)))
5253 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5256 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5257 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5258 retval = fold_builtin_call_array (loc,
5259 gimple_call_return_type (call_stmt),
5260 fn, gimple_call_num_args (stmt), args);
5263 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5264 STRIP_NOPS (retval);
5265 retval = fold_convert (gimple_call_return_type (call_stmt),
5278 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5279 Returns NULL_TREE if folding to a constant is not possible, otherwise
5280 returns a constant according to is_gimple_min_invariant. */
5283 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5285 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5286 if (res && is_gimple_min_invariant (res))
5292 /* The following set of functions are supposed to fold references using
5293 their constant initializers. */
5295 /* See if we can find constructor defining value of BASE.
5296 When we know the consructor with constant offset (such as
5297 base is array[40] and we do know constructor of array), then
5298 BIT_OFFSET is adjusted accordingly.
5300 As a special case, return error_mark_node when constructor
5301 is not explicitly available, but it is known to be zero
5302 such as 'static const int a;'. */
5304 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5305 tree (*valueize)(tree))
5307 HOST_WIDE_INT bit_offset2, size, max_size;
5310 if (TREE_CODE (base) == MEM_REF)
5312 if (!integer_zerop (TREE_OPERAND (base, 1)))
5314 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5316 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5321 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5322 base = valueize (TREE_OPERAND (base, 0));
5323 if (!base || TREE_CODE (base) != ADDR_EXPR)
5325 base = TREE_OPERAND (base, 0);
5328 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5329 DECL_INITIAL. If BASE is a nested reference into another
5330 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5331 the inner reference. */
5332 switch (TREE_CODE (base))
5337 tree init = ctor_for_folding (base);
5339 /* Our semantic is exact opposite of ctor_for_folding;
5340 NULL means unknown, while error_mark_node is 0. */
5341 if (init == error_mark_node)
5344 return error_mark_node;
5350 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5352 if (max_size == -1 || size != max_size)
5354 *bit_offset += bit_offset2;
5355 return get_base_constructor (base, bit_offset, valueize);
5366 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5367 SIZE to the memory at bit OFFSET. */
5370 fold_array_ctor_reference (tree type, tree ctor,
5371 unsigned HOST_WIDE_INT offset,
5372 unsigned HOST_WIDE_INT size,
5375 offset_int low_bound;
5376 offset_int elt_size;
5377 offset_int access_index;
5378 tree domain_type = NULL_TREE;
5379 HOST_WIDE_INT inner_offset;
5381 /* Compute low bound and elt size. */
5382 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5383 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5384 if (domain_type && TYPE_MIN_VALUE (domain_type))
5386 /* Static constructors for variably sized objects makes no sense. */
5387 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5388 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5392 /* Static constructors for variably sized objects makes no sense. */
5393 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5395 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5397 /* We can handle only constantly sized accesses that are known to not
5398 be larger than size of array element. */
5399 if (!TYPE_SIZE_UNIT (type)
5400 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5401 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
5405 /* Compute the array index we look for. */
5406 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5408 access_index += low_bound;
5410 /* And offset within the access. */
5411 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5413 /* See if the array field is large enough to span whole access. We do not
5414 care to fold accesses spanning multiple array indexes. */
5415 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5417 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5418 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
5420 /* When memory is not explicitely mentioned in constructor,
5421 it is 0 (or out of range). */
5422 return build_zero_cst (type);
5425 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5426 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5429 fold_nonarray_ctor_reference (tree type, tree ctor,
5430 unsigned HOST_WIDE_INT offset,
5431 unsigned HOST_WIDE_INT size,
5434 unsigned HOST_WIDE_INT cnt;
5437 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5440 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5441 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5442 tree field_size = DECL_SIZE (cfield);
5443 offset_int bitoffset;
5444 offset_int bitoffset_end, access_end;
5446 /* Variable sized objects in static constructors makes no sense,
5447 but field_size can be NULL for flexible array members. */
5448 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5449 && TREE_CODE (byte_offset) == INTEGER_CST
5450 && (field_size != NULL_TREE
5451 ? TREE_CODE (field_size) == INTEGER_CST
5452 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5454 /* Compute bit offset of the field. */
5455 bitoffset = (wi::to_offset (field_offset)
5456 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
5457 /* Compute bit offset where the field ends. */
5458 if (field_size != NULL_TREE)
5459 bitoffset_end = bitoffset + wi::to_offset (field_size);
5463 access_end = offset_int (offset) + size;
5465 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5466 [BITOFFSET, BITOFFSET_END)? */
5467 if (wi::cmps (access_end, bitoffset) > 0
5468 && (field_size == NULL_TREE
5469 || wi::lts_p (offset, bitoffset_end)))
5471 offset_int inner_offset = offset_int (offset) - bitoffset;
5472 /* We do have overlap. Now see if field is large enough to
5473 cover the access. Give up for accesses spanning multiple
5475 if (wi::cmps (access_end, bitoffset_end) > 0)
5477 if (offset < bitoffset)
5479 return fold_ctor_reference (type, cval,
5480 inner_offset.to_uhwi (), size,
5484 /* When memory is not explicitely mentioned in constructor, it is 0. */
5485 return build_zero_cst (type);
5488 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5489 to the memory at bit OFFSET. */
5492 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5493 unsigned HOST_WIDE_INT size, tree from_decl)
5497 /* We found the field with exact match. */
5498 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5500 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5502 /* We are at the end of walk, see if we can view convert the
5504 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5505 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5506 && !compare_tree_int (TYPE_SIZE (type), size)
5507 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5509 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5510 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5512 STRIP_USELESS_TYPE_CONVERSION (ret);
5515 /* For constants and byte-aligned/sized reads try to go through
5516 native_encode/interpret. */
5517 if (CONSTANT_CLASS_P (ctor)
5518 && BITS_PER_UNIT == 8
5519 && offset % BITS_PER_UNIT == 0
5520 && size % BITS_PER_UNIT == 0
5521 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5523 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5524 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5525 offset / BITS_PER_UNIT);
5527 return native_interpret_expr (type, buf, len);
5529 if (TREE_CODE (ctor) == CONSTRUCTOR)
5532 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5533 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5534 return fold_array_ctor_reference (type, ctor, offset, size,
5537 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5544 /* Return the tree representing the element referenced by T if T is an
5545 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5546 names using VALUEIZE. Return NULL_TREE otherwise. */
5549 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5551 tree ctor, idx, base;
5552 HOST_WIDE_INT offset, size, max_size;
5556 if (TREE_THIS_VOLATILE (t))
5560 return get_symbol_constant_value (t);
5562 tem = fold_read_from_constant_string (t);
5566 switch (TREE_CODE (t))
5569 case ARRAY_RANGE_REF:
5570 /* Constant indexes are handled well by get_base_constructor.
5571 Only special case variable offsets.
5572 FIXME: This code can't handle nested references with variable indexes
5573 (they will be handled only by iteration of ccp). Perhaps we can bring
5574 get_ref_base_and_extent here and make it use a valueize callback. */
5575 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5577 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5578 && TREE_CODE (idx) == INTEGER_CST)
5580 tree low_bound, unit_size;
5582 /* If the resulting bit-offset is constant, track it. */
5583 if ((low_bound = array_ref_low_bound (t),
5584 TREE_CODE (low_bound) == INTEGER_CST)
5585 && (unit_size = array_ref_element_size (t),
5586 tree_fits_uhwi_p (unit_size)))
5589 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5590 TYPE_PRECISION (TREE_TYPE (idx)));
5592 if (wi::fits_shwi_p (woffset))
5594 offset = woffset.to_shwi ();
5595 /* TODO: This code seems wrong, multiply then check
5596 to see if it fits. */
5597 offset *= tree_to_uhwi (unit_size);
5598 offset *= BITS_PER_UNIT;
5600 base = TREE_OPERAND (t, 0);
5601 ctor = get_base_constructor (base, &offset, valueize);
5602 /* Empty constructor. Always fold to 0. */
5603 if (ctor == error_mark_node)
5604 return build_zero_cst (TREE_TYPE (t));
5605 /* Out of bound array access. Value is undefined,
5609 /* We can not determine ctor. */
5612 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5613 tree_to_uhwi (unit_size)
5623 case TARGET_MEM_REF:
5625 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
5626 ctor = get_base_constructor (base, &offset, valueize);
5628 /* Empty constructor. Always fold to 0. */
5629 if (ctor == error_mark_node)
5630 return build_zero_cst (TREE_TYPE (t));
5631 /* We do not know precise address. */
5632 if (max_size == -1 || max_size != size)
5634 /* We can not determine ctor. */
5638 /* Out of bound array access. Value is undefined, but don't fold. */
5642 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5648 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5649 if (c && TREE_CODE (c) == COMPLEX_CST)
5650 return fold_build1_loc (EXPR_LOCATION (t),
5651 TREE_CODE (t), TREE_TYPE (t), c);
5663 fold_const_aggregate_ref (tree t)
5665 return fold_const_aggregate_ref_1 (t, NULL);
5668 /* Lookup virtual method with index TOKEN in a virtual table V
5670 Set CAN_REFER if non-NULL to false if method
5671 is not referable or if the virtual table is ill-formed (such as rewriten
5672 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5675 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5677 unsigned HOST_WIDE_INT offset,
5680 tree vtable = v, init, fn;
5681 unsigned HOST_WIDE_INT size;
5682 unsigned HOST_WIDE_INT elt_size, access_index;
5688 /* First of all double check we have virtual table. */
5689 if (TREE_CODE (v) != VAR_DECL
5690 || !DECL_VIRTUAL_P (v))
5692 /* Pass down that we lost track of the target. */
5698 init = ctor_for_folding (v);
5700 /* The virtual tables should always be born with constructors
5701 and we always should assume that they are avaialble for
5702 folding. At the moment we do not stream them in all cases,
5703 but it should never happen that ctor seem unreachable. */
5705 if (init == error_mark_node)
5707 gcc_assert (in_lto_p);
5708 /* Pass down that we lost track of the target. */
5713 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5714 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5715 offset *= BITS_PER_UNIT;
5716 offset += token * size;
5718 /* Lookup the value in the constructor that is assumed to be array.
5719 This is equivalent to
5720 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5721 offset, size, NULL);
5722 but in a constant time. We expect that frontend produced a simple
5723 array without indexed initializers. */
5725 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5726 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5727 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5728 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5730 access_index = offset / BITS_PER_UNIT / elt_size;
5731 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5733 /* This code makes an assumption that there are no
5734 indexed fileds produced by C++ FE, so we can directly index the array. */
5735 if (access_index < CONSTRUCTOR_NELTS (init))
5737 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5738 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5744 /* For type inconsistent program we may end up looking up virtual method
5745 in virtual table that does not contain TOKEN entries. We may overrun
5746 the virtual table and pick up a constant or RTTI info pointer.
5747 In any case the call is undefined. */
5749 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5750 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5751 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5754 fn = TREE_OPERAND (fn, 0);
5756 /* When cgraph node is missing and function is not public, we cannot
5757 devirtualize. This can happen in WHOPR when the actual method
5758 ends up in other partition, because we found devirtualization
5759 possibility too late. */
5760 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5771 /* Make sure we create a cgraph node for functions we'll reference.
5772 They can be non-existent if the reference comes from an entry
5773 of an external vtable for example. */
5774 cgraph_node::get_create (fn);
5779 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5780 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5781 KNOWN_BINFO carries the binfo describing the true type of
5782 OBJ_TYPE_REF_OBJECT(REF).
5783 Set CAN_REFER if non-NULL to false if method
5784 is not referable or if the virtual table is ill-formed (such as rewriten
5785 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5788 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5791 unsigned HOST_WIDE_INT offset;
5794 v = BINFO_VTABLE (known_binfo);
5795 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5799 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5805 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5808 /* Given a pointer value OP0, return a simplified version of an
5809 indirection through OP0, or NULL_TREE if no simplification is
5810 possible. Note that the resulting type may be different from
5811 the type pointed to in the sense that it is still compatible
5812 from the langhooks point of view. */
5815 gimple_fold_indirect_ref (tree t)
5817 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5822 subtype = TREE_TYPE (sub);
5823 if (!POINTER_TYPE_P (subtype))
5826 if (TREE_CODE (sub) == ADDR_EXPR)
5828 tree op = TREE_OPERAND (sub, 0);
5829 tree optype = TREE_TYPE (op);
5831 if (useless_type_conversion_p (type, optype))
5834 /* *(foo *)&fooarray => fooarray[0] */
5835 if (TREE_CODE (optype) == ARRAY_TYPE
5836 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5837 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5839 tree type_domain = TYPE_DOMAIN (optype);
5840 tree min_val = size_zero_node;
5841 if (type_domain && TYPE_MIN_VALUE (type_domain))
5842 min_val = TYPE_MIN_VALUE (type_domain);
5843 if (TREE_CODE (min_val) == INTEGER_CST)
5844 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5846 /* *(foo *)&complexfoo => __real__ complexfoo */
5847 else if (TREE_CODE (optype) == COMPLEX_TYPE
5848 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5849 return fold_build1 (REALPART_EXPR, type, op);
5850 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5851 else if (TREE_CODE (optype) == VECTOR_TYPE
5852 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5854 tree part_width = TYPE_SIZE (type);
5855 tree index = bitsize_int (0);
5856 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5860 /* *(p + CST) -> ... */
5861 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5862 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5864 tree addr = TREE_OPERAND (sub, 0);
5865 tree off = TREE_OPERAND (sub, 1);
5869 addrtype = TREE_TYPE (addr);
5871 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5872 if (TREE_CODE (addr) == ADDR_EXPR
5873 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5874 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5875 && tree_fits_uhwi_p (off))
5877 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5878 tree part_width = TYPE_SIZE (type);
5879 unsigned HOST_WIDE_INT part_widthi
5880 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5881 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5882 tree index = bitsize_int (indexi);
5883 if (offset / part_widthi
5884 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5885 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5889 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5890 if (TREE_CODE (addr) == ADDR_EXPR
5891 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5892 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5894 tree size = TYPE_SIZE_UNIT (type);
5895 if (tree_int_cst_equal (size, off))
5896 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5899 /* *(p + CST) -> MEM_REF <p, CST>. */
5900 if (TREE_CODE (addr) != ADDR_EXPR
5901 || DECL_P (TREE_OPERAND (addr, 0)))
5902 return fold_build2 (MEM_REF, type,
5904 wide_int_to_tree (ptype, off));
5907 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5908 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5909 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5910 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5913 tree min_val = size_zero_node;
5915 sub = gimple_fold_indirect_ref (sub);
5917 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5918 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5919 if (type_domain && TYPE_MIN_VALUE (type_domain))
5920 min_val = TYPE_MIN_VALUE (type_domain);
5921 if (TREE_CODE (min_val) == INTEGER_CST)
5922 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5928 /* Return true if CODE is an operation that when operating on signed
5929 integer types involves undefined behavior on overflow and the
5930 operation can be expressed with unsigned arithmetic. */
5933 arith_code_with_undefined_signed_overflow (tree_code code)
5941 case POINTER_PLUS_EXPR:
5948 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5949 operation that can be transformed to unsigned arithmetic by converting
5950 its operand, carrying out the operation in the corresponding unsigned
5951 type and converting the result back to the original type.
5953 Returns a sequence of statements that replace STMT and also contain
5954 a modified form of STMT itself. */
5957 rewrite_to_defined_overflow (gimple *stmt)
5959 if (dump_file && (dump_flags & TDF_DETAILS))
5961 fprintf (dump_file, "rewriting stmt with undefined signed "
5963 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5966 tree lhs = gimple_assign_lhs (stmt);
5967 tree type = unsigned_type_for (TREE_TYPE (lhs));
5968 gimple_seq stmts = NULL;
5969 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5971 tree op = gimple_op (stmt, i);
5972 op = gimple_convert (&stmts, type, op);
5973 gimple_set_op (stmt, i, op);
5975 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5976 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5977 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5978 gimple_seq_add_stmt (&stmts, stmt);
5979 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
5980 gimple_seq_add_stmt (&stmts, cvt);
5986 /* The valueization hook we use for the gimple_build API simplification.
5987 This makes us match fold_buildN behavior by only combining with
5988 statements in the sequence(s) we are currently building. */
5991 gimple_build_valueize (tree op)
5993 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
5998 /* Build the expression CODE OP0 of type TYPE with location LOC,
5999 simplifying it first if possible. Returns the built
6000 expression value and appends statements possibly defining it
6004 gimple_build (gimple_seq *seq, location_t loc,
6005 enum tree_code code, tree type, tree op0)
6007 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6010 if (gimple_in_ssa_p (cfun))
6011 res = make_ssa_name (type);
6013 res = create_tmp_reg (type);
6015 if (code == REALPART_EXPR
6016 || code == IMAGPART_EXPR
6017 || code == VIEW_CONVERT_EXPR)
6018 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6020 stmt = gimple_build_assign (res, code, op0);
6021 gimple_set_location (stmt, loc);
6022 gimple_seq_add_stmt_without_update (seq, stmt);
6027 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6028 simplifying it first if possible. Returns the built
6029 expression value and appends statements possibly defining it
6033 gimple_build (gimple_seq *seq, location_t loc,
6034 enum tree_code code, tree type, tree op0, tree op1)
6036 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6039 if (gimple_in_ssa_p (cfun))
6040 res = make_ssa_name (type);
6042 res = create_tmp_reg (type);
6043 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6044 gimple_set_location (stmt, loc);
6045 gimple_seq_add_stmt_without_update (seq, stmt);
6050 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6051 simplifying it first if possible. Returns the built
6052 expression value and appends statements possibly defining it
6056 gimple_build (gimple_seq *seq, location_t loc,
6057 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6059 tree res = gimple_simplify (code, type, op0, op1, op2,
6060 seq, gimple_build_valueize);
6063 if (gimple_in_ssa_p (cfun))
6064 res = make_ssa_name (type);
6066 res = create_tmp_reg (type);
6068 if (code == BIT_FIELD_REF)
6069 stmt = gimple_build_assign (res, code,
6070 build3 (code, type, op0, op1, op2));
6072 stmt = gimple_build_assign (res, code, op0, op1, op2);
6073 gimple_set_location (stmt, loc);
6074 gimple_seq_add_stmt_without_update (seq, stmt);
6079 /* Build the call FN (ARG0) with a result of type TYPE
6080 (or no result if TYPE is void) with location LOC,
6081 simplifying it first if possible. Returns the built
6082 expression value (or NULL_TREE if TYPE is void) and appends
6083 statements possibly defining it to SEQ. */
6086 gimple_build (gimple_seq *seq, location_t loc,
6087 enum built_in_function fn, tree type, tree arg0)
6089 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6092 tree decl = builtin_decl_implicit (fn);
6093 gimple *stmt = gimple_build_call (decl, 1, arg0);
6094 if (!VOID_TYPE_P (type))
6096 if (gimple_in_ssa_p (cfun))
6097 res = make_ssa_name (type);
6099 res = create_tmp_reg (type);
6100 gimple_call_set_lhs (stmt, res);
6102 gimple_set_location (stmt, loc);
6103 gimple_seq_add_stmt_without_update (seq, stmt);
6108 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6109 (or no result if TYPE is void) with location LOC,
6110 simplifying it first if possible. Returns the built
6111 expression value (or NULL_TREE if TYPE is void) and appends
6112 statements possibly defining it to SEQ. */
6115 gimple_build (gimple_seq *seq, location_t loc,
6116 enum built_in_function fn, tree type, tree arg0, tree arg1)
6118 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6121 tree decl = builtin_decl_implicit (fn);
6122 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6123 if (!VOID_TYPE_P (type))
6125 if (gimple_in_ssa_p (cfun))
6126 res = make_ssa_name (type);
6128 res = create_tmp_reg (type);
6129 gimple_call_set_lhs (stmt, res);
6131 gimple_set_location (stmt, loc);
6132 gimple_seq_add_stmt_without_update (seq, stmt);
6137 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6138 (or no result if TYPE is void) with location LOC,
6139 simplifying it first if possible. Returns the built
6140 expression value (or NULL_TREE if TYPE is void) and appends
6141 statements possibly defining it to SEQ. */
6144 gimple_build (gimple_seq *seq, location_t loc,
6145 enum built_in_function fn, tree type,
6146 tree arg0, tree arg1, tree arg2)
6148 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6149 seq, gimple_build_valueize);
6152 tree decl = builtin_decl_implicit (fn);
6153 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6154 if (!VOID_TYPE_P (type))
6156 if (gimple_in_ssa_p (cfun))
6157 res = make_ssa_name (type);
6159 res = create_tmp_reg (type);
6160 gimple_call_set_lhs (stmt, res);
6162 gimple_set_location (stmt, loc);
6163 gimple_seq_add_stmt_without_update (seq, stmt);
6168 /* Build the conversion (TYPE) OP with a result of type TYPE
6169 with location LOC if such conversion is neccesary in GIMPLE,
6170 simplifying it first.
6171 Returns the built expression value and appends
6172 statements possibly defining it to SEQ. */
6175 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6177 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6179 return gimple_build (seq, loc, NOP_EXPR, type, op);
6182 /* Build the conversion (ptrofftype) OP with a result of a type
6183 compatible with ptrofftype with location LOC if such conversion
6184 is neccesary in GIMPLE, simplifying it first.
6185 Returns the built expression value and appends
6186 statements possibly defining it to SEQ. */
6189 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6191 if (ptrofftype_p (TREE_TYPE (op)))
6193 return gimple_convert (seq, loc, sizetype, op);
6196 /* Return true if the result of assignment STMT is known to be non-negative.
6197 If the return value is based on the assumption that signed overflow is
6198 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6199 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6202 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6205 enum tree_code code = gimple_assign_rhs_code (stmt);
6206 switch (get_gimple_rhs_class (code))
6208 case GIMPLE_UNARY_RHS:
6209 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6210 gimple_expr_type (stmt),
6211 gimple_assign_rhs1 (stmt),
6212 strict_overflow_p, depth);
6213 case GIMPLE_BINARY_RHS:
6214 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6215 gimple_expr_type (stmt),
6216 gimple_assign_rhs1 (stmt),
6217 gimple_assign_rhs2 (stmt),
6218 strict_overflow_p, depth);
6219 case GIMPLE_TERNARY_RHS:
6221 case GIMPLE_SINGLE_RHS:
6222 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6223 strict_overflow_p, depth);
6224 case GIMPLE_INVALID_RHS:
6230 /* Return true if return value of call STMT is known to be non-negative.
6231 If the return value is based on the assumption that signed overflow is
6232 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6233 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6236 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6239 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6240 gimple_call_arg (stmt, 0) : NULL_TREE;
6241 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6242 gimple_call_arg (stmt, 1) : NULL_TREE;
6244 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6245 gimple_call_combined_fn (stmt),
6248 strict_overflow_p, depth);
6251 /* Return true if return value of call STMT is known to be non-negative.
6252 If the return value is based on the assumption that signed overflow is
6253 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6254 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6257 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6260 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6262 tree arg = gimple_phi_arg_def (stmt, i);
6263 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6269 /* Return true if STMT is known to compute a non-negative value.
6270 If the return value is based on the assumption that signed overflow is
6271 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6272 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6275 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6278 switch (gimple_code (stmt))
6281 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6284 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6287 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6294 /* Return true if the floating-point value computed by assignment STMT
6295 is known to have an integer value. We also allow +Inf, -Inf and NaN
6296 to be considered integer values. Return false for signaling NaN.
6298 DEPTH is the current nesting depth of the query. */
6301 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6303 enum tree_code code = gimple_assign_rhs_code (stmt);
6304 switch (get_gimple_rhs_class (code))
6306 case GIMPLE_UNARY_RHS:
6307 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6308 gimple_assign_rhs1 (stmt), depth);
6309 case GIMPLE_BINARY_RHS:
6310 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6311 gimple_assign_rhs1 (stmt),
6312 gimple_assign_rhs2 (stmt), depth);
6313 case GIMPLE_TERNARY_RHS:
6315 case GIMPLE_SINGLE_RHS:
6316 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6317 case GIMPLE_INVALID_RHS:
6323 /* Return true if the floating-point value computed by call STMT is known
6324 to have an integer value. We also allow +Inf, -Inf and NaN to be
6325 considered integer values. Return false for signaling NaN.
6327 DEPTH is the current nesting depth of the query. */
6330 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6332 tree arg0 = (gimple_call_num_args (stmt) > 0
6333 ? gimple_call_arg (stmt, 0)
6335 tree arg1 = (gimple_call_num_args (stmt) > 1
6336 ? gimple_call_arg (stmt, 1)
6338 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
6342 /* Return true if the floating-point result of phi STMT is known to have
6343 an integer value. We also allow +Inf, -Inf and NaN to be considered
6344 integer values. Return false for signaling NaN.
6346 DEPTH is the current nesting depth of the query. */
6349 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6351 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6353 tree arg = gimple_phi_arg_def (stmt, i);
6354 if (!integer_valued_real_single_p (arg, depth + 1))
6360 /* Return true if the floating-point value computed by STMT is known
6361 to have an integer value. We also allow +Inf, -Inf and NaN to be
6362 considered integer values. Return false for signaling NaN.
6364 DEPTH is the current nesting depth of the query. */
6367 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6369 switch (gimple_code (stmt))
6372 return gimple_assign_integer_valued_real_p (stmt, depth);
6374 return gimple_call_integer_valued_real_p (stmt, depth);
6376 return gimple_phi_integer_valued_real_p (stmt, depth);