1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
36 #include "stor-layout.h"
38 #include "gimple-fold.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
59 /* Return true when DECL can be referenced from current unit.
60 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
61 We can get declarations that are not possible to reference for various
64 1) When analyzing C++ virtual tables.
65 C++ virtual tables do have known constructors even
66 when they are keyed to other compilation unit.
67 Those tables can contain pointers to methods and vars
68 in other units. Those methods have both STATIC and EXTERNAL
70 2) In WHOPR mode devirtualization might lead to reference
71 to method that was partitioned elsehwere.
72 In this case we have static VAR_DECL or FUNCTION_DECL
73 that has no corresponding callgraph/varpool node
75 3) COMDAT functions referred by external vtables that
76 we devirtualize only during final compilation stage.
77 At this time we already decided that we will not output
78 the function body and thus we can't reference the symbol
82 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
85 struct cgraph_node *node;
88 if (DECL_ABSTRACT_P (decl))
91 /* We are concerned only about static/external vars and functions. */
92 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
93 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
96 /* Static objects can be referred only if they was not optimized out yet. */
97 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
99 /* Before we start optimizing unreachable code we can be sure all
100 static objects are defined. */
101 if (symtab->function_flags_ready)
103 snode = symtab_node::get (decl);
104 if (!snode || !snode->definition)
106 node = dyn_cast <cgraph_node *> (snode);
107 return !node || !node->global.inlined_to;
110 /* We will later output the initializer, so we can refer to it.
111 So we are concerned only when DECL comes from initializer of
112 external var or var that has been optimized out. */
114 || TREE_CODE (from_decl) != VAR_DECL
115 || (!DECL_EXTERNAL (from_decl)
116 && (vnode = varpool_node::get (from_decl)) != NULL
117 && vnode->definition)
119 && (vnode = varpool_node::get (from_decl)) != NULL
120 && vnode->in_other_partition))
122 /* We are folding reference from external vtable. The vtable may reffer
123 to a symbol keyed to other compilation unit. The other compilation
124 unit may be in separate DSO and the symbol may be hidden. */
125 if (DECL_VISIBILITY_SPECIFIED (decl)
126 && DECL_EXTERNAL (decl)
127 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
128 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
130 /* When function is public, we always can introduce new reference.
131 Exception are the COMDAT functions where introducing a direct
132 reference imply need to include function body in the curren tunit. */
133 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
135 /* We have COMDAT. We are going to check if we still have definition
136 or if the definition is going to be output in other partition.
137 Bypass this when gimplifying; all needed functions will be produced.
139 As observed in PR20991 for already optimized out comdat virtual functions
140 it may be tempting to not necessarily give up because the copy will be
141 output elsewhere when corresponding vtable is output.
142 This is however not possible - ABI specify that COMDATs are output in
143 units where they are used and when the other unit was compiled with LTO
144 it is possible that vtable was kept public while the function itself
146 if (!symtab->function_flags_ready)
149 snode = symtab_node::get (decl);
151 || ((!snode->definition || DECL_EXTERNAL (decl))
152 && (!snode->in_other_partition
153 || (!snode->forced_by_abi && !snode->force_output))))
155 node = dyn_cast <cgraph_node *> (snode);
156 return !node || !node->global.inlined_to;
159 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
160 acceptable form for is_gimple_min_invariant.
161 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
164 canonicalize_constructor_val (tree cval, tree from_decl)
166 tree orig_cval = cval;
168 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
169 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
171 tree ptr = TREE_OPERAND (cval, 0);
172 if (is_gimple_min_invariant (ptr))
173 cval = build1_loc (EXPR_LOCATION (cval),
174 ADDR_EXPR, TREE_TYPE (ptr),
175 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
177 fold_convert (ptr_type_node,
178 TREE_OPERAND (cval, 1))));
180 if (TREE_CODE (cval) == ADDR_EXPR)
182 tree base = NULL_TREE;
183 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
185 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
187 TREE_OPERAND (cval, 0) = base;
190 base = get_base_address (TREE_OPERAND (cval, 0));
194 if ((TREE_CODE (base) == VAR_DECL
195 || TREE_CODE (base) == FUNCTION_DECL)
196 && !can_refer_decl_in_current_unit_p (base, from_decl))
198 if (TREE_TYPE (base) == error_mark_node)
200 if (TREE_CODE (base) == VAR_DECL)
201 TREE_ADDRESSABLE (base) = 1;
202 else if (TREE_CODE (base) == FUNCTION_DECL)
204 /* Make sure we create a cgraph node for functions we'll reference.
205 They can be non-existent if the reference comes from an entry
206 of an external vtable for example. */
207 cgraph_node::get_create (base);
209 /* Fixup types in global initializers. */
210 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
211 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
213 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
214 cval = fold_convert (TREE_TYPE (orig_cval), cval);
217 if (TREE_OVERFLOW_P (cval))
218 return drop_tree_overflow (cval);
222 /* If SYM is a constant variable with known value, return the value.
223 NULL_TREE is returned otherwise. */
226 get_symbol_constant_value (tree sym)
228 tree val = ctor_for_folding (sym);
229 if (val != error_mark_node)
233 val = canonicalize_constructor_val (unshare_expr (val), sym);
234 if (val && is_gimple_min_invariant (val))
239 /* Variables declared 'const' without an initializer
240 have zero as the initializer if they may not be
241 overridden at link or run time. */
243 && is_gimple_reg_type (TREE_TYPE (sym)))
244 return build_zero_cst (TREE_TYPE (sym));
252 /* Subroutine of fold_stmt. We perform several simplifications of the
253 memory reference tree EXPR and make sure to re-gimplify them properly
254 after propagation of constant addresses. IS_LHS is true if the
255 reference is supposed to be an lvalue. */
258 maybe_fold_reference (tree expr, bool is_lhs)
262 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
263 || TREE_CODE (expr) == REALPART_EXPR
264 || TREE_CODE (expr) == IMAGPART_EXPR)
265 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
266 return fold_unary_loc (EXPR_LOCATION (expr),
269 TREE_OPERAND (expr, 0));
270 else if (TREE_CODE (expr) == BIT_FIELD_REF
271 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
272 return fold_ternary_loc (EXPR_LOCATION (expr),
275 TREE_OPERAND (expr, 0),
276 TREE_OPERAND (expr, 1),
277 TREE_OPERAND (expr, 2));
280 && (result = fold_const_aggregate_ref (expr))
281 && is_gimple_min_invariant (result))
288 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
289 replacement rhs for the statement or NULL_TREE if no simplification
290 could be made. It is assumed that the operands have been previously
294 fold_gimple_assign (gimple_stmt_iterator *si)
296 gimple *stmt = gsi_stmt (*si);
297 enum tree_code subcode = gimple_assign_rhs_code (stmt);
298 location_t loc = gimple_location (stmt);
300 tree result = NULL_TREE;
302 switch (get_gimple_rhs_class (subcode))
304 case GIMPLE_SINGLE_RHS:
306 tree rhs = gimple_assign_rhs1 (stmt);
308 if (TREE_CLOBBER_P (rhs))
311 if (REFERENCE_CLASS_P (rhs))
312 return maybe_fold_reference (rhs, false);
314 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
316 tree val = OBJ_TYPE_REF_EXPR (rhs);
317 if (is_gimple_min_invariant (val))
319 else if (flag_devirtualize && virtual_method_call_p (rhs))
322 vec <cgraph_node *>targets
323 = possible_polymorphic_call_targets (rhs, stmt, &final);
324 if (final && targets.length () <= 1 && dbg_cnt (devirt))
326 if (dump_enabled_p ())
328 location_t loc = gimple_location_safe (stmt);
329 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
330 "resolving virtual function address "
331 "reference to function %s\n",
332 targets.length () == 1
333 ? targets[0]->name ()
336 if (targets.length () == 1)
338 val = fold_convert (TREE_TYPE (val),
339 build_fold_addr_expr_loc
340 (loc, targets[0]->decl));
341 STRIP_USELESS_TYPE_CONVERSION (val);
344 /* We can not use __builtin_unreachable here because it
345 can not have address taken. */
346 val = build_int_cst (TREE_TYPE (val), 0);
352 else if (TREE_CODE (rhs) == ADDR_EXPR)
354 tree ref = TREE_OPERAND (rhs, 0);
355 tree tem = maybe_fold_reference (ref, true);
357 && TREE_CODE (tem) == MEM_REF
358 && integer_zerop (TREE_OPERAND (tem, 1)))
359 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
361 result = fold_convert (TREE_TYPE (rhs),
362 build_fold_addr_expr_loc (loc, tem));
363 else if (TREE_CODE (ref) == MEM_REF
364 && integer_zerop (TREE_OPERAND (ref, 1)))
365 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
369 /* Strip away useless type conversions. Both the
370 NON_LVALUE_EXPR that may have been added by fold, and
371 "useless" type conversions that might now be apparent
372 due to propagation. */
373 STRIP_USELESS_TYPE_CONVERSION (result);
375 if (result != rhs && valid_gimple_rhs_p (result))
380 else if (TREE_CODE (rhs) == CONSTRUCTOR
381 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
383 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
387 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
388 if (! CONSTANT_CLASS_P (val))
391 return build_vector_from_ctor (TREE_TYPE (rhs),
392 CONSTRUCTOR_ELTS (rhs));
395 else if (DECL_P (rhs))
396 return get_symbol_constant_value (rhs);
400 case GIMPLE_UNARY_RHS:
403 case GIMPLE_BINARY_RHS:
406 case GIMPLE_TERNARY_RHS:
407 result = fold_ternary_loc (loc, subcode,
408 TREE_TYPE (gimple_assign_lhs (stmt)),
409 gimple_assign_rhs1 (stmt),
410 gimple_assign_rhs2 (stmt),
411 gimple_assign_rhs3 (stmt));
415 STRIP_USELESS_TYPE_CONVERSION (result);
416 if (valid_gimple_rhs_p (result))
421 case GIMPLE_INVALID_RHS:
429 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
430 adjusting the replacement stmts location and virtual operands.
431 If the statement has a lhs the last stmt in the sequence is expected
432 to assign to that lhs. */
435 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
437 gimple *stmt = gsi_stmt (*si_p);
439 if (gimple_has_location (stmt))
440 annotate_all_with_location (stmts, gimple_location (stmt));
442 /* First iterate over the replacement statements backward, assigning
443 virtual operands to their defining statements. */
444 gimple *laststore = NULL;
445 for (gimple_stmt_iterator i = gsi_last (stmts);
446 !gsi_end_p (i); gsi_prev (&i))
448 gimple *new_stmt = gsi_stmt (i);
449 if ((gimple_assign_single_p (new_stmt)
450 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
451 || (is_gimple_call (new_stmt)
452 && (gimple_call_flags (new_stmt)
453 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
457 vdef = gimple_vdef (stmt);
459 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
460 gimple_set_vdef (new_stmt, vdef);
461 if (vdef && TREE_CODE (vdef) == SSA_NAME)
462 SSA_NAME_DEF_STMT (vdef) = new_stmt;
463 laststore = new_stmt;
467 /* Second iterate over the statements forward, assigning virtual
468 operands to their uses. */
469 tree reaching_vuse = gimple_vuse (stmt);
470 for (gimple_stmt_iterator i = gsi_start (stmts);
471 !gsi_end_p (i); gsi_next (&i))
473 gimple *new_stmt = gsi_stmt (i);
474 /* If the new statement possibly has a VUSE, update it with exact SSA
475 name we know will reach this one. */
476 if (gimple_has_mem_ops (new_stmt))
477 gimple_set_vuse (new_stmt, reaching_vuse);
478 gimple_set_modified (new_stmt, true);
479 if (gimple_vdef (new_stmt))
480 reaching_vuse = gimple_vdef (new_stmt);
483 /* If the new sequence does not do a store release the virtual
484 definition of the original statement. */
486 && reaching_vuse == gimple_vuse (stmt))
488 tree vdef = gimple_vdef (stmt);
490 && TREE_CODE (vdef) == SSA_NAME)
492 unlink_stmt_vdef (stmt);
493 release_ssa_name (vdef);
497 /* Finally replace the original statement with the sequence. */
498 gsi_replace_with_seq (si_p, stmts, false);
501 /* Convert EXPR into a GIMPLE value suitable for substitution on the
502 RHS of an assignment. Insert the necessary statements before
503 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
504 is replaced. If the call is expected to produces a result, then it
505 is replaced by an assignment of the new RHS to the result variable.
506 If the result is to be ignored, then the call is replaced by a
507 GIMPLE_NOP. A proper VDEF chain is retained by making the first
508 VUSE and the last VDEF of the whole sequence be the same as the replaced
509 statement and using new SSA names for stores in between. */
512 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
515 gimple *stmt, *new_stmt;
516 gimple_stmt_iterator i;
517 gimple_seq stmts = NULL;
519 stmt = gsi_stmt (*si_p);
521 gcc_assert (is_gimple_call (stmt));
523 push_gimplify_context (gimple_in_ssa_p (cfun));
525 lhs = gimple_call_lhs (stmt);
526 if (lhs == NULL_TREE)
528 gimplify_and_add (expr, &stmts);
529 /* We can end up with folding a memcpy of an empty class assignment
530 which gets optimized away by C++ gimplification. */
531 if (gimple_seq_empty_p (stmts))
533 pop_gimplify_context (NULL);
534 if (gimple_in_ssa_p (cfun))
536 unlink_stmt_vdef (stmt);
539 gsi_replace (si_p, gimple_build_nop (), false);
545 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
546 new_stmt = gimple_build_assign (lhs, tmp);
547 i = gsi_last (stmts);
548 gsi_insert_after_without_update (&i, new_stmt,
549 GSI_CONTINUE_LINKING);
552 pop_gimplify_context (NULL);
554 gsi_replace_with_seq_vops (si_p, stmts);
558 /* Replace the call at *GSI with the gimple value VAL. */
561 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
563 gimple *stmt = gsi_stmt (*gsi);
564 tree lhs = gimple_call_lhs (stmt);
568 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
569 val = fold_convert (TREE_TYPE (lhs), val);
570 repl = gimple_build_assign (lhs, val);
573 repl = gimple_build_nop ();
574 tree vdef = gimple_vdef (stmt);
575 if (vdef && TREE_CODE (vdef) == SSA_NAME)
577 unlink_stmt_vdef (stmt);
578 release_ssa_name (vdef);
580 gsi_replace (gsi, repl, false);
583 /* Replace the call at *GSI with the new call REPL and fold that
587 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
589 gimple *stmt = gsi_stmt (*gsi);
590 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
591 gimple_set_location (repl, gimple_location (stmt));
592 if (gimple_vdef (stmt)
593 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
595 gimple_set_vdef (repl, gimple_vdef (stmt));
596 gimple_set_vuse (repl, gimple_vuse (stmt));
597 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
599 gsi_replace (gsi, repl, false);
603 /* Return true if VAR is a VAR_DECL or a component thereof. */
606 var_decl_component_p (tree var)
609 while (handled_component_p (inner))
610 inner = TREE_OPERAND (inner, 0);
611 return SSA_VAR_P (inner);
614 /* Fold function call to builtin mem{{,p}cpy,move}. Return
615 false if no simplification can be made.
616 If ENDP is 0, return DEST (like memcpy).
617 If ENDP is 1, return DEST+LEN (like mempcpy).
618 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
619 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
623 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
624 tree dest, tree src, int endp)
626 gimple *stmt = gsi_stmt (*gsi);
627 tree lhs = gimple_call_lhs (stmt);
628 tree len = gimple_call_arg (stmt, 2);
629 tree destvar, srcvar;
630 location_t loc = gimple_location (stmt);
632 /* If the LEN parameter is zero, return DEST. */
633 if (integer_zerop (len))
636 if (gimple_call_lhs (stmt))
637 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
639 repl = gimple_build_nop ();
640 tree vdef = gimple_vdef (stmt);
641 if (vdef && TREE_CODE (vdef) == SSA_NAME)
643 unlink_stmt_vdef (stmt);
644 release_ssa_name (vdef);
646 gsi_replace (gsi, repl, false);
650 /* If SRC and DEST are the same (and not volatile), return
651 DEST{,+LEN,+LEN-1}. */
652 if (operand_equal_p (src, dest, 0))
654 unlink_stmt_vdef (stmt);
655 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
656 release_ssa_name (gimple_vdef (stmt));
659 gsi_replace (gsi, gimple_build_nop (), false);
666 tree srctype, desttype;
667 unsigned int src_align, dest_align;
670 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
671 pointers as wide integer) and also may result in huge function
672 size because of inlined bounds copy. Thus don't inline for
673 functions we want to instrument. */
674 if (flag_check_pointer_bounds
675 && chkp_instrumentable_p (cfun->decl)
676 /* Even if data may contain pointers we can inline if copy
677 less than a pointer size. */
678 && (!tree_fits_uhwi_p (len)
679 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
682 /* Build accesses at offset zero with a ref-all character type. */
683 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
686 /* If we can perform the copy efficiently with first doing all loads
687 and then all stores inline it that way. Currently efficiently
688 means that we can load all the memory into a single integer
689 register which is what MOVE_MAX gives us. */
690 src_align = get_pointer_alignment (src);
691 dest_align = get_pointer_alignment (dest);
692 if (tree_fits_uhwi_p (len)
693 && compare_tree_int (len, MOVE_MAX) <= 0
694 /* ??? Don't transform copies from strings with known length this
695 confuses the tree-ssa-strlen.c. This doesn't handle
696 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
698 && !c_strlen (src, 2))
700 unsigned ilen = tree_to_uhwi (len);
701 if (exact_log2 (ilen) != -1)
703 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
705 && TYPE_MODE (type) != BLKmode
706 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
708 /* If the destination pointer is not aligned we must be able
709 to emit an unaligned store. */
710 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
711 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
712 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
713 != CODE_FOR_nothing)))
716 tree desttype = type;
717 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
718 srctype = build_aligned_type (type, src_align);
719 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
720 tree tem = fold_const_aggregate_ref (srcmem);
723 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
724 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
726 && (optab_handler (movmisalign_optab,
728 == CODE_FOR_nothing))
733 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
735 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
736 if (gimple_in_ssa_p (cfun))
737 srcmem = make_ssa_name (TREE_TYPE (srcmem),
740 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
741 gimple_assign_set_lhs (new_stmt, srcmem);
742 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
743 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
745 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
746 desttype = build_aligned_type (type, dest_align);
748 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
751 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
752 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
753 if (gimple_vdef (new_stmt)
754 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
755 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
758 gsi_replace (gsi, new_stmt, false);
761 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
770 /* Both DEST and SRC must be pointer types.
771 ??? This is what old code did. Is the testing for pointer types
774 If either SRC is readonly or length is 1, we can use memcpy. */
775 if (!dest_align || !src_align)
777 if (readonly_data_expr (src)
778 || (tree_fits_uhwi_p (len)
779 && (MIN (src_align, dest_align) / BITS_PER_UNIT
780 >= tree_to_uhwi (len))))
782 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
785 gimple_call_set_fndecl (stmt, fn);
786 gimple_call_set_arg (stmt, 0, dest);
787 gimple_call_set_arg (stmt, 1, src);
792 /* If *src and *dest can't overlap, optimize into memcpy as well. */
793 if (TREE_CODE (src) == ADDR_EXPR
794 && TREE_CODE (dest) == ADDR_EXPR)
796 tree src_base, dest_base, fn;
797 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
798 HOST_WIDE_INT maxsize;
800 srcvar = TREE_OPERAND (src, 0);
801 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
802 if (src_base == NULL)
804 destvar = TREE_OPERAND (dest, 0);
805 dest_base = get_addr_base_and_unit_offset (destvar,
807 if (dest_base == NULL)
809 if (tree_fits_uhwi_p (len))
810 maxsize = tree_to_uhwi (len);
813 if (SSA_VAR_P (src_base)
814 && SSA_VAR_P (dest_base))
816 if (operand_equal_p (src_base, dest_base, 0)
817 && ranges_overlap_p (src_offset, maxsize,
818 dest_offset, maxsize))
821 else if (TREE_CODE (src_base) == MEM_REF
822 && TREE_CODE (dest_base) == MEM_REF)
824 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
825 TREE_OPERAND (dest_base, 0), 0))
827 offset_int off = mem_ref_offset (src_base) + src_offset;
828 if (!wi::fits_shwi_p (off))
830 src_offset = off.to_shwi ();
832 off = mem_ref_offset (dest_base) + dest_offset;
833 if (!wi::fits_shwi_p (off))
835 dest_offset = off.to_shwi ();
836 if (ranges_overlap_p (src_offset, maxsize,
837 dest_offset, maxsize))
843 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
846 gimple_call_set_fndecl (stmt, fn);
847 gimple_call_set_arg (stmt, 0, dest);
848 gimple_call_set_arg (stmt, 1, src);
853 /* If the destination and source do not alias optimize into
855 if ((is_gimple_min_invariant (dest)
856 || TREE_CODE (dest) == SSA_NAME)
857 && (is_gimple_min_invariant (src)
858 || TREE_CODE (src) == SSA_NAME))
861 ao_ref_init_from_ptr_and_size (&destr, dest, len);
862 ao_ref_init_from_ptr_and_size (&srcr, src, len);
863 if (!refs_may_alias_p_1 (&destr, &srcr, false))
866 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
869 gimple_call_set_fndecl (stmt, fn);
870 gimple_call_set_arg (stmt, 0, dest);
871 gimple_call_set_arg (stmt, 1, src);
880 if (!tree_fits_shwi_p (len))
883 This logic lose for arguments like (type *)malloc (sizeof (type)),
884 since we strip the casts of up to VOID return value from malloc.
885 Perhaps we ought to inherit type from non-VOID argument here? */
888 if (!POINTER_TYPE_P (TREE_TYPE (src))
889 || !POINTER_TYPE_P (TREE_TYPE (dest)))
891 /* In the following try to find a type that is most natural to be
892 used for the memcpy source and destination and that allows
893 the most optimization when memcpy is turned into a plain assignment
894 using that type. In theory we could always use a char[len] type
895 but that only gains us that the destination and source possibly
896 no longer will have their address taken. */
897 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
898 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
900 tree tem = TREE_OPERAND (src, 0);
902 if (tem != TREE_OPERAND (src, 0))
903 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
905 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
907 tree tem = TREE_OPERAND (dest, 0);
909 if (tem != TREE_OPERAND (dest, 0))
910 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
912 srctype = TREE_TYPE (TREE_TYPE (src));
913 if (TREE_CODE (srctype) == ARRAY_TYPE
914 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
916 srctype = TREE_TYPE (srctype);
918 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
920 desttype = TREE_TYPE (TREE_TYPE (dest));
921 if (TREE_CODE (desttype) == ARRAY_TYPE
922 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
924 desttype = TREE_TYPE (desttype);
926 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
928 if (TREE_ADDRESSABLE (srctype)
929 || TREE_ADDRESSABLE (desttype))
932 /* Make sure we are not copying using a floating-point mode or
933 a type whose size possibly does not match its precision. */
934 if (FLOAT_MODE_P (TYPE_MODE (desttype))
935 || TREE_CODE (desttype) == BOOLEAN_TYPE
936 || TREE_CODE (desttype) == ENUMERAL_TYPE)
937 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
938 if (FLOAT_MODE_P (TYPE_MODE (srctype))
939 || TREE_CODE (srctype) == BOOLEAN_TYPE
940 || TREE_CODE (srctype) == ENUMERAL_TYPE)
941 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
949 src_align = get_pointer_alignment (src);
950 dest_align = get_pointer_alignment (dest);
951 if (dest_align < TYPE_ALIGN (desttype)
952 || src_align < TYPE_ALIGN (srctype))
956 STRIP_NOPS (destvar);
957 if (TREE_CODE (destvar) == ADDR_EXPR
958 && var_decl_component_p (TREE_OPERAND (destvar, 0))
959 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
960 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
966 if (TREE_CODE (srcvar) == ADDR_EXPR
967 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
968 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
971 || src_align >= TYPE_ALIGN (desttype))
972 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
974 else if (!STRICT_ALIGNMENT)
976 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
978 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
986 if (srcvar == NULL_TREE && destvar == NULL_TREE)
989 if (srcvar == NULL_TREE)
992 if (src_align >= TYPE_ALIGN (desttype))
993 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
996 if (STRICT_ALIGNMENT)
998 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1000 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1003 else if (destvar == NULL_TREE)
1006 if (dest_align >= TYPE_ALIGN (srctype))
1007 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1010 if (STRICT_ALIGNMENT)
1012 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1014 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1019 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1021 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1022 if (gimple_in_ssa_p (cfun))
1023 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1025 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1026 gimple_assign_set_lhs (new_stmt, srcvar);
1027 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1028 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1030 new_stmt = gimple_build_assign (destvar, srcvar);
1031 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1032 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1033 if (gimple_vdef (new_stmt)
1034 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1035 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1038 gsi_replace (gsi, new_stmt, false);
1041 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1045 gimple_seq stmts = NULL;
1046 if (endp == 0 || endp == 3)
1049 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1051 if (endp == 2 || endp == 1)
1053 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1054 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1055 TREE_TYPE (dest), dest, len);
1058 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1059 gimple *repl = gimple_build_assign (lhs, dest);
1060 gsi_replace (gsi, repl, false);
1064 /* Fold function call to builtin memset or bzero at *GSI setting the
1065 memory of size LEN to VAL. Return whether a simplification was made. */
1068 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1070 gimple *stmt = gsi_stmt (*gsi);
1072 unsigned HOST_WIDE_INT length, cval;
1074 /* If the LEN parameter is zero, return DEST. */
1075 if (integer_zerop (len))
1077 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1081 if (! tree_fits_uhwi_p (len))
1084 if (TREE_CODE (c) != INTEGER_CST)
1087 tree dest = gimple_call_arg (stmt, 0);
1089 if (TREE_CODE (var) != ADDR_EXPR)
1092 var = TREE_OPERAND (var, 0);
1093 if (TREE_THIS_VOLATILE (var))
1096 etype = TREE_TYPE (var);
1097 if (TREE_CODE (etype) == ARRAY_TYPE)
1098 etype = TREE_TYPE (etype);
1100 if (!INTEGRAL_TYPE_P (etype)
1101 && !POINTER_TYPE_P (etype))
1104 if (! var_decl_component_p (var))
1107 length = tree_to_uhwi (len);
1108 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1109 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1112 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1115 if (integer_zerop (c))
1119 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1122 cval = TREE_INT_CST_LOW (c);
1126 cval |= (cval << 31) << 1;
1129 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1130 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1131 gimple_set_vuse (store, gimple_vuse (stmt));
1132 tree vdef = gimple_vdef (stmt);
1133 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1135 gimple_set_vdef (store, gimple_vdef (stmt));
1136 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1138 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1139 if (gimple_call_lhs (stmt))
1141 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1142 gsi_replace (gsi, asgn, false);
1146 gimple_stmt_iterator gsi2 = *gsi;
1148 gsi_remove (&gsi2, true);
1155 /* Return the string length, maximum string length or maximum value of
1157 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1158 is not NULL and, for TYPE == 0, its value is not equal to the length
1159 we determine or if we are unable to determine the length or value,
1160 return false. VISITED is a bitmap of visited variables.
1161 TYPE is 0 if string length should be returned, 1 for maximum string
1162 length and 2 for maximum value ARG can have. */
1165 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1170 if (TREE_CODE (arg) != SSA_NAME)
1172 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1173 if (TREE_CODE (arg) == ADDR_EXPR
1174 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1175 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1177 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1178 if (TREE_CODE (aop0) == INDIRECT_REF
1179 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1180 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1181 length, visited, type);
1187 if (TREE_CODE (val) != INTEGER_CST
1188 || tree_int_cst_sgn (val) < 0)
1192 val = c_strlen (arg, 1);
1200 if (TREE_CODE (*length) != INTEGER_CST
1201 || TREE_CODE (val) != INTEGER_CST)
1204 if (tree_int_cst_lt (*length, val))
1208 else if (simple_cst_equal (val, *length) != 1)
1216 /* If ARG is registered for SSA update we cannot look at its defining
1218 if (name_registered_for_update_p (arg))
1221 /* If we were already here, break the infinite cycle. */
1223 *visited = BITMAP_ALLOC (NULL);
1224 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1228 def_stmt = SSA_NAME_DEF_STMT (var);
1230 switch (gimple_code (def_stmt))
1233 /* The RHS of the statement defining VAR must either have a
1234 constant length or come from another SSA_NAME with a constant
1236 if (gimple_assign_single_p (def_stmt)
1237 || gimple_assign_unary_nop_p (def_stmt))
1239 tree rhs = gimple_assign_rhs1 (def_stmt);
1240 return get_maxval_strlen (rhs, length, visited, type);
1242 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1244 tree op2 = gimple_assign_rhs2 (def_stmt);
1245 tree op3 = gimple_assign_rhs3 (def_stmt);
1246 return get_maxval_strlen (op2, length, visited, type)
1247 && get_maxval_strlen (op3, length, visited, type);
1253 /* All the arguments of the PHI node must have the same constant
1257 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1259 tree arg = gimple_phi_arg (def_stmt, i)->def;
1261 /* If this PHI has itself as an argument, we cannot
1262 determine the string length of this argument. However,
1263 if we can find a constant string length for the other
1264 PHI args then we can still be sure that this is a
1265 constant string length. So be optimistic and just
1266 continue with the next argument. */
1267 if (arg == gimple_phi_result (def_stmt))
1270 if (!get_maxval_strlen (arg, length, visited, type))
1282 get_maxval_strlen (tree arg, int type)
1284 bitmap visited = NULL;
1285 tree len = NULL_TREE;
1286 if (!get_maxval_strlen (arg, &len, &visited, type))
1289 BITMAP_FREE (visited);
1295 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1296 If LEN is not NULL, it represents the length of the string to be
1297 copied. Return NULL_TREE if no simplification can be made. */
1300 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1301 tree dest, tree src)
1303 location_t loc = gimple_location (gsi_stmt (*gsi));
1306 /* If SRC and DEST are the same (and not volatile), return DEST. */
1307 if (operand_equal_p (src, dest, 0))
1309 replace_call_with_value (gsi, dest);
1313 if (optimize_function_for_size_p (cfun))
1316 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1320 tree len = get_maxval_strlen (src, 0);
1324 len = fold_convert_loc (loc, size_type_node, len);
1325 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1326 len = force_gimple_operand_gsi (gsi, len, true,
1327 NULL_TREE, true, GSI_SAME_STMT);
1328 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1329 replace_call_with_call_and_fold (gsi, repl);
1333 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1334 If SLEN is not NULL, it represents the length of the source string.
1335 Return NULL_TREE if no simplification can be made. */
1338 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1339 tree dest, tree src, tree len)
1341 location_t loc = gimple_location (gsi_stmt (*gsi));
1344 /* If the LEN parameter is zero, return DEST. */
1345 if (integer_zerop (len))
1347 replace_call_with_value (gsi, dest);
1351 /* We can't compare slen with len as constants below if len is not a
1353 if (TREE_CODE (len) != INTEGER_CST)
1356 /* Now, we must be passed a constant src ptr parameter. */
1357 tree slen = get_maxval_strlen (src, 0);
1358 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1361 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1363 /* We do not support simplification of this case, though we do
1364 support it when expanding trees into RTL. */
1365 /* FIXME: generate a call to __builtin_memset. */
1366 if (tree_int_cst_lt (slen, len))
1369 /* OK transform into builtin memcpy. */
1370 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1374 len = fold_convert_loc (loc, size_type_node, len);
1375 len = force_gimple_operand_gsi (gsi, len, true,
1376 NULL_TREE, true, GSI_SAME_STMT);
1377 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1378 replace_call_with_call_and_fold (gsi, repl);
1382 /* Simplify strchr (str, 0) into str + strlen (str).
1383 In general strlen is significantly faster than strchr
1384 due to being a simpler operation. */
1386 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi)
1388 gimple *stmt = gsi_stmt (*gsi);
1389 tree str = gimple_call_arg (stmt, 0);
1390 tree c = gimple_call_arg (stmt, 1);
1391 location_t loc = gimple_location (stmt);
1393 if (optimize_function_for_size_p (cfun))
1396 if (!integer_zerop (c) || !gimple_call_lhs (stmt))
1400 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1405 /* Create newstr = strlen (str). */
1406 gimple_seq stmts = NULL;
1407 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1408 gimple_set_location (new_stmt, loc);
1409 if (gimple_in_ssa_p (cfun))
1410 len = make_ssa_name (size_type_node);
1412 len = create_tmp_reg (size_type_node);
1413 gimple_call_set_lhs (new_stmt, len);
1414 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1416 /* Create (str p+ strlen (str)). */
1417 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1418 POINTER_PLUS_EXPR, str, len);
1419 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1420 gsi_replace_with_seq_vops (gsi, stmts);
1421 /* gsi now points at the assignment to the lhs, get a
1422 stmt iterator to the strlen.
1423 ??? We can't use gsi_for_stmt as that doesn't work when the
1424 CFG isn't built yet. */
1425 gimple_stmt_iterator gsi2 = *gsi;
1431 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1434 Return NULL_TREE if no simplification was possible, otherwise return the
1435 simplified form of the call as a tree.
1437 The simplified form may be a constant or other expression which
1438 computes the same value, but in a more efficient manner (including
1439 calls to other builtin functions).
1441 The call may contain arguments which need to be evaluated, but
1442 which are not useful to determine the result of the call. In
1443 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1444 COMPOUND_EXPR will be an argument which must be evaluated.
1445 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1446 COMPOUND_EXPR in the chain will contain the tree for the simplified
1447 form of the builtin function call. */
1450 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1452 gimple *stmt = gsi_stmt (*gsi);
1453 location_t loc = gimple_location (stmt);
1455 const char *p = c_getstr (src);
1457 /* If the string length is zero, return the dst parameter. */
1458 if (p && *p == '\0')
1460 replace_call_with_value (gsi, dst);
1464 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1467 /* See if we can store by pieces into (dst + strlen(dst)). */
1469 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1470 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1472 if (!strlen_fn || !memcpy_fn)
1475 /* If the length of the source string isn't computable don't
1476 split strcat into strlen and memcpy. */
1477 tree len = get_maxval_strlen (src, 0);
1481 /* Create strlen (dst). */
1482 gimple_seq stmts = NULL, stmts2;
1483 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1484 gimple_set_location (repl, loc);
1485 if (gimple_in_ssa_p (cfun))
1486 newdst = make_ssa_name (size_type_node);
1488 newdst = create_tmp_reg (size_type_node);
1489 gimple_call_set_lhs (repl, newdst);
1490 gimple_seq_add_stmt_without_update (&stmts, repl);
1492 /* Create (dst p+ strlen (dst)). */
1493 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1494 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1495 gimple_seq_add_seq_without_update (&stmts, stmts2);
1497 len = fold_convert_loc (loc, size_type_node, len);
1498 len = size_binop_loc (loc, PLUS_EXPR, len,
1499 build_int_cst (size_type_node, 1));
1500 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1501 gimple_seq_add_seq_without_update (&stmts, stmts2);
1503 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1504 gimple_seq_add_stmt_without_update (&stmts, repl);
1505 if (gimple_call_lhs (stmt))
1507 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1508 gimple_seq_add_stmt_without_update (&stmts, repl);
1509 gsi_replace_with_seq_vops (gsi, stmts);
1510 /* gsi now points at the assignment to the lhs, get a
1511 stmt iterator to the memcpy call.
1512 ??? We can't use gsi_for_stmt as that doesn't work when the
1513 CFG isn't built yet. */
1514 gimple_stmt_iterator gsi2 = *gsi;
1520 gsi_replace_with_seq_vops (gsi, stmts);
1526 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1527 are the arguments to the call. */
1530 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1532 gimple *stmt = gsi_stmt (*gsi);
1533 tree dest = gimple_call_arg (stmt, 0);
1534 tree src = gimple_call_arg (stmt, 1);
1535 tree size = gimple_call_arg (stmt, 2);
1541 /* If the SRC parameter is "", return DEST. */
1542 if (p && *p == '\0')
1544 replace_call_with_value (gsi, dest);
1548 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1551 /* If __builtin_strcat_chk is used, assume strcat is available. */
1552 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1556 gimple *repl = gimple_build_call (fn, 2, dest, src);
1557 replace_call_with_call_and_fold (gsi, repl);
1561 /* Simplify a call to the strncat builtin. */
1564 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1566 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1567 tree dst = gimple_call_arg (stmt, 0);
1568 tree src = gimple_call_arg (stmt, 1);
1569 tree len = gimple_call_arg (stmt, 2);
1571 const char *p = c_getstr (src);
1573 /* If the requested length is zero, or the src parameter string
1574 length is zero, return the dst parameter. */
1575 if (integer_zerop (len) || (p && *p == '\0'))
1577 replace_call_with_value (gsi, dst);
1581 /* If the requested len is greater than or equal to the string
1582 length, call strcat. */
1583 if (TREE_CODE (len) == INTEGER_CST && p
1584 && compare_tree_int (len, strlen (p)) >= 0)
1586 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1588 /* If the replacement _DECL isn't initialized, don't do the
1593 gcall *repl = gimple_build_call (fn, 2, dst, src);
1594 replace_call_with_call_and_fold (gsi, repl);
1601 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1605 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1607 gimple *stmt = gsi_stmt (*gsi);
1608 tree dest = gimple_call_arg (stmt, 0);
1609 tree src = gimple_call_arg (stmt, 1);
1610 tree len = gimple_call_arg (stmt, 2);
1611 tree size = gimple_call_arg (stmt, 3);
1616 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1617 if ((p && *p == '\0')
1618 || integer_zerop (len))
1620 replace_call_with_value (gsi, dest);
1624 if (! tree_fits_uhwi_p (size))
1627 if (! integer_all_onesp (size))
1629 tree src_len = c_strlen (src, 1);
1631 && tree_fits_uhwi_p (src_len)
1632 && tree_fits_uhwi_p (len)
1633 && ! tree_int_cst_lt (len, src_len))
1635 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1636 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1640 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1641 replace_call_with_call_and_fold (gsi, repl);
1647 /* If __builtin_strncat_chk is used, assume strncat is available. */
1648 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1652 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1653 replace_call_with_call_and_fold (gsi, repl);
1657 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1658 to the call. IGNORE is true if the value returned
1659 by the builtin will be ignored. UNLOCKED is true is true if this
1660 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1661 the known length of the string. Return NULL_TREE if no simplification
1665 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1666 tree arg0, tree arg1,
1669 gimple *stmt = gsi_stmt (*gsi);
1671 /* If we're using an unlocked function, assume the other unlocked
1672 functions exist explicitly. */
1673 tree const fn_fputc = (unlocked
1674 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1675 : builtin_decl_implicit (BUILT_IN_FPUTC));
1676 tree const fn_fwrite = (unlocked
1677 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1678 : builtin_decl_implicit (BUILT_IN_FWRITE));
1680 /* If the return value is used, don't do the transformation. */
1681 if (gimple_call_lhs (stmt))
1684 /* Get the length of the string passed to fputs. If the length
1685 can't be determined, punt. */
1686 tree len = get_maxval_strlen (arg0, 0);
1688 || TREE_CODE (len) != INTEGER_CST)
1691 switch (compare_tree_int (len, 1))
1693 case -1: /* length is 0, delete the call entirely . */
1694 replace_call_with_value (gsi, integer_zero_node);
1697 case 0: /* length is 1, call fputc. */
1699 const char *p = c_getstr (arg0);
1705 gimple *repl = gimple_build_call (fn_fputc, 2,
1707 (integer_type_node, p[0]), arg1);
1708 replace_call_with_call_and_fold (gsi, repl);
1713 case 1: /* length is greater than 1, call fwrite. */
1715 /* If optimizing for size keep fputs. */
1716 if (optimize_function_for_size_p (cfun))
1718 /* New argument list transforming fputs(string, stream) to
1719 fwrite(string, 1, len, stream). */
1723 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
1724 size_one_node, len, arg1);
1725 replace_call_with_call_and_fold (gsi, repl);
1734 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1735 DEST, SRC, LEN, and SIZE are the arguments to the call.
1736 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1737 code of the builtin. If MAXLEN is not NULL, it is maximum length
1738 passed as third argument. */
1741 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1742 tree dest, tree src, tree len, tree size,
1743 enum built_in_function fcode)
1745 gimple *stmt = gsi_stmt (*gsi);
1746 location_t loc = gimple_location (stmt);
1747 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1750 /* If SRC and DEST are the same (and not volatile), return DEST
1751 (resp. DEST+LEN for __mempcpy_chk). */
1752 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1754 if (fcode != BUILT_IN_MEMPCPY_CHK)
1756 replace_call_with_value (gsi, dest);
1761 gimple_seq stmts = NULL;
1762 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1763 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1764 TREE_TYPE (dest), dest, len);
1765 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1766 replace_call_with_value (gsi, temp);
1771 if (! tree_fits_uhwi_p (size))
1774 tree maxlen = get_maxval_strlen (len, 2);
1775 if (! integer_all_onesp (size))
1777 if (! tree_fits_uhwi_p (len))
1779 /* If LEN is not constant, try MAXLEN too.
1780 For MAXLEN only allow optimizing into non-_ocs function
1781 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1782 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1784 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1786 /* (void) __mempcpy_chk () can be optimized into
1787 (void) __memcpy_chk (). */
1788 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1792 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1793 replace_call_with_call_and_fold (gsi, repl);
1802 if (tree_int_cst_lt (size, maxlen))
1807 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1808 mem{cpy,pcpy,move,set} is available. */
1811 case BUILT_IN_MEMCPY_CHK:
1812 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1814 case BUILT_IN_MEMPCPY_CHK:
1815 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1817 case BUILT_IN_MEMMOVE_CHK:
1818 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1820 case BUILT_IN_MEMSET_CHK:
1821 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1830 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1831 replace_call_with_call_and_fold (gsi, repl);
1835 /* Fold a call to the __st[rp]cpy_chk builtin.
1836 DEST, SRC, and SIZE are the arguments to the call.
1837 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1838 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1839 strings passed as second argument. */
1842 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1844 tree src, tree size,
1845 enum built_in_function fcode)
1847 gimple *stmt = gsi_stmt (*gsi);
1848 location_t loc = gimple_location (stmt);
1849 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1852 /* If SRC and DEST are the same (and not volatile), return DEST. */
1853 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1855 replace_call_with_value (gsi, dest);
1859 if (! tree_fits_uhwi_p (size))
1862 tree maxlen = get_maxval_strlen (src, 1);
1863 if (! integer_all_onesp (size))
1865 len = c_strlen (src, 1);
1866 if (! len || ! tree_fits_uhwi_p (len))
1868 /* If LEN is not constant, try MAXLEN too.
1869 For MAXLEN only allow optimizing into non-_ocs function
1870 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1871 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1873 if (fcode == BUILT_IN_STPCPY_CHK)
1878 /* If return value of __stpcpy_chk is ignored,
1879 optimize into __strcpy_chk. */
1880 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1884 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1885 replace_call_with_call_and_fold (gsi, repl);
1889 if (! len || TREE_SIDE_EFFECTS (len))
1892 /* If c_strlen returned something, but not a constant,
1893 transform __strcpy_chk into __memcpy_chk. */
1894 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1898 gimple_seq stmts = NULL;
1899 len = gimple_convert (&stmts, loc, size_type_node, len);
1900 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
1901 build_int_cst (size_type_node, 1));
1902 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1903 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1904 replace_call_with_call_and_fold (gsi, repl);
1911 if (! tree_int_cst_lt (maxlen, size))
1915 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1916 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1917 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1921 gimple *repl = gimple_build_call (fn, 2, dest, src);
1922 replace_call_with_call_and_fold (gsi, repl);
1926 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1927 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1928 length passed as third argument. IGNORE is true if return value can be
1929 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1932 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1933 tree dest, tree src,
1934 tree len, tree size,
1935 enum built_in_function fcode)
1937 gimple *stmt = gsi_stmt (*gsi);
1938 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1941 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1943 /* If return value of __stpncpy_chk is ignored,
1944 optimize into __strncpy_chk. */
1945 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1948 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1949 replace_call_with_call_and_fold (gsi, repl);
1954 if (! tree_fits_uhwi_p (size))
1957 tree maxlen = get_maxval_strlen (len, 2);
1958 if (! integer_all_onesp (size))
1960 if (! tree_fits_uhwi_p (len))
1962 /* If LEN is not constant, try MAXLEN too.
1963 For MAXLEN only allow optimizing into non-_ocs function
1964 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1965 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1971 if (tree_int_cst_lt (size, maxlen))
1975 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1976 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1977 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1981 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1982 replace_call_with_call_and_fold (gsi, repl);
1986 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1987 Return NULL_TREE if no simplification can be made. */
1990 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1992 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1993 location_t loc = gimple_location (stmt);
1994 tree dest = gimple_call_arg (stmt, 0);
1995 tree src = gimple_call_arg (stmt, 1);
1996 tree fn, len, lenp1;
1998 /* If the result is unused, replace stpcpy with strcpy. */
1999 if (gimple_call_lhs (stmt) == NULL_TREE)
2001 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2004 gimple_call_set_fndecl (stmt, fn);
2009 len = c_strlen (src, 1);
2011 || TREE_CODE (len) != INTEGER_CST)
2014 if (optimize_function_for_size_p (cfun)
2015 /* If length is zero it's small enough. */
2016 && !integer_zerop (len))
2019 /* If the source has a known length replace stpcpy with memcpy. */
2020 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2024 gimple_seq stmts = NULL;
2025 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2026 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2027 tem, build_int_cst (size_type_node, 1));
2028 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2029 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2030 gimple_set_vuse (repl, gimple_vuse (stmt));
2031 gimple_set_vdef (repl, gimple_vdef (stmt));
2032 if (gimple_vdef (repl)
2033 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2034 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2035 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2036 /* Replace the result with dest + len. */
2038 tem = gimple_convert (&stmts, loc, sizetype, len);
2039 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2040 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2041 POINTER_PLUS_EXPR, dest, tem);
2042 gsi_replace (gsi, ret, false);
2043 /* Finally fold the memcpy call. */
2044 gimple_stmt_iterator gsi2 = *gsi;
2050 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2051 NULL_TREE if a normal call should be emitted rather than expanding
2052 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2053 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2054 passed as second argument. */
2057 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2058 enum built_in_function fcode)
2060 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2061 tree dest, size, len, fn, fmt, flag;
2062 const char *fmt_str;
2064 /* Verify the required arguments in the original call. */
2065 if (gimple_call_num_args (stmt) < 5)
2068 dest = gimple_call_arg (stmt, 0);
2069 len = gimple_call_arg (stmt, 1);
2070 flag = gimple_call_arg (stmt, 2);
2071 size = gimple_call_arg (stmt, 3);
2072 fmt = gimple_call_arg (stmt, 4);
2074 if (! tree_fits_uhwi_p (size))
2077 if (! integer_all_onesp (size))
2079 tree maxlen = get_maxval_strlen (len, 2);
2080 if (! tree_fits_uhwi_p (len))
2082 /* If LEN is not constant, try MAXLEN too.
2083 For MAXLEN only allow optimizing into non-_ocs function
2084 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2085 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2091 if (tree_int_cst_lt (size, maxlen))
2095 if (!init_target_chars ())
2098 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2099 or if format doesn't contain % chars or is "%s". */
2100 if (! integer_zerop (flag))
2102 fmt_str = c_getstr (fmt);
2103 if (fmt_str == NULL)
2105 if (strchr (fmt_str, target_percent) != NULL
2106 && strcmp (fmt_str, target_percent_s))
2110 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2112 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2113 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2117 /* Replace the called function and the first 5 argument by 3 retaining
2118 trailing varargs. */
2119 gimple_call_set_fndecl (stmt, fn);
2120 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2121 gimple_call_set_arg (stmt, 0, dest);
2122 gimple_call_set_arg (stmt, 1, len);
2123 gimple_call_set_arg (stmt, 2, fmt);
2124 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2125 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2126 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2131 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2132 Return NULL_TREE if a normal call should be emitted rather than
2133 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2134 or BUILT_IN_VSPRINTF_CHK. */
2137 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2138 enum built_in_function fcode)
2140 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2141 tree dest, size, len, fn, fmt, flag;
2142 const char *fmt_str;
2143 unsigned nargs = gimple_call_num_args (stmt);
2145 /* Verify the required arguments in the original call. */
2148 dest = gimple_call_arg (stmt, 0);
2149 flag = gimple_call_arg (stmt, 1);
2150 size = gimple_call_arg (stmt, 2);
2151 fmt = gimple_call_arg (stmt, 3);
2153 if (! tree_fits_uhwi_p (size))
2158 if (!init_target_chars ())
2161 /* Check whether the format is a literal string constant. */
2162 fmt_str = c_getstr (fmt);
2163 if (fmt_str != NULL)
2165 /* If the format doesn't contain % args or %%, we know the size. */
2166 if (strchr (fmt_str, target_percent) == 0)
2168 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2169 len = build_int_cstu (size_type_node, strlen (fmt_str));
2171 /* If the format is "%s" and first ... argument is a string literal,
2172 we know the size too. */
2173 else if (fcode == BUILT_IN_SPRINTF_CHK
2174 && strcmp (fmt_str, target_percent_s) == 0)
2180 arg = gimple_call_arg (stmt, 4);
2181 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2183 len = c_strlen (arg, 1);
2184 if (! len || ! tree_fits_uhwi_p (len))
2191 if (! integer_all_onesp (size))
2193 if (! len || ! tree_int_cst_lt (len, size))
2197 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2198 or if format doesn't contain % chars or is "%s". */
2199 if (! integer_zerop (flag))
2201 if (fmt_str == NULL)
2203 if (strchr (fmt_str, target_percent) != NULL
2204 && strcmp (fmt_str, target_percent_s))
2208 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2209 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2210 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2214 /* Replace the called function and the first 4 argument by 2 retaining
2215 trailing varargs. */
2216 gimple_call_set_fndecl (stmt, fn);
2217 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2218 gimple_call_set_arg (stmt, 0, dest);
2219 gimple_call_set_arg (stmt, 1, fmt);
2220 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2221 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2222 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2227 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2228 ORIG may be null if this is a 2-argument call. We don't attempt to
2229 simplify calls with more than 3 arguments.
2231 Return NULL_TREE if no simplification was possible, otherwise return the
2232 simplified form of the call as a tree. If IGNORED is true, it means that
2233 the caller does not use the returned value of the function. */
2236 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2238 gimple *stmt = gsi_stmt (*gsi);
2239 tree dest = gimple_call_arg (stmt, 0);
2240 tree fmt = gimple_call_arg (stmt, 1);
2241 tree orig = NULL_TREE;
2242 const char *fmt_str = NULL;
2244 /* Verify the required arguments in the original call. We deal with two
2245 types of sprintf() calls: 'sprintf (str, fmt)' and
2246 'sprintf (dest, "%s", orig)'. */
2247 if (gimple_call_num_args (stmt) > 3)
2250 if (gimple_call_num_args (stmt) == 3)
2251 orig = gimple_call_arg (stmt, 2);
2253 /* Check whether the format is a literal string constant. */
2254 fmt_str = c_getstr (fmt);
2255 if (fmt_str == NULL)
2258 if (!init_target_chars ())
2261 /* If the format doesn't contain % args or %%, use strcpy. */
2262 if (strchr (fmt_str, target_percent) == NULL)
2264 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2269 /* Don't optimize sprintf (buf, "abc", ptr++). */
2273 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2274 'format' is known to contain no % formats. */
2275 gimple_seq stmts = NULL;
2276 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2277 gimple_seq_add_stmt_without_update (&stmts, repl);
2278 if (gimple_call_lhs (stmt))
2280 repl = gimple_build_assign (gimple_call_lhs (stmt),
2281 build_int_cst (integer_type_node,
2283 gimple_seq_add_stmt_without_update (&stmts, repl);
2284 gsi_replace_with_seq_vops (gsi, stmts);
2285 /* gsi now points at the assignment to the lhs, get a
2286 stmt iterator to the memcpy call.
2287 ??? We can't use gsi_for_stmt as that doesn't work when the
2288 CFG isn't built yet. */
2289 gimple_stmt_iterator gsi2 = *gsi;
2295 gsi_replace_with_seq_vops (gsi, stmts);
2301 /* If the format is "%s", use strcpy if the result isn't used. */
2302 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2305 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2310 /* Don't crash on sprintf (str1, "%s"). */
2314 tree orig_len = NULL_TREE;
2315 if (gimple_call_lhs (stmt))
2317 orig_len = get_maxval_strlen (orig, 0);
2322 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2323 gimple_seq stmts = NULL;
2324 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2325 gimple_seq_add_stmt_without_update (&stmts, repl);
2326 if (gimple_call_lhs (stmt))
2328 if (!useless_type_conversion_p (integer_type_node,
2329 TREE_TYPE (orig_len)))
2330 orig_len = fold_convert (integer_type_node, orig_len);
2331 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2332 gimple_seq_add_stmt_without_update (&stmts, repl);
2333 gsi_replace_with_seq_vops (gsi, stmts);
2334 /* gsi now points at the assignment to the lhs, get a
2335 stmt iterator to the memcpy call.
2336 ??? We can't use gsi_for_stmt as that doesn't work when the
2337 CFG isn't built yet. */
2338 gimple_stmt_iterator gsi2 = *gsi;
2344 gsi_replace_with_seq_vops (gsi, stmts);
2352 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2353 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2354 attempt to simplify calls with more than 4 arguments.
2356 Return NULL_TREE if no simplification was possible, otherwise return the
2357 simplified form of the call as a tree. If IGNORED is true, it means that
2358 the caller does not use the returned value of the function. */
2361 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2363 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2364 tree dest = gimple_call_arg (stmt, 0);
2365 tree destsize = gimple_call_arg (stmt, 1);
2366 tree fmt = gimple_call_arg (stmt, 2);
2367 tree orig = NULL_TREE;
2368 const char *fmt_str = NULL;
2370 if (gimple_call_num_args (stmt) > 4)
2373 if (gimple_call_num_args (stmt) == 4)
2374 orig = gimple_call_arg (stmt, 3);
2376 if (!tree_fits_uhwi_p (destsize))
2378 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2380 /* Check whether the format is a literal string constant. */
2381 fmt_str = c_getstr (fmt);
2382 if (fmt_str == NULL)
2385 if (!init_target_chars ())
2388 /* If the format doesn't contain % args or %%, use strcpy. */
2389 if (strchr (fmt_str, target_percent) == NULL)
2391 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2395 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2399 /* We could expand this as
2400 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2402 memcpy (str, fmt_with_nul_at_cstm1, cst);
2403 but in the former case that might increase code size
2404 and in the latter case grow .rodata section too much.
2406 size_t len = strlen (fmt_str);
2410 gimple_seq stmts = NULL;
2411 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2412 gimple_seq_add_stmt_without_update (&stmts, repl);
2413 if (gimple_call_lhs (stmt))
2415 repl = gimple_build_assign (gimple_call_lhs (stmt),
2416 build_int_cst (integer_type_node, len));
2417 gimple_seq_add_stmt_without_update (&stmts, repl);
2418 gsi_replace_with_seq_vops (gsi, stmts);
2419 /* gsi now points at the assignment to the lhs, get a
2420 stmt iterator to the memcpy call.
2421 ??? We can't use gsi_for_stmt as that doesn't work when the
2422 CFG isn't built yet. */
2423 gimple_stmt_iterator gsi2 = *gsi;
2429 gsi_replace_with_seq_vops (gsi, stmts);
2435 /* If the format is "%s", use strcpy if the result isn't used. */
2436 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2438 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2442 /* Don't crash on snprintf (str1, cst, "%s"). */
2446 tree orig_len = get_maxval_strlen (orig, 0);
2447 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2450 /* We could expand this as
2451 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2453 memcpy (str1, str2_with_nul_at_cstm1, cst);
2454 but in the former case that might increase code size
2455 and in the latter case grow .rodata section too much.
2457 if (compare_tree_int (orig_len, destlen) >= 0)
2460 /* Convert snprintf (str1, cst, "%s", str2) into
2461 strcpy (str1, str2) if strlen (str2) < cst. */
2462 gimple_seq stmts = NULL;
2463 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2464 gimple_seq_add_stmt_without_update (&stmts, repl);
2465 if (gimple_call_lhs (stmt))
2467 if (!useless_type_conversion_p (integer_type_node,
2468 TREE_TYPE (orig_len)))
2469 orig_len = fold_convert (integer_type_node, orig_len);
2470 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2471 gimple_seq_add_stmt_without_update (&stmts, repl);
2472 gsi_replace_with_seq_vops (gsi, stmts);
2473 /* gsi now points at the assignment to the lhs, get a
2474 stmt iterator to the memcpy call.
2475 ??? We can't use gsi_for_stmt as that doesn't work when the
2476 CFG isn't built yet. */
2477 gimple_stmt_iterator gsi2 = *gsi;
2483 gsi_replace_with_seq_vops (gsi, stmts);
2491 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2492 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2493 more than 3 arguments, and ARG may be null in the 2-argument case.
2495 Return NULL_TREE if no simplification was possible, otherwise return the
2496 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2497 code of the function to be simplified. */
2500 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2501 tree fp, tree fmt, tree arg,
2502 enum built_in_function fcode)
2504 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2505 tree fn_fputc, fn_fputs;
2506 const char *fmt_str = NULL;
2508 /* If the return value is used, don't do the transformation. */
2509 if (gimple_call_lhs (stmt) != NULL_TREE)
2512 /* Check whether the format is a literal string constant. */
2513 fmt_str = c_getstr (fmt);
2514 if (fmt_str == NULL)
2517 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2519 /* If we're using an unlocked function, assume the other
2520 unlocked functions exist explicitly. */
2521 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2522 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2526 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2527 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2530 if (!init_target_chars ())
2533 /* If the format doesn't contain % args or %%, use strcpy. */
2534 if (strchr (fmt_str, target_percent) == NULL)
2536 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2540 /* If the format specifier was "", fprintf does nothing. */
2541 if (fmt_str[0] == '\0')
2543 replace_call_with_value (gsi, NULL_TREE);
2547 /* When "string" doesn't contain %, replace all cases of
2548 fprintf (fp, string) with fputs (string, fp). The fputs
2549 builtin will take care of special cases like length == 1. */
2552 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2553 replace_call_with_call_and_fold (gsi, repl);
2558 /* The other optimizations can be done only on the non-va_list variants. */
2559 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2562 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2563 else if (strcmp (fmt_str, target_percent_s) == 0)
2565 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2569 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2570 replace_call_with_call_and_fold (gsi, repl);
2575 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2576 else if (strcmp (fmt_str, target_percent_c) == 0)
2579 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2583 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2584 replace_call_with_call_and_fold (gsi, repl);
2592 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2593 FMT and ARG are the arguments to the call; we don't fold cases with
2594 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2596 Return NULL_TREE if no simplification was possible, otherwise return the
2597 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2598 code of the function to be simplified. */
2601 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2602 tree arg, enum built_in_function fcode)
2604 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2605 tree fn_putchar, fn_puts, newarg;
2606 const char *fmt_str = NULL;
2608 /* If the return value is used, don't do the transformation. */
2609 if (gimple_call_lhs (stmt) != NULL_TREE)
2612 /* Check whether the format is a literal string constant. */
2613 fmt_str = c_getstr (fmt);
2614 if (fmt_str == NULL)
2617 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2619 /* If we're using an unlocked function, assume the other
2620 unlocked functions exist explicitly. */
2621 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2622 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2626 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2627 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2630 if (!init_target_chars ())
2633 if (strcmp (fmt_str, target_percent_s) == 0
2634 || strchr (fmt_str, target_percent) == NULL)
2638 if (strcmp (fmt_str, target_percent_s) == 0)
2640 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2643 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2646 str = c_getstr (arg);
2652 /* The format specifier doesn't contain any '%' characters. */
2653 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2659 /* If the string was "", printf does nothing. */
2662 replace_call_with_value (gsi, NULL_TREE);
2666 /* If the string has length of 1, call putchar. */
2669 /* Given printf("c"), (where c is any one character,)
2670 convert "c"[0] to an int and pass that to the replacement
2672 newarg = build_int_cst (integer_type_node, str[0]);
2675 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2676 replace_call_with_call_and_fold (gsi, repl);
2682 /* If the string was "string\n", call puts("string"). */
2683 size_t len = strlen (str);
2684 if ((unsigned char)str[len - 1] == target_newline
2685 && (size_t) (int) len == len
2689 tree offset_node, string_cst;
2691 /* Create a NUL-terminated string that's one char shorter
2692 than the original, stripping off the trailing '\n'. */
2693 newarg = build_string_literal (len, str);
2694 string_cst = string_constant (newarg, &offset_node);
2695 gcc_checking_assert (string_cst
2696 && (TREE_STRING_LENGTH (string_cst)
2698 && integer_zerop (offset_node)
2700 TREE_STRING_POINTER (string_cst)[len - 1]
2702 /* build_string_literal creates a new STRING_CST,
2703 modify it in place to avoid double copying. */
2704 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2705 newstr[len - 1] = '\0';
2708 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2709 replace_call_with_call_and_fold (gsi, repl);
2714 /* We'd like to arrange to call fputs(string,stdout) here,
2715 but we need stdout and don't have a way to get it yet. */
2720 /* The other optimizations can be done only on the non-va_list variants. */
2721 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2724 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2725 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2727 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2731 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2732 replace_call_with_call_and_fold (gsi, repl);
2737 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2738 else if (strcmp (fmt_str, target_percent_c) == 0)
2740 if (!arg || ! useless_type_conversion_p (integer_type_node,
2745 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2746 replace_call_with_call_and_fold (gsi, repl);
2756 /* Fold a call to __builtin_strlen with known length LEN. */
2759 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2761 gimple *stmt = gsi_stmt (*gsi);
2762 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2765 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2766 replace_call_with_value (gsi, len);
2770 /* Fold a call to __builtin_acc_on_device. */
2773 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2775 /* Defer folding until we know which compiler we're in. */
2776 if (symtab->state != EXPANSION)
2779 unsigned val_host = GOMP_DEVICE_HOST;
2780 unsigned val_dev = GOMP_DEVICE_NONE;
2782 #ifdef ACCEL_COMPILER
2783 val_host = GOMP_DEVICE_NOT_HOST;
2784 val_dev = ACCEL_COMPILER_acc_device;
2787 location_t loc = gimple_location (gsi_stmt (*gsi));
2789 tree host_eq = make_ssa_name (boolean_type_node);
2790 gimple *host_ass = gimple_build_assign
2791 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2792 gimple_set_location (host_ass, loc);
2793 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2795 tree dev_eq = make_ssa_name (boolean_type_node);
2796 gimple *dev_ass = gimple_build_assign
2797 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2798 gimple_set_location (dev_ass, loc);
2799 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2801 tree result = make_ssa_name (boolean_type_node);
2802 gimple *result_ass = gimple_build_assign
2803 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2804 gimple_set_location (result_ass, loc);
2805 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2807 replace_call_with_value (gsi, result);
2812 /* Fold the non-target builtin at *GSI and return whether any simplification
2816 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2818 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2819 tree callee = gimple_call_fndecl (stmt);
2821 /* Give up for always_inline inline builtins until they are
2823 if (avoid_folding_inline_builtin (callee))
2826 unsigned n = gimple_call_num_args (stmt);
2827 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2830 case BUILT_IN_BZERO:
2831 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2832 gimple_call_arg (stmt, 1));
2833 case BUILT_IN_MEMSET:
2834 return gimple_fold_builtin_memset (gsi,
2835 gimple_call_arg (stmt, 1),
2836 gimple_call_arg (stmt, 2));
2837 case BUILT_IN_BCOPY:
2838 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2839 gimple_call_arg (stmt, 0), 3);
2840 case BUILT_IN_MEMCPY:
2841 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2842 gimple_call_arg (stmt, 1), 0);
2843 case BUILT_IN_MEMPCPY:
2844 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2845 gimple_call_arg (stmt, 1), 1);
2846 case BUILT_IN_MEMMOVE:
2847 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2848 gimple_call_arg (stmt, 1), 3);
2849 case BUILT_IN_SPRINTF_CHK:
2850 case BUILT_IN_VSPRINTF_CHK:
2851 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2852 case BUILT_IN_STRCAT_CHK:
2853 return gimple_fold_builtin_strcat_chk (gsi);
2854 case BUILT_IN_STRNCAT_CHK:
2855 return gimple_fold_builtin_strncat_chk (gsi);
2856 case BUILT_IN_STRLEN:
2857 return gimple_fold_builtin_strlen (gsi);
2858 case BUILT_IN_STRCPY:
2859 return gimple_fold_builtin_strcpy (gsi,
2860 gimple_call_arg (stmt, 0),
2861 gimple_call_arg (stmt, 1));
2862 case BUILT_IN_STRNCPY:
2863 return gimple_fold_builtin_strncpy (gsi,
2864 gimple_call_arg (stmt, 0),
2865 gimple_call_arg (stmt, 1),
2866 gimple_call_arg (stmt, 2));
2867 case BUILT_IN_STRCAT:
2868 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2869 gimple_call_arg (stmt, 1));
2870 case BUILT_IN_STRNCAT:
2871 return gimple_fold_builtin_strncat (gsi);
2872 case BUILT_IN_STRCHR:
2873 if (gimple_fold_builtin_strchr (gsi))
2875 /* Perform additional folding in builtin.c. */
2877 case BUILT_IN_FPUTS:
2878 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2879 gimple_call_arg (stmt, 1), false);
2880 case BUILT_IN_FPUTS_UNLOCKED:
2881 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2882 gimple_call_arg (stmt, 1), true);
2883 case BUILT_IN_MEMCPY_CHK:
2884 case BUILT_IN_MEMPCPY_CHK:
2885 case BUILT_IN_MEMMOVE_CHK:
2886 case BUILT_IN_MEMSET_CHK:
2887 return gimple_fold_builtin_memory_chk (gsi,
2888 gimple_call_arg (stmt, 0),
2889 gimple_call_arg (stmt, 1),
2890 gimple_call_arg (stmt, 2),
2891 gimple_call_arg (stmt, 3),
2893 case BUILT_IN_STPCPY:
2894 return gimple_fold_builtin_stpcpy (gsi);
2895 case BUILT_IN_STRCPY_CHK:
2896 case BUILT_IN_STPCPY_CHK:
2897 return gimple_fold_builtin_stxcpy_chk (gsi,
2898 gimple_call_arg (stmt, 0),
2899 gimple_call_arg (stmt, 1),
2900 gimple_call_arg (stmt, 2),
2902 case BUILT_IN_STRNCPY_CHK:
2903 case BUILT_IN_STPNCPY_CHK:
2904 return gimple_fold_builtin_stxncpy_chk (gsi,
2905 gimple_call_arg (stmt, 0),
2906 gimple_call_arg (stmt, 1),
2907 gimple_call_arg (stmt, 2),
2908 gimple_call_arg (stmt, 3),
2910 case BUILT_IN_SNPRINTF_CHK:
2911 case BUILT_IN_VSNPRINTF_CHK:
2912 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2913 case BUILT_IN_SNPRINTF:
2914 return gimple_fold_builtin_snprintf (gsi);
2915 case BUILT_IN_SPRINTF:
2916 return gimple_fold_builtin_sprintf (gsi);
2917 case BUILT_IN_FPRINTF:
2918 case BUILT_IN_FPRINTF_UNLOCKED:
2919 case BUILT_IN_VFPRINTF:
2920 if (n == 2 || n == 3)
2921 return gimple_fold_builtin_fprintf (gsi,
2922 gimple_call_arg (stmt, 0),
2923 gimple_call_arg (stmt, 1),
2925 ? gimple_call_arg (stmt, 2)
2929 case BUILT_IN_FPRINTF_CHK:
2930 case BUILT_IN_VFPRINTF_CHK:
2931 if (n == 3 || n == 4)
2932 return gimple_fold_builtin_fprintf (gsi,
2933 gimple_call_arg (stmt, 0),
2934 gimple_call_arg (stmt, 2),
2936 ? gimple_call_arg (stmt, 3)
2940 case BUILT_IN_PRINTF:
2941 case BUILT_IN_PRINTF_UNLOCKED:
2942 case BUILT_IN_VPRINTF:
2943 if (n == 1 || n == 2)
2944 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2946 ? gimple_call_arg (stmt, 1)
2947 : NULL_TREE, fcode);
2949 case BUILT_IN_PRINTF_CHK:
2950 case BUILT_IN_VPRINTF_CHK:
2951 if (n == 2 || n == 3)
2952 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2954 ? gimple_call_arg (stmt, 2)
2955 : NULL_TREE, fcode);
2957 case BUILT_IN_ACC_ON_DEVICE:
2958 return gimple_fold_builtin_acc_on_device (gsi,
2959 gimple_call_arg (stmt, 0));
2963 /* Try the generic builtin folder. */
2964 bool ignore = (gimple_call_lhs (stmt) == NULL);
2965 tree result = fold_call_stmt (stmt, ignore);
2969 STRIP_NOPS (result);
2971 result = fold_convert (gimple_call_return_type (stmt), result);
2972 if (!update_call_from_tree (gsi, result))
2973 gimplify_and_update_call_from_tree (gsi, result);
2980 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
2981 function calls to constants, where possible. */
2984 fold_internal_goacc_dim (const gimple *call)
2986 int axis = get_oacc_ifn_dim_arg (call);
2987 int size = get_oacc_fn_dim_size (current_function_decl, axis);
2988 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
2989 tree result = NULL_TREE;
2991 /* If the size is 1, or we only want the size and it is not dynamic,
2992 we know the answer. */
2993 if (size == 1 || (!is_pos && size))
2995 tree type = TREE_TYPE (gimple_call_lhs (call));
2996 result = build_int_cst (type, size - is_pos);
3002 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3003 doesn't fit into TYPE. The test for overflow should be regardless of
3004 -fwrapv, and even for unsigned types. */
3007 arith_overflowed_p (enum tree_code code, const_tree type,
3008 const_tree arg0, const_tree arg1)
3010 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3011 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3013 widest2_int warg0 = widest2_int_cst (arg0);
3014 widest2_int warg1 = widest2_int_cst (arg1);
3018 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3019 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3020 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3021 default: gcc_unreachable ();
3023 signop sign = TYPE_SIGN (type);
3024 if (sign == UNSIGNED && wi::neg_p (wres))
3026 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3029 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3030 The statement may be replaced by another statement, e.g., if the call
3031 simplifies to a constant value. Return true if any changes were made.
3032 It is assumed that the operands have been previously folded. */
3035 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3037 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3039 bool changed = false;
3042 /* Fold *& in call arguments. */
3043 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3044 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3046 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3049 gimple_call_set_arg (stmt, i, tmp);
3054 /* Check for virtual calls that became direct calls. */
3055 callee = gimple_call_fn (stmt);
3056 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3058 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3060 if (dump_file && virtual_method_call_p (callee)
3061 && !possible_polymorphic_call_target_p
3062 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3063 (OBJ_TYPE_REF_EXPR (callee)))))
3066 "Type inheritance inconsistent devirtualization of ");
3067 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3068 fprintf (dump_file, " to ");
3069 print_generic_expr (dump_file, callee, TDF_SLIM);
3070 fprintf (dump_file, "\n");
3073 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3076 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3079 vec <cgraph_node *>targets
3080 = possible_polymorphic_call_targets (callee, stmt, &final);
3081 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3083 tree lhs = gimple_call_lhs (stmt);
3084 if (dump_enabled_p ())
3086 location_t loc = gimple_location_safe (stmt);
3087 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3088 "folding virtual function call to %s\n",
3089 targets.length () == 1
3090 ? targets[0]->name ()
3091 : "__builtin_unreachable");
3093 if (targets.length () == 1)
3095 tree fndecl = targets[0]->decl;
3096 gimple_call_set_fndecl (stmt, fndecl);
3098 /* If changing the call to __cxa_pure_virtual
3099 or similar noreturn function, adjust gimple_call_fntype
3101 if ((gimple_call_flags (stmt) & ECF_NORETURN)
3102 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3103 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3104 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3106 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3107 /* If the call becomes noreturn, remove the lhs. */
3109 && (gimple_call_flags (stmt) & ECF_NORETURN)
3110 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3111 || ((TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))
3113 && !TREE_ADDRESSABLE (TREE_TYPE (lhs)))))
3115 if (TREE_CODE (lhs) == SSA_NAME)
3117 tree var = create_tmp_var (TREE_TYPE (lhs));
3118 tree def = get_or_create_ssa_default_def (cfun, var);
3119 gimple *new_stmt = gimple_build_assign (lhs, def);
3120 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3122 gimple_call_set_lhs (stmt, NULL_TREE);
3124 maybe_remove_unused_call_args (cfun, stmt);
3128 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3129 gimple *new_stmt = gimple_build_call (fndecl, 0);
3130 gimple_set_location (new_stmt, gimple_location (stmt));
3131 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3133 tree var = create_tmp_var (TREE_TYPE (lhs));
3134 tree def = get_or_create_ssa_default_def (cfun, var);
3136 /* To satisfy condition for
3137 cgraph_update_edges_for_call_stmt_node,
3138 we need to preserve GIMPLE_CALL statement
3139 at position of GSI iterator. */
3140 update_call_from_tree (gsi, def);
3141 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3145 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3146 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3147 gsi_replace (gsi, new_stmt, false);
3155 /* Check for indirect calls that became direct calls, and then
3156 no longer require a static chain. */
3157 if (gimple_call_chain (stmt))
3159 tree fn = gimple_call_fndecl (stmt);
3160 if (fn && !DECL_STATIC_CHAIN (fn))
3162 gimple_call_set_chain (stmt, NULL);
3167 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3170 gimple_call_set_chain (stmt, tmp);
3179 /* Check for builtins that CCP can handle using information not
3180 available in the generic fold routines. */
3181 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3183 if (gimple_fold_builtin (gsi))
3186 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3188 changed |= targetm.gimple_fold_builtin (gsi);
3190 else if (gimple_call_internal_p (stmt))
3192 enum tree_code subcode = ERROR_MARK;
3193 tree result = NULL_TREE;
3194 bool cplx_result = false;
3195 tree overflow = NULL_TREE;
3196 switch (gimple_call_internal_fn (stmt))
3198 case IFN_BUILTIN_EXPECT:
3199 result = fold_builtin_expect (gimple_location (stmt),
3200 gimple_call_arg (stmt, 0),
3201 gimple_call_arg (stmt, 1),
3202 gimple_call_arg (stmt, 2));
3204 case IFN_UBSAN_OBJECT_SIZE:
3205 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3206 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3207 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3208 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3209 gimple_call_arg (stmt, 2))))
3211 gsi_replace (gsi, gimple_build_nop (), false);
3212 unlink_stmt_vdef (stmt);
3213 release_defs (stmt);
3217 case IFN_GOACC_DIM_SIZE:
3218 case IFN_GOACC_DIM_POS:
3219 result = fold_internal_goacc_dim (stmt);
3221 case IFN_UBSAN_CHECK_ADD:
3222 subcode = PLUS_EXPR;
3224 case IFN_UBSAN_CHECK_SUB:
3225 subcode = MINUS_EXPR;
3227 case IFN_UBSAN_CHECK_MUL:
3228 subcode = MULT_EXPR;
3230 case IFN_ADD_OVERFLOW:
3231 subcode = PLUS_EXPR;
3234 case IFN_SUB_OVERFLOW:
3235 subcode = MINUS_EXPR;
3238 case IFN_MUL_OVERFLOW:
3239 subcode = MULT_EXPR;
3245 if (subcode != ERROR_MARK)
3247 tree arg0 = gimple_call_arg (stmt, 0);
3248 tree arg1 = gimple_call_arg (stmt, 1);
3249 tree type = TREE_TYPE (arg0);
3252 tree lhs = gimple_call_lhs (stmt);
3253 if (lhs == NULL_TREE)
3256 type = TREE_TYPE (TREE_TYPE (lhs));
3258 if (type == NULL_TREE)
3260 /* x = y + 0; x = y - 0; x = y * 0; */
3261 else if (integer_zerop (arg1))
3262 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3263 /* x = 0 + y; x = 0 * y; */
3264 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3265 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3267 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3268 result = integer_zero_node;
3269 /* x = y * 1; x = 1 * y; */
3270 else if (subcode == MULT_EXPR && integer_onep (arg1))
3272 else if (subcode == MULT_EXPR && integer_onep (arg0))
3274 else if (TREE_CODE (arg0) == INTEGER_CST
3275 && TREE_CODE (arg1) == INTEGER_CST)
3278 result = int_const_binop (subcode, fold_convert (type, arg0),
3279 fold_convert (type, arg1));
3281 result = int_const_binop (subcode, arg0, arg1);
3282 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3285 overflow = build_one_cst (type);
3292 if (result == integer_zero_node)
3293 result = build_zero_cst (type);
3294 else if (cplx_result && TREE_TYPE (result) != type)
3296 if (TREE_CODE (result) == INTEGER_CST)
3298 if (arith_overflowed_p (PLUS_EXPR, type, result,
3300 overflow = build_one_cst (type);
3302 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3303 && TYPE_UNSIGNED (type))
3304 || (TYPE_PRECISION (type)
3305 < (TYPE_PRECISION (TREE_TYPE (result))
3306 + (TYPE_UNSIGNED (TREE_TYPE (result))
3307 && !TYPE_UNSIGNED (type)))))
3310 result = fold_convert (type, result);
3317 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3318 result = drop_tree_overflow (result);
3321 if (overflow == NULL_TREE)
3322 overflow = build_zero_cst (TREE_TYPE (result));
3323 tree ctype = build_complex_type (TREE_TYPE (result));
3324 if (TREE_CODE (result) == INTEGER_CST
3325 && TREE_CODE (overflow) == INTEGER_CST)
3326 result = build_complex (ctype, result, overflow);
3328 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3329 ctype, result, overflow);
3331 if (!update_call_from_tree (gsi, result))
3332 gimplify_and_update_call_from_tree (gsi, result);
3341 /* Return true whether NAME has a use on STMT. */
3344 has_use_on_stmt (tree name, gimple *stmt)
3346 imm_use_iterator iter;
3347 use_operand_p use_p;
3348 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3349 if (USE_STMT (use_p) == stmt)
3354 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3357 Replaces *GSI with the simplification result in RCODE and OPS
3358 and the associated statements in *SEQ. Does the replacement
3359 according to INPLACE and returns true if the operation succeeded. */
3362 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3363 code_helper rcode, tree *ops,
3364 gimple_seq *seq, bool inplace)
3366 gimple *stmt = gsi_stmt (*gsi);
3368 /* Play safe and do not allow abnormals to be mentioned in
3369 newly created statements. See also maybe_push_res_to_seq.
3370 As an exception allow such uses if there was a use of the
3371 same SSA name on the old stmt. */
3372 if ((TREE_CODE (ops[0]) == SSA_NAME
3373 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3374 && !has_use_on_stmt (ops[0], stmt))
3376 && TREE_CODE (ops[1]) == SSA_NAME
3377 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3378 && !has_use_on_stmt (ops[1], stmt))
3380 && TREE_CODE (ops[2]) == SSA_NAME
3381 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3382 && !has_use_on_stmt (ops[2], stmt))
3383 || (COMPARISON_CLASS_P (ops[0])
3384 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
3385 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
3386 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
3387 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
3388 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
3389 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
3392 /* Don't insert new statements when INPLACE is true, even if we could
3393 reuse STMT for the final statement. */
3394 if (inplace && !gimple_seq_empty_p (*seq))
3397 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3399 gcc_assert (rcode.is_tree_code ());
3400 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3401 /* GIMPLE_CONDs condition may not throw. */
3402 && (!flag_exceptions
3403 || !cfun->can_throw_non_call_exceptions
3404 || !operation_could_trap_p (rcode,
3405 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3407 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3408 else if (rcode == SSA_NAME)
3409 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3410 build_zero_cst (TREE_TYPE (ops[0])));
3411 else if (rcode == INTEGER_CST)
3413 if (integer_zerop (ops[0]))
3414 gimple_cond_make_false (cond_stmt);
3416 gimple_cond_make_true (cond_stmt);
3420 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3424 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3425 build_zero_cst (TREE_TYPE (res)));
3429 if (dump_file && (dump_flags & TDF_DETAILS))
3431 fprintf (dump_file, "gimple_simplified to ");
3432 if (!gimple_seq_empty_p (*seq))
3433 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3434 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3437 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3440 else if (is_gimple_assign (stmt)
3441 && rcode.is_tree_code ())
3444 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3446 maybe_build_generic_op (rcode,
3447 TREE_TYPE (gimple_assign_lhs (stmt)),
3448 &ops[0], ops[1], ops[2]);
3449 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3450 if (dump_file && (dump_flags & TDF_DETAILS))
3452 fprintf (dump_file, "gimple_simplified to ");
3453 if (!gimple_seq_empty_p (*seq))
3454 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3455 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3458 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3462 else if (rcode.is_fn_code ()
3463 && gimple_call_combined_fn (stmt) == rcode)
3466 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3468 gcc_assert (ops[i] != NULL_TREE);
3469 gimple_call_set_arg (stmt, i, ops[i]);
3472 gcc_assert (ops[i] == NULL_TREE);
3473 if (dump_file && (dump_flags & TDF_DETAILS))
3475 fprintf (dump_file, "gimple_simplified to ");
3476 if (!gimple_seq_empty_p (*seq))
3477 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3478 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3480 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3485 if (gimple_has_lhs (stmt))
3487 tree lhs = gimple_get_lhs (stmt);
3488 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3491 if (dump_file && (dump_flags & TDF_DETAILS))
3493 fprintf (dump_file, "gimple_simplified to ");
3494 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3496 gsi_replace_with_seq_vops (gsi, *seq);
3506 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3509 maybe_canonicalize_mem_ref_addr (tree *t)
3513 if (TREE_CODE (*t) == ADDR_EXPR)
3514 t = &TREE_OPERAND (*t, 0);
3516 while (handled_component_p (*t))
3517 t = &TREE_OPERAND (*t, 0);
3519 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3520 of invariant addresses into a SSA name MEM_REF address. */
3521 if (TREE_CODE (*t) == MEM_REF
3522 || TREE_CODE (*t) == TARGET_MEM_REF)
3524 tree addr = TREE_OPERAND (*t, 0);
3525 if (TREE_CODE (addr) == ADDR_EXPR
3526 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3527 || handled_component_p (TREE_OPERAND (addr, 0))))
3530 HOST_WIDE_INT coffset;
3531 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3536 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3537 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3538 TREE_OPERAND (*t, 1),
3539 size_int (coffset));
3542 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3543 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3546 /* Canonicalize back MEM_REFs to plain reference trees if the object
3547 accessed is a decl that has the same access semantics as the MEM_REF. */
3548 if (TREE_CODE (*t) == MEM_REF
3549 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3550 && integer_zerop (TREE_OPERAND (*t, 1))
3551 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3553 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3554 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3555 if (/* Same volatile qualification. */
3556 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3557 /* Same TBAA behavior with -fstrict-aliasing. */
3558 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3559 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3560 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3561 /* Same alignment. */
3562 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3563 /* We have to look out here to not drop a required conversion
3564 from the rhs to the lhs if *t appears on the lhs or vice-versa
3565 if it appears on the rhs. Thus require strict type
3567 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3569 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3574 /* Canonicalize TARGET_MEM_REF in particular with respect to
3575 the indexes becoming constant. */
3576 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3578 tree tem = maybe_fold_tmr (*t);
3589 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3590 distinguishes both cases. */
3593 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3595 bool changed = false;
3596 gimple *stmt = gsi_stmt (*gsi);
3599 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3601 ??? This shouldn't be done in generic folding but in the
3602 propagation helpers which also know whether an address was
3604 Also canonicalize operand order. */
3605 switch (gimple_code (stmt))
3608 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3610 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3611 if ((REFERENCE_CLASS_P (*rhs)
3612 || TREE_CODE (*rhs) == ADDR_EXPR)
3613 && maybe_canonicalize_mem_ref_addr (rhs))
3615 tree *lhs = gimple_assign_lhs_ptr (stmt);
3616 if (REFERENCE_CLASS_P (*lhs)
3617 && maybe_canonicalize_mem_ref_addr (lhs))
3622 /* Canonicalize operand order. */
3623 enum tree_code code = gimple_assign_rhs_code (stmt);
3624 if (TREE_CODE_CLASS (code) == tcc_comparison
3625 || commutative_tree_code (code)
3626 || commutative_ternary_tree_code (code))
3628 tree rhs1 = gimple_assign_rhs1 (stmt);
3629 tree rhs2 = gimple_assign_rhs2 (stmt);
3630 if (tree_swap_operands_p (rhs1, rhs2, false))
3632 gimple_assign_set_rhs1 (stmt, rhs2);
3633 gimple_assign_set_rhs2 (stmt, rhs1);
3634 if (TREE_CODE_CLASS (code) == tcc_comparison)
3635 gimple_assign_set_rhs_code (stmt,
3636 swap_tree_comparison (code));
3644 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3646 tree *arg = gimple_call_arg_ptr (stmt, i);
3647 if (REFERENCE_CLASS_P (*arg)
3648 && maybe_canonicalize_mem_ref_addr (arg))
3651 tree *lhs = gimple_call_lhs_ptr (stmt);
3653 && REFERENCE_CLASS_P (*lhs)
3654 && maybe_canonicalize_mem_ref_addr (lhs))
3660 gasm *asm_stmt = as_a <gasm *> (stmt);
3661 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3663 tree link = gimple_asm_output_op (asm_stmt, i);
3664 tree op = TREE_VALUE (link);
3665 if (REFERENCE_CLASS_P (op)
3666 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3669 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3671 tree link = gimple_asm_input_op (asm_stmt, i);
3672 tree op = TREE_VALUE (link);
3673 if ((REFERENCE_CLASS_P (op)
3674 || TREE_CODE (op) == ADDR_EXPR)
3675 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3681 if (gimple_debug_bind_p (stmt))
3683 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3685 && (REFERENCE_CLASS_P (*val)
3686 || TREE_CODE (*val) == ADDR_EXPR)
3687 && maybe_canonicalize_mem_ref_addr (val))
3693 /* Canonicalize operand order. */
3694 tree lhs = gimple_cond_lhs (stmt);
3695 tree rhs = gimple_cond_rhs (stmt);
3696 if (tree_swap_operands_p (lhs, rhs, false))
3698 gcond *gc = as_a <gcond *> (stmt);
3699 gimple_cond_set_lhs (gc, rhs);
3700 gimple_cond_set_rhs (gc, lhs);
3701 gimple_cond_set_code (gc,
3702 swap_tree_comparison (gimple_cond_code (gc)));
3709 /* Dispatch to pattern-based folding. */
3711 || is_gimple_assign (stmt)
3712 || gimple_code (stmt) == GIMPLE_COND)
3714 gimple_seq seq = NULL;
3717 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3718 valueize, valueize))
3720 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3723 gimple_seq_discard (seq);
3727 stmt = gsi_stmt (*gsi);
3729 /* Fold the main computation performed by the statement. */
3730 switch (gimple_code (stmt))
3734 /* Try to canonicalize for boolean-typed X the comparisons
3735 X == 0, X == 1, X != 0, and X != 1. */
3736 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3737 || gimple_assign_rhs_code (stmt) == NE_EXPR)
3739 tree lhs = gimple_assign_lhs (stmt);
3740 tree op1 = gimple_assign_rhs1 (stmt);
3741 tree op2 = gimple_assign_rhs2 (stmt);
3742 tree type = TREE_TYPE (op1);
3744 /* Check whether the comparison operands are of the same boolean
3745 type as the result type is.
3746 Check that second operand is an integer-constant with value
3748 if (TREE_CODE (op2) == INTEGER_CST
3749 && (integer_zerop (op2) || integer_onep (op2))
3750 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3752 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3753 bool is_logical_not = false;
3755 /* X == 0 and X != 1 is a logical-not.of X
3756 X == 1 and X != 0 is X */
3757 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3758 || (cmp_code == NE_EXPR && integer_onep (op2)))
3759 is_logical_not = true;
3761 if (is_logical_not == false)
3762 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3763 /* Only for one-bit precision typed X the transformation
3764 !X -> ~X is valied. */
3765 else if (TYPE_PRECISION (type) == 1)
3766 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3767 /* Otherwise we use !X -> X ^ 1. */
3769 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3770 build_int_cst (type, 1));
3776 unsigned old_num_ops = gimple_num_ops (stmt);
3777 tree lhs = gimple_assign_lhs (stmt);
3778 tree new_rhs = fold_gimple_assign (gsi);
3780 && !useless_type_conversion_p (TREE_TYPE (lhs),
3781 TREE_TYPE (new_rhs)))
3782 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3785 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3787 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3794 changed |= gimple_fold_call (gsi, inplace);
3798 /* Fold *& in asm operands. */
3800 gasm *asm_stmt = as_a <gasm *> (stmt);
3802 const char **oconstraints;
3803 const char *constraint;
3804 bool allows_mem, allows_reg;
3806 noutputs = gimple_asm_noutputs (asm_stmt);
3807 oconstraints = XALLOCAVEC (const char *, noutputs);
3809 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3811 tree link = gimple_asm_output_op (asm_stmt, i);
3812 tree op = TREE_VALUE (link);
3814 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3815 if (REFERENCE_CLASS_P (op)
3816 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3818 TREE_VALUE (link) = op;
3822 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3824 tree link = gimple_asm_input_op (asm_stmt, i);
3825 tree op = TREE_VALUE (link);
3827 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3828 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3829 oconstraints, &allows_mem, &allows_reg);
3830 if (REFERENCE_CLASS_P (op)
3831 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3834 TREE_VALUE (link) = op;
3842 if (gimple_debug_bind_p (stmt))
3844 tree val = gimple_debug_bind_get_value (stmt);
3846 && REFERENCE_CLASS_P (val))
3848 tree tem = maybe_fold_reference (val, false);
3851 gimple_debug_bind_set_value (stmt, tem);
3856 && TREE_CODE (val) == ADDR_EXPR)
3858 tree ref = TREE_OPERAND (val, 0);
3859 tree tem = maybe_fold_reference (ref, false);
3862 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3863 gimple_debug_bind_set_value (stmt, tem);
3873 stmt = gsi_stmt (*gsi);
3875 /* Fold *& on the lhs. */
3876 if (gimple_has_lhs (stmt))
3878 tree lhs = gimple_get_lhs (stmt);
3879 if (lhs && REFERENCE_CLASS_P (lhs))
3881 tree new_lhs = maybe_fold_reference (lhs, true);
3884 gimple_set_lhs (stmt, new_lhs);
3893 /* Valueziation callback that ends up not following SSA edges. */
3896 no_follow_ssa_edges (tree)
3901 /* Valueization callback that ends up following single-use SSA edges only. */
3904 follow_single_use_edges (tree val)
3906 if (TREE_CODE (val) == SSA_NAME
3907 && !has_single_use (val))
3912 /* Fold the statement pointed to by GSI. In some cases, this function may
3913 replace the whole statement with a new one. Returns true iff folding
3915 The statement pointed to by GSI should be in valid gimple form but may
3916 be in unfolded state as resulting from for example constant propagation
3917 which can produce *&x = 0. */
3920 fold_stmt (gimple_stmt_iterator *gsi)
3922 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3926 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3928 return fold_stmt_1 (gsi, false, valueize);
3931 /* Perform the minimal folding on statement *GSI. Only operations like
3932 *&x created by constant propagation are handled. The statement cannot
3933 be replaced with a new one. Return true if the statement was
3934 changed, false otherwise.
3935 The statement *GSI should be in valid gimple form but may
3936 be in unfolded state as resulting from for example constant propagation
3937 which can produce *&x = 0. */
3940 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3942 gimple *stmt = gsi_stmt (*gsi);
3943 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3944 gcc_assert (gsi_stmt (*gsi) == stmt);
3948 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3949 if EXPR is null or we don't know how.
3950 If non-null, the result always has boolean type. */
3953 canonicalize_bool (tree expr, bool invert)
3959 if (integer_nonzerop (expr))
3960 return boolean_false_node;
3961 else if (integer_zerop (expr))
3962 return boolean_true_node;
3963 else if (TREE_CODE (expr) == SSA_NAME)
3964 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3965 build_int_cst (TREE_TYPE (expr), 0));
3966 else if (COMPARISON_CLASS_P (expr))
3967 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3969 TREE_OPERAND (expr, 0),
3970 TREE_OPERAND (expr, 1));
3976 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3978 if (integer_nonzerop (expr))
3979 return boolean_true_node;
3980 else if (integer_zerop (expr))
3981 return boolean_false_node;
3982 else if (TREE_CODE (expr) == SSA_NAME)
3983 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3984 build_int_cst (TREE_TYPE (expr), 0));
3985 else if (COMPARISON_CLASS_P (expr))
3986 return fold_build2 (TREE_CODE (expr),
3988 TREE_OPERAND (expr, 0),
3989 TREE_OPERAND (expr, 1));
3995 /* Check to see if a boolean expression EXPR is logically equivalent to the
3996 comparison (OP1 CODE OP2). Check for various identities involving
4000 same_bool_comparison_p (const_tree expr, enum tree_code code,
4001 const_tree op1, const_tree op2)
4005 /* The obvious case. */
4006 if (TREE_CODE (expr) == code
4007 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4008 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4011 /* Check for comparing (name, name != 0) and the case where expr
4012 is an SSA_NAME with a definition matching the comparison. */
4013 if (TREE_CODE (expr) == SSA_NAME
4014 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4016 if (operand_equal_p (expr, op1, 0))
4017 return ((code == NE_EXPR && integer_zerop (op2))
4018 || (code == EQ_EXPR && integer_nonzerop (op2)));
4019 s = SSA_NAME_DEF_STMT (expr);
4020 if (is_gimple_assign (s)
4021 && gimple_assign_rhs_code (s) == code
4022 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4023 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4027 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4028 of name is a comparison, recurse. */
4029 if (TREE_CODE (op1) == SSA_NAME
4030 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4032 s = SSA_NAME_DEF_STMT (op1);
4033 if (is_gimple_assign (s)
4034 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4036 enum tree_code c = gimple_assign_rhs_code (s);
4037 if ((c == NE_EXPR && integer_zerop (op2))
4038 || (c == EQ_EXPR && integer_nonzerop (op2)))
4039 return same_bool_comparison_p (expr, c,
4040 gimple_assign_rhs1 (s),
4041 gimple_assign_rhs2 (s));
4042 if ((c == EQ_EXPR && integer_zerop (op2))
4043 || (c == NE_EXPR && integer_nonzerop (op2)))
4044 return same_bool_comparison_p (expr,
4045 invert_tree_comparison (c, false),
4046 gimple_assign_rhs1 (s),
4047 gimple_assign_rhs2 (s));
4053 /* Check to see if two boolean expressions OP1 and OP2 are logically
4057 same_bool_result_p (const_tree op1, const_tree op2)
4059 /* Simple cases first. */
4060 if (operand_equal_p (op1, op2, 0))
4063 /* Check the cases where at least one of the operands is a comparison.
4064 These are a bit smarter than operand_equal_p in that they apply some
4065 identifies on SSA_NAMEs. */
4066 if (COMPARISON_CLASS_P (op2)
4067 && same_bool_comparison_p (op1, TREE_CODE (op2),
4068 TREE_OPERAND (op2, 0),
4069 TREE_OPERAND (op2, 1)))
4071 if (COMPARISON_CLASS_P (op1)
4072 && same_bool_comparison_p (op2, TREE_CODE (op1),
4073 TREE_OPERAND (op1, 0),
4074 TREE_OPERAND (op1, 1)))
4081 /* Forward declarations for some mutually recursive functions. */
4084 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4085 enum tree_code code2, tree op2a, tree op2b);
4087 and_var_with_comparison (tree var, bool invert,
4088 enum tree_code code2, tree op2a, tree op2b);
4090 and_var_with_comparison_1 (gimple *stmt,
4091 enum tree_code code2, tree op2a, tree op2b);
4093 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4094 enum tree_code code2, tree op2a, tree op2b);
4096 or_var_with_comparison (tree var, bool invert,
4097 enum tree_code code2, tree op2a, tree op2b);
4099 or_var_with_comparison_1 (gimple *stmt,
4100 enum tree_code code2, tree op2a, tree op2b);
4102 /* Helper function for and_comparisons_1: try to simplify the AND of the
4103 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4104 If INVERT is true, invert the value of the VAR before doing the AND.
4105 Return NULL_EXPR if we can't simplify this to a single expression. */
4108 and_var_with_comparison (tree var, bool invert,
4109 enum tree_code code2, tree op2a, tree op2b)
4112 gimple *stmt = SSA_NAME_DEF_STMT (var);
4114 /* We can only deal with variables whose definitions are assignments. */
4115 if (!is_gimple_assign (stmt))
4118 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4119 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4120 Then we only have to consider the simpler non-inverted cases. */
4122 t = or_var_with_comparison_1 (stmt,
4123 invert_tree_comparison (code2, false),
4126 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4127 return canonicalize_bool (t, invert);
4130 /* Try to simplify the AND of the ssa variable defined by the assignment
4131 STMT with the comparison specified by (OP2A CODE2 OP2B).
4132 Return NULL_EXPR if we can't simplify this to a single expression. */
4135 and_var_with_comparison_1 (gimple *stmt,
4136 enum tree_code code2, tree op2a, tree op2b)
4138 tree var = gimple_assign_lhs (stmt);
4139 tree true_test_var = NULL_TREE;
4140 tree false_test_var = NULL_TREE;
4141 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4143 /* Check for identities like (var AND (var == 0)) => false. */
4144 if (TREE_CODE (op2a) == SSA_NAME
4145 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4147 if ((code2 == NE_EXPR && integer_zerop (op2b))
4148 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4150 true_test_var = op2a;
4151 if (var == true_test_var)
4154 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4155 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4157 false_test_var = op2a;
4158 if (var == false_test_var)
4159 return boolean_false_node;
4163 /* If the definition is a comparison, recurse on it. */
4164 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4166 tree t = and_comparisons_1 (innercode,
4167 gimple_assign_rhs1 (stmt),
4168 gimple_assign_rhs2 (stmt),
4176 /* If the definition is an AND or OR expression, we may be able to
4177 simplify by reassociating. */
4178 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4179 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4181 tree inner1 = gimple_assign_rhs1 (stmt);
4182 tree inner2 = gimple_assign_rhs2 (stmt);
4185 tree partial = NULL_TREE;
4186 bool is_and = (innercode == BIT_AND_EXPR);
4188 /* Check for boolean identities that don't require recursive examination
4190 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4191 inner1 AND (inner1 OR inner2) => inner1
4192 !inner1 AND (inner1 AND inner2) => false
4193 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4194 Likewise for similar cases involving inner2. */
4195 if (inner1 == true_test_var)
4196 return (is_and ? var : inner1);
4197 else if (inner2 == true_test_var)
4198 return (is_and ? var : inner2);
4199 else if (inner1 == false_test_var)
4201 ? boolean_false_node
4202 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4203 else if (inner2 == false_test_var)
4205 ? boolean_false_node
4206 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4208 /* Next, redistribute/reassociate the AND across the inner tests.
4209 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4210 if (TREE_CODE (inner1) == SSA_NAME
4211 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4212 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4213 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4214 gimple_assign_rhs1 (s),
4215 gimple_assign_rhs2 (s),
4216 code2, op2a, op2b)))
4218 /* Handle the AND case, where we are reassociating:
4219 (inner1 AND inner2) AND (op2a code2 op2b)
4221 If the partial result t is a constant, we win. Otherwise
4222 continue on to try reassociating with the other inner test. */
4225 if (integer_onep (t))
4227 else if (integer_zerop (t))
4228 return boolean_false_node;
4231 /* Handle the OR case, where we are redistributing:
4232 (inner1 OR inner2) AND (op2a code2 op2b)
4233 => (t OR (inner2 AND (op2a code2 op2b))) */
4234 else if (integer_onep (t))
4235 return boolean_true_node;
4237 /* Save partial result for later. */
4241 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4242 if (TREE_CODE (inner2) == SSA_NAME
4243 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4244 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4245 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4246 gimple_assign_rhs1 (s),
4247 gimple_assign_rhs2 (s),
4248 code2, op2a, op2b)))
4250 /* Handle the AND case, where we are reassociating:
4251 (inner1 AND inner2) AND (op2a code2 op2b)
4252 => (inner1 AND t) */
4255 if (integer_onep (t))
4257 else if (integer_zerop (t))
4258 return boolean_false_node;
4259 /* If both are the same, we can apply the identity
4261 else if (partial && same_bool_result_p (t, partial))
4265 /* Handle the OR case. where we are redistributing:
4266 (inner1 OR inner2) AND (op2a code2 op2b)
4267 => (t OR (inner1 AND (op2a code2 op2b)))
4268 => (t OR partial) */
4271 if (integer_onep (t))
4272 return boolean_true_node;
4275 /* We already got a simplification for the other
4276 operand to the redistributed OR expression. The
4277 interesting case is when at least one is false.
4278 Or, if both are the same, we can apply the identity
4280 if (integer_zerop (partial))
4282 else if (integer_zerop (t))
4284 else if (same_bool_result_p (t, partial))
4293 /* Try to simplify the AND of two comparisons defined by
4294 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4295 If this can be done without constructing an intermediate value,
4296 return the resulting tree; otherwise NULL_TREE is returned.
4297 This function is deliberately asymmetric as it recurses on SSA_DEFs
4298 in the first comparison but not the second. */
4301 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4302 enum tree_code code2, tree op2a, tree op2b)
4304 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4306 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4307 if (operand_equal_p (op1a, op2a, 0)
4308 && operand_equal_p (op1b, op2b, 0))
4310 /* Result will be either NULL_TREE, or a combined comparison. */
4311 tree t = combine_comparisons (UNKNOWN_LOCATION,
4312 TRUTH_ANDIF_EXPR, code1, code2,
4313 truth_type, op1a, op1b);
4318 /* Likewise the swapped case of the above. */
4319 if (operand_equal_p (op1a, op2b, 0)
4320 && operand_equal_p (op1b, op2a, 0))
4322 /* Result will be either NULL_TREE, or a combined comparison. */
4323 tree t = combine_comparisons (UNKNOWN_LOCATION,
4324 TRUTH_ANDIF_EXPR, code1,
4325 swap_tree_comparison (code2),
4326 truth_type, op1a, op1b);
4331 /* If both comparisons are of the same value against constants, we might
4332 be able to merge them. */
4333 if (operand_equal_p (op1a, op2a, 0)
4334 && TREE_CODE (op1b) == INTEGER_CST
4335 && TREE_CODE (op2b) == INTEGER_CST)
4337 int cmp = tree_int_cst_compare (op1b, op2b);
4339 /* If we have (op1a == op1b), we should either be able to
4340 return that or FALSE, depending on whether the constant op1b
4341 also satisfies the other comparison against op2b. */
4342 if (code1 == EQ_EXPR)
4348 case EQ_EXPR: val = (cmp == 0); break;
4349 case NE_EXPR: val = (cmp != 0); break;
4350 case LT_EXPR: val = (cmp < 0); break;
4351 case GT_EXPR: val = (cmp > 0); break;
4352 case LE_EXPR: val = (cmp <= 0); break;
4353 case GE_EXPR: val = (cmp >= 0); break;
4354 default: done = false;
4359 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4361 return boolean_false_node;
4364 /* Likewise if the second comparison is an == comparison. */
4365 else if (code2 == EQ_EXPR)
4371 case EQ_EXPR: val = (cmp == 0); break;
4372 case NE_EXPR: val = (cmp != 0); break;
4373 case LT_EXPR: val = (cmp > 0); break;
4374 case GT_EXPR: val = (cmp < 0); break;
4375 case LE_EXPR: val = (cmp >= 0); break;
4376 case GE_EXPR: val = (cmp <= 0); break;
4377 default: done = false;
4382 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4384 return boolean_false_node;
4388 /* Same business with inequality tests. */
4389 else if (code1 == NE_EXPR)
4394 case EQ_EXPR: val = (cmp != 0); break;
4395 case NE_EXPR: val = (cmp == 0); break;
4396 case LT_EXPR: val = (cmp >= 0); break;
4397 case GT_EXPR: val = (cmp <= 0); break;
4398 case LE_EXPR: val = (cmp > 0); break;
4399 case GE_EXPR: val = (cmp < 0); break;
4404 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4406 else if (code2 == NE_EXPR)
4411 case EQ_EXPR: val = (cmp == 0); break;
4412 case NE_EXPR: val = (cmp != 0); break;
4413 case LT_EXPR: val = (cmp <= 0); break;
4414 case GT_EXPR: val = (cmp >= 0); break;
4415 case LE_EXPR: val = (cmp < 0); break;
4416 case GE_EXPR: val = (cmp > 0); break;
4421 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4424 /* Chose the more restrictive of two < or <= comparisons. */
4425 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4426 && (code2 == LT_EXPR || code2 == LE_EXPR))
4428 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4429 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4431 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4434 /* Likewise chose the more restrictive of two > or >= comparisons. */
4435 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4436 && (code2 == GT_EXPR || code2 == GE_EXPR))
4438 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4439 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4441 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4444 /* Check for singleton ranges. */
4446 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4447 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4448 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4450 /* Check for disjoint ranges. */
4452 && (code1 == LT_EXPR || code1 == LE_EXPR)
4453 && (code2 == GT_EXPR || code2 == GE_EXPR))
4454 return boolean_false_node;
4456 && (code1 == GT_EXPR || code1 == GE_EXPR)
4457 && (code2 == LT_EXPR || code2 == LE_EXPR))
4458 return boolean_false_node;
4461 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4462 NAME's definition is a truth value. See if there are any simplifications
4463 that can be done against the NAME's definition. */
4464 if (TREE_CODE (op1a) == SSA_NAME
4465 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4466 && (integer_zerop (op1b) || integer_onep (op1b)))
4468 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4469 || (code1 == NE_EXPR && integer_onep (op1b)));
4470 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4471 switch (gimple_code (stmt))
4474 /* Try to simplify by copy-propagating the definition. */
4475 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4478 /* If every argument to the PHI produces the same result when
4479 ANDed with the second comparison, we win.
4480 Do not do this unless the type is bool since we need a bool
4481 result here anyway. */
4482 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4484 tree result = NULL_TREE;
4486 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4488 tree arg = gimple_phi_arg_def (stmt, i);
4490 /* If this PHI has itself as an argument, ignore it.
4491 If all the other args produce the same result,
4493 if (arg == gimple_phi_result (stmt))
4495 else if (TREE_CODE (arg) == INTEGER_CST)
4497 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4500 result = boolean_false_node;
4501 else if (!integer_zerop (result))
4505 result = fold_build2 (code2, boolean_type_node,
4507 else if (!same_bool_comparison_p (result,
4511 else if (TREE_CODE (arg) == SSA_NAME
4512 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4515 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4516 /* In simple cases we can look through PHI nodes,
4517 but we have to be careful with loops.
4519 if (! dom_info_available_p (CDI_DOMINATORS)
4520 || gimple_bb (def_stmt) == gimple_bb (stmt)
4521 || dominated_by_p (CDI_DOMINATORS,
4522 gimple_bb (def_stmt),
4525 temp = and_var_with_comparison (arg, invert, code2,
4531 else if (!same_bool_result_p (result, temp))
4547 /* Try to simplify the AND of two comparisons, specified by
4548 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4549 If this can be simplified to a single expression (without requiring
4550 introducing more SSA variables to hold intermediate values),
4551 return the resulting tree. Otherwise return NULL_TREE.
4552 If the result expression is non-null, it has boolean type. */
4555 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4556 enum tree_code code2, tree op2a, tree op2b)
4558 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4562 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4565 /* Helper function for or_comparisons_1: try to simplify the OR of the
4566 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4567 If INVERT is true, invert the value of VAR before doing the OR.
4568 Return NULL_EXPR if we can't simplify this to a single expression. */
4571 or_var_with_comparison (tree var, bool invert,
4572 enum tree_code code2, tree op2a, tree op2b)
4575 gimple *stmt = SSA_NAME_DEF_STMT (var);
4577 /* We can only deal with variables whose definitions are assignments. */
4578 if (!is_gimple_assign (stmt))
4581 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4582 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4583 Then we only have to consider the simpler non-inverted cases. */
4585 t = and_var_with_comparison_1 (stmt,
4586 invert_tree_comparison (code2, false),
4589 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4590 return canonicalize_bool (t, invert);
4593 /* Try to simplify the OR of the ssa variable defined by the assignment
4594 STMT with the comparison specified by (OP2A CODE2 OP2B).
4595 Return NULL_EXPR if we can't simplify this to a single expression. */
4598 or_var_with_comparison_1 (gimple *stmt,
4599 enum tree_code code2, tree op2a, tree op2b)
4601 tree var = gimple_assign_lhs (stmt);
4602 tree true_test_var = NULL_TREE;
4603 tree false_test_var = NULL_TREE;
4604 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4606 /* Check for identities like (var OR (var != 0)) => true . */
4607 if (TREE_CODE (op2a) == SSA_NAME
4608 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4610 if ((code2 == NE_EXPR && integer_zerop (op2b))
4611 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4613 true_test_var = op2a;
4614 if (var == true_test_var)
4617 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4618 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4620 false_test_var = op2a;
4621 if (var == false_test_var)
4622 return boolean_true_node;
4626 /* If the definition is a comparison, recurse on it. */
4627 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4629 tree t = or_comparisons_1 (innercode,
4630 gimple_assign_rhs1 (stmt),
4631 gimple_assign_rhs2 (stmt),
4639 /* If the definition is an AND or OR expression, we may be able to
4640 simplify by reassociating. */
4641 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4642 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4644 tree inner1 = gimple_assign_rhs1 (stmt);
4645 tree inner2 = gimple_assign_rhs2 (stmt);
4648 tree partial = NULL_TREE;
4649 bool is_or = (innercode == BIT_IOR_EXPR);
4651 /* Check for boolean identities that don't require recursive examination
4653 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4654 inner1 OR (inner1 AND inner2) => inner1
4655 !inner1 OR (inner1 OR inner2) => true
4656 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4658 if (inner1 == true_test_var)
4659 return (is_or ? var : inner1);
4660 else if (inner2 == true_test_var)
4661 return (is_or ? var : inner2);
4662 else if (inner1 == false_test_var)
4665 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4666 else if (inner2 == false_test_var)
4669 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4671 /* Next, redistribute/reassociate the OR across the inner tests.
4672 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4673 if (TREE_CODE (inner1) == SSA_NAME
4674 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4675 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4676 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4677 gimple_assign_rhs1 (s),
4678 gimple_assign_rhs2 (s),
4679 code2, op2a, op2b)))
4681 /* Handle the OR case, where we are reassociating:
4682 (inner1 OR inner2) OR (op2a code2 op2b)
4684 If the partial result t is a constant, we win. Otherwise
4685 continue on to try reassociating with the other inner test. */
4688 if (integer_onep (t))
4689 return boolean_true_node;
4690 else if (integer_zerop (t))
4694 /* Handle the AND case, where we are redistributing:
4695 (inner1 AND inner2) OR (op2a code2 op2b)
4696 => (t AND (inner2 OR (op2a code op2b))) */
4697 else if (integer_zerop (t))
4698 return boolean_false_node;
4700 /* Save partial result for later. */
4704 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4705 if (TREE_CODE (inner2) == SSA_NAME
4706 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4707 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4708 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4709 gimple_assign_rhs1 (s),
4710 gimple_assign_rhs2 (s),
4711 code2, op2a, op2b)))
4713 /* Handle the OR case, where we are reassociating:
4714 (inner1 OR inner2) OR (op2a code2 op2b)
4716 => (t OR partial) */
4719 if (integer_zerop (t))
4721 else if (integer_onep (t))
4722 return boolean_true_node;
4723 /* If both are the same, we can apply the identity
4725 else if (partial && same_bool_result_p (t, partial))
4729 /* Handle the AND case, where we are redistributing:
4730 (inner1 AND inner2) OR (op2a code2 op2b)
4731 => (t AND (inner1 OR (op2a code2 op2b)))
4732 => (t AND partial) */
4735 if (integer_zerop (t))
4736 return boolean_false_node;
4739 /* We already got a simplification for the other
4740 operand to the redistributed AND expression. The
4741 interesting case is when at least one is true.
4742 Or, if both are the same, we can apply the identity
4744 if (integer_onep (partial))
4746 else if (integer_onep (t))
4748 else if (same_bool_result_p (t, partial))
4757 /* Try to simplify the OR of two comparisons defined by
4758 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4759 If this can be done without constructing an intermediate value,
4760 return the resulting tree; otherwise NULL_TREE is returned.
4761 This function is deliberately asymmetric as it recurses on SSA_DEFs
4762 in the first comparison but not the second. */
4765 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4766 enum tree_code code2, tree op2a, tree op2b)
4768 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4770 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4771 if (operand_equal_p (op1a, op2a, 0)
4772 && operand_equal_p (op1b, op2b, 0))
4774 /* Result will be either NULL_TREE, or a combined comparison. */
4775 tree t = combine_comparisons (UNKNOWN_LOCATION,
4776 TRUTH_ORIF_EXPR, code1, code2,
4777 truth_type, op1a, op1b);
4782 /* Likewise the swapped case of the above. */
4783 if (operand_equal_p (op1a, op2b, 0)
4784 && operand_equal_p (op1b, op2a, 0))
4786 /* Result will be either NULL_TREE, or a combined comparison. */
4787 tree t = combine_comparisons (UNKNOWN_LOCATION,
4788 TRUTH_ORIF_EXPR, code1,
4789 swap_tree_comparison (code2),
4790 truth_type, op1a, op1b);
4795 /* If both comparisons are of the same value against constants, we might
4796 be able to merge them. */
4797 if (operand_equal_p (op1a, op2a, 0)
4798 && TREE_CODE (op1b) == INTEGER_CST
4799 && TREE_CODE (op2b) == INTEGER_CST)
4801 int cmp = tree_int_cst_compare (op1b, op2b);
4803 /* If we have (op1a != op1b), we should either be able to
4804 return that or TRUE, depending on whether the constant op1b
4805 also satisfies the other comparison against op2b. */
4806 if (code1 == NE_EXPR)
4812 case EQ_EXPR: val = (cmp == 0); break;
4813 case NE_EXPR: val = (cmp != 0); break;
4814 case LT_EXPR: val = (cmp < 0); break;
4815 case GT_EXPR: val = (cmp > 0); break;
4816 case LE_EXPR: val = (cmp <= 0); break;
4817 case GE_EXPR: val = (cmp >= 0); break;
4818 default: done = false;
4823 return boolean_true_node;
4825 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4828 /* Likewise if the second comparison is a != comparison. */
4829 else if (code2 == NE_EXPR)
4835 case EQ_EXPR: val = (cmp == 0); break;
4836 case NE_EXPR: val = (cmp != 0); break;
4837 case LT_EXPR: val = (cmp > 0); break;
4838 case GT_EXPR: val = (cmp < 0); break;
4839 case LE_EXPR: val = (cmp >= 0); break;
4840 case GE_EXPR: val = (cmp <= 0); break;
4841 default: done = false;
4846 return boolean_true_node;
4848 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4852 /* See if an equality test is redundant with the other comparison. */
4853 else if (code1 == EQ_EXPR)
4858 case EQ_EXPR: val = (cmp == 0); break;
4859 case NE_EXPR: val = (cmp != 0); break;
4860 case LT_EXPR: val = (cmp < 0); break;
4861 case GT_EXPR: val = (cmp > 0); break;
4862 case LE_EXPR: val = (cmp <= 0); break;
4863 case GE_EXPR: val = (cmp >= 0); break;
4868 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4870 else if (code2 == EQ_EXPR)
4875 case EQ_EXPR: val = (cmp == 0); break;
4876 case NE_EXPR: val = (cmp != 0); break;
4877 case LT_EXPR: val = (cmp > 0); break;
4878 case GT_EXPR: val = (cmp < 0); break;
4879 case LE_EXPR: val = (cmp >= 0); break;
4880 case GE_EXPR: val = (cmp <= 0); break;
4885 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4888 /* Chose the less restrictive of two < or <= comparisons. */
4889 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4890 && (code2 == LT_EXPR || code2 == LE_EXPR))
4892 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4893 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4895 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4898 /* Likewise chose the less restrictive of two > or >= comparisons. */
4899 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4900 && (code2 == GT_EXPR || code2 == GE_EXPR))
4902 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4903 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4905 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4908 /* Check for singleton ranges. */
4910 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4911 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4912 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4914 /* Check for less/greater pairs that don't restrict the range at all. */
4916 && (code1 == LT_EXPR || code1 == LE_EXPR)
4917 && (code2 == GT_EXPR || code2 == GE_EXPR))
4918 return boolean_true_node;
4920 && (code1 == GT_EXPR || code1 == GE_EXPR)
4921 && (code2 == LT_EXPR || code2 == LE_EXPR))
4922 return boolean_true_node;
4925 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4926 NAME's definition is a truth value. See if there are any simplifications
4927 that can be done against the NAME's definition. */
4928 if (TREE_CODE (op1a) == SSA_NAME
4929 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4930 && (integer_zerop (op1b) || integer_onep (op1b)))
4932 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4933 || (code1 == NE_EXPR && integer_onep (op1b)));
4934 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4935 switch (gimple_code (stmt))
4938 /* Try to simplify by copy-propagating the definition. */
4939 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4942 /* If every argument to the PHI produces the same result when
4943 ORed with the second comparison, we win.
4944 Do not do this unless the type is bool since we need a bool
4945 result here anyway. */
4946 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4948 tree result = NULL_TREE;
4950 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4952 tree arg = gimple_phi_arg_def (stmt, i);
4954 /* If this PHI has itself as an argument, ignore it.
4955 If all the other args produce the same result,
4957 if (arg == gimple_phi_result (stmt))
4959 else if (TREE_CODE (arg) == INTEGER_CST)
4961 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4964 result = boolean_true_node;
4965 else if (!integer_onep (result))
4969 result = fold_build2 (code2, boolean_type_node,
4971 else if (!same_bool_comparison_p (result,
4975 else if (TREE_CODE (arg) == SSA_NAME
4976 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4979 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4980 /* In simple cases we can look through PHI nodes,
4981 but we have to be careful with loops.
4983 if (! dom_info_available_p (CDI_DOMINATORS)
4984 || gimple_bb (def_stmt) == gimple_bb (stmt)
4985 || dominated_by_p (CDI_DOMINATORS,
4986 gimple_bb (def_stmt),
4989 temp = or_var_with_comparison (arg, invert, code2,
4995 else if (!same_bool_result_p (result, temp))
5011 /* Try to simplify the OR of two comparisons, specified by
5012 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5013 If this can be simplified to a single expression (without requiring
5014 introducing more SSA variables to hold intermediate values),
5015 return the resulting tree. Otherwise return NULL_TREE.
5016 If the result expression is non-null, it has boolean type. */
5019 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5020 enum tree_code code2, tree op2a, tree op2b)
5022 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5026 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5030 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5032 Either NULL_TREE, a simplified but non-constant or a constant
5035 ??? This should go into a gimple-fold-inline.h file to be eventually
5036 privatized with the single valueize function used in the various TUs
5037 to avoid the indirect function call overhead. */
5040 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
5041 tree (*gvalueize) (tree))
5045 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5046 edges if there are intermediate VARYING defs. For this reason
5047 do not follow SSA edges here even though SCCVN can technically
5048 just deal fine with that. */
5049 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
5051 tree res = NULL_TREE;
5052 if (gimple_simplified_result_is_gimple_val (rcode, ops))
5054 else if (mprts_hook)
5055 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5058 if (dump_file && dump_flags & TDF_DETAILS)
5060 fprintf (dump_file, "Match-and-simplified ");
5061 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5062 fprintf (dump_file, " to ");
5063 print_generic_expr (dump_file, res, 0);
5064 fprintf (dump_file, "\n");
5070 location_t loc = gimple_location (stmt);
5071 switch (gimple_code (stmt))
5075 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5077 switch (get_gimple_rhs_class (subcode))
5079 case GIMPLE_SINGLE_RHS:
5081 tree rhs = gimple_assign_rhs1 (stmt);
5082 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5084 if (TREE_CODE (rhs) == SSA_NAME)
5086 /* If the RHS is an SSA_NAME, return its known constant value,
5088 return (*valueize) (rhs);
5090 /* Handle propagating invariant addresses into address
5092 else if (TREE_CODE (rhs) == ADDR_EXPR
5093 && !is_gimple_min_invariant (rhs))
5095 HOST_WIDE_INT offset = 0;
5097 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5101 && (CONSTANT_CLASS_P (base)
5102 || decl_address_invariant_p (base)))
5103 return build_invariant_address (TREE_TYPE (rhs),
5106 else if (TREE_CODE (rhs) == CONSTRUCTOR
5107 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5108 && (CONSTRUCTOR_NELTS (rhs)
5109 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5114 vec = XALLOCAVEC (tree,
5115 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5116 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5118 val = (*valueize) (val);
5119 if (TREE_CODE (val) == INTEGER_CST
5120 || TREE_CODE (val) == REAL_CST
5121 || TREE_CODE (val) == FIXED_CST)
5127 return build_vector (TREE_TYPE (rhs), vec);
5129 if (subcode == OBJ_TYPE_REF)
5131 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5132 /* If callee is constant, we can fold away the wrapper. */
5133 if (is_gimple_min_invariant (val))
5137 if (kind == tcc_reference)
5139 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5140 || TREE_CODE (rhs) == REALPART_EXPR
5141 || TREE_CODE (rhs) == IMAGPART_EXPR)
5142 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5144 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5145 return fold_unary_loc (EXPR_LOCATION (rhs),
5147 TREE_TYPE (rhs), val);
5149 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5150 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5152 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5153 return fold_ternary_loc (EXPR_LOCATION (rhs),
5155 TREE_TYPE (rhs), val,
5156 TREE_OPERAND (rhs, 1),
5157 TREE_OPERAND (rhs, 2));
5159 else if (TREE_CODE (rhs) == MEM_REF
5160 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5162 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5163 if (TREE_CODE (val) == ADDR_EXPR
5164 && is_gimple_min_invariant (val))
5166 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5168 TREE_OPERAND (rhs, 1));
5173 return fold_const_aggregate_ref_1 (rhs, valueize);
5175 else if (kind == tcc_declaration)
5176 return get_symbol_constant_value (rhs);
5180 case GIMPLE_UNARY_RHS:
5183 case GIMPLE_BINARY_RHS:
5184 /* Translate &x + CST into an invariant form suitable for
5185 further propagation. */
5186 if (subcode == POINTER_PLUS_EXPR)
5188 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5189 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5190 if (TREE_CODE (op0) == ADDR_EXPR
5191 && TREE_CODE (op1) == INTEGER_CST)
5193 tree off = fold_convert (ptr_type_node, op1);
5194 return build_fold_addr_expr_loc
5196 fold_build2 (MEM_REF,
5197 TREE_TYPE (TREE_TYPE (op0)),
5198 unshare_expr (op0), off));
5201 /* Canonicalize bool != 0 and bool == 0 appearing after
5202 valueization. While gimple_simplify handles this
5203 it can get confused by the ~X == 1 -> X == 0 transform
5204 which we cant reduce to a SSA name or a constant
5205 (and we have no way to tell gimple_simplify to not
5206 consider those transforms in the first place). */
5207 else if (subcode == EQ_EXPR
5208 || subcode == NE_EXPR)
5210 tree lhs = gimple_assign_lhs (stmt);
5211 tree op0 = gimple_assign_rhs1 (stmt);
5212 if (useless_type_conversion_p (TREE_TYPE (lhs),
5215 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5216 op0 = (*valueize) (op0);
5217 if (TREE_CODE (op0) == INTEGER_CST)
5218 std::swap (op0, op1);
5219 if (TREE_CODE (op1) == INTEGER_CST
5220 && ((subcode == NE_EXPR && integer_zerop (op1))
5221 || (subcode == EQ_EXPR && integer_onep (op1))))
5227 case GIMPLE_TERNARY_RHS:
5229 /* Handle ternary operators that can appear in GIMPLE form. */
5230 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5231 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5232 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5233 return fold_ternary_loc (loc, subcode,
5234 gimple_expr_type (stmt), op0, op1, op2);
5245 gcall *call_stmt = as_a <gcall *> (stmt);
5247 if (gimple_call_internal_p (stmt))
5249 enum tree_code subcode = ERROR_MARK;
5250 switch (gimple_call_internal_fn (stmt))
5252 case IFN_UBSAN_CHECK_ADD:
5253 subcode = PLUS_EXPR;
5255 case IFN_UBSAN_CHECK_SUB:
5256 subcode = MINUS_EXPR;
5258 case IFN_UBSAN_CHECK_MUL:
5259 subcode = MULT_EXPR;
5264 tree arg0 = gimple_call_arg (stmt, 0);
5265 tree arg1 = gimple_call_arg (stmt, 1);
5266 tree op0 = (*valueize) (arg0);
5267 tree op1 = (*valueize) (arg1);
5269 if (TREE_CODE (op0) != INTEGER_CST
5270 || TREE_CODE (op1) != INTEGER_CST)
5275 /* x * 0 = 0 * x = 0 without overflow. */
5276 if (integer_zerop (op0) || integer_zerop (op1))
5277 return build_zero_cst (TREE_TYPE (arg0));
5280 /* y - y = 0 without overflow. */
5281 if (operand_equal_p (op0, op1, 0))
5282 return build_zero_cst (TREE_TYPE (arg0));
5289 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5291 && TREE_CODE (res) == INTEGER_CST
5292 && !TREE_OVERFLOW (res))
5297 fn = (*valueize) (gimple_call_fn (stmt));
5298 if (TREE_CODE (fn) == ADDR_EXPR
5299 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5300 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5301 && gimple_builtin_call_types_compatible_p (stmt,
5302 TREE_OPERAND (fn, 0)))
5304 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5307 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5308 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5309 retval = fold_builtin_call_array (loc,
5310 gimple_call_return_type (call_stmt),
5311 fn, gimple_call_num_args (stmt), args);
5314 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5315 STRIP_NOPS (retval);
5316 retval = fold_convert (gimple_call_return_type (call_stmt),
5329 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5330 Returns NULL_TREE if folding to a constant is not possible, otherwise
5331 returns a constant according to is_gimple_min_invariant. */
5334 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5336 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5337 if (res && is_gimple_min_invariant (res))
5343 /* The following set of functions are supposed to fold references using
5344 their constant initializers. */
5346 /* See if we can find constructor defining value of BASE.
5347 When we know the consructor with constant offset (such as
5348 base is array[40] and we do know constructor of array), then
5349 BIT_OFFSET is adjusted accordingly.
5351 As a special case, return error_mark_node when constructor
5352 is not explicitly available, but it is known to be zero
5353 such as 'static const int a;'. */
5355 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5356 tree (*valueize)(tree))
5358 HOST_WIDE_INT bit_offset2, size, max_size;
5361 if (TREE_CODE (base) == MEM_REF)
5363 if (!integer_zerop (TREE_OPERAND (base, 1)))
5365 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5367 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5372 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5373 base = valueize (TREE_OPERAND (base, 0));
5374 if (!base || TREE_CODE (base) != ADDR_EXPR)
5376 base = TREE_OPERAND (base, 0);
5379 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5380 DECL_INITIAL. If BASE is a nested reference into another
5381 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5382 the inner reference. */
5383 switch (TREE_CODE (base))
5388 tree init = ctor_for_folding (base);
5390 /* Our semantic is exact opposite of ctor_for_folding;
5391 NULL means unknown, while error_mark_node is 0. */
5392 if (init == error_mark_node)
5395 return error_mark_node;
5401 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5403 if (max_size == -1 || size != max_size)
5405 *bit_offset += bit_offset2;
5406 return get_base_constructor (base, bit_offset, valueize);
5417 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5418 SIZE to the memory at bit OFFSET. */
5421 fold_array_ctor_reference (tree type, tree ctor,
5422 unsigned HOST_WIDE_INT offset,
5423 unsigned HOST_WIDE_INT size,
5426 offset_int low_bound;
5427 offset_int elt_size;
5428 offset_int access_index;
5429 tree domain_type = NULL_TREE;
5430 HOST_WIDE_INT inner_offset;
5432 /* Compute low bound and elt size. */
5433 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5434 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5435 if (domain_type && TYPE_MIN_VALUE (domain_type))
5437 /* Static constructors for variably sized objects makes no sense. */
5438 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
5440 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5444 /* Static constructors for variably sized objects makes no sense. */
5445 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
5447 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5449 /* We can handle only constantly sized accesses that are known to not
5450 be larger than size of array element. */
5451 if (!TYPE_SIZE_UNIT (type)
5452 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5453 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5457 /* Compute the array index we look for. */
5458 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5460 access_index += low_bound;
5462 /* And offset within the access. */
5463 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5465 /* See if the array field is large enough to span whole access. We do not
5466 care to fold accesses spanning multiple array indexes. */
5467 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5469 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5470 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
5472 /* When memory is not explicitely mentioned in constructor,
5473 it is 0 (or out of range). */
5474 return build_zero_cst (type);
5477 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5478 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5481 fold_nonarray_ctor_reference (tree type, tree ctor,
5482 unsigned HOST_WIDE_INT offset,
5483 unsigned HOST_WIDE_INT size,
5486 unsigned HOST_WIDE_INT cnt;
5489 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5492 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5493 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5494 tree field_size = DECL_SIZE (cfield);
5495 offset_int bitoffset;
5496 offset_int bitoffset_end, access_end;
5498 /* Variable sized objects in static constructors makes no sense,
5499 but field_size can be NULL for flexible array members. */
5500 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5501 && TREE_CODE (byte_offset) == INTEGER_CST
5502 && (field_size != NULL_TREE
5503 ? TREE_CODE (field_size) == INTEGER_CST
5504 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5506 /* Compute bit offset of the field. */
5507 bitoffset = (wi::to_offset (field_offset)
5508 + wi::lshift (wi::to_offset (byte_offset),
5509 LOG2_BITS_PER_UNIT));
5510 /* Compute bit offset where the field ends. */
5511 if (field_size != NULL_TREE)
5512 bitoffset_end = bitoffset + wi::to_offset (field_size);
5516 access_end = offset_int (offset) + size;
5518 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5519 [BITOFFSET, BITOFFSET_END)? */
5520 if (wi::cmps (access_end, bitoffset) > 0
5521 && (field_size == NULL_TREE
5522 || wi::lts_p (offset, bitoffset_end)))
5524 offset_int inner_offset = offset_int (offset) - bitoffset;
5525 /* We do have overlap. Now see if field is large enough to
5526 cover the access. Give up for accesses spanning multiple
5528 if (wi::cmps (access_end, bitoffset_end) > 0)
5530 if (wi::lts_p (offset, bitoffset))
5532 return fold_ctor_reference (type, cval,
5533 inner_offset.to_uhwi (), size,
5537 /* When memory is not explicitely mentioned in constructor, it is 0. */
5538 return build_zero_cst (type);
5541 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5542 to the memory at bit OFFSET. */
5545 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5546 unsigned HOST_WIDE_INT size, tree from_decl)
5550 /* We found the field with exact match. */
5551 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5553 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5555 /* We are at the end of walk, see if we can view convert the
5557 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5558 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5559 && !compare_tree_int (TYPE_SIZE (type), size)
5560 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5562 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5563 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5565 STRIP_USELESS_TYPE_CONVERSION (ret);
5568 /* For constants and byte-aligned/sized reads try to go through
5569 native_encode/interpret. */
5570 if (CONSTANT_CLASS_P (ctor)
5571 && BITS_PER_UNIT == 8
5572 && offset % BITS_PER_UNIT == 0
5573 && size % BITS_PER_UNIT == 0
5574 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5576 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5577 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5578 offset / BITS_PER_UNIT);
5580 return native_interpret_expr (type, buf, len);
5582 if (TREE_CODE (ctor) == CONSTRUCTOR)
5585 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5586 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5587 return fold_array_ctor_reference (type, ctor, offset, size,
5590 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5597 /* Return the tree representing the element referenced by T if T is an
5598 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5599 names using VALUEIZE. Return NULL_TREE otherwise. */
5602 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5604 tree ctor, idx, base;
5605 HOST_WIDE_INT offset, size, max_size;
5609 if (TREE_THIS_VOLATILE (t))
5613 return get_symbol_constant_value (t);
5615 tem = fold_read_from_constant_string (t);
5619 switch (TREE_CODE (t))
5622 case ARRAY_RANGE_REF:
5623 /* Constant indexes are handled well by get_base_constructor.
5624 Only special case variable offsets.
5625 FIXME: This code can't handle nested references with variable indexes
5626 (they will be handled only by iteration of ccp). Perhaps we can bring
5627 get_ref_base_and_extent here and make it use a valueize callback. */
5628 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5630 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5631 && TREE_CODE (idx) == INTEGER_CST)
5633 tree low_bound, unit_size;
5635 /* If the resulting bit-offset is constant, track it. */
5636 if ((low_bound = array_ref_low_bound (t),
5637 TREE_CODE (low_bound) == INTEGER_CST)
5638 && (unit_size = array_ref_element_size (t),
5639 tree_fits_uhwi_p (unit_size)))
5642 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5643 TYPE_PRECISION (TREE_TYPE (idx)));
5645 if (wi::fits_shwi_p (woffset))
5647 offset = woffset.to_shwi ();
5648 /* TODO: This code seems wrong, multiply then check
5649 to see if it fits. */
5650 offset *= tree_to_uhwi (unit_size);
5651 offset *= BITS_PER_UNIT;
5653 base = TREE_OPERAND (t, 0);
5654 ctor = get_base_constructor (base, &offset, valueize);
5655 /* Empty constructor. Always fold to 0. */
5656 if (ctor == error_mark_node)
5657 return build_zero_cst (TREE_TYPE (t));
5658 /* Out of bound array access. Value is undefined,
5662 /* We can not determine ctor. */
5665 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5666 tree_to_uhwi (unit_size)
5676 case TARGET_MEM_REF:
5678 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
5679 ctor = get_base_constructor (base, &offset, valueize);
5681 /* Empty constructor. Always fold to 0. */
5682 if (ctor == error_mark_node)
5683 return build_zero_cst (TREE_TYPE (t));
5684 /* We do not know precise address. */
5685 if (max_size == -1 || max_size != size)
5687 /* We can not determine ctor. */
5691 /* Out of bound array access. Value is undefined, but don't fold. */
5695 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5701 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5702 if (c && TREE_CODE (c) == COMPLEX_CST)
5703 return fold_build1_loc (EXPR_LOCATION (t),
5704 TREE_CODE (t), TREE_TYPE (t), c);
5716 fold_const_aggregate_ref (tree t)
5718 return fold_const_aggregate_ref_1 (t, NULL);
5721 /* Lookup virtual method with index TOKEN in a virtual table V
5723 Set CAN_REFER if non-NULL to false if method
5724 is not referable or if the virtual table is ill-formed (such as rewriten
5725 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5728 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5730 unsigned HOST_WIDE_INT offset,
5733 tree vtable = v, init, fn;
5734 unsigned HOST_WIDE_INT size;
5735 unsigned HOST_WIDE_INT elt_size, access_index;
5741 /* First of all double check we have virtual table. */
5742 if (TREE_CODE (v) != VAR_DECL
5743 || !DECL_VIRTUAL_P (v))
5745 /* Pass down that we lost track of the target. */
5751 init = ctor_for_folding (v);
5753 /* The virtual tables should always be born with constructors
5754 and we always should assume that they are avaialble for
5755 folding. At the moment we do not stream them in all cases,
5756 but it should never happen that ctor seem unreachable. */
5758 if (init == error_mark_node)
5760 gcc_assert (in_lto_p);
5761 /* Pass down that we lost track of the target. */
5766 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5767 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5768 offset *= BITS_PER_UNIT;
5769 offset += token * size;
5771 /* Lookup the value in the constructor that is assumed to be array.
5772 This is equivalent to
5773 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5774 offset, size, NULL);
5775 but in a constant time. We expect that frontend produced a simple
5776 array without indexed initializers. */
5778 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5779 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5780 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5781 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5783 access_index = offset / BITS_PER_UNIT / elt_size;
5784 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5786 /* This code makes an assumption that there are no
5787 indexed fileds produced by C++ FE, so we can directly index the array. */
5788 if (access_index < CONSTRUCTOR_NELTS (init))
5790 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5791 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5797 /* For type inconsistent program we may end up looking up virtual method
5798 in virtual table that does not contain TOKEN entries. We may overrun
5799 the virtual table and pick up a constant or RTTI info pointer.
5800 In any case the call is undefined. */
5802 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5803 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5804 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5807 fn = TREE_OPERAND (fn, 0);
5809 /* When cgraph node is missing and function is not public, we cannot
5810 devirtualize. This can happen in WHOPR when the actual method
5811 ends up in other partition, because we found devirtualization
5812 possibility too late. */
5813 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5824 /* Make sure we create a cgraph node for functions we'll reference.
5825 They can be non-existent if the reference comes from an entry
5826 of an external vtable for example. */
5827 cgraph_node::get_create (fn);
5832 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5833 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5834 KNOWN_BINFO carries the binfo describing the true type of
5835 OBJ_TYPE_REF_OBJECT(REF).
5836 Set CAN_REFER if non-NULL to false if method
5837 is not referable or if the virtual table is ill-formed (such as rewriten
5838 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5841 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5844 unsigned HOST_WIDE_INT offset;
5847 v = BINFO_VTABLE (known_binfo);
5848 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5852 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5858 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5861 /* Given a pointer value OP0, return a simplified version of an
5862 indirection through OP0, or NULL_TREE if no simplification is
5863 possible. Note that the resulting type may be different from
5864 the type pointed to in the sense that it is still compatible
5865 from the langhooks point of view. */
5868 gimple_fold_indirect_ref (tree t)
5870 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5875 subtype = TREE_TYPE (sub);
5876 if (!POINTER_TYPE_P (subtype))
5879 if (TREE_CODE (sub) == ADDR_EXPR)
5881 tree op = TREE_OPERAND (sub, 0);
5882 tree optype = TREE_TYPE (op);
5884 if (useless_type_conversion_p (type, optype))
5887 /* *(foo *)&fooarray => fooarray[0] */
5888 if (TREE_CODE (optype) == ARRAY_TYPE
5889 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5890 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5892 tree type_domain = TYPE_DOMAIN (optype);
5893 tree min_val = size_zero_node;
5894 if (type_domain && TYPE_MIN_VALUE (type_domain))
5895 min_val = TYPE_MIN_VALUE (type_domain);
5896 if (TREE_CODE (min_val) == INTEGER_CST)
5897 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5899 /* *(foo *)&complexfoo => __real__ complexfoo */
5900 else if (TREE_CODE (optype) == COMPLEX_TYPE
5901 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5902 return fold_build1 (REALPART_EXPR, type, op);
5903 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5904 else if (TREE_CODE (optype) == VECTOR_TYPE
5905 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5907 tree part_width = TYPE_SIZE (type);
5908 tree index = bitsize_int (0);
5909 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5913 /* *(p + CST) -> ... */
5914 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5915 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5917 tree addr = TREE_OPERAND (sub, 0);
5918 tree off = TREE_OPERAND (sub, 1);
5922 addrtype = TREE_TYPE (addr);
5924 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5925 if (TREE_CODE (addr) == ADDR_EXPR
5926 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5927 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5928 && tree_fits_uhwi_p (off))
5930 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5931 tree part_width = TYPE_SIZE (type);
5932 unsigned HOST_WIDE_INT part_widthi
5933 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5934 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5935 tree index = bitsize_int (indexi);
5936 if (offset / part_widthi
5937 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5938 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5942 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5943 if (TREE_CODE (addr) == ADDR_EXPR
5944 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5945 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5947 tree size = TYPE_SIZE_UNIT (type);
5948 if (tree_int_cst_equal (size, off))
5949 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5952 /* *(p + CST) -> MEM_REF <p, CST>. */
5953 if (TREE_CODE (addr) != ADDR_EXPR
5954 || DECL_P (TREE_OPERAND (addr, 0)))
5955 return fold_build2 (MEM_REF, type,
5957 wide_int_to_tree (ptype, off));
5960 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5961 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5962 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5963 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5966 tree min_val = size_zero_node;
5968 sub = gimple_fold_indirect_ref (sub);
5970 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5971 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5972 if (type_domain && TYPE_MIN_VALUE (type_domain))
5973 min_val = TYPE_MIN_VALUE (type_domain);
5974 if (TREE_CODE (min_val) == INTEGER_CST)
5975 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5981 /* Return true if CODE is an operation that when operating on signed
5982 integer types involves undefined behavior on overflow and the
5983 operation can be expressed with unsigned arithmetic. */
5986 arith_code_with_undefined_signed_overflow (tree_code code)
5994 case POINTER_PLUS_EXPR:
6001 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6002 operation that can be transformed to unsigned arithmetic by converting
6003 its operand, carrying out the operation in the corresponding unsigned
6004 type and converting the result back to the original type.
6006 Returns a sequence of statements that replace STMT and also contain
6007 a modified form of STMT itself. */
6010 rewrite_to_defined_overflow (gimple *stmt)
6012 if (dump_file && (dump_flags & TDF_DETAILS))
6014 fprintf (dump_file, "rewriting stmt with undefined signed "
6016 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6019 tree lhs = gimple_assign_lhs (stmt);
6020 tree type = unsigned_type_for (TREE_TYPE (lhs));
6021 gimple_seq stmts = NULL;
6022 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6024 tree op = gimple_op (stmt, i);
6025 op = gimple_convert (&stmts, type, op);
6026 gimple_set_op (stmt, i, op);
6028 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6029 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6030 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6031 gimple_seq_add_stmt (&stmts, stmt);
6032 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6033 gimple_seq_add_stmt (&stmts, cvt);
6039 /* The valueization hook we use for the gimple_build API simplification.
6040 This makes us match fold_buildN behavior by only combining with
6041 statements in the sequence(s) we are currently building. */
6044 gimple_build_valueize (tree op)
6046 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6051 /* Build the expression CODE OP0 of type TYPE with location LOC,
6052 simplifying it first if possible. Returns the built
6053 expression value and appends statements possibly defining it
6057 gimple_build (gimple_seq *seq, location_t loc,
6058 enum tree_code code, tree type, tree op0)
6060 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6063 if (gimple_in_ssa_p (cfun))
6064 res = make_ssa_name (type);
6066 res = create_tmp_reg (type);
6068 if (code == REALPART_EXPR
6069 || code == IMAGPART_EXPR
6070 || code == VIEW_CONVERT_EXPR)
6071 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6073 stmt = gimple_build_assign (res, code, op0);
6074 gimple_set_location (stmt, loc);
6075 gimple_seq_add_stmt_without_update (seq, stmt);
6080 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6081 simplifying it first if possible. Returns the built
6082 expression value and appends statements possibly defining it
6086 gimple_build (gimple_seq *seq, location_t loc,
6087 enum tree_code code, tree type, tree op0, tree op1)
6089 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6092 if (gimple_in_ssa_p (cfun))
6093 res = make_ssa_name (type);
6095 res = create_tmp_reg (type);
6096 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6097 gimple_set_location (stmt, loc);
6098 gimple_seq_add_stmt_without_update (seq, stmt);
6103 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6104 simplifying it first if possible. Returns the built
6105 expression value and appends statements possibly defining it
6109 gimple_build (gimple_seq *seq, location_t loc,
6110 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6112 tree res = gimple_simplify (code, type, op0, op1, op2,
6113 seq, gimple_build_valueize);
6116 if (gimple_in_ssa_p (cfun))
6117 res = make_ssa_name (type);
6119 res = create_tmp_reg (type);
6121 if (code == BIT_FIELD_REF)
6122 stmt = gimple_build_assign (res, code,
6123 build3 (code, type, op0, op1, op2));
6125 stmt = gimple_build_assign (res, code, op0, op1, op2);
6126 gimple_set_location (stmt, loc);
6127 gimple_seq_add_stmt_without_update (seq, stmt);
6132 /* Build the call FN (ARG0) with a result of type TYPE
6133 (or no result if TYPE is void) with location LOC,
6134 simplifying it first if possible. Returns the built
6135 expression value (or NULL_TREE if TYPE is void) and appends
6136 statements possibly defining it to SEQ. */
6139 gimple_build (gimple_seq *seq, location_t loc,
6140 enum built_in_function fn, tree type, tree arg0)
6142 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6145 tree decl = builtin_decl_implicit (fn);
6146 gimple *stmt = gimple_build_call (decl, 1, arg0);
6147 if (!VOID_TYPE_P (type))
6149 if (gimple_in_ssa_p (cfun))
6150 res = make_ssa_name (type);
6152 res = create_tmp_reg (type);
6153 gimple_call_set_lhs (stmt, res);
6155 gimple_set_location (stmt, loc);
6156 gimple_seq_add_stmt_without_update (seq, stmt);
6161 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6162 (or no result if TYPE is void) with location LOC,
6163 simplifying it first if possible. Returns the built
6164 expression value (or NULL_TREE if TYPE is void) and appends
6165 statements possibly defining it to SEQ. */
6168 gimple_build (gimple_seq *seq, location_t loc,
6169 enum built_in_function fn, tree type, tree arg0, tree arg1)
6171 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6174 tree decl = builtin_decl_implicit (fn);
6175 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6176 if (!VOID_TYPE_P (type))
6178 if (gimple_in_ssa_p (cfun))
6179 res = make_ssa_name (type);
6181 res = create_tmp_reg (type);
6182 gimple_call_set_lhs (stmt, res);
6184 gimple_set_location (stmt, loc);
6185 gimple_seq_add_stmt_without_update (seq, stmt);
6190 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6191 (or no result if TYPE is void) with location LOC,
6192 simplifying it first if possible. Returns the built
6193 expression value (or NULL_TREE if TYPE is void) and appends
6194 statements possibly defining it to SEQ. */
6197 gimple_build (gimple_seq *seq, location_t loc,
6198 enum built_in_function fn, tree type,
6199 tree arg0, tree arg1, tree arg2)
6201 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6202 seq, gimple_build_valueize);
6205 tree decl = builtin_decl_implicit (fn);
6206 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6207 if (!VOID_TYPE_P (type))
6209 if (gimple_in_ssa_p (cfun))
6210 res = make_ssa_name (type);
6212 res = create_tmp_reg (type);
6213 gimple_call_set_lhs (stmt, res);
6215 gimple_set_location (stmt, loc);
6216 gimple_seq_add_stmt_without_update (seq, stmt);
6221 /* Build the conversion (TYPE) OP with a result of type TYPE
6222 with location LOC if such conversion is neccesary in GIMPLE,
6223 simplifying it first.
6224 Returns the built expression value and appends
6225 statements possibly defining it to SEQ. */
6228 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6230 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6232 return gimple_build (seq, loc, NOP_EXPR, type, op);
6235 /* Build the conversion (ptrofftype) OP with a result of a type
6236 compatible with ptrofftype with location LOC if such conversion
6237 is neccesary in GIMPLE, simplifying it first.
6238 Returns the built expression value and appends
6239 statements possibly defining it to SEQ. */
6242 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6244 if (ptrofftype_p (TREE_TYPE (op)))
6246 return gimple_convert (seq, loc, sizetype, op);
6249 /* Return true if the result of assignment STMT is known to be non-negative.
6250 If the return value is based on the assumption that signed overflow is
6251 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6252 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6255 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6258 enum tree_code code = gimple_assign_rhs_code (stmt);
6259 switch (get_gimple_rhs_class (code))
6261 case GIMPLE_UNARY_RHS:
6262 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6263 gimple_expr_type (stmt),
6264 gimple_assign_rhs1 (stmt),
6265 strict_overflow_p, depth);
6266 case GIMPLE_BINARY_RHS:
6267 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6268 gimple_expr_type (stmt),
6269 gimple_assign_rhs1 (stmt),
6270 gimple_assign_rhs2 (stmt),
6271 strict_overflow_p, depth);
6272 case GIMPLE_TERNARY_RHS:
6274 case GIMPLE_SINGLE_RHS:
6275 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6276 strict_overflow_p, depth);
6277 case GIMPLE_INVALID_RHS:
6283 /* Return true if return value of call STMT is known to be non-negative.
6284 If the return value is based on the assumption that signed overflow is
6285 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6286 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6289 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6292 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6293 gimple_call_arg (stmt, 0) : NULL_TREE;
6294 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6295 gimple_call_arg (stmt, 1) : NULL_TREE;
6297 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6298 gimple_call_combined_fn (stmt),
6301 strict_overflow_p, depth);
6304 /* Return true if return value of call STMT is known to be non-negative.
6305 If the return value is based on the assumption that signed overflow is
6306 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6307 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6310 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6313 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6315 tree arg = gimple_phi_arg_def (stmt, i);
6316 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6322 /* Return true if STMT is known to compute a non-negative value.
6323 If the return value is based on the assumption that signed overflow is
6324 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6325 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6328 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6331 switch (gimple_code (stmt))
6334 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6337 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6340 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6347 /* Return true if the floating-point value computed by assignment STMT
6348 is known to have an integer value. We also allow +Inf, -Inf and NaN
6349 to be considered integer values. Return false for signaling NaN.
6351 DEPTH is the current nesting depth of the query. */
6354 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6356 enum tree_code code = gimple_assign_rhs_code (stmt);
6357 switch (get_gimple_rhs_class (code))
6359 case GIMPLE_UNARY_RHS:
6360 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6361 gimple_assign_rhs1 (stmt), depth);
6362 case GIMPLE_BINARY_RHS:
6363 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6364 gimple_assign_rhs1 (stmt),
6365 gimple_assign_rhs2 (stmt), depth);
6366 case GIMPLE_TERNARY_RHS:
6368 case GIMPLE_SINGLE_RHS:
6369 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6370 case GIMPLE_INVALID_RHS:
6376 /* Return true if the floating-point value computed by call STMT is known
6377 to have an integer value. We also allow +Inf, -Inf and NaN to be
6378 considered integer values. Return false for signaling NaN.
6380 DEPTH is the current nesting depth of the query. */
6383 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6385 tree arg0 = (gimple_call_num_args (stmt) > 0
6386 ? gimple_call_arg (stmt, 0)
6388 tree arg1 = (gimple_call_num_args (stmt) > 1
6389 ? gimple_call_arg (stmt, 1)
6391 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
6395 /* Return true if the floating-point result of phi STMT is known to have
6396 an integer value. We also allow +Inf, -Inf and NaN to be considered
6397 integer values. Return false for signaling NaN.
6399 DEPTH is the current nesting depth of the query. */
6402 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6404 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6406 tree arg = gimple_phi_arg_def (stmt, i);
6407 if (!integer_valued_real_single_p (arg, depth + 1))
6413 /* Return true if the floating-point value computed by STMT is known
6414 to have an integer value. We also allow +Inf, -Inf and NaN to be
6415 considered integer values. Return false for signaling NaN.
6417 DEPTH is the current nesting depth of the query. */
6420 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6422 switch (gimple_code (stmt))
6425 return gimple_assign_integer_valued_real_p (stmt, depth);
6427 return gimple_call_integer_valued_real_p (stmt, depth);
6429 return gimple_phi_integer_valued_real_p (stmt, depth);