1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
36 #include "stor-layout.h"
38 #include "gimple-fold.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
57 /* Return true when DECL can be referenced from current unit.
58 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
59 We can get declarations that are not possible to reference for various
62 1) When analyzing C++ virtual tables.
63 C++ virtual tables do have known constructors even
64 when they are keyed to other compilation unit.
65 Those tables can contain pointers to methods and vars
66 in other units. Those methods have both STATIC and EXTERNAL
68 2) In WHOPR mode devirtualization might lead to reference
69 to method that was partitioned elsehwere.
70 In this case we have static VAR_DECL or FUNCTION_DECL
71 that has no corresponding callgraph/varpool node
73 3) COMDAT functions referred by external vtables that
74 we devirtualize only during final compilation stage.
75 At this time we already decided that we will not output
76 the function body and thus we can't reference the symbol
80 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
83 struct cgraph_node *node;
86 if (DECL_ABSTRACT_P (decl))
89 /* We are concerned only about static/external vars and functions. */
90 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
91 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
94 /* Static objects can be referred only if they was not optimized out yet. */
95 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
97 /* Before we start optimizing unreachable code we can be sure all
98 static objects are defined. */
99 if (symtab->function_flags_ready)
101 snode = symtab_node::get (decl);
102 if (!snode || !snode->definition)
104 node = dyn_cast <cgraph_node *> (snode);
105 return !node || !node->global.inlined_to;
108 /* We will later output the initializer, so we can refer to it.
109 So we are concerned only when DECL comes from initializer of
110 external var or var that has been optimized out. */
112 || TREE_CODE (from_decl) != VAR_DECL
113 || (!DECL_EXTERNAL (from_decl)
114 && (vnode = varpool_node::get (from_decl)) != NULL
115 && vnode->definition)
117 && (vnode = varpool_node::get (from_decl)) != NULL
118 && vnode->in_other_partition))
120 /* We are folding reference from external vtable. The vtable may reffer
121 to a symbol keyed to other compilation unit. The other compilation
122 unit may be in separate DSO and the symbol may be hidden. */
123 if (DECL_VISIBILITY_SPECIFIED (decl)
124 && DECL_EXTERNAL (decl)
125 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
126 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
128 /* When function is public, we always can introduce new reference.
129 Exception are the COMDAT functions where introducing a direct
130 reference imply need to include function body in the curren tunit. */
131 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
133 /* We have COMDAT. We are going to check if we still have definition
134 or if the definition is going to be output in other partition.
135 Bypass this when gimplifying; all needed functions will be produced.
137 As observed in PR20991 for already optimized out comdat virtual functions
138 it may be tempting to not necessarily give up because the copy will be
139 output elsewhere when corresponding vtable is output.
140 This is however not possible - ABI specify that COMDATs are output in
141 units where they are used and when the other unit was compiled with LTO
142 it is possible that vtable was kept public while the function itself
144 if (!symtab->function_flags_ready)
147 snode = symtab_node::get (decl);
149 || ((!snode->definition || DECL_EXTERNAL (decl))
150 && (!snode->in_other_partition
151 || (!snode->forced_by_abi && !snode->force_output))))
153 node = dyn_cast <cgraph_node *> (snode);
154 return !node || !node->global.inlined_to;
157 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
158 acceptable form for is_gimple_min_invariant.
159 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
162 canonicalize_constructor_val (tree cval, tree from_decl)
164 tree orig_cval = cval;
166 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
167 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
169 tree ptr = TREE_OPERAND (cval, 0);
170 if (is_gimple_min_invariant (ptr))
171 cval = build1_loc (EXPR_LOCATION (cval),
172 ADDR_EXPR, TREE_TYPE (ptr),
173 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
175 fold_convert (ptr_type_node,
176 TREE_OPERAND (cval, 1))));
178 if (TREE_CODE (cval) == ADDR_EXPR)
180 tree base = NULL_TREE;
181 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
183 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
185 TREE_OPERAND (cval, 0) = base;
188 base = get_base_address (TREE_OPERAND (cval, 0));
192 if ((TREE_CODE (base) == VAR_DECL
193 || TREE_CODE (base) == FUNCTION_DECL)
194 && !can_refer_decl_in_current_unit_p (base, from_decl))
196 if (TREE_CODE (base) == VAR_DECL)
197 TREE_ADDRESSABLE (base) = 1;
198 else if (TREE_CODE (base) == FUNCTION_DECL)
200 /* Make sure we create a cgraph node for functions we'll reference.
201 They can be non-existent if the reference comes from an entry
202 of an external vtable for example. */
203 cgraph_node::get_create (base);
205 /* Fixup types in global initializers. */
206 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
207 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
209 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
210 cval = fold_convert (TREE_TYPE (orig_cval), cval);
213 if (TREE_OVERFLOW_P (cval))
214 return drop_tree_overflow (cval);
218 /* If SYM is a constant variable with known value, return the value.
219 NULL_TREE is returned otherwise. */
222 get_symbol_constant_value (tree sym)
224 tree val = ctor_for_folding (sym);
225 if (val != error_mark_node)
229 val = canonicalize_constructor_val (unshare_expr (val), sym);
230 if (val && is_gimple_min_invariant (val))
235 /* Variables declared 'const' without an initializer
236 have zero as the initializer if they may not be
237 overridden at link or run time. */
239 && is_gimple_reg_type (TREE_TYPE (sym)))
240 return build_zero_cst (TREE_TYPE (sym));
248 /* Subroutine of fold_stmt. We perform several simplifications of the
249 memory reference tree EXPR and make sure to re-gimplify them properly
250 after propagation of constant addresses. IS_LHS is true if the
251 reference is supposed to be an lvalue. */
254 maybe_fold_reference (tree expr, bool is_lhs)
258 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
259 || TREE_CODE (expr) == REALPART_EXPR
260 || TREE_CODE (expr) == IMAGPART_EXPR)
261 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
262 return fold_unary_loc (EXPR_LOCATION (expr),
265 TREE_OPERAND (expr, 0));
266 else if (TREE_CODE (expr) == BIT_FIELD_REF
267 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
268 return fold_ternary_loc (EXPR_LOCATION (expr),
271 TREE_OPERAND (expr, 0),
272 TREE_OPERAND (expr, 1),
273 TREE_OPERAND (expr, 2));
276 && (result = fold_const_aggregate_ref (expr))
277 && is_gimple_min_invariant (result))
284 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
285 replacement rhs for the statement or NULL_TREE if no simplification
286 could be made. It is assumed that the operands have been previously
290 fold_gimple_assign (gimple_stmt_iterator *si)
292 gimple *stmt = gsi_stmt (*si);
293 enum tree_code subcode = gimple_assign_rhs_code (stmt);
294 location_t loc = gimple_location (stmt);
296 tree result = NULL_TREE;
298 switch (get_gimple_rhs_class (subcode))
300 case GIMPLE_SINGLE_RHS:
302 tree rhs = gimple_assign_rhs1 (stmt);
304 if (TREE_CLOBBER_P (rhs))
307 if (REFERENCE_CLASS_P (rhs))
308 return maybe_fold_reference (rhs, false);
310 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
312 tree val = OBJ_TYPE_REF_EXPR (rhs);
313 if (is_gimple_min_invariant (val))
315 else if (flag_devirtualize && virtual_method_call_p (rhs))
318 vec <cgraph_node *>targets
319 = possible_polymorphic_call_targets (rhs, stmt, &final);
320 if (final && targets.length () <= 1 && dbg_cnt (devirt))
322 if (dump_enabled_p ())
324 location_t loc = gimple_location_safe (stmt);
325 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
326 "resolving virtual function address "
327 "reference to function %s\n",
328 targets.length () == 1
329 ? targets[0]->name ()
332 if (targets.length () == 1)
334 val = fold_convert (TREE_TYPE (val),
335 build_fold_addr_expr_loc
336 (loc, targets[0]->decl));
337 STRIP_USELESS_TYPE_CONVERSION (val);
340 /* We can not use __builtin_unreachable here because it
341 can not have address taken. */
342 val = build_int_cst (TREE_TYPE (val), 0);
348 else if (TREE_CODE (rhs) == ADDR_EXPR)
350 tree ref = TREE_OPERAND (rhs, 0);
351 tree tem = maybe_fold_reference (ref, true);
353 && TREE_CODE (tem) == MEM_REF
354 && integer_zerop (TREE_OPERAND (tem, 1)))
355 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
357 result = fold_convert (TREE_TYPE (rhs),
358 build_fold_addr_expr_loc (loc, tem));
359 else if (TREE_CODE (ref) == MEM_REF
360 && integer_zerop (TREE_OPERAND (ref, 1)))
361 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
365 /* Strip away useless type conversions. Both the
366 NON_LVALUE_EXPR that may have been added by fold, and
367 "useless" type conversions that might now be apparent
368 due to propagation. */
369 STRIP_USELESS_TYPE_CONVERSION (result);
371 if (result != rhs && valid_gimple_rhs_p (result))
376 else if (TREE_CODE (rhs) == CONSTRUCTOR
377 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
379 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
383 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
384 if (! CONSTANT_CLASS_P (val))
387 return build_vector_from_ctor (TREE_TYPE (rhs),
388 CONSTRUCTOR_ELTS (rhs));
391 else if (DECL_P (rhs))
392 return get_symbol_constant_value (rhs);
396 case GIMPLE_UNARY_RHS:
399 case GIMPLE_BINARY_RHS:
402 case GIMPLE_TERNARY_RHS:
403 result = fold_ternary_loc (loc, subcode,
404 TREE_TYPE (gimple_assign_lhs (stmt)),
405 gimple_assign_rhs1 (stmt),
406 gimple_assign_rhs2 (stmt),
407 gimple_assign_rhs3 (stmt));
411 STRIP_USELESS_TYPE_CONVERSION (result);
412 if (valid_gimple_rhs_p (result))
417 case GIMPLE_INVALID_RHS:
425 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
426 adjusting the replacement stmts location and virtual operands.
427 If the statement has a lhs the last stmt in the sequence is expected
428 to assign to that lhs. */
431 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
433 gimple *stmt = gsi_stmt (*si_p);
435 if (gimple_has_location (stmt))
436 annotate_all_with_location (stmts, gimple_location (stmt));
438 /* First iterate over the replacement statements backward, assigning
439 virtual operands to their defining statements. */
440 gimple *laststore = NULL;
441 for (gimple_stmt_iterator i = gsi_last (stmts);
442 !gsi_end_p (i); gsi_prev (&i))
444 gimple *new_stmt = gsi_stmt (i);
445 if ((gimple_assign_single_p (new_stmt)
446 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
447 || (is_gimple_call (new_stmt)
448 && (gimple_call_flags (new_stmt)
449 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
453 vdef = gimple_vdef (stmt);
455 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
456 gimple_set_vdef (new_stmt, vdef);
457 if (vdef && TREE_CODE (vdef) == SSA_NAME)
458 SSA_NAME_DEF_STMT (vdef) = new_stmt;
459 laststore = new_stmt;
463 /* Second iterate over the statements forward, assigning virtual
464 operands to their uses. */
465 tree reaching_vuse = gimple_vuse (stmt);
466 for (gimple_stmt_iterator i = gsi_start (stmts);
467 !gsi_end_p (i); gsi_next (&i))
469 gimple *new_stmt = gsi_stmt (i);
470 /* If the new statement possibly has a VUSE, update it with exact SSA
471 name we know will reach this one. */
472 if (gimple_has_mem_ops (new_stmt))
473 gimple_set_vuse (new_stmt, reaching_vuse);
474 gimple_set_modified (new_stmt, true);
475 if (gimple_vdef (new_stmt))
476 reaching_vuse = gimple_vdef (new_stmt);
479 /* If the new sequence does not do a store release the virtual
480 definition of the original statement. */
482 && reaching_vuse == gimple_vuse (stmt))
484 tree vdef = gimple_vdef (stmt);
486 && TREE_CODE (vdef) == SSA_NAME)
488 unlink_stmt_vdef (stmt);
489 release_ssa_name (vdef);
493 /* Finally replace the original statement with the sequence. */
494 gsi_replace_with_seq (si_p, stmts, false);
497 /* Convert EXPR into a GIMPLE value suitable for substitution on the
498 RHS of an assignment. Insert the necessary statements before
499 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
500 is replaced. If the call is expected to produces a result, then it
501 is replaced by an assignment of the new RHS to the result variable.
502 If the result is to be ignored, then the call is replaced by a
503 GIMPLE_NOP. A proper VDEF chain is retained by making the first
504 VUSE and the last VDEF of the whole sequence be the same as the replaced
505 statement and using new SSA names for stores in between. */
508 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
511 gimple *stmt, *new_stmt;
512 gimple_stmt_iterator i;
513 gimple_seq stmts = NULL;
515 stmt = gsi_stmt (*si_p);
517 gcc_assert (is_gimple_call (stmt));
519 push_gimplify_context (gimple_in_ssa_p (cfun));
521 lhs = gimple_call_lhs (stmt);
522 if (lhs == NULL_TREE)
524 gimplify_and_add (expr, &stmts);
525 /* We can end up with folding a memcpy of an empty class assignment
526 which gets optimized away by C++ gimplification. */
527 if (gimple_seq_empty_p (stmts))
529 pop_gimplify_context (NULL);
530 if (gimple_in_ssa_p (cfun))
532 unlink_stmt_vdef (stmt);
535 gsi_replace (si_p, gimple_build_nop (), false);
541 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
542 new_stmt = gimple_build_assign (lhs, tmp);
543 i = gsi_last (stmts);
544 gsi_insert_after_without_update (&i, new_stmt,
545 GSI_CONTINUE_LINKING);
548 pop_gimplify_context (NULL);
550 gsi_replace_with_seq_vops (si_p, stmts);
554 /* Replace the call at *GSI with the gimple value VAL. */
557 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
559 gimple *stmt = gsi_stmt (*gsi);
560 tree lhs = gimple_call_lhs (stmt);
564 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
565 val = fold_convert (TREE_TYPE (lhs), val);
566 repl = gimple_build_assign (lhs, val);
569 repl = gimple_build_nop ();
570 tree vdef = gimple_vdef (stmt);
571 if (vdef && TREE_CODE (vdef) == SSA_NAME)
573 unlink_stmt_vdef (stmt);
574 release_ssa_name (vdef);
576 gsi_replace (gsi, repl, false);
579 /* Replace the call at *GSI with the new call REPL and fold that
583 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
585 gimple *stmt = gsi_stmt (*gsi);
586 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
587 gimple_set_location (repl, gimple_location (stmt));
588 if (gimple_vdef (stmt)
589 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
591 gimple_set_vdef (repl, gimple_vdef (stmt));
592 gimple_set_vuse (repl, gimple_vuse (stmt));
593 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
595 gsi_replace (gsi, repl, false);
599 /* Return true if VAR is a VAR_DECL or a component thereof. */
602 var_decl_component_p (tree var)
605 while (handled_component_p (inner))
606 inner = TREE_OPERAND (inner, 0);
607 return SSA_VAR_P (inner);
610 /* Fold function call to builtin mem{{,p}cpy,move}. Return
611 false if no simplification can be made.
612 If ENDP is 0, return DEST (like memcpy).
613 If ENDP is 1, return DEST+LEN (like mempcpy).
614 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
615 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
619 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
620 tree dest, tree src, int endp)
622 gimple *stmt = gsi_stmt (*gsi);
623 tree lhs = gimple_call_lhs (stmt);
624 tree len = gimple_call_arg (stmt, 2);
625 tree destvar, srcvar;
626 location_t loc = gimple_location (stmt);
628 /* If the LEN parameter is zero, return DEST. */
629 if (integer_zerop (len))
632 if (gimple_call_lhs (stmt))
633 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
635 repl = gimple_build_nop ();
636 tree vdef = gimple_vdef (stmt);
637 if (vdef && TREE_CODE (vdef) == SSA_NAME)
639 unlink_stmt_vdef (stmt);
640 release_ssa_name (vdef);
642 gsi_replace (gsi, repl, false);
646 /* If SRC and DEST are the same (and not volatile), return
647 DEST{,+LEN,+LEN-1}. */
648 if (operand_equal_p (src, dest, 0))
650 unlink_stmt_vdef (stmt);
651 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
652 release_ssa_name (gimple_vdef (stmt));
655 gsi_replace (gsi, gimple_build_nop (), false);
662 tree srctype, desttype;
663 unsigned int src_align, dest_align;
666 /* Build accesses at offset zero with a ref-all character type. */
667 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
670 /* If we can perform the copy efficiently with first doing all loads
671 and then all stores inline it that way. Currently efficiently
672 means that we can load all the memory into a single integer
673 register which is what MOVE_MAX gives us. */
674 src_align = get_pointer_alignment (src);
675 dest_align = get_pointer_alignment (dest);
676 if (tree_fits_uhwi_p (len)
677 && compare_tree_int (len, MOVE_MAX) <= 0
678 /* ??? Don't transform copies from strings with known length this
679 confuses the tree-ssa-strlen.c. This doesn't handle
680 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
682 && !c_strlen (src, 2))
684 unsigned ilen = tree_to_uhwi (len);
685 if (exact_log2 (ilen) != -1)
687 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
689 && TYPE_MODE (type) != BLKmode
690 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
692 /* If the destination pointer is not aligned we must be able
693 to emit an unaligned store. */
694 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
695 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
696 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
697 != CODE_FOR_nothing)))
700 tree desttype = type;
701 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
702 srctype = build_aligned_type (type, src_align);
703 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
704 tree tem = fold_const_aggregate_ref (srcmem);
707 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
708 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
710 && (optab_handler (movmisalign_optab,
712 == CODE_FOR_nothing))
717 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
719 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
720 if (gimple_in_ssa_p (cfun))
721 srcmem = make_ssa_name (TREE_TYPE (srcmem),
724 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
725 gimple_assign_set_lhs (new_stmt, srcmem);
726 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
727 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
729 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
730 desttype = build_aligned_type (type, dest_align);
732 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
735 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
736 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
737 if (gimple_vdef (new_stmt)
738 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
739 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
742 gsi_replace (gsi, new_stmt, false);
745 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
754 /* Both DEST and SRC must be pointer types.
755 ??? This is what old code did. Is the testing for pointer types
758 If either SRC is readonly or length is 1, we can use memcpy. */
759 if (!dest_align || !src_align)
761 if (readonly_data_expr (src)
762 || (tree_fits_uhwi_p (len)
763 && (MIN (src_align, dest_align) / BITS_PER_UNIT
764 >= tree_to_uhwi (len))))
766 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
769 gimple_call_set_fndecl (stmt, fn);
770 gimple_call_set_arg (stmt, 0, dest);
771 gimple_call_set_arg (stmt, 1, src);
776 /* If *src and *dest can't overlap, optimize into memcpy as well. */
777 if (TREE_CODE (src) == ADDR_EXPR
778 && TREE_CODE (dest) == ADDR_EXPR)
780 tree src_base, dest_base, fn;
781 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
782 HOST_WIDE_INT size = -1;
783 HOST_WIDE_INT maxsize = -1;
785 srcvar = TREE_OPERAND (src, 0);
786 src_base = get_ref_base_and_extent (srcvar, &src_offset,
788 destvar = TREE_OPERAND (dest, 0);
789 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
791 if (tree_fits_uhwi_p (len))
792 maxsize = tree_to_uhwi (len);
795 src_offset /= BITS_PER_UNIT;
796 dest_offset /= BITS_PER_UNIT;
797 if (SSA_VAR_P (src_base)
798 && SSA_VAR_P (dest_base))
800 if (operand_equal_p (src_base, dest_base, 0)
801 && ranges_overlap_p (src_offset, maxsize,
802 dest_offset, maxsize))
805 else if (TREE_CODE (src_base) == MEM_REF
806 && TREE_CODE (dest_base) == MEM_REF)
808 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
809 TREE_OPERAND (dest_base, 0), 0))
811 offset_int off = mem_ref_offset (src_base) + src_offset;
812 if (!wi::fits_shwi_p (off))
814 src_offset = off.to_shwi ();
816 off = mem_ref_offset (dest_base) + dest_offset;
817 if (!wi::fits_shwi_p (off))
819 dest_offset = off.to_shwi ();
820 if (ranges_overlap_p (src_offset, maxsize,
821 dest_offset, maxsize))
827 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
830 gimple_call_set_fndecl (stmt, fn);
831 gimple_call_set_arg (stmt, 0, dest);
832 gimple_call_set_arg (stmt, 1, src);
837 /* If the destination and source do not alias optimize into
839 if ((is_gimple_min_invariant (dest)
840 || TREE_CODE (dest) == SSA_NAME)
841 && (is_gimple_min_invariant (src)
842 || TREE_CODE (src) == SSA_NAME))
845 ao_ref_init_from_ptr_and_size (&destr, dest, len);
846 ao_ref_init_from_ptr_and_size (&srcr, src, len);
847 if (!refs_may_alias_p_1 (&destr, &srcr, false))
850 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
853 gimple_call_set_fndecl (stmt, fn);
854 gimple_call_set_arg (stmt, 0, dest);
855 gimple_call_set_arg (stmt, 1, src);
864 if (!tree_fits_shwi_p (len))
867 This logic lose for arguments like (type *)malloc (sizeof (type)),
868 since we strip the casts of up to VOID return value from malloc.
869 Perhaps we ought to inherit type from non-VOID argument here? */
872 if (!POINTER_TYPE_P (TREE_TYPE (src))
873 || !POINTER_TYPE_P (TREE_TYPE (dest)))
875 /* In the following try to find a type that is most natural to be
876 used for the memcpy source and destination and that allows
877 the most optimization when memcpy is turned into a plain assignment
878 using that type. In theory we could always use a char[len] type
879 but that only gains us that the destination and source possibly
880 no longer will have their address taken. */
881 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
882 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
884 tree tem = TREE_OPERAND (src, 0);
886 if (tem != TREE_OPERAND (src, 0))
887 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
889 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
891 tree tem = TREE_OPERAND (dest, 0);
893 if (tem != TREE_OPERAND (dest, 0))
894 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
896 srctype = TREE_TYPE (TREE_TYPE (src));
897 if (TREE_CODE (srctype) == ARRAY_TYPE
898 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
900 srctype = TREE_TYPE (srctype);
902 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
904 desttype = TREE_TYPE (TREE_TYPE (dest));
905 if (TREE_CODE (desttype) == ARRAY_TYPE
906 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
908 desttype = TREE_TYPE (desttype);
910 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
912 if (TREE_ADDRESSABLE (srctype)
913 || TREE_ADDRESSABLE (desttype))
916 /* Make sure we are not copying using a floating-point mode or
917 a type whose size possibly does not match its precision. */
918 if (FLOAT_MODE_P (TYPE_MODE (desttype))
919 || TREE_CODE (desttype) == BOOLEAN_TYPE
920 || TREE_CODE (desttype) == ENUMERAL_TYPE)
921 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
922 if (FLOAT_MODE_P (TYPE_MODE (srctype))
923 || TREE_CODE (srctype) == BOOLEAN_TYPE
924 || TREE_CODE (srctype) == ENUMERAL_TYPE)
925 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
933 src_align = get_pointer_alignment (src);
934 dest_align = get_pointer_alignment (dest);
935 if (dest_align < TYPE_ALIGN (desttype)
936 || src_align < TYPE_ALIGN (srctype))
940 STRIP_NOPS (destvar);
941 if (TREE_CODE (destvar) == ADDR_EXPR
942 && var_decl_component_p (TREE_OPERAND (destvar, 0))
943 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
944 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
950 if (TREE_CODE (srcvar) == ADDR_EXPR
951 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
952 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
955 || src_align >= TYPE_ALIGN (desttype))
956 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
958 else if (!STRICT_ALIGNMENT)
960 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
962 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
970 if (srcvar == NULL_TREE && destvar == NULL_TREE)
973 if (srcvar == NULL_TREE)
976 if (src_align >= TYPE_ALIGN (desttype))
977 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
980 if (STRICT_ALIGNMENT)
982 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
984 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
987 else if (destvar == NULL_TREE)
990 if (dest_align >= TYPE_ALIGN (srctype))
991 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
994 if (STRICT_ALIGNMENT)
996 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
998 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1003 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1005 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1006 if (gimple_in_ssa_p (cfun))
1007 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1009 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1010 gimple_assign_set_lhs (new_stmt, srcvar);
1011 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1012 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1014 new_stmt = gimple_build_assign (destvar, srcvar);
1015 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1016 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1017 if (gimple_vdef (new_stmt)
1018 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1019 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1022 gsi_replace (gsi, new_stmt, false);
1025 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1029 gimple_seq stmts = NULL;
1030 if (endp == 0 || endp == 3)
1033 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1035 if (endp == 2 || endp == 1)
1037 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1038 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1039 TREE_TYPE (dest), dest, len);
1042 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1043 gimple *repl = gimple_build_assign (lhs, dest);
1044 gsi_replace (gsi, repl, false);
1048 /* Fold function call to builtin memset or bzero at *GSI setting the
1049 memory of size LEN to VAL. Return whether a simplification was made. */
1052 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1054 gimple *stmt = gsi_stmt (*gsi);
1056 unsigned HOST_WIDE_INT length, cval;
1058 /* If the LEN parameter is zero, return DEST. */
1059 if (integer_zerop (len))
1061 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1065 if (! tree_fits_uhwi_p (len))
1068 if (TREE_CODE (c) != INTEGER_CST)
1071 tree dest = gimple_call_arg (stmt, 0);
1073 if (TREE_CODE (var) != ADDR_EXPR)
1076 var = TREE_OPERAND (var, 0);
1077 if (TREE_THIS_VOLATILE (var))
1080 etype = TREE_TYPE (var);
1081 if (TREE_CODE (etype) == ARRAY_TYPE)
1082 etype = TREE_TYPE (etype);
1084 if (!INTEGRAL_TYPE_P (etype)
1085 && !POINTER_TYPE_P (etype))
1088 if (! var_decl_component_p (var))
1091 length = tree_to_uhwi (len);
1092 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1093 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1096 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1099 if (integer_zerop (c))
1103 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1106 cval = TREE_INT_CST_LOW (c);
1110 cval |= (cval << 31) << 1;
1113 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1114 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1115 gimple_set_vuse (store, gimple_vuse (stmt));
1116 tree vdef = gimple_vdef (stmt);
1117 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1119 gimple_set_vdef (store, gimple_vdef (stmt));
1120 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1122 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1123 if (gimple_call_lhs (stmt))
1125 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1126 gsi_replace (gsi, asgn, false);
1130 gimple_stmt_iterator gsi2 = *gsi;
1132 gsi_remove (&gsi2, true);
1139 /* Return the string length, maximum string length or maximum value of
1141 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1142 is not NULL and, for TYPE == 0, its value is not equal to the length
1143 we determine or if we are unable to determine the length or value,
1144 return false. VISITED is a bitmap of visited variables.
1145 TYPE is 0 if string length should be returned, 1 for maximum string
1146 length and 2 for maximum value ARG can have. */
1149 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1154 if (TREE_CODE (arg) != SSA_NAME)
1156 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1157 if (TREE_CODE (arg) == ADDR_EXPR
1158 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1159 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1161 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1162 if (TREE_CODE (aop0) == INDIRECT_REF
1163 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1164 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1165 length, visited, type);
1171 if (TREE_CODE (val) != INTEGER_CST
1172 || tree_int_cst_sgn (val) < 0)
1176 val = c_strlen (arg, 1);
1184 if (TREE_CODE (*length) != INTEGER_CST
1185 || TREE_CODE (val) != INTEGER_CST)
1188 if (tree_int_cst_lt (*length, val))
1192 else if (simple_cst_equal (val, *length) != 1)
1200 /* If ARG is registered for SSA update we cannot look at its defining
1202 if (name_registered_for_update_p (arg))
1205 /* If we were already here, break the infinite cycle. */
1207 *visited = BITMAP_ALLOC (NULL);
1208 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1212 def_stmt = SSA_NAME_DEF_STMT (var);
1214 switch (gimple_code (def_stmt))
1217 /* The RHS of the statement defining VAR must either have a
1218 constant length or come from another SSA_NAME with a constant
1220 if (gimple_assign_single_p (def_stmt)
1221 || gimple_assign_unary_nop_p (def_stmt))
1223 tree rhs = gimple_assign_rhs1 (def_stmt);
1224 return get_maxval_strlen (rhs, length, visited, type);
1226 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1228 tree op2 = gimple_assign_rhs2 (def_stmt);
1229 tree op3 = gimple_assign_rhs3 (def_stmt);
1230 return get_maxval_strlen (op2, length, visited, type)
1231 && get_maxval_strlen (op3, length, visited, type);
1237 /* All the arguments of the PHI node must have the same constant
1241 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1243 tree arg = gimple_phi_arg (def_stmt, i)->def;
1245 /* If this PHI has itself as an argument, we cannot
1246 determine the string length of this argument. However,
1247 if we can find a constant string length for the other
1248 PHI args then we can still be sure that this is a
1249 constant string length. So be optimistic and just
1250 continue with the next argument. */
1251 if (arg == gimple_phi_result (def_stmt))
1254 if (!get_maxval_strlen (arg, length, visited, type))
1266 get_maxval_strlen (tree arg, int type)
1268 bitmap visited = NULL;
1269 tree len = NULL_TREE;
1270 if (!get_maxval_strlen (arg, &len, &visited, type))
1273 BITMAP_FREE (visited);
1279 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1280 If LEN is not NULL, it represents the length of the string to be
1281 copied. Return NULL_TREE if no simplification can be made. */
1284 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1285 tree dest, tree src)
1287 location_t loc = gimple_location (gsi_stmt (*gsi));
1290 /* If SRC and DEST are the same (and not volatile), return DEST. */
1291 if (operand_equal_p (src, dest, 0))
1293 replace_call_with_value (gsi, dest);
1297 if (optimize_function_for_size_p (cfun))
1300 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1304 tree len = get_maxval_strlen (src, 0);
1308 len = fold_convert_loc (loc, size_type_node, len);
1309 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1310 len = force_gimple_operand_gsi (gsi, len, true,
1311 NULL_TREE, true, GSI_SAME_STMT);
1312 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1313 replace_call_with_call_and_fold (gsi, repl);
1317 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1318 If SLEN is not NULL, it represents the length of the source string.
1319 Return NULL_TREE if no simplification can be made. */
1322 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1323 tree dest, tree src, tree len)
1325 location_t loc = gimple_location (gsi_stmt (*gsi));
1328 /* If the LEN parameter is zero, return DEST. */
1329 if (integer_zerop (len))
1331 replace_call_with_value (gsi, dest);
1335 /* We can't compare slen with len as constants below if len is not a
1337 if (TREE_CODE (len) != INTEGER_CST)
1340 /* Now, we must be passed a constant src ptr parameter. */
1341 tree slen = get_maxval_strlen (src, 0);
1342 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1345 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1347 /* We do not support simplification of this case, though we do
1348 support it when expanding trees into RTL. */
1349 /* FIXME: generate a call to __builtin_memset. */
1350 if (tree_int_cst_lt (slen, len))
1353 /* OK transform into builtin memcpy. */
1354 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1358 len = fold_convert_loc (loc, size_type_node, len);
1359 len = force_gimple_operand_gsi (gsi, len, true,
1360 NULL_TREE, true, GSI_SAME_STMT);
1361 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1362 replace_call_with_call_and_fold (gsi, repl);
1366 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1369 Return NULL_TREE if no simplification was possible, otherwise return the
1370 simplified form of the call as a tree.
1372 The simplified form may be a constant or other expression which
1373 computes the same value, but in a more efficient manner (including
1374 calls to other builtin functions).
1376 The call may contain arguments which need to be evaluated, but
1377 which are not useful to determine the result of the call. In
1378 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1379 COMPOUND_EXPR will be an argument which must be evaluated.
1380 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1381 COMPOUND_EXPR in the chain will contain the tree for the simplified
1382 form of the builtin function call. */
1385 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1387 gimple *stmt = gsi_stmt (*gsi);
1388 location_t loc = gimple_location (stmt);
1390 const char *p = c_getstr (src);
1392 /* If the string length is zero, return the dst parameter. */
1393 if (p && *p == '\0')
1395 replace_call_with_value (gsi, dst);
1399 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1402 /* See if we can store by pieces into (dst + strlen(dst)). */
1404 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1405 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1407 if (!strlen_fn || !memcpy_fn)
1410 /* If the length of the source string isn't computable don't
1411 split strcat into strlen and memcpy. */
1412 tree len = get_maxval_strlen (src, 0);
1416 /* Create strlen (dst). */
1417 gimple_seq stmts = NULL, stmts2;
1418 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1419 gimple_set_location (repl, loc);
1420 if (gimple_in_ssa_p (cfun))
1421 newdst = make_ssa_name (size_type_node);
1423 newdst = create_tmp_reg (size_type_node);
1424 gimple_call_set_lhs (repl, newdst);
1425 gimple_seq_add_stmt_without_update (&stmts, repl);
1427 /* Create (dst p+ strlen (dst)). */
1428 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1429 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1430 gimple_seq_add_seq_without_update (&stmts, stmts2);
1432 len = fold_convert_loc (loc, size_type_node, len);
1433 len = size_binop_loc (loc, PLUS_EXPR, len,
1434 build_int_cst (size_type_node, 1));
1435 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1436 gimple_seq_add_seq_without_update (&stmts, stmts2);
1438 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1439 gimple_seq_add_stmt_without_update (&stmts, repl);
1440 if (gimple_call_lhs (stmt))
1442 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1443 gimple_seq_add_stmt_without_update (&stmts, repl);
1444 gsi_replace_with_seq_vops (gsi, stmts);
1445 /* gsi now points at the assignment to the lhs, get a
1446 stmt iterator to the memcpy call.
1447 ??? We can't use gsi_for_stmt as that doesn't work when the
1448 CFG isn't built yet. */
1449 gimple_stmt_iterator gsi2 = *gsi;
1455 gsi_replace_with_seq_vops (gsi, stmts);
1461 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1462 are the arguments to the call. */
1465 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1467 gimple *stmt = gsi_stmt (*gsi);
1468 tree dest = gimple_call_arg (stmt, 0);
1469 tree src = gimple_call_arg (stmt, 1);
1470 tree size = gimple_call_arg (stmt, 2);
1476 /* If the SRC parameter is "", return DEST. */
1477 if (p && *p == '\0')
1479 replace_call_with_value (gsi, dest);
1483 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1486 /* If __builtin_strcat_chk is used, assume strcat is available. */
1487 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1491 gimple *repl = gimple_build_call (fn, 2, dest, src);
1492 replace_call_with_call_and_fold (gsi, repl);
1496 /* Simplify a call to the strncat builtin. */
1499 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1501 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1502 tree dst = gimple_call_arg (stmt, 0);
1503 tree src = gimple_call_arg (stmt, 1);
1504 tree len = gimple_call_arg (stmt, 2);
1506 const char *p = c_getstr (src);
1508 /* If the requested length is zero, or the src parameter string
1509 length is zero, return the dst parameter. */
1510 if (integer_zerop (len) || (p && *p == '\0'))
1512 replace_call_with_value (gsi, dst);
1516 /* If the requested len is greater than or equal to the string
1517 length, call strcat. */
1518 if (TREE_CODE (len) == INTEGER_CST && p
1519 && compare_tree_int (len, strlen (p)) >= 0)
1521 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1523 /* If the replacement _DECL isn't initialized, don't do the
1528 gcall *repl = gimple_build_call (fn, 2, dst, src);
1529 replace_call_with_call_and_fold (gsi, repl);
1536 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1540 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1542 gimple *stmt = gsi_stmt (*gsi);
1543 tree dest = gimple_call_arg (stmt, 0);
1544 tree src = gimple_call_arg (stmt, 1);
1545 tree len = gimple_call_arg (stmt, 2);
1546 tree size = gimple_call_arg (stmt, 3);
1551 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1552 if ((p && *p == '\0')
1553 || integer_zerop (len))
1555 replace_call_with_value (gsi, dest);
1559 if (! tree_fits_uhwi_p (size))
1562 if (! integer_all_onesp (size))
1564 tree src_len = c_strlen (src, 1);
1566 && tree_fits_uhwi_p (src_len)
1567 && tree_fits_uhwi_p (len)
1568 && ! tree_int_cst_lt (len, src_len))
1570 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1571 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1575 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1576 replace_call_with_call_and_fold (gsi, repl);
1582 /* If __builtin_strncat_chk is used, assume strncat is available. */
1583 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1587 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1588 replace_call_with_call_and_fold (gsi, repl);
1592 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1593 to the call. IGNORE is true if the value returned
1594 by the builtin will be ignored. UNLOCKED is true is true if this
1595 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1596 the known length of the string. Return NULL_TREE if no simplification
1600 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1601 tree arg0, tree arg1,
1604 gimple *stmt = gsi_stmt (*gsi);
1606 /* If we're using an unlocked function, assume the other unlocked
1607 functions exist explicitly. */
1608 tree const fn_fputc = (unlocked
1609 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1610 : builtin_decl_implicit (BUILT_IN_FPUTC));
1611 tree const fn_fwrite = (unlocked
1612 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1613 : builtin_decl_implicit (BUILT_IN_FWRITE));
1615 /* If the return value is used, don't do the transformation. */
1616 if (gimple_call_lhs (stmt))
1619 /* Get the length of the string passed to fputs. If the length
1620 can't be determined, punt. */
1621 tree len = get_maxval_strlen (arg0, 0);
1623 || TREE_CODE (len) != INTEGER_CST)
1626 switch (compare_tree_int (len, 1))
1628 case -1: /* length is 0, delete the call entirely . */
1629 replace_call_with_value (gsi, integer_zero_node);
1632 case 0: /* length is 1, call fputc. */
1634 const char *p = c_getstr (arg0);
1640 gimple *repl = gimple_build_call (fn_fputc, 2,
1642 (integer_type_node, p[0]), arg1);
1643 replace_call_with_call_and_fold (gsi, repl);
1648 case 1: /* length is greater than 1, call fwrite. */
1650 /* If optimizing for size keep fputs. */
1651 if (optimize_function_for_size_p (cfun))
1653 /* New argument list transforming fputs(string, stream) to
1654 fwrite(string, 1, len, stream). */
1658 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
1659 size_one_node, len, arg1);
1660 replace_call_with_call_and_fold (gsi, repl);
1669 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1670 DEST, SRC, LEN, and SIZE are the arguments to the call.
1671 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1672 code of the builtin. If MAXLEN is not NULL, it is maximum length
1673 passed as third argument. */
1676 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1677 tree dest, tree src, tree len, tree size,
1678 enum built_in_function fcode)
1680 gimple *stmt = gsi_stmt (*gsi);
1681 location_t loc = gimple_location (stmt);
1682 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1685 /* If SRC and DEST are the same (and not volatile), return DEST
1686 (resp. DEST+LEN for __mempcpy_chk). */
1687 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1689 if (fcode != BUILT_IN_MEMPCPY_CHK)
1691 replace_call_with_value (gsi, dest);
1696 gimple_seq stmts = NULL;
1697 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1698 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR, dest, len);
1699 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1700 replace_call_with_value (gsi, temp);
1705 if (! tree_fits_uhwi_p (size))
1708 tree maxlen = get_maxval_strlen (len, 2);
1709 if (! integer_all_onesp (size))
1711 if (! tree_fits_uhwi_p (len))
1713 /* If LEN is not constant, try MAXLEN too.
1714 For MAXLEN only allow optimizing into non-_ocs function
1715 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1716 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1718 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1720 /* (void) __mempcpy_chk () can be optimized into
1721 (void) __memcpy_chk (). */
1722 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1726 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1727 replace_call_with_call_and_fold (gsi, repl);
1736 if (tree_int_cst_lt (size, maxlen))
1741 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1742 mem{cpy,pcpy,move,set} is available. */
1745 case BUILT_IN_MEMCPY_CHK:
1746 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1748 case BUILT_IN_MEMPCPY_CHK:
1749 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1751 case BUILT_IN_MEMMOVE_CHK:
1752 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1754 case BUILT_IN_MEMSET_CHK:
1755 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1764 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1765 replace_call_with_call_and_fold (gsi, repl);
1769 /* Fold a call to the __st[rp]cpy_chk builtin.
1770 DEST, SRC, and SIZE are the arguments to the call.
1771 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1772 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1773 strings passed as second argument. */
1776 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1778 tree src, tree size,
1779 enum built_in_function fcode)
1781 gimple *stmt = gsi_stmt (*gsi);
1782 location_t loc = gimple_location (stmt);
1783 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1786 /* If SRC and DEST are the same (and not volatile), return DEST. */
1787 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1789 replace_call_with_value (gsi, dest);
1793 if (! tree_fits_uhwi_p (size))
1796 tree maxlen = get_maxval_strlen (src, 1);
1797 if (! integer_all_onesp (size))
1799 len = c_strlen (src, 1);
1800 if (! len || ! tree_fits_uhwi_p (len))
1802 /* If LEN is not constant, try MAXLEN too.
1803 For MAXLEN only allow optimizing into non-_ocs function
1804 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1805 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1807 if (fcode == BUILT_IN_STPCPY_CHK)
1812 /* If return value of __stpcpy_chk is ignored,
1813 optimize into __strcpy_chk. */
1814 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1818 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1819 replace_call_with_call_and_fold (gsi, repl);
1823 if (! len || TREE_SIDE_EFFECTS (len))
1826 /* If c_strlen returned something, but not a constant,
1827 transform __strcpy_chk into __memcpy_chk. */
1828 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1832 gimple_seq stmts = NULL;
1833 len = gimple_convert (&stmts, loc, size_type_node, len);
1834 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
1835 build_int_cst (size_type_node, 1));
1836 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1837 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1838 replace_call_with_call_and_fold (gsi, repl);
1845 if (! tree_int_cst_lt (maxlen, size))
1849 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1850 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1851 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1855 gimple *repl = gimple_build_call (fn, 2, dest, src);
1856 replace_call_with_call_and_fold (gsi, repl);
1860 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1861 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1862 length passed as third argument. IGNORE is true if return value can be
1863 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1866 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1867 tree dest, tree src,
1868 tree len, tree size,
1869 enum built_in_function fcode)
1871 gimple *stmt = gsi_stmt (*gsi);
1872 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1875 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
1877 /* If return value of __stpncpy_chk is ignored,
1878 optimize into __strncpy_chk. */
1879 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1882 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
1883 replace_call_with_call_and_fold (gsi, repl);
1888 if (! tree_fits_uhwi_p (size))
1891 tree maxlen = get_maxval_strlen (len, 2);
1892 if (! integer_all_onesp (size))
1894 if (! tree_fits_uhwi_p (len))
1896 /* If LEN is not constant, try MAXLEN too.
1897 For MAXLEN only allow optimizing into non-_ocs function
1898 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1899 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1905 if (tree_int_cst_lt (size, maxlen))
1909 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1910 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1911 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1915 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1916 replace_call_with_call_and_fold (gsi, repl);
1920 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1921 Return NULL_TREE if no simplification can be made. */
1924 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1926 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1927 location_t loc = gimple_location (stmt);
1928 tree dest = gimple_call_arg (stmt, 0);
1929 tree src = gimple_call_arg (stmt, 1);
1930 tree fn, len, lenp1;
1932 /* If the result is unused, replace stpcpy with strcpy. */
1933 if (gimple_call_lhs (stmt) == NULL_TREE)
1935 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1938 gimple_call_set_fndecl (stmt, fn);
1943 len = c_strlen (src, 1);
1945 || TREE_CODE (len) != INTEGER_CST)
1948 if (optimize_function_for_size_p (cfun)
1949 /* If length is zero it's small enough. */
1950 && !integer_zerop (len))
1953 /* If the source has a known length replace stpcpy with memcpy. */
1954 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1958 gimple_seq stmts = NULL;
1959 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1960 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1961 tem, build_int_cst (size_type_node, 1));
1962 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1963 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1964 gimple_set_vuse (repl, gimple_vuse (stmt));
1965 gimple_set_vdef (repl, gimple_vdef (stmt));
1966 if (gimple_vdef (repl)
1967 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1968 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1969 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1970 /* Replace the result with dest + len. */
1972 tem = gimple_convert (&stmts, loc, sizetype, len);
1973 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1974 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
1975 POINTER_PLUS_EXPR, dest, tem);
1976 gsi_replace (gsi, ret, false);
1977 /* Finally fold the memcpy call. */
1978 gimple_stmt_iterator gsi2 = *gsi;
1984 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
1985 NULL_TREE if a normal call should be emitted rather than expanding
1986 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
1987 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
1988 passed as second argument. */
1991 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
1992 enum built_in_function fcode)
1994 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1995 tree dest, size, len, fn, fmt, flag;
1996 const char *fmt_str;
1998 /* Verify the required arguments in the original call. */
1999 if (gimple_call_num_args (stmt) < 5)
2002 dest = gimple_call_arg (stmt, 0);
2003 len = gimple_call_arg (stmt, 1);
2004 flag = gimple_call_arg (stmt, 2);
2005 size = gimple_call_arg (stmt, 3);
2006 fmt = gimple_call_arg (stmt, 4);
2008 if (! tree_fits_uhwi_p (size))
2011 if (! integer_all_onesp (size))
2013 tree maxlen = get_maxval_strlen (len, 2);
2014 if (! tree_fits_uhwi_p (len))
2016 /* If LEN is not constant, try MAXLEN too.
2017 For MAXLEN only allow optimizing into non-_ocs function
2018 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2019 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2025 if (tree_int_cst_lt (size, maxlen))
2029 if (!init_target_chars ())
2032 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2033 or if format doesn't contain % chars or is "%s". */
2034 if (! integer_zerop (flag))
2036 fmt_str = c_getstr (fmt);
2037 if (fmt_str == NULL)
2039 if (strchr (fmt_str, target_percent) != NULL
2040 && strcmp (fmt_str, target_percent_s))
2044 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2046 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2047 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2051 /* Replace the called function and the first 5 argument by 3 retaining
2052 trailing varargs. */
2053 gimple_call_set_fndecl (stmt, fn);
2054 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2055 gimple_call_set_arg (stmt, 0, dest);
2056 gimple_call_set_arg (stmt, 1, len);
2057 gimple_call_set_arg (stmt, 2, fmt);
2058 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2059 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2060 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2065 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2066 Return NULL_TREE if a normal call should be emitted rather than
2067 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2068 or BUILT_IN_VSPRINTF_CHK. */
2071 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2072 enum built_in_function fcode)
2074 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2075 tree dest, size, len, fn, fmt, flag;
2076 const char *fmt_str;
2077 unsigned nargs = gimple_call_num_args (stmt);
2079 /* Verify the required arguments in the original call. */
2082 dest = gimple_call_arg (stmt, 0);
2083 flag = gimple_call_arg (stmt, 1);
2084 size = gimple_call_arg (stmt, 2);
2085 fmt = gimple_call_arg (stmt, 3);
2087 if (! tree_fits_uhwi_p (size))
2092 if (!init_target_chars ())
2095 /* Check whether the format is a literal string constant. */
2096 fmt_str = c_getstr (fmt);
2097 if (fmt_str != NULL)
2099 /* If the format doesn't contain % args or %%, we know the size. */
2100 if (strchr (fmt_str, target_percent) == 0)
2102 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2103 len = build_int_cstu (size_type_node, strlen (fmt_str));
2105 /* If the format is "%s" and first ... argument is a string literal,
2106 we know the size too. */
2107 else if (fcode == BUILT_IN_SPRINTF_CHK
2108 && strcmp (fmt_str, target_percent_s) == 0)
2114 arg = gimple_call_arg (stmt, 4);
2115 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2117 len = c_strlen (arg, 1);
2118 if (! len || ! tree_fits_uhwi_p (len))
2125 if (! integer_all_onesp (size))
2127 if (! len || ! tree_int_cst_lt (len, size))
2131 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2132 or if format doesn't contain % chars or is "%s". */
2133 if (! integer_zerop (flag))
2135 if (fmt_str == NULL)
2137 if (strchr (fmt_str, target_percent) != NULL
2138 && strcmp (fmt_str, target_percent_s))
2142 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2143 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2144 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2148 /* Replace the called function and the first 4 argument by 2 retaining
2149 trailing varargs. */
2150 gimple_call_set_fndecl (stmt, fn);
2151 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2152 gimple_call_set_arg (stmt, 0, dest);
2153 gimple_call_set_arg (stmt, 1, fmt);
2154 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2155 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2156 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2161 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2162 ORIG may be null if this is a 2-argument call. We don't attempt to
2163 simplify calls with more than 3 arguments.
2165 Return NULL_TREE if no simplification was possible, otherwise return the
2166 simplified form of the call as a tree. If IGNORED is true, it means that
2167 the caller does not use the returned value of the function. */
2170 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2172 gimple *stmt = gsi_stmt (*gsi);
2173 tree dest = gimple_call_arg (stmt, 0);
2174 tree fmt = gimple_call_arg (stmt, 1);
2175 tree orig = NULL_TREE;
2176 const char *fmt_str = NULL;
2178 /* Verify the required arguments in the original call. We deal with two
2179 types of sprintf() calls: 'sprintf (str, fmt)' and
2180 'sprintf (dest, "%s", orig)'. */
2181 if (gimple_call_num_args (stmt) > 3)
2184 if (gimple_call_num_args (stmt) == 3)
2185 orig = gimple_call_arg (stmt, 2);
2187 /* Check whether the format is a literal string constant. */
2188 fmt_str = c_getstr (fmt);
2189 if (fmt_str == NULL)
2192 if (!init_target_chars ())
2195 /* If the format doesn't contain % args or %%, use strcpy. */
2196 if (strchr (fmt_str, target_percent) == NULL)
2198 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2203 /* Don't optimize sprintf (buf, "abc", ptr++). */
2207 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2208 'format' is known to contain no % formats. */
2209 gimple_seq stmts = NULL;
2210 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2211 gimple_seq_add_stmt_without_update (&stmts, repl);
2212 if (gimple_call_lhs (stmt))
2214 repl = gimple_build_assign (gimple_call_lhs (stmt),
2215 build_int_cst (integer_type_node,
2217 gimple_seq_add_stmt_without_update (&stmts, repl);
2218 gsi_replace_with_seq_vops (gsi, stmts);
2219 /* gsi now points at the assignment to the lhs, get a
2220 stmt iterator to the memcpy call.
2221 ??? We can't use gsi_for_stmt as that doesn't work when the
2222 CFG isn't built yet. */
2223 gimple_stmt_iterator gsi2 = *gsi;
2229 gsi_replace_with_seq_vops (gsi, stmts);
2235 /* If the format is "%s", use strcpy if the result isn't used. */
2236 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2239 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2244 /* Don't crash on sprintf (str1, "%s"). */
2248 tree orig_len = NULL_TREE;
2249 if (gimple_call_lhs (stmt))
2251 orig_len = get_maxval_strlen (orig, 0);
2256 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2257 gimple_seq stmts = NULL;
2258 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2259 gimple_seq_add_stmt_without_update (&stmts, repl);
2260 if (gimple_call_lhs (stmt))
2262 if (!useless_type_conversion_p (integer_type_node,
2263 TREE_TYPE (orig_len)))
2264 orig_len = fold_convert (integer_type_node, orig_len);
2265 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2266 gimple_seq_add_stmt_without_update (&stmts, repl);
2267 gsi_replace_with_seq_vops (gsi, stmts);
2268 /* gsi now points at the assignment to the lhs, get a
2269 stmt iterator to the memcpy call.
2270 ??? We can't use gsi_for_stmt as that doesn't work when the
2271 CFG isn't built yet. */
2272 gimple_stmt_iterator gsi2 = *gsi;
2278 gsi_replace_with_seq_vops (gsi, stmts);
2286 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2287 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2288 attempt to simplify calls with more than 4 arguments.
2290 Return NULL_TREE if no simplification was possible, otherwise return the
2291 simplified form of the call as a tree. If IGNORED is true, it means that
2292 the caller does not use the returned value of the function. */
2295 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2297 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2298 tree dest = gimple_call_arg (stmt, 0);
2299 tree destsize = gimple_call_arg (stmt, 1);
2300 tree fmt = gimple_call_arg (stmt, 2);
2301 tree orig = NULL_TREE;
2302 const char *fmt_str = NULL;
2304 if (gimple_call_num_args (stmt) > 4)
2307 if (gimple_call_num_args (stmt) == 4)
2308 orig = gimple_call_arg (stmt, 3);
2310 if (!tree_fits_uhwi_p (destsize))
2312 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2314 /* Check whether the format is a literal string constant. */
2315 fmt_str = c_getstr (fmt);
2316 if (fmt_str == NULL)
2319 if (!init_target_chars ())
2322 /* If the format doesn't contain % args or %%, use strcpy. */
2323 if (strchr (fmt_str, target_percent) == NULL)
2325 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2329 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2333 /* We could expand this as
2334 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2336 memcpy (str, fmt_with_nul_at_cstm1, cst);
2337 but in the former case that might increase code size
2338 and in the latter case grow .rodata section too much.
2340 size_t len = strlen (fmt_str);
2344 gimple_seq stmts = NULL;
2345 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2346 gimple_seq_add_stmt_without_update (&stmts, repl);
2347 if (gimple_call_lhs (stmt))
2349 repl = gimple_build_assign (gimple_call_lhs (stmt),
2350 build_int_cst (integer_type_node, len));
2351 gimple_seq_add_stmt_without_update (&stmts, repl);
2352 gsi_replace_with_seq_vops (gsi, stmts);
2353 /* gsi now points at the assignment to the lhs, get a
2354 stmt iterator to the memcpy call.
2355 ??? We can't use gsi_for_stmt as that doesn't work when the
2356 CFG isn't built yet. */
2357 gimple_stmt_iterator gsi2 = *gsi;
2363 gsi_replace_with_seq_vops (gsi, stmts);
2369 /* If the format is "%s", use strcpy if the result isn't used. */
2370 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2372 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2376 /* Don't crash on snprintf (str1, cst, "%s"). */
2380 tree orig_len = get_maxval_strlen (orig, 0);
2381 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2384 /* We could expand this as
2385 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2387 memcpy (str1, str2_with_nul_at_cstm1, cst);
2388 but in the former case that might increase code size
2389 and in the latter case grow .rodata section too much.
2391 if (compare_tree_int (orig_len, destlen) >= 0)
2394 /* Convert snprintf (str1, cst, "%s", str2) into
2395 strcpy (str1, str2) if strlen (str2) < cst. */
2396 gimple_seq stmts = NULL;
2397 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2398 gimple_seq_add_stmt_without_update (&stmts, repl);
2399 if (gimple_call_lhs (stmt))
2401 if (!useless_type_conversion_p (integer_type_node,
2402 TREE_TYPE (orig_len)))
2403 orig_len = fold_convert (integer_type_node, orig_len);
2404 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2405 gimple_seq_add_stmt_without_update (&stmts, repl);
2406 gsi_replace_with_seq_vops (gsi, stmts);
2407 /* gsi now points at the assignment to the lhs, get a
2408 stmt iterator to the memcpy call.
2409 ??? We can't use gsi_for_stmt as that doesn't work when the
2410 CFG isn't built yet. */
2411 gimple_stmt_iterator gsi2 = *gsi;
2417 gsi_replace_with_seq_vops (gsi, stmts);
2425 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2426 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2427 more than 3 arguments, and ARG may be null in the 2-argument case.
2429 Return NULL_TREE if no simplification was possible, otherwise return the
2430 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2431 code of the function to be simplified. */
2434 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2435 tree fp, tree fmt, tree arg,
2436 enum built_in_function fcode)
2438 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2439 tree fn_fputc, fn_fputs;
2440 const char *fmt_str = NULL;
2442 /* If the return value is used, don't do the transformation. */
2443 if (gimple_call_lhs (stmt) != NULL_TREE)
2446 /* Check whether the format is a literal string constant. */
2447 fmt_str = c_getstr (fmt);
2448 if (fmt_str == NULL)
2451 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2453 /* If we're using an unlocked function, assume the other
2454 unlocked functions exist explicitly. */
2455 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2456 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2460 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2461 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2464 if (!init_target_chars ())
2467 /* If the format doesn't contain % args or %%, use strcpy. */
2468 if (strchr (fmt_str, target_percent) == NULL)
2470 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2474 /* If the format specifier was "", fprintf does nothing. */
2475 if (fmt_str[0] == '\0')
2477 replace_call_with_value (gsi, NULL_TREE);
2481 /* When "string" doesn't contain %, replace all cases of
2482 fprintf (fp, string) with fputs (string, fp). The fputs
2483 builtin will take care of special cases like length == 1. */
2486 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2487 replace_call_with_call_and_fold (gsi, repl);
2492 /* The other optimizations can be done only on the non-va_list variants. */
2493 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2496 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2497 else if (strcmp (fmt_str, target_percent_s) == 0)
2499 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2503 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2504 replace_call_with_call_and_fold (gsi, repl);
2509 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2510 else if (strcmp (fmt_str, target_percent_c) == 0)
2513 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2517 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2518 replace_call_with_call_and_fold (gsi, repl);
2526 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2527 FMT and ARG are the arguments to the call; we don't fold cases with
2528 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2530 Return NULL_TREE if no simplification was possible, otherwise return the
2531 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2532 code of the function to be simplified. */
2535 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2536 tree arg, enum built_in_function fcode)
2538 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2539 tree fn_putchar, fn_puts, newarg;
2540 const char *fmt_str = NULL;
2542 /* If the return value is used, don't do the transformation. */
2543 if (gimple_call_lhs (stmt) != NULL_TREE)
2546 /* Check whether the format is a literal string constant. */
2547 fmt_str = c_getstr (fmt);
2548 if (fmt_str == NULL)
2551 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2553 /* If we're using an unlocked function, assume the other
2554 unlocked functions exist explicitly. */
2555 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2556 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2560 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2561 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2564 if (!init_target_chars ())
2567 if (strcmp (fmt_str, target_percent_s) == 0
2568 || strchr (fmt_str, target_percent) == NULL)
2572 if (strcmp (fmt_str, target_percent_s) == 0)
2574 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2577 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2580 str = c_getstr (arg);
2586 /* The format specifier doesn't contain any '%' characters. */
2587 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2593 /* If the string was "", printf does nothing. */
2596 replace_call_with_value (gsi, NULL_TREE);
2600 /* If the string has length of 1, call putchar. */
2603 /* Given printf("c"), (where c is any one character,)
2604 convert "c"[0] to an int and pass that to the replacement
2606 newarg = build_int_cst (integer_type_node, str[0]);
2609 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2610 replace_call_with_call_and_fold (gsi, repl);
2616 /* If the string was "string\n", call puts("string"). */
2617 size_t len = strlen (str);
2618 if ((unsigned char)str[len - 1] == target_newline
2619 && (size_t) (int) len == len
2623 tree offset_node, string_cst;
2625 /* Create a NUL-terminated string that's one char shorter
2626 than the original, stripping off the trailing '\n'. */
2627 newarg = build_string_literal (len, str);
2628 string_cst = string_constant (newarg, &offset_node);
2629 gcc_checking_assert (string_cst
2630 && (TREE_STRING_LENGTH (string_cst)
2632 && integer_zerop (offset_node)
2634 TREE_STRING_POINTER (string_cst)[len - 1]
2636 /* build_string_literal creates a new STRING_CST,
2637 modify it in place to avoid double copying. */
2638 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2639 newstr[len - 1] = '\0';
2642 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2643 replace_call_with_call_and_fold (gsi, repl);
2648 /* We'd like to arrange to call fputs(string,stdout) here,
2649 but we need stdout and don't have a way to get it yet. */
2654 /* The other optimizations can be done only on the non-va_list variants. */
2655 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2658 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2659 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2661 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2665 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2666 replace_call_with_call_and_fold (gsi, repl);
2671 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2672 else if (strcmp (fmt_str, target_percent_c) == 0)
2674 if (!arg || ! useless_type_conversion_p (integer_type_node,
2679 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2680 replace_call_with_call_and_fold (gsi, repl);
2690 /* Fold a call to __builtin_strlen with known length LEN. */
2693 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2695 gimple *stmt = gsi_stmt (*gsi);
2696 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2699 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2700 replace_call_with_value (gsi, len);
2704 /* Fold a call to __builtin_acc_on_device. */
2707 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2709 /* Defer folding until we know which compiler we're in. */
2710 if (symtab->state != EXPANSION)
2713 unsigned val_host = GOMP_DEVICE_HOST;
2714 unsigned val_dev = GOMP_DEVICE_NONE;
2716 #ifdef ACCEL_COMPILER
2717 val_host = GOMP_DEVICE_NOT_HOST;
2718 val_dev = ACCEL_COMPILER_acc_device;
2721 location_t loc = gimple_location (gsi_stmt (*gsi));
2723 tree host_eq = make_ssa_name (boolean_type_node);
2724 gimple *host_ass = gimple_build_assign
2725 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2726 gimple_set_location (host_ass, loc);
2727 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2729 tree dev_eq = make_ssa_name (boolean_type_node);
2730 gimple *dev_ass = gimple_build_assign
2731 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2732 gimple_set_location (dev_ass, loc);
2733 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2735 tree result = make_ssa_name (boolean_type_node);
2736 gimple *result_ass = gimple_build_assign
2737 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2738 gimple_set_location (result_ass, loc);
2739 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2741 replace_call_with_value (gsi, result);
2746 /* Fold the non-target builtin at *GSI and return whether any simplification
2750 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2752 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2753 tree callee = gimple_call_fndecl (stmt);
2755 /* Give up for always_inline inline builtins until they are
2757 if (avoid_folding_inline_builtin (callee))
2760 unsigned n = gimple_call_num_args (stmt);
2761 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2764 case BUILT_IN_BZERO:
2765 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2766 gimple_call_arg (stmt, 1));
2767 case BUILT_IN_MEMSET:
2768 return gimple_fold_builtin_memset (gsi,
2769 gimple_call_arg (stmt, 1),
2770 gimple_call_arg (stmt, 2));
2771 case BUILT_IN_BCOPY:
2772 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2773 gimple_call_arg (stmt, 0), 3);
2774 case BUILT_IN_MEMCPY:
2775 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2776 gimple_call_arg (stmt, 1), 0);
2777 case BUILT_IN_MEMPCPY:
2778 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2779 gimple_call_arg (stmt, 1), 1);
2780 case BUILT_IN_MEMMOVE:
2781 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2782 gimple_call_arg (stmt, 1), 3);
2783 case BUILT_IN_SPRINTF_CHK:
2784 case BUILT_IN_VSPRINTF_CHK:
2785 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2786 case BUILT_IN_STRCAT_CHK:
2787 return gimple_fold_builtin_strcat_chk (gsi);
2788 case BUILT_IN_STRNCAT_CHK:
2789 return gimple_fold_builtin_strncat_chk (gsi);
2790 case BUILT_IN_STRLEN:
2791 return gimple_fold_builtin_strlen (gsi);
2792 case BUILT_IN_STRCPY:
2793 return gimple_fold_builtin_strcpy (gsi,
2794 gimple_call_arg (stmt, 0),
2795 gimple_call_arg (stmt, 1));
2796 case BUILT_IN_STRNCPY:
2797 return gimple_fold_builtin_strncpy (gsi,
2798 gimple_call_arg (stmt, 0),
2799 gimple_call_arg (stmt, 1),
2800 gimple_call_arg (stmt, 2));
2801 case BUILT_IN_STRCAT:
2802 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2803 gimple_call_arg (stmt, 1));
2804 case BUILT_IN_STRNCAT:
2805 return gimple_fold_builtin_strncat (gsi);
2806 case BUILT_IN_FPUTS:
2807 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2808 gimple_call_arg (stmt, 1), false);
2809 case BUILT_IN_FPUTS_UNLOCKED:
2810 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2811 gimple_call_arg (stmt, 1), true);
2812 case BUILT_IN_MEMCPY_CHK:
2813 case BUILT_IN_MEMPCPY_CHK:
2814 case BUILT_IN_MEMMOVE_CHK:
2815 case BUILT_IN_MEMSET_CHK:
2816 return gimple_fold_builtin_memory_chk (gsi,
2817 gimple_call_arg (stmt, 0),
2818 gimple_call_arg (stmt, 1),
2819 gimple_call_arg (stmt, 2),
2820 gimple_call_arg (stmt, 3),
2822 case BUILT_IN_STPCPY:
2823 return gimple_fold_builtin_stpcpy (gsi);
2824 case BUILT_IN_STRCPY_CHK:
2825 case BUILT_IN_STPCPY_CHK:
2826 return gimple_fold_builtin_stxcpy_chk (gsi,
2827 gimple_call_arg (stmt, 0),
2828 gimple_call_arg (stmt, 1),
2829 gimple_call_arg (stmt, 2),
2831 case BUILT_IN_STRNCPY_CHK:
2832 case BUILT_IN_STPNCPY_CHK:
2833 return gimple_fold_builtin_stxncpy_chk (gsi,
2834 gimple_call_arg (stmt, 0),
2835 gimple_call_arg (stmt, 1),
2836 gimple_call_arg (stmt, 2),
2837 gimple_call_arg (stmt, 3),
2839 case BUILT_IN_SNPRINTF_CHK:
2840 case BUILT_IN_VSNPRINTF_CHK:
2841 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2842 case BUILT_IN_SNPRINTF:
2843 return gimple_fold_builtin_snprintf (gsi);
2844 case BUILT_IN_SPRINTF:
2845 return gimple_fold_builtin_sprintf (gsi);
2846 case BUILT_IN_FPRINTF:
2847 case BUILT_IN_FPRINTF_UNLOCKED:
2848 case BUILT_IN_VFPRINTF:
2849 if (n == 2 || n == 3)
2850 return gimple_fold_builtin_fprintf (gsi,
2851 gimple_call_arg (stmt, 0),
2852 gimple_call_arg (stmt, 1),
2854 ? gimple_call_arg (stmt, 2)
2858 case BUILT_IN_FPRINTF_CHK:
2859 case BUILT_IN_VFPRINTF_CHK:
2860 if (n == 3 || n == 4)
2861 return gimple_fold_builtin_fprintf (gsi,
2862 gimple_call_arg (stmt, 0),
2863 gimple_call_arg (stmt, 2),
2865 ? gimple_call_arg (stmt, 3)
2869 case BUILT_IN_PRINTF:
2870 case BUILT_IN_PRINTF_UNLOCKED:
2871 case BUILT_IN_VPRINTF:
2872 if (n == 1 || n == 2)
2873 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2875 ? gimple_call_arg (stmt, 1)
2876 : NULL_TREE, fcode);
2878 case BUILT_IN_PRINTF_CHK:
2879 case BUILT_IN_VPRINTF_CHK:
2880 if (n == 2 || n == 3)
2881 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2883 ? gimple_call_arg (stmt, 2)
2884 : NULL_TREE, fcode);
2886 case BUILT_IN_ACC_ON_DEVICE:
2887 return gimple_fold_builtin_acc_on_device (gsi,
2888 gimple_call_arg (stmt, 0));
2892 /* Try the generic builtin folder. */
2893 bool ignore = (gimple_call_lhs (stmt) == NULL);
2894 tree result = fold_call_stmt (stmt, ignore);
2898 STRIP_NOPS (result);
2900 result = fold_convert (gimple_call_return_type (stmt), result);
2901 if (!update_call_from_tree (gsi, result))
2902 gimplify_and_update_call_from_tree (gsi, result);
2909 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2910 doesn't fit into TYPE. The test for overflow should be regardless of
2911 -fwrapv, and even for unsigned types. */
2914 arith_overflowed_p (enum tree_code code, const_tree type,
2915 const_tree arg0, const_tree arg1)
2917 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2918 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2920 widest2_int warg0 = widest2_int_cst (arg0);
2921 widest2_int warg1 = widest2_int_cst (arg1);
2925 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2926 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2927 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2928 default: gcc_unreachable ();
2930 signop sign = TYPE_SIGN (type);
2931 if (sign == UNSIGNED && wi::neg_p (wres))
2933 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2936 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2937 The statement may be replaced by another statement, e.g., if the call
2938 simplifies to a constant value. Return true if any changes were made.
2939 It is assumed that the operands have been previously folded. */
2942 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2944 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2946 bool changed = false;
2949 /* Fold *& in call arguments. */
2950 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2951 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2953 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2956 gimple_call_set_arg (stmt, i, tmp);
2961 /* Check for virtual calls that became direct calls. */
2962 callee = gimple_call_fn (stmt);
2963 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
2965 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2967 if (dump_file && virtual_method_call_p (callee)
2968 && !possible_polymorphic_call_target_p
2969 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
2970 (OBJ_TYPE_REF_EXPR (callee)))))
2973 "Type inheritance inconsistent devirtualization of ");
2974 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2975 fprintf (dump_file, " to ");
2976 print_generic_expr (dump_file, callee, TDF_SLIM);
2977 fprintf (dump_file, "\n");
2980 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
2983 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
2986 vec <cgraph_node *>targets
2987 = possible_polymorphic_call_targets (callee, stmt, &final);
2988 if (final && targets.length () <= 1 && dbg_cnt (devirt))
2990 tree lhs = gimple_call_lhs (stmt);
2991 if (dump_enabled_p ())
2993 location_t loc = gimple_location_safe (stmt);
2994 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2995 "folding virtual function call to %s\n",
2996 targets.length () == 1
2997 ? targets[0]->name ()
2998 : "__builtin_unreachable");
3000 if (targets.length () == 1)
3002 gimple_call_set_fndecl (stmt, targets[0]->decl);
3004 /* If the call becomes noreturn, remove the lhs. */
3005 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3007 if (TREE_CODE (lhs) == SSA_NAME)
3009 tree var = create_tmp_var (TREE_TYPE (lhs));
3010 tree def = get_or_create_ssa_default_def (cfun, var);
3011 gimple *new_stmt = gimple_build_assign (lhs, def);
3012 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3014 gimple_call_set_lhs (stmt, NULL_TREE);
3016 maybe_remove_unused_call_args (cfun, stmt);
3020 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3021 gimple *new_stmt = gimple_build_call (fndecl, 0);
3022 gimple_set_location (new_stmt, gimple_location (stmt));
3023 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3025 tree var = create_tmp_var (TREE_TYPE (lhs));
3026 tree def = get_or_create_ssa_default_def (cfun, var);
3028 /* To satisfy condition for
3029 cgraph_update_edges_for_call_stmt_node,
3030 we need to preserve GIMPLE_CALL statement
3031 at position of GSI iterator. */
3032 update_call_from_tree (gsi, def);
3033 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3037 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3038 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3039 gsi_replace (gsi, new_stmt, false);
3047 /* Check for indirect calls that became direct calls, and then
3048 no longer require a static chain. */
3049 if (gimple_call_chain (stmt))
3051 tree fn = gimple_call_fndecl (stmt);
3052 if (fn && !DECL_STATIC_CHAIN (fn))
3054 gimple_call_set_chain (stmt, NULL);
3059 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3062 gimple_call_set_chain (stmt, tmp);
3071 /* Check for builtins that CCP can handle using information not
3072 available in the generic fold routines. */
3073 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3075 if (gimple_fold_builtin (gsi))
3078 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3080 changed |= targetm.gimple_fold_builtin (gsi);
3082 else if (gimple_call_internal_p (stmt))
3084 enum tree_code subcode = ERROR_MARK;
3085 tree result = NULL_TREE;
3086 bool cplx_result = false;
3087 tree overflow = NULL_TREE;
3088 switch (gimple_call_internal_fn (stmt))
3090 case IFN_BUILTIN_EXPECT:
3091 result = fold_builtin_expect (gimple_location (stmt),
3092 gimple_call_arg (stmt, 0),
3093 gimple_call_arg (stmt, 1),
3094 gimple_call_arg (stmt, 2));
3096 case IFN_UBSAN_OBJECT_SIZE:
3097 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3098 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3099 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3100 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3101 gimple_call_arg (stmt, 2))))
3103 gsi_replace (gsi, gimple_build_nop (), false);
3104 unlink_stmt_vdef (stmt);
3105 release_defs (stmt);
3109 case IFN_UBSAN_CHECK_ADD:
3110 subcode = PLUS_EXPR;
3112 case IFN_UBSAN_CHECK_SUB:
3113 subcode = MINUS_EXPR;
3115 case IFN_UBSAN_CHECK_MUL:
3116 subcode = MULT_EXPR;
3118 case IFN_ADD_OVERFLOW:
3119 subcode = PLUS_EXPR;
3122 case IFN_SUB_OVERFLOW:
3123 subcode = MINUS_EXPR;
3126 case IFN_MUL_OVERFLOW:
3127 subcode = MULT_EXPR;
3133 if (subcode != ERROR_MARK)
3135 tree arg0 = gimple_call_arg (stmt, 0);
3136 tree arg1 = gimple_call_arg (stmt, 1);
3137 tree type = TREE_TYPE (arg0);
3140 tree lhs = gimple_call_lhs (stmt);
3141 if (lhs == NULL_TREE)
3144 type = TREE_TYPE (TREE_TYPE (lhs));
3146 if (type == NULL_TREE)
3148 /* x = y + 0; x = y - 0; x = y * 0; */
3149 else if (integer_zerop (arg1))
3150 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3151 /* x = 0 + y; x = 0 * y; */
3152 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3153 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3155 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3156 result = integer_zero_node;
3157 /* x = y * 1; x = 1 * y; */
3158 else if (subcode == MULT_EXPR && integer_onep (arg1))
3160 else if (subcode == MULT_EXPR && integer_onep (arg0))
3162 else if (TREE_CODE (arg0) == INTEGER_CST
3163 && TREE_CODE (arg1) == INTEGER_CST)
3166 result = int_const_binop (subcode, fold_convert (type, arg0),
3167 fold_convert (type, arg1));
3169 result = int_const_binop (subcode, arg0, arg1);
3170 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3173 overflow = build_one_cst (type);
3180 if (result == integer_zero_node)
3181 result = build_zero_cst (type);
3182 else if (cplx_result && TREE_TYPE (result) != type)
3184 if (TREE_CODE (result) == INTEGER_CST)
3186 if (arith_overflowed_p (PLUS_EXPR, type, result,
3188 overflow = build_one_cst (type);
3190 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3191 && TYPE_UNSIGNED (type))
3192 || (TYPE_PRECISION (type)
3193 < (TYPE_PRECISION (TREE_TYPE (result))
3194 + (TYPE_UNSIGNED (TREE_TYPE (result))
3195 && !TYPE_UNSIGNED (type)))))
3198 result = fold_convert (type, result);
3205 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3206 result = drop_tree_overflow (result);
3209 if (overflow == NULL_TREE)
3210 overflow = build_zero_cst (TREE_TYPE (result));
3211 tree ctype = build_complex_type (TREE_TYPE (result));
3212 if (TREE_CODE (result) == INTEGER_CST
3213 && TREE_CODE (overflow) == INTEGER_CST)
3214 result = build_complex (ctype, result, overflow);
3216 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3217 ctype, result, overflow);
3219 if (!update_call_from_tree (gsi, result))
3220 gimplify_and_update_call_from_tree (gsi, result);
3229 /* Return true whether NAME has a use on STMT. */
3232 has_use_on_stmt (tree name, gimple *stmt)
3234 imm_use_iterator iter;
3235 use_operand_p use_p;
3236 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3237 if (USE_STMT (use_p) == stmt)
3242 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3245 Replaces *GSI with the simplification result in RCODE and OPS
3246 and the associated statements in *SEQ. Does the replacement
3247 according to INPLACE and returns true if the operation succeeded. */
3250 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3251 code_helper rcode, tree *ops,
3252 gimple_seq *seq, bool inplace)
3254 gimple *stmt = gsi_stmt (*gsi);
3256 /* Play safe and do not allow abnormals to be mentioned in
3257 newly created statements. See also maybe_push_res_to_seq.
3258 As an exception allow such uses if there was a use of the
3259 same SSA name on the old stmt. */
3260 if ((TREE_CODE (ops[0]) == SSA_NAME
3261 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3262 && !has_use_on_stmt (ops[0], stmt))
3264 && TREE_CODE (ops[1]) == SSA_NAME
3265 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3266 && !has_use_on_stmt (ops[1], stmt))
3268 && TREE_CODE (ops[2]) == SSA_NAME
3269 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3270 && !has_use_on_stmt (ops[2], stmt)))
3273 /* Don't insert new statements when INPLACE is true, even if we could
3274 reuse STMT for the final statement. */
3275 if (inplace && !gimple_seq_empty_p (*seq))
3278 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3280 gcc_assert (rcode.is_tree_code ());
3281 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3282 /* GIMPLE_CONDs condition may not throw. */
3283 && (!flag_exceptions
3284 || !cfun->can_throw_non_call_exceptions
3285 || !operation_could_trap_p (rcode,
3286 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3288 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3289 else if (rcode == SSA_NAME)
3290 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3291 build_zero_cst (TREE_TYPE (ops[0])));
3292 else if (rcode == INTEGER_CST)
3294 if (integer_zerop (ops[0]))
3295 gimple_cond_make_false (cond_stmt);
3297 gimple_cond_make_true (cond_stmt);
3301 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3305 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3306 build_zero_cst (TREE_TYPE (res)));
3310 if (dump_file && (dump_flags & TDF_DETAILS))
3312 fprintf (dump_file, "gimple_simplified to ");
3313 if (!gimple_seq_empty_p (*seq))
3314 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3315 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3318 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3321 else if (is_gimple_assign (stmt)
3322 && rcode.is_tree_code ())
3325 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3327 maybe_build_generic_op (rcode,
3328 TREE_TYPE (gimple_assign_lhs (stmt)),
3329 &ops[0], ops[1], ops[2]);
3330 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3331 if (dump_file && (dump_flags & TDF_DETAILS))
3333 fprintf (dump_file, "gimple_simplified to ");
3334 if (!gimple_seq_empty_p (*seq))
3335 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3336 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3339 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3343 else if (rcode.is_fn_code ()
3344 && gimple_call_builtin_p (stmt, rcode))
3347 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3349 gcc_assert (ops[i] != NULL_TREE);
3350 gimple_call_set_arg (stmt, i, ops[i]);
3353 gcc_assert (ops[i] == NULL_TREE);
3354 if (dump_file && (dump_flags & TDF_DETAILS))
3356 fprintf (dump_file, "gimple_simplified to ");
3357 if (!gimple_seq_empty_p (*seq))
3358 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3359 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3361 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3366 if (gimple_has_lhs (stmt))
3368 tree lhs = gimple_get_lhs (stmt);
3369 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3372 if (dump_file && (dump_flags & TDF_DETAILS))
3374 fprintf (dump_file, "gimple_simplified to ");
3375 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3377 gsi_replace_with_seq_vops (gsi, *seq);
3387 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3390 maybe_canonicalize_mem_ref_addr (tree *t)
3394 if (TREE_CODE (*t) == ADDR_EXPR)
3395 t = &TREE_OPERAND (*t, 0);
3397 while (handled_component_p (*t))
3398 t = &TREE_OPERAND (*t, 0);
3400 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3401 of invariant addresses into a SSA name MEM_REF address. */
3402 if (TREE_CODE (*t) == MEM_REF
3403 || TREE_CODE (*t) == TARGET_MEM_REF)
3405 tree addr = TREE_OPERAND (*t, 0);
3406 if (TREE_CODE (addr) == ADDR_EXPR
3407 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3408 || handled_component_p (TREE_OPERAND (addr, 0))))
3411 HOST_WIDE_INT coffset;
3412 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3417 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3418 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3419 TREE_OPERAND (*t, 1),
3420 size_int (coffset));
3423 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3424 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3427 /* Canonicalize back MEM_REFs to plain reference trees if the object
3428 accessed is a decl that has the same access semantics as the MEM_REF. */
3429 if (TREE_CODE (*t) == MEM_REF
3430 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3431 && integer_zerop (TREE_OPERAND (*t, 1))
3432 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3434 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3435 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3436 if (/* Same volatile qualification. */
3437 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3438 /* Same TBAA behavior with -fstrict-aliasing. */
3439 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3440 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3441 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3442 /* Same alignment. */
3443 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3444 /* We have to look out here to not drop a required conversion
3445 from the rhs to the lhs if *t appears on the lhs or vice-versa
3446 if it appears on the rhs. Thus require strict type
3448 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3450 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3455 /* Canonicalize TARGET_MEM_REF in particular with respect to
3456 the indexes becoming constant. */
3457 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3459 tree tem = maybe_fold_tmr (*t);
3470 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3471 distinguishes both cases. */
3474 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3476 bool changed = false;
3477 gimple *stmt = gsi_stmt (*gsi);
3480 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3482 ??? This shouldn't be done in generic folding but in the
3483 propagation helpers which also know whether an address was
3485 Also canonicalize operand order. */
3486 switch (gimple_code (stmt))
3489 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3491 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3492 if ((REFERENCE_CLASS_P (*rhs)
3493 || TREE_CODE (*rhs) == ADDR_EXPR)
3494 && maybe_canonicalize_mem_ref_addr (rhs))
3496 tree *lhs = gimple_assign_lhs_ptr (stmt);
3497 if (REFERENCE_CLASS_P (*lhs)
3498 && maybe_canonicalize_mem_ref_addr (lhs))
3503 /* Canonicalize operand order. */
3504 enum tree_code code = gimple_assign_rhs_code (stmt);
3505 if (TREE_CODE_CLASS (code) == tcc_comparison
3506 || commutative_tree_code (code)
3507 || commutative_ternary_tree_code (code))
3509 tree rhs1 = gimple_assign_rhs1 (stmt);
3510 tree rhs2 = gimple_assign_rhs2 (stmt);
3511 if (tree_swap_operands_p (rhs1, rhs2, false))
3513 gimple_assign_set_rhs1 (stmt, rhs2);
3514 gimple_assign_set_rhs2 (stmt, rhs1);
3515 if (TREE_CODE_CLASS (code) == tcc_comparison)
3516 gimple_assign_set_rhs_code (stmt,
3517 swap_tree_comparison (code));
3525 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3527 tree *arg = gimple_call_arg_ptr (stmt, i);
3528 if (REFERENCE_CLASS_P (*arg)
3529 && maybe_canonicalize_mem_ref_addr (arg))
3532 tree *lhs = gimple_call_lhs_ptr (stmt);
3534 && REFERENCE_CLASS_P (*lhs)
3535 && maybe_canonicalize_mem_ref_addr (lhs))
3541 gasm *asm_stmt = as_a <gasm *> (stmt);
3542 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3544 tree link = gimple_asm_output_op (asm_stmt, i);
3545 tree op = TREE_VALUE (link);
3546 if (REFERENCE_CLASS_P (op)
3547 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3550 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3552 tree link = gimple_asm_input_op (asm_stmt, i);
3553 tree op = TREE_VALUE (link);
3554 if ((REFERENCE_CLASS_P (op)
3555 || TREE_CODE (op) == ADDR_EXPR)
3556 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3562 if (gimple_debug_bind_p (stmt))
3564 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3566 && (REFERENCE_CLASS_P (*val)
3567 || TREE_CODE (*val) == ADDR_EXPR)
3568 && maybe_canonicalize_mem_ref_addr (val))
3574 /* Canonicalize operand order. */
3575 tree lhs = gimple_cond_lhs (stmt);
3576 tree rhs = gimple_cond_rhs (stmt);
3577 if (tree_swap_operands_p (lhs, rhs, false))
3579 gcond *gc = as_a <gcond *> (stmt);
3580 gimple_cond_set_lhs (gc, rhs);
3581 gimple_cond_set_rhs (gc, lhs);
3582 gimple_cond_set_code (gc,
3583 swap_tree_comparison (gimple_cond_code (gc)));
3590 /* Dispatch to pattern-based folding. */
3592 || is_gimple_assign (stmt)
3593 || gimple_code (stmt) == GIMPLE_COND)
3595 gimple_seq seq = NULL;
3598 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3599 valueize, valueize))
3601 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3604 gimple_seq_discard (seq);
3608 stmt = gsi_stmt (*gsi);
3610 /* Fold the main computation performed by the statement. */
3611 switch (gimple_code (stmt))
3615 /* Try to canonicalize for boolean-typed X the comparisons
3616 X == 0, X == 1, X != 0, and X != 1. */
3617 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3618 || gimple_assign_rhs_code (stmt) == NE_EXPR)
3620 tree lhs = gimple_assign_lhs (stmt);
3621 tree op1 = gimple_assign_rhs1 (stmt);
3622 tree op2 = gimple_assign_rhs2 (stmt);
3623 tree type = TREE_TYPE (op1);
3625 /* Check whether the comparison operands are of the same boolean
3626 type as the result type is.
3627 Check that second operand is an integer-constant with value
3629 if (TREE_CODE (op2) == INTEGER_CST
3630 && (integer_zerop (op2) || integer_onep (op2))
3631 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3633 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3634 bool is_logical_not = false;
3636 /* X == 0 and X != 1 is a logical-not.of X
3637 X == 1 and X != 0 is X */
3638 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3639 || (cmp_code == NE_EXPR && integer_onep (op2)))
3640 is_logical_not = true;
3642 if (is_logical_not == false)
3643 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3644 /* Only for one-bit precision typed X the transformation
3645 !X -> ~X is valied. */
3646 else if (TYPE_PRECISION (type) == 1)
3647 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3648 /* Otherwise we use !X -> X ^ 1. */
3650 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3651 build_int_cst (type, 1));
3657 unsigned old_num_ops = gimple_num_ops (stmt);
3658 tree lhs = gimple_assign_lhs (stmt);
3659 tree new_rhs = fold_gimple_assign (gsi);
3661 && !useless_type_conversion_p (TREE_TYPE (lhs),
3662 TREE_TYPE (new_rhs)))
3663 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3666 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3668 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3675 changed |= gimple_fold_call (gsi, inplace);
3679 /* Fold *& in asm operands. */
3681 gasm *asm_stmt = as_a <gasm *> (stmt);
3683 const char **oconstraints;
3684 const char *constraint;
3685 bool allows_mem, allows_reg;
3687 noutputs = gimple_asm_noutputs (asm_stmt);
3688 oconstraints = XALLOCAVEC (const char *, noutputs);
3690 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3692 tree link = gimple_asm_output_op (asm_stmt, i);
3693 tree op = TREE_VALUE (link);
3695 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3696 if (REFERENCE_CLASS_P (op)
3697 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3699 TREE_VALUE (link) = op;
3703 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3705 tree link = gimple_asm_input_op (asm_stmt, i);
3706 tree op = TREE_VALUE (link);
3708 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3709 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3710 oconstraints, &allows_mem, &allows_reg);
3711 if (REFERENCE_CLASS_P (op)
3712 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3715 TREE_VALUE (link) = op;
3723 if (gimple_debug_bind_p (stmt))
3725 tree val = gimple_debug_bind_get_value (stmt);
3727 && REFERENCE_CLASS_P (val))
3729 tree tem = maybe_fold_reference (val, false);
3732 gimple_debug_bind_set_value (stmt, tem);
3737 && TREE_CODE (val) == ADDR_EXPR)
3739 tree ref = TREE_OPERAND (val, 0);
3740 tree tem = maybe_fold_reference (ref, false);
3743 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3744 gimple_debug_bind_set_value (stmt, tem);
3754 stmt = gsi_stmt (*gsi);
3756 /* Fold *& on the lhs. */
3757 if (gimple_has_lhs (stmt))
3759 tree lhs = gimple_get_lhs (stmt);
3760 if (lhs && REFERENCE_CLASS_P (lhs))
3762 tree new_lhs = maybe_fold_reference (lhs, true);
3765 gimple_set_lhs (stmt, new_lhs);
3774 /* Valueziation callback that ends up not following SSA edges. */
3777 no_follow_ssa_edges (tree)
3782 /* Valueization callback that ends up following single-use SSA edges only. */
3785 follow_single_use_edges (tree val)
3787 if (TREE_CODE (val) == SSA_NAME
3788 && !has_single_use (val))
3793 /* Fold the statement pointed to by GSI. In some cases, this function may
3794 replace the whole statement with a new one. Returns true iff folding
3796 The statement pointed to by GSI should be in valid gimple form but may
3797 be in unfolded state as resulting from for example constant propagation
3798 which can produce *&x = 0. */
3801 fold_stmt (gimple_stmt_iterator *gsi)
3803 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3807 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3809 return fold_stmt_1 (gsi, false, valueize);
3812 /* Perform the minimal folding on statement *GSI. Only operations like
3813 *&x created by constant propagation are handled. The statement cannot
3814 be replaced with a new one. Return true if the statement was
3815 changed, false otherwise.
3816 The statement *GSI should be in valid gimple form but may
3817 be in unfolded state as resulting from for example constant propagation
3818 which can produce *&x = 0. */
3821 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3823 gimple *stmt = gsi_stmt (*gsi);
3824 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3825 gcc_assert (gsi_stmt (*gsi) == stmt);
3829 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3830 if EXPR is null or we don't know how.
3831 If non-null, the result always has boolean type. */
3834 canonicalize_bool (tree expr, bool invert)
3840 if (integer_nonzerop (expr))
3841 return boolean_false_node;
3842 else if (integer_zerop (expr))
3843 return boolean_true_node;
3844 else if (TREE_CODE (expr) == SSA_NAME)
3845 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3846 build_int_cst (TREE_TYPE (expr), 0));
3847 else if (COMPARISON_CLASS_P (expr))
3848 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3850 TREE_OPERAND (expr, 0),
3851 TREE_OPERAND (expr, 1));
3857 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3859 if (integer_nonzerop (expr))
3860 return boolean_true_node;
3861 else if (integer_zerop (expr))
3862 return boolean_false_node;
3863 else if (TREE_CODE (expr) == SSA_NAME)
3864 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3865 build_int_cst (TREE_TYPE (expr), 0));
3866 else if (COMPARISON_CLASS_P (expr))
3867 return fold_build2 (TREE_CODE (expr),
3869 TREE_OPERAND (expr, 0),
3870 TREE_OPERAND (expr, 1));
3876 /* Check to see if a boolean expression EXPR is logically equivalent to the
3877 comparison (OP1 CODE OP2). Check for various identities involving
3881 same_bool_comparison_p (const_tree expr, enum tree_code code,
3882 const_tree op1, const_tree op2)
3886 /* The obvious case. */
3887 if (TREE_CODE (expr) == code
3888 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3889 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3892 /* Check for comparing (name, name != 0) and the case where expr
3893 is an SSA_NAME with a definition matching the comparison. */
3894 if (TREE_CODE (expr) == SSA_NAME
3895 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3897 if (operand_equal_p (expr, op1, 0))
3898 return ((code == NE_EXPR && integer_zerop (op2))
3899 || (code == EQ_EXPR && integer_nonzerop (op2)));
3900 s = SSA_NAME_DEF_STMT (expr);
3901 if (is_gimple_assign (s)
3902 && gimple_assign_rhs_code (s) == code
3903 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3904 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3908 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3909 of name is a comparison, recurse. */
3910 if (TREE_CODE (op1) == SSA_NAME
3911 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3913 s = SSA_NAME_DEF_STMT (op1);
3914 if (is_gimple_assign (s)
3915 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3917 enum tree_code c = gimple_assign_rhs_code (s);
3918 if ((c == NE_EXPR && integer_zerop (op2))
3919 || (c == EQ_EXPR && integer_nonzerop (op2)))
3920 return same_bool_comparison_p (expr, c,
3921 gimple_assign_rhs1 (s),
3922 gimple_assign_rhs2 (s));
3923 if ((c == EQ_EXPR && integer_zerop (op2))
3924 || (c == NE_EXPR && integer_nonzerop (op2)))
3925 return same_bool_comparison_p (expr,
3926 invert_tree_comparison (c, false),
3927 gimple_assign_rhs1 (s),
3928 gimple_assign_rhs2 (s));
3934 /* Check to see if two boolean expressions OP1 and OP2 are logically
3938 same_bool_result_p (const_tree op1, const_tree op2)
3940 /* Simple cases first. */
3941 if (operand_equal_p (op1, op2, 0))
3944 /* Check the cases where at least one of the operands is a comparison.
3945 These are a bit smarter than operand_equal_p in that they apply some
3946 identifies on SSA_NAMEs. */
3947 if (COMPARISON_CLASS_P (op2)
3948 && same_bool_comparison_p (op1, TREE_CODE (op2),
3949 TREE_OPERAND (op2, 0),
3950 TREE_OPERAND (op2, 1)))
3952 if (COMPARISON_CLASS_P (op1)
3953 && same_bool_comparison_p (op2, TREE_CODE (op1),
3954 TREE_OPERAND (op1, 0),
3955 TREE_OPERAND (op1, 1)))
3962 /* Forward declarations for some mutually recursive functions. */
3965 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3966 enum tree_code code2, tree op2a, tree op2b);
3968 and_var_with_comparison (tree var, bool invert,
3969 enum tree_code code2, tree op2a, tree op2b);
3971 and_var_with_comparison_1 (gimple *stmt,
3972 enum tree_code code2, tree op2a, tree op2b);
3974 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3975 enum tree_code code2, tree op2a, tree op2b);
3977 or_var_with_comparison (tree var, bool invert,
3978 enum tree_code code2, tree op2a, tree op2b);
3980 or_var_with_comparison_1 (gimple *stmt,
3981 enum tree_code code2, tree op2a, tree op2b);
3983 /* Helper function for and_comparisons_1: try to simplify the AND of the
3984 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3985 If INVERT is true, invert the value of the VAR before doing the AND.
3986 Return NULL_EXPR if we can't simplify this to a single expression. */
3989 and_var_with_comparison (tree var, bool invert,
3990 enum tree_code code2, tree op2a, tree op2b)
3993 gimple *stmt = SSA_NAME_DEF_STMT (var);
3995 /* We can only deal with variables whose definitions are assignments. */
3996 if (!is_gimple_assign (stmt))
3999 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4000 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4001 Then we only have to consider the simpler non-inverted cases. */
4003 t = or_var_with_comparison_1 (stmt,
4004 invert_tree_comparison (code2, false),
4007 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4008 return canonicalize_bool (t, invert);
4011 /* Try to simplify the AND of the ssa variable defined by the assignment
4012 STMT with the comparison specified by (OP2A CODE2 OP2B).
4013 Return NULL_EXPR if we can't simplify this to a single expression. */
4016 and_var_with_comparison_1 (gimple *stmt,
4017 enum tree_code code2, tree op2a, tree op2b)
4019 tree var = gimple_assign_lhs (stmt);
4020 tree true_test_var = NULL_TREE;
4021 tree false_test_var = NULL_TREE;
4022 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4024 /* Check for identities like (var AND (var == 0)) => false. */
4025 if (TREE_CODE (op2a) == SSA_NAME
4026 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4028 if ((code2 == NE_EXPR && integer_zerop (op2b))
4029 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4031 true_test_var = op2a;
4032 if (var == true_test_var)
4035 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4036 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4038 false_test_var = op2a;
4039 if (var == false_test_var)
4040 return boolean_false_node;
4044 /* If the definition is a comparison, recurse on it. */
4045 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4047 tree t = and_comparisons_1 (innercode,
4048 gimple_assign_rhs1 (stmt),
4049 gimple_assign_rhs2 (stmt),
4057 /* If the definition is an AND or OR expression, we may be able to
4058 simplify by reassociating. */
4059 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4060 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4062 tree inner1 = gimple_assign_rhs1 (stmt);
4063 tree inner2 = gimple_assign_rhs2 (stmt);
4066 tree partial = NULL_TREE;
4067 bool is_and = (innercode == BIT_AND_EXPR);
4069 /* Check for boolean identities that don't require recursive examination
4071 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4072 inner1 AND (inner1 OR inner2) => inner1
4073 !inner1 AND (inner1 AND inner2) => false
4074 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4075 Likewise for similar cases involving inner2. */
4076 if (inner1 == true_test_var)
4077 return (is_and ? var : inner1);
4078 else if (inner2 == true_test_var)
4079 return (is_and ? var : inner2);
4080 else if (inner1 == false_test_var)
4082 ? boolean_false_node
4083 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4084 else if (inner2 == false_test_var)
4086 ? boolean_false_node
4087 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4089 /* Next, redistribute/reassociate the AND across the inner tests.
4090 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4091 if (TREE_CODE (inner1) == SSA_NAME
4092 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4093 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4094 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4095 gimple_assign_rhs1 (s),
4096 gimple_assign_rhs2 (s),
4097 code2, op2a, op2b)))
4099 /* Handle the AND case, where we are reassociating:
4100 (inner1 AND inner2) AND (op2a code2 op2b)
4102 If the partial result t is a constant, we win. Otherwise
4103 continue on to try reassociating with the other inner test. */
4106 if (integer_onep (t))
4108 else if (integer_zerop (t))
4109 return boolean_false_node;
4112 /* Handle the OR case, where we are redistributing:
4113 (inner1 OR inner2) AND (op2a code2 op2b)
4114 => (t OR (inner2 AND (op2a code2 op2b))) */
4115 else if (integer_onep (t))
4116 return boolean_true_node;
4118 /* Save partial result for later. */
4122 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4123 if (TREE_CODE (inner2) == SSA_NAME
4124 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4125 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4126 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4127 gimple_assign_rhs1 (s),
4128 gimple_assign_rhs2 (s),
4129 code2, op2a, op2b)))
4131 /* Handle the AND case, where we are reassociating:
4132 (inner1 AND inner2) AND (op2a code2 op2b)
4133 => (inner1 AND t) */
4136 if (integer_onep (t))
4138 else if (integer_zerop (t))
4139 return boolean_false_node;
4140 /* If both are the same, we can apply the identity
4142 else if (partial && same_bool_result_p (t, partial))
4146 /* Handle the OR case. where we are redistributing:
4147 (inner1 OR inner2) AND (op2a code2 op2b)
4148 => (t OR (inner1 AND (op2a code2 op2b)))
4149 => (t OR partial) */
4152 if (integer_onep (t))
4153 return boolean_true_node;
4156 /* We already got a simplification for the other
4157 operand to the redistributed OR expression. The
4158 interesting case is when at least one is false.
4159 Or, if both are the same, we can apply the identity
4161 if (integer_zerop (partial))
4163 else if (integer_zerop (t))
4165 else if (same_bool_result_p (t, partial))
4174 /* Try to simplify the AND of two comparisons defined by
4175 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4176 If this can be done without constructing an intermediate value,
4177 return the resulting tree; otherwise NULL_TREE is returned.
4178 This function is deliberately asymmetric as it recurses on SSA_DEFs
4179 in the first comparison but not the second. */
4182 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4183 enum tree_code code2, tree op2a, tree op2b)
4185 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4187 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4188 if (operand_equal_p (op1a, op2a, 0)
4189 && operand_equal_p (op1b, op2b, 0))
4191 /* Result will be either NULL_TREE, or a combined comparison. */
4192 tree t = combine_comparisons (UNKNOWN_LOCATION,
4193 TRUTH_ANDIF_EXPR, code1, code2,
4194 truth_type, op1a, op1b);
4199 /* Likewise the swapped case of the above. */
4200 if (operand_equal_p (op1a, op2b, 0)
4201 && operand_equal_p (op1b, op2a, 0))
4203 /* Result will be either NULL_TREE, or a combined comparison. */
4204 tree t = combine_comparisons (UNKNOWN_LOCATION,
4205 TRUTH_ANDIF_EXPR, code1,
4206 swap_tree_comparison (code2),
4207 truth_type, op1a, op1b);
4212 /* If both comparisons are of the same value against constants, we might
4213 be able to merge them. */
4214 if (operand_equal_p (op1a, op2a, 0)
4215 && TREE_CODE (op1b) == INTEGER_CST
4216 && TREE_CODE (op2b) == INTEGER_CST)
4218 int cmp = tree_int_cst_compare (op1b, op2b);
4220 /* If we have (op1a == op1b), we should either be able to
4221 return that or FALSE, depending on whether the constant op1b
4222 also satisfies the other comparison against op2b. */
4223 if (code1 == EQ_EXPR)
4229 case EQ_EXPR: val = (cmp == 0); break;
4230 case NE_EXPR: val = (cmp != 0); break;
4231 case LT_EXPR: val = (cmp < 0); break;
4232 case GT_EXPR: val = (cmp > 0); break;
4233 case LE_EXPR: val = (cmp <= 0); break;
4234 case GE_EXPR: val = (cmp >= 0); break;
4235 default: done = false;
4240 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4242 return boolean_false_node;
4245 /* Likewise if the second comparison is an == comparison. */
4246 else if (code2 == EQ_EXPR)
4252 case EQ_EXPR: val = (cmp == 0); break;
4253 case NE_EXPR: val = (cmp != 0); break;
4254 case LT_EXPR: val = (cmp > 0); break;
4255 case GT_EXPR: val = (cmp < 0); break;
4256 case LE_EXPR: val = (cmp >= 0); break;
4257 case GE_EXPR: val = (cmp <= 0); break;
4258 default: done = false;
4263 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4265 return boolean_false_node;
4269 /* Same business with inequality tests. */
4270 else if (code1 == NE_EXPR)
4275 case EQ_EXPR: val = (cmp != 0); break;
4276 case NE_EXPR: val = (cmp == 0); break;
4277 case LT_EXPR: val = (cmp >= 0); break;
4278 case GT_EXPR: val = (cmp <= 0); break;
4279 case LE_EXPR: val = (cmp > 0); break;
4280 case GE_EXPR: val = (cmp < 0); break;
4285 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4287 else if (code2 == NE_EXPR)
4292 case EQ_EXPR: val = (cmp == 0); break;
4293 case NE_EXPR: val = (cmp != 0); break;
4294 case LT_EXPR: val = (cmp <= 0); break;
4295 case GT_EXPR: val = (cmp >= 0); break;
4296 case LE_EXPR: val = (cmp < 0); break;
4297 case GE_EXPR: val = (cmp > 0); break;
4302 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4305 /* Chose the more restrictive of two < or <= comparisons. */
4306 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4307 && (code2 == LT_EXPR || code2 == LE_EXPR))
4309 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4310 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4312 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4315 /* Likewise chose the more restrictive of two > or >= comparisons. */
4316 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4317 && (code2 == GT_EXPR || code2 == GE_EXPR))
4319 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4320 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4322 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4325 /* Check for singleton ranges. */
4327 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4328 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4329 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4331 /* Check for disjoint ranges. */
4333 && (code1 == LT_EXPR || code1 == LE_EXPR)
4334 && (code2 == GT_EXPR || code2 == GE_EXPR))
4335 return boolean_false_node;
4337 && (code1 == GT_EXPR || code1 == GE_EXPR)
4338 && (code2 == LT_EXPR || code2 == LE_EXPR))
4339 return boolean_false_node;
4342 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4343 NAME's definition is a truth value. See if there are any simplifications
4344 that can be done against the NAME's definition. */
4345 if (TREE_CODE (op1a) == SSA_NAME
4346 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4347 && (integer_zerop (op1b) || integer_onep (op1b)))
4349 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4350 || (code1 == NE_EXPR && integer_onep (op1b)));
4351 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4352 switch (gimple_code (stmt))
4355 /* Try to simplify by copy-propagating the definition. */
4356 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4359 /* If every argument to the PHI produces the same result when
4360 ANDed with the second comparison, we win.
4361 Do not do this unless the type is bool since we need a bool
4362 result here anyway. */
4363 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4365 tree result = NULL_TREE;
4367 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4369 tree arg = gimple_phi_arg_def (stmt, i);
4371 /* If this PHI has itself as an argument, ignore it.
4372 If all the other args produce the same result,
4374 if (arg == gimple_phi_result (stmt))
4376 else if (TREE_CODE (arg) == INTEGER_CST)
4378 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4381 result = boolean_false_node;
4382 else if (!integer_zerop (result))
4386 result = fold_build2 (code2, boolean_type_node,
4388 else if (!same_bool_comparison_p (result,
4392 else if (TREE_CODE (arg) == SSA_NAME
4393 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4396 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4397 /* In simple cases we can look through PHI nodes,
4398 but we have to be careful with loops.
4400 if (! dom_info_available_p (CDI_DOMINATORS)
4401 || gimple_bb (def_stmt) == gimple_bb (stmt)
4402 || dominated_by_p (CDI_DOMINATORS,
4403 gimple_bb (def_stmt),
4406 temp = and_var_with_comparison (arg, invert, code2,
4412 else if (!same_bool_result_p (result, temp))
4428 /* Try to simplify the AND of two comparisons, specified by
4429 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4430 If this can be simplified to a single expression (without requiring
4431 introducing more SSA variables to hold intermediate values),
4432 return the resulting tree. Otherwise return NULL_TREE.
4433 If the result expression is non-null, it has boolean type. */
4436 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4437 enum tree_code code2, tree op2a, tree op2b)
4439 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4443 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4446 /* Helper function for or_comparisons_1: try to simplify the OR of the
4447 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4448 If INVERT is true, invert the value of VAR before doing the OR.
4449 Return NULL_EXPR if we can't simplify this to a single expression. */
4452 or_var_with_comparison (tree var, bool invert,
4453 enum tree_code code2, tree op2a, tree op2b)
4456 gimple *stmt = SSA_NAME_DEF_STMT (var);
4458 /* We can only deal with variables whose definitions are assignments. */
4459 if (!is_gimple_assign (stmt))
4462 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4463 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4464 Then we only have to consider the simpler non-inverted cases. */
4466 t = and_var_with_comparison_1 (stmt,
4467 invert_tree_comparison (code2, false),
4470 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4471 return canonicalize_bool (t, invert);
4474 /* Try to simplify the OR of the ssa variable defined by the assignment
4475 STMT with the comparison specified by (OP2A CODE2 OP2B).
4476 Return NULL_EXPR if we can't simplify this to a single expression. */
4479 or_var_with_comparison_1 (gimple *stmt,
4480 enum tree_code code2, tree op2a, tree op2b)
4482 tree var = gimple_assign_lhs (stmt);
4483 tree true_test_var = NULL_TREE;
4484 tree false_test_var = NULL_TREE;
4485 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4487 /* Check for identities like (var OR (var != 0)) => true . */
4488 if (TREE_CODE (op2a) == SSA_NAME
4489 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4491 if ((code2 == NE_EXPR && integer_zerop (op2b))
4492 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4494 true_test_var = op2a;
4495 if (var == true_test_var)
4498 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4499 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4501 false_test_var = op2a;
4502 if (var == false_test_var)
4503 return boolean_true_node;
4507 /* If the definition is a comparison, recurse on it. */
4508 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4510 tree t = or_comparisons_1 (innercode,
4511 gimple_assign_rhs1 (stmt),
4512 gimple_assign_rhs2 (stmt),
4520 /* If the definition is an AND or OR expression, we may be able to
4521 simplify by reassociating. */
4522 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4523 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4525 tree inner1 = gimple_assign_rhs1 (stmt);
4526 tree inner2 = gimple_assign_rhs2 (stmt);
4529 tree partial = NULL_TREE;
4530 bool is_or = (innercode == BIT_IOR_EXPR);
4532 /* Check for boolean identities that don't require recursive examination
4534 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4535 inner1 OR (inner1 AND inner2) => inner1
4536 !inner1 OR (inner1 OR inner2) => true
4537 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4539 if (inner1 == true_test_var)
4540 return (is_or ? var : inner1);
4541 else if (inner2 == true_test_var)
4542 return (is_or ? var : inner2);
4543 else if (inner1 == false_test_var)
4546 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4547 else if (inner2 == false_test_var)
4550 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4552 /* Next, redistribute/reassociate the OR across the inner tests.
4553 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4554 if (TREE_CODE (inner1) == SSA_NAME
4555 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4556 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4557 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4558 gimple_assign_rhs1 (s),
4559 gimple_assign_rhs2 (s),
4560 code2, op2a, op2b)))
4562 /* Handle the OR case, where we are reassociating:
4563 (inner1 OR inner2) OR (op2a code2 op2b)
4565 If the partial result t is a constant, we win. Otherwise
4566 continue on to try reassociating with the other inner test. */
4569 if (integer_onep (t))
4570 return boolean_true_node;
4571 else if (integer_zerop (t))
4575 /* Handle the AND case, where we are redistributing:
4576 (inner1 AND inner2) OR (op2a code2 op2b)
4577 => (t AND (inner2 OR (op2a code op2b))) */
4578 else if (integer_zerop (t))
4579 return boolean_false_node;
4581 /* Save partial result for later. */
4585 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4586 if (TREE_CODE (inner2) == SSA_NAME
4587 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4588 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4589 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4590 gimple_assign_rhs1 (s),
4591 gimple_assign_rhs2 (s),
4592 code2, op2a, op2b)))
4594 /* Handle the OR case, where we are reassociating:
4595 (inner1 OR inner2) OR (op2a code2 op2b)
4597 => (t OR partial) */
4600 if (integer_zerop (t))
4602 else if (integer_onep (t))
4603 return boolean_true_node;
4604 /* If both are the same, we can apply the identity
4606 else if (partial && same_bool_result_p (t, partial))
4610 /* Handle the AND case, where we are redistributing:
4611 (inner1 AND inner2) OR (op2a code2 op2b)
4612 => (t AND (inner1 OR (op2a code2 op2b)))
4613 => (t AND partial) */
4616 if (integer_zerop (t))
4617 return boolean_false_node;
4620 /* We already got a simplification for the other
4621 operand to the redistributed AND expression. The
4622 interesting case is when at least one is true.
4623 Or, if both are the same, we can apply the identity
4625 if (integer_onep (partial))
4627 else if (integer_onep (t))
4629 else if (same_bool_result_p (t, partial))
4638 /* Try to simplify the OR of two comparisons defined by
4639 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4640 If this can be done without constructing an intermediate value,
4641 return the resulting tree; otherwise NULL_TREE is returned.
4642 This function is deliberately asymmetric as it recurses on SSA_DEFs
4643 in the first comparison but not the second. */
4646 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4647 enum tree_code code2, tree op2a, tree op2b)
4649 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4651 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4652 if (operand_equal_p (op1a, op2a, 0)
4653 && operand_equal_p (op1b, op2b, 0))
4655 /* Result will be either NULL_TREE, or a combined comparison. */
4656 tree t = combine_comparisons (UNKNOWN_LOCATION,
4657 TRUTH_ORIF_EXPR, code1, code2,
4658 truth_type, op1a, op1b);
4663 /* Likewise the swapped case of the above. */
4664 if (operand_equal_p (op1a, op2b, 0)
4665 && operand_equal_p (op1b, op2a, 0))
4667 /* Result will be either NULL_TREE, or a combined comparison. */
4668 tree t = combine_comparisons (UNKNOWN_LOCATION,
4669 TRUTH_ORIF_EXPR, code1,
4670 swap_tree_comparison (code2),
4671 truth_type, op1a, op1b);
4676 /* If both comparisons are of the same value against constants, we might
4677 be able to merge them. */
4678 if (operand_equal_p (op1a, op2a, 0)
4679 && TREE_CODE (op1b) == INTEGER_CST
4680 && TREE_CODE (op2b) == INTEGER_CST)
4682 int cmp = tree_int_cst_compare (op1b, op2b);
4684 /* If we have (op1a != op1b), we should either be able to
4685 return that or TRUE, depending on whether the constant op1b
4686 also satisfies the other comparison against op2b. */
4687 if (code1 == NE_EXPR)
4693 case EQ_EXPR: val = (cmp == 0); break;
4694 case NE_EXPR: val = (cmp != 0); break;
4695 case LT_EXPR: val = (cmp < 0); break;
4696 case GT_EXPR: val = (cmp > 0); break;
4697 case LE_EXPR: val = (cmp <= 0); break;
4698 case GE_EXPR: val = (cmp >= 0); break;
4699 default: done = false;
4704 return boolean_true_node;
4706 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4709 /* Likewise if the second comparison is a != comparison. */
4710 else if (code2 == NE_EXPR)
4716 case EQ_EXPR: val = (cmp == 0); break;
4717 case NE_EXPR: val = (cmp != 0); break;
4718 case LT_EXPR: val = (cmp > 0); break;
4719 case GT_EXPR: val = (cmp < 0); break;
4720 case LE_EXPR: val = (cmp >= 0); break;
4721 case GE_EXPR: val = (cmp <= 0); break;
4722 default: done = false;
4727 return boolean_true_node;
4729 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4733 /* See if an equality test is redundant with the other comparison. */
4734 else if (code1 == EQ_EXPR)
4739 case EQ_EXPR: val = (cmp == 0); break;
4740 case NE_EXPR: val = (cmp != 0); break;
4741 case LT_EXPR: val = (cmp < 0); break;
4742 case GT_EXPR: val = (cmp > 0); break;
4743 case LE_EXPR: val = (cmp <= 0); break;
4744 case GE_EXPR: val = (cmp >= 0); break;
4749 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4751 else if (code2 == EQ_EXPR)
4756 case EQ_EXPR: val = (cmp == 0); break;
4757 case NE_EXPR: val = (cmp != 0); break;
4758 case LT_EXPR: val = (cmp > 0); break;
4759 case GT_EXPR: val = (cmp < 0); break;
4760 case LE_EXPR: val = (cmp >= 0); break;
4761 case GE_EXPR: val = (cmp <= 0); break;
4766 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4769 /* Chose the less restrictive of two < or <= comparisons. */
4770 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4771 && (code2 == LT_EXPR || code2 == LE_EXPR))
4773 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4774 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4776 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4779 /* Likewise chose the less restrictive of two > or >= comparisons. */
4780 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4781 && (code2 == GT_EXPR || code2 == GE_EXPR))
4783 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4784 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4786 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4789 /* Check for singleton ranges. */
4791 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4792 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4793 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4795 /* Check for less/greater pairs that don't restrict the range at all. */
4797 && (code1 == LT_EXPR || code1 == LE_EXPR)
4798 && (code2 == GT_EXPR || code2 == GE_EXPR))
4799 return boolean_true_node;
4801 && (code1 == GT_EXPR || code1 == GE_EXPR)
4802 && (code2 == LT_EXPR || code2 == LE_EXPR))
4803 return boolean_true_node;
4806 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4807 NAME's definition is a truth value. See if there are any simplifications
4808 that can be done against the NAME's definition. */
4809 if (TREE_CODE (op1a) == SSA_NAME
4810 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4811 && (integer_zerop (op1b) || integer_onep (op1b)))
4813 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4814 || (code1 == NE_EXPR && integer_onep (op1b)));
4815 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4816 switch (gimple_code (stmt))
4819 /* Try to simplify by copy-propagating the definition. */
4820 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4823 /* If every argument to the PHI produces the same result when
4824 ORed with the second comparison, we win.
4825 Do not do this unless the type is bool since we need a bool
4826 result here anyway. */
4827 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4829 tree result = NULL_TREE;
4831 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4833 tree arg = gimple_phi_arg_def (stmt, i);
4835 /* If this PHI has itself as an argument, ignore it.
4836 If all the other args produce the same result,
4838 if (arg == gimple_phi_result (stmt))
4840 else if (TREE_CODE (arg) == INTEGER_CST)
4842 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4845 result = boolean_true_node;
4846 else if (!integer_onep (result))
4850 result = fold_build2 (code2, boolean_type_node,
4852 else if (!same_bool_comparison_p (result,
4856 else if (TREE_CODE (arg) == SSA_NAME
4857 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4860 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
4861 /* In simple cases we can look through PHI nodes,
4862 but we have to be careful with loops.
4864 if (! dom_info_available_p (CDI_DOMINATORS)
4865 || gimple_bb (def_stmt) == gimple_bb (stmt)
4866 || dominated_by_p (CDI_DOMINATORS,
4867 gimple_bb (def_stmt),
4870 temp = or_var_with_comparison (arg, invert, code2,
4876 else if (!same_bool_result_p (result, temp))
4892 /* Try to simplify the OR of two comparisons, specified by
4893 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4894 If this can be simplified to a single expression (without requiring
4895 introducing more SSA variables to hold intermediate values),
4896 return the resulting tree. Otherwise return NULL_TREE.
4897 If the result expression is non-null, it has boolean type. */
4900 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4901 enum tree_code code2, tree op2a, tree op2b)
4903 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4907 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4911 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4913 Either NULL_TREE, a simplified but non-constant or a constant
4916 ??? This should go into a gimple-fold-inline.h file to be eventually
4917 privatized with the single valueize function used in the various TUs
4918 to avoid the indirect function call overhead. */
4921 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
4922 tree (*gvalueize) (tree))
4926 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4927 edges if there are intermediate VARYING defs. For this reason
4928 do not follow SSA edges here even though SCCVN can technically
4929 just deal fine with that. */
4930 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
4932 tree res = NULL_TREE;
4933 if (gimple_simplified_result_is_gimple_val (rcode, ops))
4935 else if (mprts_hook)
4936 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
4939 if (dump_file && dump_flags & TDF_DETAILS)
4941 fprintf (dump_file, "Match-and-simplified ");
4942 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4943 fprintf (dump_file, " to ");
4944 print_generic_expr (dump_file, res, 0);
4945 fprintf (dump_file, "\n");
4951 location_t loc = gimple_location (stmt);
4952 switch (gimple_code (stmt))
4956 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4958 switch (get_gimple_rhs_class (subcode))
4960 case GIMPLE_SINGLE_RHS:
4962 tree rhs = gimple_assign_rhs1 (stmt);
4963 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4965 if (TREE_CODE (rhs) == SSA_NAME)
4967 /* If the RHS is an SSA_NAME, return its known constant value,
4969 return (*valueize) (rhs);
4971 /* Handle propagating invariant addresses into address
4973 else if (TREE_CODE (rhs) == ADDR_EXPR
4974 && !is_gimple_min_invariant (rhs))
4976 HOST_WIDE_INT offset = 0;
4978 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4982 && (CONSTANT_CLASS_P (base)
4983 || decl_address_invariant_p (base)))
4984 return build_invariant_address (TREE_TYPE (rhs),
4987 else if (TREE_CODE (rhs) == CONSTRUCTOR
4988 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4989 && (CONSTRUCTOR_NELTS (rhs)
4990 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4995 vec = XALLOCAVEC (tree,
4996 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
4997 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4999 val = (*valueize) (val);
5000 if (TREE_CODE (val) == INTEGER_CST
5001 || TREE_CODE (val) == REAL_CST
5002 || TREE_CODE (val) == FIXED_CST)
5008 return build_vector (TREE_TYPE (rhs), vec);
5010 if (subcode == OBJ_TYPE_REF)
5012 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5013 /* If callee is constant, we can fold away the wrapper. */
5014 if (is_gimple_min_invariant (val))
5018 if (kind == tcc_reference)
5020 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5021 || TREE_CODE (rhs) == REALPART_EXPR
5022 || TREE_CODE (rhs) == IMAGPART_EXPR)
5023 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5025 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5026 return fold_unary_loc (EXPR_LOCATION (rhs),
5028 TREE_TYPE (rhs), val);
5030 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5031 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5033 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5034 return fold_ternary_loc (EXPR_LOCATION (rhs),
5036 TREE_TYPE (rhs), val,
5037 TREE_OPERAND (rhs, 1),
5038 TREE_OPERAND (rhs, 2));
5040 else if (TREE_CODE (rhs) == MEM_REF
5041 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5043 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5044 if (TREE_CODE (val) == ADDR_EXPR
5045 && is_gimple_min_invariant (val))
5047 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5049 TREE_OPERAND (rhs, 1));
5054 return fold_const_aggregate_ref_1 (rhs, valueize);
5056 else if (kind == tcc_declaration)
5057 return get_symbol_constant_value (rhs);
5061 case GIMPLE_UNARY_RHS:
5064 case GIMPLE_BINARY_RHS:
5065 /* Translate &x + CST into an invariant form suitable for
5066 further propagation. */
5067 if (subcode == POINTER_PLUS_EXPR)
5069 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5070 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5071 if (TREE_CODE (op0) == ADDR_EXPR
5072 && TREE_CODE (op1) == INTEGER_CST)
5074 tree off = fold_convert (ptr_type_node, op1);
5075 return build_fold_addr_expr_loc
5077 fold_build2 (MEM_REF,
5078 TREE_TYPE (TREE_TYPE (op0)),
5079 unshare_expr (op0), off));
5082 /* Canonicalize bool != 0 and bool == 0 appearing after
5083 valueization. While gimple_simplify handles this
5084 it can get confused by the ~X == 1 -> X == 0 transform
5085 which we cant reduce to a SSA name or a constant
5086 (and we have no way to tell gimple_simplify to not
5087 consider those transforms in the first place). */
5088 else if (subcode == EQ_EXPR
5089 || subcode == NE_EXPR)
5091 tree lhs = gimple_assign_lhs (stmt);
5092 tree op0 = gimple_assign_rhs1 (stmt);
5093 if (useless_type_conversion_p (TREE_TYPE (lhs),
5096 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5097 op0 = (*valueize) (op0);
5098 if (TREE_CODE (op0) == INTEGER_CST)
5099 std::swap (op0, op1);
5100 if (TREE_CODE (op1) == INTEGER_CST
5101 && ((subcode == NE_EXPR && integer_zerop (op1))
5102 || (subcode == EQ_EXPR && integer_onep (op1))))
5108 case GIMPLE_TERNARY_RHS:
5110 /* Handle ternary operators that can appear in GIMPLE form. */
5111 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5112 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5113 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5114 return fold_ternary_loc (loc, subcode,
5115 gimple_expr_type (stmt), op0, op1, op2);
5126 gcall *call_stmt = as_a <gcall *> (stmt);
5128 if (gimple_call_internal_p (stmt))
5130 enum tree_code subcode = ERROR_MARK;
5131 switch (gimple_call_internal_fn (stmt))
5133 case IFN_UBSAN_CHECK_ADD:
5134 subcode = PLUS_EXPR;
5136 case IFN_UBSAN_CHECK_SUB:
5137 subcode = MINUS_EXPR;
5139 case IFN_UBSAN_CHECK_MUL:
5140 subcode = MULT_EXPR;
5145 tree arg0 = gimple_call_arg (stmt, 0);
5146 tree arg1 = gimple_call_arg (stmt, 1);
5147 tree op0 = (*valueize) (arg0);
5148 tree op1 = (*valueize) (arg1);
5150 if (TREE_CODE (op0) != INTEGER_CST
5151 || TREE_CODE (op1) != INTEGER_CST)
5156 /* x * 0 = 0 * x = 0 without overflow. */
5157 if (integer_zerop (op0) || integer_zerop (op1))
5158 return build_zero_cst (TREE_TYPE (arg0));
5161 /* y - y = 0 without overflow. */
5162 if (operand_equal_p (op0, op1, 0))
5163 return build_zero_cst (TREE_TYPE (arg0));
5170 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5172 && TREE_CODE (res) == INTEGER_CST
5173 && !TREE_OVERFLOW (res))
5178 fn = (*valueize) (gimple_call_fn (stmt));
5179 if (TREE_CODE (fn) == ADDR_EXPR
5180 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5181 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5182 && gimple_builtin_call_types_compatible_p (stmt,
5183 TREE_OPERAND (fn, 0)))
5185 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5188 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5189 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5190 retval = fold_builtin_call_array (loc,
5191 gimple_call_return_type (call_stmt),
5192 fn, gimple_call_num_args (stmt), args);
5195 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5196 STRIP_NOPS (retval);
5197 retval = fold_convert (gimple_call_return_type (call_stmt),
5210 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5211 Returns NULL_TREE if folding to a constant is not possible, otherwise
5212 returns a constant according to is_gimple_min_invariant. */
5215 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5217 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5218 if (res && is_gimple_min_invariant (res))
5224 /* The following set of functions are supposed to fold references using
5225 their constant initializers. */
5227 /* See if we can find constructor defining value of BASE.
5228 When we know the consructor with constant offset (such as
5229 base is array[40] and we do know constructor of array), then
5230 BIT_OFFSET is adjusted accordingly.
5232 As a special case, return error_mark_node when constructor
5233 is not explicitly available, but it is known to be zero
5234 such as 'static const int a;'. */
5236 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5237 tree (*valueize)(tree))
5239 HOST_WIDE_INT bit_offset2, size, max_size;
5240 if (TREE_CODE (base) == MEM_REF)
5242 if (!integer_zerop (TREE_OPERAND (base, 1)))
5244 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5246 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5251 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5252 base = valueize (TREE_OPERAND (base, 0));
5253 if (!base || TREE_CODE (base) != ADDR_EXPR)
5255 base = TREE_OPERAND (base, 0);
5258 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5259 DECL_INITIAL. If BASE is a nested reference into another
5260 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5261 the inner reference. */
5262 switch (TREE_CODE (base))
5267 tree init = ctor_for_folding (base);
5269 /* Our semantic is exact opposite of ctor_for_folding;
5270 NULL means unknown, while error_mark_node is 0. */
5271 if (init == error_mark_node)
5274 return error_mark_node;
5280 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
5281 if (max_size == -1 || size != max_size)
5283 *bit_offset += bit_offset2;
5284 return get_base_constructor (base, bit_offset, valueize);
5295 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5296 SIZE to the memory at bit OFFSET. */
5299 fold_array_ctor_reference (tree type, tree ctor,
5300 unsigned HOST_WIDE_INT offset,
5301 unsigned HOST_WIDE_INT size,
5304 offset_int low_bound;
5305 offset_int elt_size;
5306 offset_int access_index;
5307 tree domain_type = NULL_TREE;
5308 HOST_WIDE_INT inner_offset;
5310 /* Compute low bound and elt size. */
5311 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5312 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5313 if (domain_type && TYPE_MIN_VALUE (domain_type))
5315 /* Static constructors for variably sized objects makes no sense. */
5316 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5317 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5321 /* Static constructors for variably sized objects makes no sense. */
5322 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5324 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5326 /* We can handle only constantly sized accesses that are known to not
5327 be larger than size of array element. */
5328 if (!TYPE_SIZE_UNIT (type)
5329 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5330 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5334 /* Compute the array index we look for. */
5335 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5337 access_index += low_bound;
5339 /* And offset within the access. */
5340 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5342 /* See if the array field is large enough to span whole access. We do not
5343 care to fold accesses spanning multiple array indexes. */
5344 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5346 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5347 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
5349 /* When memory is not explicitely mentioned in constructor,
5350 it is 0 (or out of range). */
5351 return build_zero_cst (type);
5354 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5355 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5358 fold_nonarray_ctor_reference (tree type, tree ctor,
5359 unsigned HOST_WIDE_INT offset,
5360 unsigned HOST_WIDE_INT size,
5363 unsigned HOST_WIDE_INT cnt;
5366 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5369 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5370 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5371 tree field_size = DECL_SIZE (cfield);
5372 offset_int bitoffset;
5373 offset_int bitoffset_end, access_end;
5375 /* Variable sized objects in static constructors makes no sense,
5376 but field_size can be NULL for flexible array members. */
5377 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5378 && TREE_CODE (byte_offset) == INTEGER_CST
5379 && (field_size != NULL_TREE
5380 ? TREE_CODE (field_size) == INTEGER_CST
5381 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5383 /* Compute bit offset of the field. */
5384 bitoffset = (wi::to_offset (field_offset)
5385 + wi::lshift (wi::to_offset (byte_offset),
5386 LOG2_BITS_PER_UNIT));
5387 /* Compute bit offset where the field ends. */
5388 if (field_size != NULL_TREE)
5389 bitoffset_end = bitoffset + wi::to_offset (field_size);
5393 access_end = offset_int (offset) + size;
5395 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5396 [BITOFFSET, BITOFFSET_END)? */
5397 if (wi::cmps (access_end, bitoffset) > 0
5398 && (field_size == NULL_TREE
5399 || wi::lts_p (offset, bitoffset_end)))
5401 offset_int inner_offset = offset_int (offset) - bitoffset;
5402 /* We do have overlap. Now see if field is large enough to
5403 cover the access. Give up for accesses spanning multiple
5405 if (wi::cmps (access_end, bitoffset_end) > 0)
5407 if (wi::lts_p (offset, bitoffset))
5409 return fold_ctor_reference (type, cval,
5410 inner_offset.to_uhwi (), size,
5414 /* When memory is not explicitely mentioned in constructor, it is 0. */
5415 return build_zero_cst (type);
5418 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5419 to the memory at bit OFFSET. */
5422 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5423 unsigned HOST_WIDE_INT size, tree from_decl)
5427 /* We found the field with exact match. */
5428 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5430 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5432 /* We are at the end of walk, see if we can view convert the
5434 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5435 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5436 && !compare_tree_int (TYPE_SIZE (type), size)
5437 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5439 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5440 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5442 STRIP_USELESS_TYPE_CONVERSION (ret);
5445 /* For constants and byte-aligned/sized reads try to go through
5446 native_encode/interpret. */
5447 if (CONSTANT_CLASS_P (ctor)
5448 && BITS_PER_UNIT == 8
5449 && offset % BITS_PER_UNIT == 0
5450 && size % BITS_PER_UNIT == 0
5451 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5453 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5454 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5455 offset / BITS_PER_UNIT) > 0)
5456 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5458 if (TREE_CODE (ctor) == CONSTRUCTOR)
5461 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5462 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5463 return fold_array_ctor_reference (type, ctor, offset, size,
5466 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5473 /* Return the tree representing the element referenced by T if T is an
5474 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5475 names using VALUEIZE. Return NULL_TREE otherwise. */
5478 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5480 tree ctor, idx, base;
5481 HOST_WIDE_INT offset, size, max_size;
5484 if (TREE_THIS_VOLATILE (t))
5488 return get_symbol_constant_value (t);
5490 tem = fold_read_from_constant_string (t);
5494 switch (TREE_CODE (t))
5497 case ARRAY_RANGE_REF:
5498 /* Constant indexes are handled well by get_base_constructor.
5499 Only special case variable offsets.
5500 FIXME: This code can't handle nested references with variable indexes
5501 (they will be handled only by iteration of ccp). Perhaps we can bring
5502 get_ref_base_and_extent here and make it use a valueize callback. */
5503 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5505 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5506 && TREE_CODE (idx) == INTEGER_CST)
5508 tree low_bound, unit_size;
5510 /* If the resulting bit-offset is constant, track it. */
5511 if ((low_bound = array_ref_low_bound (t),
5512 TREE_CODE (low_bound) == INTEGER_CST)
5513 && (unit_size = array_ref_element_size (t),
5514 tree_fits_uhwi_p (unit_size)))
5517 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5518 TYPE_PRECISION (TREE_TYPE (idx)));
5520 if (wi::fits_shwi_p (woffset))
5522 offset = woffset.to_shwi ();
5523 /* TODO: This code seems wrong, multiply then check
5524 to see if it fits. */
5525 offset *= tree_to_uhwi (unit_size);
5526 offset *= BITS_PER_UNIT;
5528 base = TREE_OPERAND (t, 0);
5529 ctor = get_base_constructor (base, &offset, valueize);
5530 /* Empty constructor. Always fold to 0. */
5531 if (ctor == error_mark_node)
5532 return build_zero_cst (TREE_TYPE (t));
5533 /* Out of bound array access. Value is undefined,
5537 /* We can not determine ctor. */
5540 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5541 tree_to_uhwi (unit_size)
5551 case TARGET_MEM_REF:
5553 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5554 ctor = get_base_constructor (base, &offset, valueize);
5556 /* Empty constructor. Always fold to 0. */
5557 if (ctor == error_mark_node)
5558 return build_zero_cst (TREE_TYPE (t));
5559 /* We do not know precise address. */
5560 if (max_size == -1 || max_size != size)
5562 /* We can not determine ctor. */
5566 /* Out of bound array access. Value is undefined, but don't fold. */
5570 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5576 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5577 if (c && TREE_CODE (c) == COMPLEX_CST)
5578 return fold_build1_loc (EXPR_LOCATION (t),
5579 TREE_CODE (t), TREE_TYPE (t), c);
5591 fold_const_aggregate_ref (tree t)
5593 return fold_const_aggregate_ref_1 (t, NULL);
5596 /* Lookup virtual method with index TOKEN in a virtual table V
5598 Set CAN_REFER if non-NULL to false if method
5599 is not referable or if the virtual table is ill-formed (such as rewriten
5600 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5603 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5605 unsigned HOST_WIDE_INT offset,
5608 tree vtable = v, init, fn;
5609 unsigned HOST_WIDE_INT size;
5610 unsigned HOST_WIDE_INT elt_size, access_index;
5616 /* First of all double check we have virtual table. */
5617 if (TREE_CODE (v) != VAR_DECL
5618 || !DECL_VIRTUAL_P (v))
5620 /* Pass down that we lost track of the target. */
5626 init = ctor_for_folding (v);
5628 /* The virtual tables should always be born with constructors
5629 and we always should assume that they are avaialble for
5630 folding. At the moment we do not stream them in all cases,
5631 but it should never happen that ctor seem unreachable. */
5633 if (init == error_mark_node)
5635 gcc_assert (in_lto_p);
5636 /* Pass down that we lost track of the target. */
5641 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5642 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5643 offset *= BITS_PER_UNIT;
5644 offset += token * size;
5646 /* Lookup the value in the constructor that is assumed to be array.
5647 This is equivalent to
5648 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5649 offset, size, NULL);
5650 but in a constant time. We expect that frontend produced a simple
5651 array without indexed initializers. */
5653 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5654 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5655 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5656 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5658 access_index = offset / BITS_PER_UNIT / elt_size;
5659 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5661 /* This code makes an assumption that there are no
5662 indexed fileds produced by C++ FE, so we can directly index the array. */
5663 if (access_index < CONSTRUCTOR_NELTS (init))
5665 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5666 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5672 /* For type inconsistent program we may end up looking up virtual method
5673 in virtual table that does not contain TOKEN entries. We may overrun
5674 the virtual table and pick up a constant or RTTI info pointer.
5675 In any case the call is undefined. */
5677 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5678 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5679 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5682 fn = TREE_OPERAND (fn, 0);
5684 /* When cgraph node is missing and function is not public, we cannot
5685 devirtualize. This can happen in WHOPR when the actual method
5686 ends up in other partition, because we found devirtualization
5687 possibility too late. */
5688 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5699 /* Make sure we create a cgraph node for functions we'll reference.
5700 They can be non-existent if the reference comes from an entry
5701 of an external vtable for example. */
5702 cgraph_node::get_create (fn);
5707 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5708 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5709 KNOWN_BINFO carries the binfo describing the true type of
5710 OBJ_TYPE_REF_OBJECT(REF).
5711 Set CAN_REFER if non-NULL to false if method
5712 is not referable or if the virtual table is ill-formed (such as rewriten
5713 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5716 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5719 unsigned HOST_WIDE_INT offset;
5722 v = BINFO_VTABLE (known_binfo);
5723 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5727 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5733 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5736 /* Given a pointer value OP0, return a simplified version of an
5737 indirection through OP0, or NULL_TREE if no simplification is
5738 possible. Note that the resulting type may be different from
5739 the type pointed to in the sense that it is still compatible
5740 from the langhooks point of view. */
5743 gimple_fold_indirect_ref (tree t)
5745 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5750 subtype = TREE_TYPE (sub);
5751 if (!POINTER_TYPE_P (subtype))
5754 if (TREE_CODE (sub) == ADDR_EXPR)
5756 tree op = TREE_OPERAND (sub, 0);
5757 tree optype = TREE_TYPE (op);
5759 if (useless_type_conversion_p (type, optype))
5762 /* *(foo *)&fooarray => fooarray[0] */
5763 if (TREE_CODE (optype) == ARRAY_TYPE
5764 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5765 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5767 tree type_domain = TYPE_DOMAIN (optype);
5768 tree min_val = size_zero_node;
5769 if (type_domain && TYPE_MIN_VALUE (type_domain))
5770 min_val = TYPE_MIN_VALUE (type_domain);
5771 if (TREE_CODE (min_val) == INTEGER_CST)
5772 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5774 /* *(foo *)&complexfoo => __real__ complexfoo */
5775 else if (TREE_CODE (optype) == COMPLEX_TYPE
5776 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5777 return fold_build1 (REALPART_EXPR, type, op);
5778 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5779 else if (TREE_CODE (optype) == VECTOR_TYPE
5780 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5782 tree part_width = TYPE_SIZE (type);
5783 tree index = bitsize_int (0);
5784 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5788 /* *(p + CST) -> ... */
5789 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5790 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5792 tree addr = TREE_OPERAND (sub, 0);
5793 tree off = TREE_OPERAND (sub, 1);
5797 addrtype = TREE_TYPE (addr);
5799 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5800 if (TREE_CODE (addr) == ADDR_EXPR
5801 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5802 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5803 && tree_fits_uhwi_p (off))
5805 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5806 tree part_width = TYPE_SIZE (type);
5807 unsigned HOST_WIDE_INT part_widthi
5808 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5809 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5810 tree index = bitsize_int (indexi);
5811 if (offset / part_widthi
5812 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5813 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5817 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5818 if (TREE_CODE (addr) == ADDR_EXPR
5819 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5820 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5822 tree size = TYPE_SIZE_UNIT (type);
5823 if (tree_int_cst_equal (size, off))
5824 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5827 /* *(p + CST) -> MEM_REF <p, CST>. */
5828 if (TREE_CODE (addr) != ADDR_EXPR
5829 || DECL_P (TREE_OPERAND (addr, 0)))
5830 return fold_build2 (MEM_REF, type,
5832 wide_int_to_tree (ptype, off));
5835 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5836 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5837 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5838 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5841 tree min_val = size_zero_node;
5843 sub = gimple_fold_indirect_ref (sub);
5845 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5846 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5847 if (type_domain && TYPE_MIN_VALUE (type_domain))
5848 min_val = TYPE_MIN_VALUE (type_domain);
5849 if (TREE_CODE (min_val) == INTEGER_CST)
5850 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5856 /* Return true if CODE is an operation that when operating on signed
5857 integer types involves undefined behavior on overflow and the
5858 operation can be expressed with unsigned arithmetic. */
5861 arith_code_with_undefined_signed_overflow (tree_code code)
5869 case POINTER_PLUS_EXPR:
5876 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5877 operation that can be transformed to unsigned arithmetic by converting
5878 its operand, carrying out the operation in the corresponding unsigned
5879 type and converting the result back to the original type.
5881 Returns a sequence of statements that replace STMT and also contain
5882 a modified form of STMT itself. */
5885 rewrite_to_defined_overflow (gimple *stmt)
5887 if (dump_file && (dump_flags & TDF_DETAILS))
5889 fprintf (dump_file, "rewriting stmt with undefined signed "
5891 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5894 tree lhs = gimple_assign_lhs (stmt);
5895 tree type = unsigned_type_for (TREE_TYPE (lhs));
5896 gimple_seq stmts = NULL;
5897 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5899 tree op = gimple_op (stmt, i);
5900 op = gimple_convert (&stmts, type, op);
5901 gimple_set_op (stmt, i, op);
5903 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5904 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5905 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5906 gimple_seq_add_stmt (&stmts, stmt);
5907 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
5908 gimple_seq_add_stmt (&stmts, cvt);
5914 /* The valueization hook we use for the gimple_build API simplification.
5915 This makes us match fold_buildN behavior by only combining with
5916 statements in the sequence(s) we are currently building. */
5919 gimple_build_valueize (tree op)
5921 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
5926 /* Build the expression CODE OP0 of type TYPE with location LOC,
5927 simplifying it first if possible. Returns the built
5928 expression value and appends statements possibly defining it
5932 gimple_build (gimple_seq *seq, location_t loc,
5933 enum tree_code code, tree type, tree op0)
5935 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
5938 if (gimple_in_ssa_p (cfun))
5939 res = make_ssa_name (type);
5941 res = create_tmp_reg (type);
5943 if (code == REALPART_EXPR
5944 || code == IMAGPART_EXPR
5945 || code == VIEW_CONVERT_EXPR)
5946 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
5948 stmt = gimple_build_assign (res, code, op0);
5949 gimple_set_location (stmt, loc);
5950 gimple_seq_add_stmt_without_update (seq, stmt);
5955 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
5956 simplifying it first if possible. Returns the built
5957 expression value and appends statements possibly defining it
5961 gimple_build (gimple_seq *seq, location_t loc,
5962 enum tree_code code, tree type, tree op0, tree op1)
5964 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
5967 if (gimple_in_ssa_p (cfun))
5968 res = make_ssa_name (type);
5970 res = create_tmp_reg (type);
5971 gimple *stmt = gimple_build_assign (res, code, op0, op1);
5972 gimple_set_location (stmt, loc);
5973 gimple_seq_add_stmt_without_update (seq, stmt);
5978 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
5979 simplifying it first if possible. Returns the built
5980 expression value and appends statements possibly defining it
5984 gimple_build (gimple_seq *seq, location_t loc,
5985 enum tree_code code, tree type, tree op0, tree op1, tree op2)
5987 tree res = gimple_simplify (code, type, op0, op1, op2,
5988 seq, gimple_build_valueize);
5991 if (gimple_in_ssa_p (cfun))
5992 res = make_ssa_name (type);
5994 res = create_tmp_reg (type);
5996 if (code == BIT_FIELD_REF)
5997 stmt = gimple_build_assign (res, code,
5998 build3 (code, type, op0, op1, op2));
6000 stmt = gimple_build_assign (res, code, op0, op1, op2);
6001 gimple_set_location (stmt, loc);
6002 gimple_seq_add_stmt_without_update (seq, stmt);
6007 /* Build the call FN (ARG0) with a result of type TYPE
6008 (or no result if TYPE is void) with location LOC,
6009 simplifying it first if possible. Returns the built
6010 expression value (or NULL_TREE if TYPE is void) and appends
6011 statements possibly defining it to SEQ. */
6014 gimple_build (gimple_seq *seq, location_t loc,
6015 enum built_in_function fn, tree type, tree arg0)
6017 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6020 tree decl = builtin_decl_implicit (fn);
6021 gimple *stmt = gimple_build_call (decl, 1, arg0);
6022 if (!VOID_TYPE_P (type))
6024 if (gimple_in_ssa_p (cfun))
6025 res = make_ssa_name (type);
6027 res = create_tmp_reg (type);
6028 gimple_call_set_lhs (stmt, res);
6030 gimple_set_location (stmt, loc);
6031 gimple_seq_add_stmt_without_update (seq, stmt);
6036 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6037 (or no result if TYPE is void) with location LOC,
6038 simplifying it first if possible. Returns the built
6039 expression value (or NULL_TREE if TYPE is void) and appends
6040 statements possibly defining it to SEQ. */
6043 gimple_build (gimple_seq *seq, location_t loc,
6044 enum built_in_function fn, tree type, tree arg0, tree arg1)
6046 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6049 tree decl = builtin_decl_implicit (fn);
6050 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6051 if (!VOID_TYPE_P (type))
6053 if (gimple_in_ssa_p (cfun))
6054 res = make_ssa_name (type);
6056 res = create_tmp_reg (type);
6057 gimple_call_set_lhs (stmt, res);
6059 gimple_set_location (stmt, loc);
6060 gimple_seq_add_stmt_without_update (seq, stmt);
6065 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6066 (or no result if TYPE is void) with location LOC,
6067 simplifying it first if possible. Returns the built
6068 expression value (or NULL_TREE if TYPE is void) and appends
6069 statements possibly defining it to SEQ. */
6072 gimple_build (gimple_seq *seq, location_t loc,
6073 enum built_in_function fn, tree type,
6074 tree arg0, tree arg1, tree arg2)
6076 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6077 seq, gimple_build_valueize);
6080 tree decl = builtin_decl_implicit (fn);
6081 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6082 if (!VOID_TYPE_P (type))
6084 if (gimple_in_ssa_p (cfun))
6085 res = make_ssa_name (type);
6087 res = create_tmp_reg (type);
6088 gimple_call_set_lhs (stmt, res);
6090 gimple_set_location (stmt, loc);
6091 gimple_seq_add_stmt_without_update (seq, stmt);
6096 /* Build the conversion (TYPE) OP with a result of type TYPE
6097 with location LOC if such conversion is neccesary in GIMPLE,
6098 simplifying it first.
6099 Returns the built expression value and appends
6100 statements possibly defining it to SEQ. */
6103 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6105 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6107 return gimple_build (seq, loc, NOP_EXPR, type, op);
6110 /* Build the conversion (ptrofftype) OP with a result of a type
6111 compatible with ptrofftype with location LOC if such conversion
6112 is neccesary in GIMPLE, simplifying it first.
6113 Returns the built expression value and appends
6114 statements possibly defining it to SEQ. */
6117 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6119 if (ptrofftype_p (TREE_TYPE (op)))
6121 return gimple_convert (seq, loc, sizetype, op);
6124 /* Return true if the result of assignment STMT is known to be non-negative.
6125 If the return value is based on the assumption that signed overflow is
6126 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6127 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6130 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6133 enum tree_code code = gimple_assign_rhs_code (stmt);
6134 switch (get_gimple_rhs_class (code))
6136 case GIMPLE_UNARY_RHS:
6137 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6138 gimple_expr_type (stmt),
6139 gimple_assign_rhs1 (stmt),
6140 strict_overflow_p, depth);
6141 case GIMPLE_BINARY_RHS:
6142 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6143 gimple_expr_type (stmt),
6144 gimple_assign_rhs1 (stmt),
6145 gimple_assign_rhs2 (stmt),
6146 strict_overflow_p, depth);
6147 case GIMPLE_TERNARY_RHS:
6149 case GIMPLE_SINGLE_RHS:
6150 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6151 strict_overflow_p, depth);
6152 case GIMPLE_INVALID_RHS:
6158 /* Return true if return value of call STMT is known to be non-negative.
6159 If the return value is based on the assumption that signed overflow is
6160 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6161 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6164 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6167 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6168 gimple_call_arg (stmt, 0) : NULL_TREE;
6169 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6170 gimple_call_arg (stmt, 1) : NULL_TREE;
6172 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6173 gimple_call_fndecl (stmt),
6176 strict_overflow_p, depth);
6179 /* Return true if return value of call STMT is known to be non-negative.
6180 If the return value is based on the assumption that signed overflow is
6181 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6182 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6185 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6188 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6190 tree arg = gimple_phi_arg_def (stmt, i);
6191 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6197 /* Return true if STMT is known to compute a non-negative value.
6198 If the return value is based on the assumption that signed overflow is
6199 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6200 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6203 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6206 switch (gimple_code (stmt))
6209 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6212 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6215 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6222 /* Return true if the floating-point value computed by assignment STMT
6223 is known to have an integer value. We also allow +Inf, -Inf and NaN
6224 to be considered integer values.
6226 DEPTH is the current nesting depth of the query. */
6229 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6231 enum tree_code code = gimple_assign_rhs_code (stmt);
6232 switch (get_gimple_rhs_class (code))
6234 case GIMPLE_UNARY_RHS:
6235 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6236 gimple_assign_rhs1 (stmt), depth);
6237 case GIMPLE_BINARY_RHS:
6238 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6239 gimple_assign_rhs1 (stmt),
6240 gimple_assign_rhs2 (stmt), depth);
6241 case GIMPLE_TERNARY_RHS:
6243 case GIMPLE_SINGLE_RHS:
6244 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6245 case GIMPLE_INVALID_RHS:
6251 /* Return true if the floating-point value computed by call STMT is known
6252 to have an integer value. We also allow +Inf, -Inf and NaN to be
6253 considered integer values.
6255 DEPTH is the current nesting depth of the query. */
6258 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6260 tree arg0 = (gimple_call_num_args (stmt) > 0
6261 ? gimple_call_arg (stmt, 0)
6263 tree arg1 = (gimple_call_num_args (stmt) > 1
6264 ? gimple_call_arg (stmt, 1)
6266 return integer_valued_real_call_p (gimple_call_fndecl (stmt),
6270 /* Return true if the floating-point result of phi STMT is known to have
6271 an integer value. We also allow +Inf, -Inf and NaN to be considered
6274 DEPTH is the current nesting depth of the query. */
6277 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6279 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6281 tree arg = gimple_phi_arg_def (stmt, i);
6282 if (!integer_valued_real_single_p (arg, depth + 1))
6288 /* Return true if the floating-point value computed by STMT is known
6289 to have an integer value. We also allow +Inf, -Inf and NaN to be
6290 considered integer values.
6292 DEPTH is the current nesting depth of the query. */
6295 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6297 switch (gimple_code (stmt))
6300 return gimple_assign_integer_valued_real_p (stmt, depth);
6302 return gimple_call_integer_valued_real_p (stmt, depth);
6304 return gimple_phi_integer_valued_real_p (stmt, depth);