1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
36 #include "stor-layout.h"
38 #include "gimple-fold.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
58 #include "fold-const-call.h"
60 /* Return true if T is a constant and the value cast to a target char
61 can be represented by a host char.
62 Store the casted char constant in *P if so. */
65 target_char_cst_p (tree t, char *p)
67 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
70 *p = (char)tree_to_uhwi (t);
74 /* Return true when DECL can be referenced from current unit.
75 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
76 We can get declarations that are not possible to reference for various
79 1) When analyzing C++ virtual tables.
80 C++ virtual tables do have known constructors even
81 when they are keyed to other compilation unit.
82 Those tables can contain pointers to methods and vars
83 in other units. Those methods have both STATIC and EXTERNAL
85 2) In WHOPR mode devirtualization might lead to reference
86 to method that was partitioned elsehwere.
87 In this case we have static VAR_DECL or FUNCTION_DECL
88 that has no corresponding callgraph/varpool node
90 3) COMDAT functions referred by external vtables that
91 we devirtualize only during final compilation stage.
92 At this time we already decided that we will not output
93 the function body and thus we can't reference the symbol
97 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
100 struct cgraph_node *node;
103 if (DECL_ABSTRACT_P (decl))
106 /* We are concerned only about static/external vars and functions. */
107 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
108 || !VAR_OR_FUNCTION_DECL_P (decl))
111 /* Static objects can be referred only if they was not optimized out yet. */
112 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
114 /* Before we start optimizing unreachable code we can be sure all
115 static objects are defined. */
116 if (symtab->function_flags_ready)
118 snode = symtab_node::get (decl);
119 if (!snode || !snode->definition)
121 node = dyn_cast <cgraph_node *> (snode);
122 return !node || !node->global.inlined_to;
125 /* We will later output the initializer, so we can refer to it.
126 So we are concerned only when DECL comes from initializer of
127 external var or var that has been optimized out. */
129 || !VAR_P (from_decl)
130 || (!DECL_EXTERNAL (from_decl)
131 && (vnode = varpool_node::get (from_decl)) != NULL
132 && vnode->definition)
134 && (vnode = varpool_node::get (from_decl)) != NULL
135 && vnode->in_other_partition))
137 /* We are folding reference from external vtable. The vtable may reffer
138 to a symbol keyed to other compilation unit. The other compilation
139 unit may be in separate DSO and the symbol may be hidden. */
140 if (DECL_VISIBILITY_SPECIFIED (decl)
141 && DECL_EXTERNAL (decl)
142 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
143 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
145 /* When function is public, we always can introduce new reference.
146 Exception are the COMDAT functions where introducing a direct
147 reference imply need to include function body in the curren tunit. */
148 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
150 /* We have COMDAT. We are going to check if we still have definition
151 or if the definition is going to be output in other partition.
152 Bypass this when gimplifying; all needed functions will be produced.
154 As observed in PR20991 for already optimized out comdat virtual functions
155 it may be tempting to not necessarily give up because the copy will be
156 output elsewhere when corresponding vtable is output.
157 This is however not possible - ABI specify that COMDATs are output in
158 units where they are used and when the other unit was compiled with LTO
159 it is possible that vtable was kept public while the function itself
161 if (!symtab->function_flags_ready)
164 snode = symtab_node::get (decl);
166 || ((!snode->definition || DECL_EXTERNAL (decl))
167 && (!snode->in_other_partition
168 || (!snode->forced_by_abi && !snode->force_output))))
170 node = dyn_cast <cgraph_node *> (snode);
171 return !node || !node->global.inlined_to;
174 /* Create a temporary for TYPE for a statement STMT. If the current function
175 is in SSA form, a SSA name is created. Otherwise a temporary register
179 create_tmp_reg_or_ssa_name (tree type, gimple *stmt = NULL)
181 if (gimple_in_ssa_p (cfun))
182 return make_ssa_name (type, stmt);
184 return create_tmp_reg (type);
187 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
188 acceptable form for is_gimple_min_invariant.
189 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
192 canonicalize_constructor_val (tree cval, tree from_decl)
194 tree orig_cval = cval;
196 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
197 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
199 tree ptr = TREE_OPERAND (cval, 0);
200 if (is_gimple_min_invariant (ptr))
201 cval = build1_loc (EXPR_LOCATION (cval),
202 ADDR_EXPR, TREE_TYPE (ptr),
203 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
205 fold_convert (ptr_type_node,
206 TREE_OPERAND (cval, 1))));
208 if (TREE_CODE (cval) == ADDR_EXPR)
210 tree base = NULL_TREE;
211 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
213 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
215 TREE_OPERAND (cval, 0) = base;
218 base = get_base_address (TREE_OPERAND (cval, 0));
222 if (VAR_OR_FUNCTION_DECL_P (base)
223 && !can_refer_decl_in_current_unit_p (base, from_decl))
225 if (TREE_TYPE (base) == error_mark_node)
228 TREE_ADDRESSABLE (base) = 1;
229 else if (TREE_CODE (base) == FUNCTION_DECL)
231 /* Make sure we create a cgraph node for functions we'll reference.
232 They can be non-existent if the reference comes from an entry
233 of an external vtable for example. */
234 cgraph_node::get_create (base);
236 /* Fixup types in global initializers. */
237 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
238 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
240 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
241 cval = fold_convert (TREE_TYPE (orig_cval), cval);
244 if (TREE_OVERFLOW_P (cval))
245 return drop_tree_overflow (cval);
249 /* If SYM is a constant variable with known value, return the value.
250 NULL_TREE is returned otherwise. */
253 get_symbol_constant_value (tree sym)
255 tree val = ctor_for_folding (sym);
256 if (val != error_mark_node)
260 val = canonicalize_constructor_val (unshare_expr (val), sym);
261 if (val && is_gimple_min_invariant (val))
266 /* Variables declared 'const' without an initializer
267 have zero as the initializer if they may not be
268 overridden at link or run time. */
270 && is_gimple_reg_type (TREE_TYPE (sym)))
271 return build_zero_cst (TREE_TYPE (sym));
279 /* Subroutine of fold_stmt. We perform several simplifications of the
280 memory reference tree EXPR and make sure to re-gimplify them properly
281 after propagation of constant addresses. IS_LHS is true if the
282 reference is supposed to be an lvalue. */
285 maybe_fold_reference (tree expr, bool is_lhs)
289 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
290 || TREE_CODE (expr) == REALPART_EXPR
291 || TREE_CODE (expr) == IMAGPART_EXPR)
292 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
293 return fold_unary_loc (EXPR_LOCATION (expr),
296 TREE_OPERAND (expr, 0));
297 else if (TREE_CODE (expr) == BIT_FIELD_REF
298 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
299 return fold_ternary_loc (EXPR_LOCATION (expr),
302 TREE_OPERAND (expr, 0),
303 TREE_OPERAND (expr, 1),
304 TREE_OPERAND (expr, 2));
307 && (result = fold_const_aggregate_ref (expr))
308 && is_gimple_min_invariant (result))
315 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
316 replacement rhs for the statement or NULL_TREE if no simplification
317 could be made. It is assumed that the operands have been previously
321 fold_gimple_assign (gimple_stmt_iterator *si)
323 gimple *stmt = gsi_stmt (*si);
324 enum tree_code subcode = gimple_assign_rhs_code (stmt);
325 location_t loc = gimple_location (stmt);
327 tree result = NULL_TREE;
329 switch (get_gimple_rhs_class (subcode))
331 case GIMPLE_SINGLE_RHS:
333 tree rhs = gimple_assign_rhs1 (stmt);
335 if (TREE_CLOBBER_P (rhs))
338 if (REFERENCE_CLASS_P (rhs))
339 return maybe_fold_reference (rhs, false);
341 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
343 tree val = OBJ_TYPE_REF_EXPR (rhs);
344 if (is_gimple_min_invariant (val))
346 else if (flag_devirtualize && virtual_method_call_p (rhs))
349 vec <cgraph_node *>targets
350 = possible_polymorphic_call_targets (rhs, stmt, &final);
351 if (final && targets.length () <= 1 && dbg_cnt (devirt))
353 if (dump_enabled_p ())
355 location_t loc = gimple_location_safe (stmt);
356 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
357 "resolving virtual function address "
358 "reference to function %s\n",
359 targets.length () == 1
360 ? targets[0]->name ()
363 if (targets.length () == 1)
365 val = fold_convert (TREE_TYPE (val),
366 build_fold_addr_expr_loc
367 (loc, targets[0]->decl));
368 STRIP_USELESS_TYPE_CONVERSION (val);
371 /* We can not use __builtin_unreachable here because it
372 can not have address taken. */
373 val = build_int_cst (TREE_TYPE (val), 0);
379 else if (TREE_CODE (rhs) == ADDR_EXPR)
381 tree ref = TREE_OPERAND (rhs, 0);
382 tree tem = maybe_fold_reference (ref, true);
384 && TREE_CODE (tem) == MEM_REF
385 && integer_zerop (TREE_OPERAND (tem, 1)))
386 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
388 result = fold_convert (TREE_TYPE (rhs),
389 build_fold_addr_expr_loc (loc, tem));
390 else if (TREE_CODE (ref) == MEM_REF
391 && integer_zerop (TREE_OPERAND (ref, 1)))
392 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
396 /* Strip away useless type conversions. Both the
397 NON_LVALUE_EXPR that may have been added by fold, and
398 "useless" type conversions that might now be apparent
399 due to propagation. */
400 STRIP_USELESS_TYPE_CONVERSION (result);
402 if (result != rhs && valid_gimple_rhs_p (result))
407 else if (TREE_CODE (rhs) == CONSTRUCTOR
408 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
410 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
414 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
415 if (! CONSTANT_CLASS_P (val))
418 return build_vector_from_ctor (TREE_TYPE (rhs),
419 CONSTRUCTOR_ELTS (rhs));
422 else if (DECL_P (rhs))
423 return get_symbol_constant_value (rhs);
427 case GIMPLE_UNARY_RHS:
430 case GIMPLE_BINARY_RHS:
433 case GIMPLE_TERNARY_RHS:
434 result = fold_ternary_loc (loc, subcode,
435 TREE_TYPE (gimple_assign_lhs (stmt)),
436 gimple_assign_rhs1 (stmt),
437 gimple_assign_rhs2 (stmt),
438 gimple_assign_rhs3 (stmt));
442 STRIP_USELESS_TYPE_CONVERSION (result);
443 if (valid_gimple_rhs_p (result))
448 case GIMPLE_INVALID_RHS:
456 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
457 adjusting the replacement stmts location and virtual operands.
458 If the statement has a lhs the last stmt in the sequence is expected
459 to assign to that lhs. */
462 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
464 gimple *stmt = gsi_stmt (*si_p);
466 if (gimple_has_location (stmt))
467 annotate_all_with_location (stmts, gimple_location (stmt));
469 /* First iterate over the replacement statements backward, assigning
470 virtual operands to their defining statements. */
471 gimple *laststore = NULL;
472 for (gimple_stmt_iterator i = gsi_last (stmts);
473 !gsi_end_p (i); gsi_prev (&i))
475 gimple *new_stmt = gsi_stmt (i);
476 if ((gimple_assign_single_p (new_stmt)
477 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
478 || (is_gimple_call (new_stmt)
479 && (gimple_call_flags (new_stmt)
480 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
484 vdef = gimple_vdef (stmt);
486 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
487 gimple_set_vdef (new_stmt, vdef);
488 if (vdef && TREE_CODE (vdef) == SSA_NAME)
489 SSA_NAME_DEF_STMT (vdef) = new_stmt;
490 laststore = new_stmt;
494 /* Second iterate over the statements forward, assigning virtual
495 operands to their uses. */
496 tree reaching_vuse = gimple_vuse (stmt);
497 for (gimple_stmt_iterator i = gsi_start (stmts);
498 !gsi_end_p (i); gsi_next (&i))
500 gimple *new_stmt = gsi_stmt (i);
501 /* If the new statement possibly has a VUSE, update it with exact SSA
502 name we know will reach this one. */
503 if (gimple_has_mem_ops (new_stmt))
504 gimple_set_vuse (new_stmt, reaching_vuse);
505 gimple_set_modified (new_stmt, true);
506 if (gimple_vdef (new_stmt))
507 reaching_vuse = gimple_vdef (new_stmt);
510 /* If the new sequence does not do a store release the virtual
511 definition of the original statement. */
513 && reaching_vuse == gimple_vuse (stmt))
515 tree vdef = gimple_vdef (stmt);
517 && TREE_CODE (vdef) == SSA_NAME)
519 unlink_stmt_vdef (stmt);
520 release_ssa_name (vdef);
524 /* Finally replace the original statement with the sequence. */
525 gsi_replace_with_seq (si_p, stmts, false);
528 /* Convert EXPR into a GIMPLE value suitable for substitution on the
529 RHS of an assignment. Insert the necessary statements before
530 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
531 is replaced. If the call is expected to produces a result, then it
532 is replaced by an assignment of the new RHS to the result variable.
533 If the result is to be ignored, then the call is replaced by a
534 GIMPLE_NOP. A proper VDEF chain is retained by making the first
535 VUSE and the last VDEF of the whole sequence be the same as the replaced
536 statement and using new SSA names for stores in between. */
539 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
542 gimple *stmt, *new_stmt;
543 gimple_stmt_iterator i;
544 gimple_seq stmts = NULL;
546 stmt = gsi_stmt (*si_p);
548 gcc_assert (is_gimple_call (stmt));
550 push_gimplify_context (gimple_in_ssa_p (cfun));
552 lhs = gimple_call_lhs (stmt);
553 if (lhs == NULL_TREE)
555 gimplify_and_add (expr, &stmts);
556 /* We can end up with folding a memcpy of an empty class assignment
557 which gets optimized away by C++ gimplification. */
558 if (gimple_seq_empty_p (stmts))
560 pop_gimplify_context (NULL);
561 if (gimple_in_ssa_p (cfun))
563 unlink_stmt_vdef (stmt);
566 gsi_replace (si_p, gimple_build_nop (), false);
572 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
573 new_stmt = gimple_build_assign (lhs, tmp);
574 i = gsi_last (stmts);
575 gsi_insert_after_without_update (&i, new_stmt,
576 GSI_CONTINUE_LINKING);
579 pop_gimplify_context (NULL);
581 gsi_replace_with_seq_vops (si_p, stmts);
585 /* Replace the call at *GSI with the gimple value VAL. */
588 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
590 gimple *stmt = gsi_stmt (*gsi);
591 tree lhs = gimple_call_lhs (stmt);
595 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
596 val = fold_convert (TREE_TYPE (lhs), val);
597 repl = gimple_build_assign (lhs, val);
600 repl = gimple_build_nop ();
601 tree vdef = gimple_vdef (stmt);
602 if (vdef && TREE_CODE (vdef) == SSA_NAME)
604 unlink_stmt_vdef (stmt);
605 release_ssa_name (vdef);
607 gsi_replace (gsi, repl, false);
610 /* Replace the call at *GSI with the new call REPL and fold that
614 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
616 gimple *stmt = gsi_stmt (*gsi);
617 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
618 gimple_set_location (repl, gimple_location (stmt));
619 if (gimple_vdef (stmt)
620 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
622 gimple_set_vdef (repl, gimple_vdef (stmt));
623 gimple_set_vuse (repl, gimple_vuse (stmt));
624 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
626 gsi_replace (gsi, repl, false);
630 /* Return true if VAR is a VAR_DECL or a component thereof. */
633 var_decl_component_p (tree var)
636 while (handled_component_p (inner))
637 inner = TREE_OPERAND (inner, 0);
638 return SSA_VAR_P (inner);
641 /* Fold function call to builtin mem{{,p}cpy,move}. Return
642 false if no simplification can be made.
643 If ENDP is 0, return DEST (like memcpy).
644 If ENDP is 1, return DEST+LEN (like mempcpy).
645 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
646 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
650 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
651 tree dest, tree src, int endp)
653 gimple *stmt = gsi_stmt (*gsi);
654 tree lhs = gimple_call_lhs (stmt);
655 tree len = gimple_call_arg (stmt, 2);
656 tree destvar, srcvar;
657 location_t loc = gimple_location (stmt);
659 /* If the LEN parameter is zero, return DEST. */
660 if (integer_zerop (len))
663 if (gimple_call_lhs (stmt))
664 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
666 repl = gimple_build_nop ();
667 tree vdef = gimple_vdef (stmt);
668 if (vdef && TREE_CODE (vdef) == SSA_NAME)
670 unlink_stmt_vdef (stmt);
671 release_ssa_name (vdef);
673 gsi_replace (gsi, repl, false);
677 /* If SRC and DEST are the same (and not volatile), return
678 DEST{,+LEN,+LEN-1}. */
679 if (operand_equal_p (src, dest, 0))
681 unlink_stmt_vdef (stmt);
682 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
683 release_ssa_name (gimple_vdef (stmt));
686 gsi_replace (gsi, gimple_build_nop (), false);
693 tree srctype, desttype;
694 unsigned int src_align, dest_align;
697 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
698 pointers as wide integer) and also may result in huge function
699 size because of inlined bounds copy. Thus don't inline for
700 functions we want to instrument. */
701 if (flag_check_pointer_bounds
702 && chkp_instrumentable_p (cfun->decl)
703 /* Even if data may contain pointers we can inline if copy
704 less than a pointer size. */
705 && (!tree_fits_uhwi_p (len)
706 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
709 /* Build accesses at offset zero with a ref-all character type. */
710 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
713 /* If we can perform the copy efficiently with first doing all loads
714 and then all stores inline it that way. Currently efficiently
715 means that we can load all the memory into a single integer
716 register which is what MOVE_MAX gives us. */
717 src_align = get_pointer_alignment (src);
718 dest_align = get_pointer_alignment (dest);
719 if (tree_fits_uhwi_p (len)
720 && compare_tree_int (len, MOVE_MAX) <= 0
721 /* ??? Don't transform copies from strings with known length this
722 confuses the tree-ssa-strlen.c. This doesn't handle
723 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
725 && !c_strlen (src, 2))
727 unsigned ilen = tree_to_uhwi (len);
728 if (pow2p_hwi (ilen))
730 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
732 && TYPE_MODE (type) != BLKmode
733 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
735 /* If the destination pointer is not aligned we must be able
736 to emit an unaligned store. */
737 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
738 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
739 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
740 != CODE_FOR_nothing)))
743 tree desttype = type;
744 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
745 srctype = build_aligned_type (type, src_align);
746 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
747 tree tem = fold_const_aggregate_ref (srcmem);
750 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
751 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
753 && (optab_handler (movmisalign_optab,
755 == CODE_FOR_nothing))
760 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
762 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
764 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
766 gimple_assign_set_lhs (new_stmt, srcmem);
767 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
768 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
770 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
771 desttype = build_aligned_type (type, dest_align);
773 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
776 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
777 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
778 if (gimple_vdef (new_stmt)
779 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
780 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
783 gsi_replace (gsi, new_stmt, false);
786 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
795 /* Both DEST and SRC must be pointer types.
796 ??? This is what old code did. Is the testing for pointer types
799 If either SRC is readonly or length is 1, we can use memcpy. */
800 if (!dest_align || !src_align)
802 if (readonly_data_expr (src)
803 || (tree_fits_uhwi_p (len)
804 && (MIN (src_align, dest_align) / BITS_PER_UNIT
805 >= tree_to_uhwi (len))))
807 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
810 gimple_call_set_fndecl (stmt, fn);
811 gimple_call_set_arg (stmt, 0, dest);
812 gimple_call_set_arg (stmt, 1, src);
817 /* If *src and *dest can't overlap, optimize into memcpy as well. */
818 if (TREE_CODE (src) == ADDR_EXPR
819 && TREE_CODE (dest) == ADDR_EXPR)
821 tree src_base, dest_base, fn;
822 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
823 HOST_WIDE_INT maxsize;
825 srcvar = TREE_OPERAND (src, 0);
826 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
827 if (src_base == NULL)
829 destvar = TREE_OPERAND (dest, 0);
830 dest_base = get_addr_base_and_unit_offset (destvar,
832 if (dest_base == NULL)
834 if (tree_fits_uhwi_p (len))
835 maxsize = tree_to_uhwi (len);
838 if (SSA_VAR_P (src_base)
839 && SSA_VAR_P (dest_base))
841 if (operand_equal_p (src_base, dest_base, 0)
842 && ranges_overlap_p (src_offset, maxsize,
843 dest_offset, maxsize))
846 else if (TREE_CODE (src_base) == MEM_REF
847 && TREE_CODE (dest_base) == MEM_REF)
849 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
850 TREE_OPERAND (dest_base, 0), 0))
852 offset_int off = mem_ref_offset (src_base) + src_offset;
853 if (!wi::fits_shwi_p (off))
855 src_offset = off.to_shwi ();
857 off = mem_ref_offset (dest_base) + dest_offset;
858 if (!wi::fits_shwi_p (off))
860 dest_offset = off.to_shwi ();
861 if (ranges_overlap_p (src_offset, maxsize,
862 dest_offset, maxsize))
868 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
871 gimple_call_set_fndecl (stmt, fn);
872 gimple_call_set_arg (stmt, 0, dest);
873 gimple_call_set_arg (stmt, 1, src);
878 /* If the destination and source do not alias optimize into
880 if ((is_gimple_min_invariant (dest)
881 || TREE_CODE (dest) == SSA_NAME)
882 && (is_gimple_min_invariant (src)
883 || TREE_CODE (src) == SSA_NAME))
886 ao_ref_init_from_ptr_and_size (&destr, dest, len);
887 ao_ref_init_from_ptr_and_size (&srcr, src, len);
888 if (!refs_may_alias_p_1 (&destr, &srcr, false))
891 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
894 gimple_call_set_fndecl (stmt, fn);
895 gimple_call_set_arg (stmt, 0, dest);
896 gimple_call_set_arg (stmt, 1, src);
905 if (!tree_fits_shwi_p (len))
908 This logic lose for arguments like (type *)malloc (sizeof (type)),
909 since we strip the casts of up to VOID return value from malloc.
910 Perhaps we ought to inherit type from non-VOID argument here? */
913 if (!POINTER_TYPE_P (TREE_TYPE (src))
914 || !POINTER_TYPE_P (TREE_TYPE (dest)))
916 /* In the following try to find a type that is most natural to be
917 used for the memcpy source and destination and that allows
918 the most optimization when memcpy is turned into a plain assignment
919 using that type. In theory we could always use a char[len] type
920 but that only gains us that the destination and source possibly
921 no longer will have their address taken. */
922 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
923 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
925 tree tem = TREE_OPERAND (src, 0);
927 if (tem != TREE_OPERAND (src, 0))
928 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
930 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
932 tree tem = TREE_OPERAND (dest, 0);
934 if (tem != TREE_OPERAND (dest, 0))
935 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
937 srctype = TREE_TYPE (TREE_TYPE (src));
938 if (TREE_CODE (srctype) == ARRAY_TYPE
939 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
941 srctype = TREE_TYPE (srctype);
943 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
945 desttype = TREE_TYPE (TREE_TYPE (dest));
946 if (TREE_CODE (desttype) == ARRAY_TYPE
947 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
949 desttype = TREE_TYPE (desttype);
951 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
953 if (TREE_ADDRESSABLE (srctype)
954 || TREE_ADDRESSABLE (desttype))
957 /* Make sure we are not copying using a floating-point mode or
958 a type whose size possibly does not match its precision. */
959 if (FLOAT_MODE_P (TYPE_MODE (desttype))
960 || TREE_CODE (desttype) == BOOLEAN_TYPE
961 || TREE_CODE (desttype) == ENUMERAL_TYPE)
962 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
963 if (FLOAT_MODE_P (TYPE_MODE (srctype))
964 || TREE_CODE (srctype) == BOOLEAN_TYPE
965 || TREE_CODE (srctype) == ENUMERAL_TYPE)
966 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
974 src_align = get_pointer_alignment (src);
975 dest_align = get_pointer_alignment (dest);
976 if (dest_align < TYPE_ALIGN (desttype)
977 || src_align < TYPE_ALIGN (srctype))
981 STRIP_NOPS (destvar);
982 if (TREE_CODE (destvar) == ADDR_EXPR
983 && var_decl_component_p (TREE_OPERAND (destvar, 0))
984 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
985 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
991 if (TREE_CODE (srcvar) == ADDR_EXPR
992 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
993 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
996 || src_align >= TYPE_ALIGN (desttype))
997 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
999 else if (!STRICT_ALIGNMENT)
1001 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1003 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1011 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1014 if (srcvar == NULL_TREE)
1017 if (src_align >= TYPE_ALIGN (desttype))
1018 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1021 if (STRICT_ALIGNMENT)
1023 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1025 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1028 else if (destvar == NULL_TREE)
1031 if (dest_align >= TYPE_ALIGN (srctype))
1032 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1035 if (STRICT_ALIGNMENT)
1037 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1039 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1044 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1046 tree tem = fold_const_aggregate_ref (srcvar);
1049 if (! is_gimple_min_invariant (srcvar))
1051 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1052 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1054 gimple_assign_set_lhs (new_stmt, srcvar);
1055 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1056 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1059 new_stmt = gimple_build_assign (destvar, srcvar);
1060 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1061 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1062 if (gimple_vdef (new_stmt)
1063 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1064 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1067 gsi_replace (gsi, new_stmt, false);
1070 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1074 gimple_seq stmts = NULL;
1075 if (endp == 0 || endp == 3)
1078 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1080 if (endp == 2 || endp == 1)
1082 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1083 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1084 TREE_TYPE (dest), dest, len);
1087 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1088 gimple *repl = gimple_build_assign (lhs, dest);
1089 gsi_replace (gsi, repl, false);
1093 /* Fold function call to builtin memset or bzero at *GSI setting the
1094 memory of size LEN to VAL. Return whether a simplification was made. */
1097 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1099 gimple *stmt = gsi_stmt (*gsi);
1101 unsigned HOST_WIDE_INT length, cval;
1103 /* If the LEN parameter is zero, return DEST. */
1104 if (integer_zerop (len))
1106 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1110 if (! tree_fits_uhwi_p (len))
1113 if (TREE_CODE (c) != INTEGER_CST)
1116 tree dest = gimple_call_arg (stmt, 0);
1118 if (TREE_CODE (var) != ADDR_EXPR)
1121 var = TREE_OPERAND (var, 0);
1122 if (TREE_THIS_VOLATILE (var))
1125 etype = TREE_TYPE (var);
1126 if (TREE_CODE (etype) == ARRAY_TYPE)
1127 etype = TREE_TYPE (etype);
1129 if (!INTEGRAL_TYPE_P (etype)
1130 && !POINTER_TYPE_P (etype))
1133 if (! var_decl_component_p (var))
1136 length = tree_to_uhwi (len);
1137 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1138 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1141 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1144 if (integer_zerop (c))
1148 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1151 cval = TREE_INT_CST_LOW (c);
1155 cval |= (cval << 31) << 1;
1158 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1159 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1160 gimple_set_vuse (store, gimple_vuse (stmt));
1161 tree vdef = gimple_vdef (stmt);
1162 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1164 gimple_set_vdef (store, gimple_vdef (stmt));
1165 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1167 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1168 if (gimple_call_lhs (stmt))
1170 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1171 gsi_replace (gsi, asgn, false);
1175 gimple_stmt_iterator gsi2 = *gsi;
1177 gsi_remove (&gsi2, true);
1184 /* Obtain the minimum and maximum string length or minimum and maximum
1185 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1186 If ARG is an SSA name variable, follow its use-def chains. When
1187 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1188 if we are unable to determine the length or value, return False.
1189 VISITED is a bitmap of visited variables.
1190 TYPE is 0 if string length should be obtained, 1 for maximum string
1191 length and 2 for maximum value ARG can have.
1192 When FUZZY is set and the length of a string cannot be determined,
1193 the function instead considers as the maximum possible length the
1194 size of a character array it may refer to. */
1197 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1203 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1204 but MINLEN may be cleared during the execution of the function. */
1205 tree *minlen = length;
1206 tree *const maxlen = length + 1;
1208 if (TREE_CODE (arg) != SSA_NAME)
1210 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1211 if (TREE_CODE (arg) == ADDR_EXPR
1212 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1213 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1215 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1216 if (TREE_CODE (aop0) == INDIRECT_REF
1217 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1218 return get_range_strlen (TREE_OPERAND (aop0, 0),
1219 length, visited, type, fuzzy);
1225 if (TREE_CODE (val) != INTEGER_CST
1226 || tree_int_cst_sgn (val) < 0)
1230 val = c_strlen (arg, 1);
1234 if (TREE_CODE (arg) == ADDR_EXPR)
1235 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1236 visited, type, fuzzy);
1238 if (TREE_CODE (arg) == COMPONENT_REF
1239 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1241 /* Use the type of the member array to determine the upper
1242 bound on the length of the array. This may be overly
1243 optimistic if the array itself isn't NUL-terminated and
1244 the caller relies on the subsequent member to contain
1246 arg = TREE_OPERAND (arg, 1);
1247 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1248 if (!val || integer_zerop (val))
1250 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1252 /* Avoid using the array size as the minimum. */
1263 && TREE_CODE (*minlen) == INTEGER_CST
1264 && TREE_CODE (val) == INTEGER_CST
1265 && tree_int_cst_lt (val, *minlen))))
1272 if (TREE_CODE (*maxlen) != INTEGER_CST
1273 || TREE_CODE (val) != INTEGER_CST)
1276 if (tree_int_cst_lt (*maxlen, val))
1280 else if (simple_cst_equal (val, *maxlen) != 1)
1288 /* If ARG is registered for SSA update we cannot look at its defining
1290 if (name_registered_for_update_p (arg))
1293 /* If we were already here, break the infinite cycle. */
1295 *visited = BITMAP_ALLOC (NULL);
1296 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1300 def_stmt = SSA_NAME_DEF_STMT (var);
1302 switch (gimple_code (def_stmt))
1305 /* The RHS of the statement defining VAR must either have a
1306 constant length or come from another SSA_NAME with a constant
1308 if (gimple_assign_single_p (def_stmt)
1309 || gimple_assign_unary_nop_p (def_stmt))
1311 tree rhs = gimple_assign_rhs1 (def_stmt);
1312 return get_range_strlen (rhs, length, visited, type, fuzzy);
1314 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1316 tree op2 = gimple_assign_rhs2 (def_stmt);
1317 tree op3 = gimple_assign_rhs3 (def_stmt);
1318 return get_range_strlen (op2, length, visited, type, fuzzy)
1319 && get_range_strlen (op3, length, visited, type, fuzzy);
1325 /* All the arguments of the PHI node must have the same constant
1329 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1331 tree arg = gimple_phi_arg (def_stmt, i)->def;
1333 /* If this PHI has itself as an argument, we cannot
1334 determine the string length of this argument. However,
1335 if we can find a constant string length for the other
1336 PHI args then we can still be sure that this is a
1337 constant string length. So be optimistic and just
1338 continue with the next argument. */
1339 if (arg == gimple_phi_result (def_stmt))
1342 if (!get_range_strlen (arg, length, visited, type, fuzzy))
1345 *maxlen = build_all_ones_cst (size_type_node);
1358 /* Determine the minimum and maximum value or string length that ARG
1359 refers to and store each in the first two elements of MINMAXLEN.
1360 For expressions that point to strings of unknown lengths that are
1361 character arrays, use the upper bound of the array as the maximum
1362 length. For example, given an expression like 'x ? array : "xyz"'
1363 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1364 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1368 void get_range_strlen (tree arg, tree minmaxlen[2])
1370 bitmap visited = NULL;
1372 minmaxlen[0] = NULL_TREE;
1373 minmaxlen[1] = NULL_TREE;
1375 get_range_strlen (arg, minmaxlen, &visited, 1, true);
1378 BITMAP_FREE (visited);
1382 get_maxval_strlen (tree arg, int type)
1384 bitmap visited = NULL;
1385 tree len[2] = { NULL_TREE, NULL_TREE };
1386 if (!get_range_strlen (arg, len, &visited, type, false))
1389 BITMAP_FREE (visited);
1395 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1396 If LEN is not NULL, it represents the length of the string to be
1397 copied. Return NULL_TREE if no simplification can be made. */
1400 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1401 tree dest, tree src)
1403 location_t loc = gimple_location (gsi_stmt (*gsi));
1406 /* If SRC and DEST are the same (and not volatile), return DEST. */
1407 if (operand_equal_p (src, dest, 0))
1409 replace_call_with_value (gsi, dest);
1413 if (optimize_function_for_size_p (cfun))
1416 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1420 tree len = get_maxval_strlen (src, 0);
1424 len = fold_convert_loc (loc, size_type_node, len);
1425 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1426 len = force_gimple_operand_gsi (gsi, len, true,
1427 NULL_TREE, true, GSI_SAME_STMT);
1428 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1429 replace_call_with_call_and_fold (gsi, repl);
1433 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1434 If SLEN is not NULL, it represents the length of the source string.
1435 Return NULL_TREE if no simplification can be made. */
1438 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1439 tree dest, tree src, tree len)
1441 location_t loc = gimple_location (gsi_stmt (*gsi));
1444 /* If the LEN parameter is zero, return DEST. */
1445 if (integer_zerop (len))
1447 replace_call_with_value (gsi, dest);
1451 /* We can't compare slen with len as constants below if len is not a
1453 if (TREE_CODE (len) != INTEGER_CST)
1456 /* Now, we must be passed a constant src ptr parameter. */
1457 tree slen = get_maxval_strlen (src, 0);
1458 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1461 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1463 /* We do not support simplification of this case, though we do
1464 support it when expanding trees into RTL. */
1465 /* FIXME: generate a call to __builtin_memset. */
1466 if (tree_int_cst_lt (slen, len))
1469 /* OK transform into builtin memcpy. */
1470 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1474 len = fold_convert_loc (loc, size_type_node, len);
1475 len = force_gimple_operand_gsi (gsi, len, true,
1476 NULL_TREE, true, GSI_SAME_STMT);
1477 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1478 replace_call_with_call_and_fold (gsi, repl);
1482 /* Fold function call to builtin strchr or strrchr.
1483 If both arguments are constant, evaluate and fold the result,
1484 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1485 In general strlen is significantly faster than strchr
1486 due to being a simpler operation. */
1488 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1490 gimple *stmt = gsi_stmt (*gsi);
1491 tree str = gimple_call_arg (stmt, 0);
1492 tree c = gimple_call_arg (stmt, 1);
1493 location_t loc = gimple_location (stmt);
1497 if (!gimple_call_lhs (stmt))
1500 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1502 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1506 replace_call_with_value (gsi, integer_zero_node);
1510 tree len = build_int_cst (size_type_node, p1 - p);
1511 gimple_seq stmts = NULL;
1512 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1513 POINTER_PLUS_EXPR, str, len);
1514 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1515 gsi_replace_with_seq_vops (gsi, stmts);
1519 if (!integer_zerop (c))
1522 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1523 if (optimize_function_for_size_p (cfun))
1525 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1527 if (is_strrchr && strchr_fn)
1529 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1530 replace_call_with_call_and_fold (gsi, repl);
1538 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1543 /* Create newstr = strlen (str). */
1544 gimple_seq stmts = NULL;
1545 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1546 gimple_set_location (new_stmt, loc);
1547 len = create_tmp_reg_or_ssa_name (size_type_node);
1548 gimple_call_set_lhs (new_stmt, len);
1549 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1551 /* Create (str p+ strlen (str)). */
1552 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1553 POINTER_PLUS_EXPR, str, len);
1554 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1555 gsi_replace_with_seq_vops (gsi, stmts);
1556 /* gsi now points at the assignment to the lhs, get a
1557 stmt iterator to the strlen.
1558 ??? We can't use gsi_for_stmt as that doesn't work when the
1559 CFG isn't built yet. */
1560 gimple_stmt_iterator gsi2 = *gsi;
1566 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1569 Return NULL_TREE if no simplification was possible, otherwise return the
1570 simplified form of the call as a tree.
1572 The simplified form may be a constant or other expression which
1573 computes the same value, but in a more efficient manner (including
1574 calls to other builtin functions).
1576 The call may contain arguments which need to be evaluated, but
1577 which are not useful to determine the result of the call. In
1578 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1579 COMPOUND_EXPR will be an argument which must be evaluated.
1580 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1581 COMPOUND_EXPR in the chain will contain the tree for the simplified
1582 form of the builtin function call. */
1585 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1587 gimple *stmt = gsi_stmt (*gsi);
1588 location_t loc = gimple_location (stmt);
1590 const char *p = c_getstr (src);
1592 /* If the string length is zero, return the dst parameter. */
1593 if (p && *p == '\0')
1595 replace_call_with_value (gsi, dst);
1599 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1602 /* See if we can store by pieces into (dst + strlen(dst)). */
1604 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1605 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1607 if (!strlen_fn || !memcpy_fn)
1610 /* If the length of the source string isn't computable don't
1611 split strcat into strlen and memcpy. */
1612 tree len = get_maxval_strlen (src, 0);
1616 /* Create strlen (dst). */
1617 gimple_seq stmts = NULL, stmts2;
1618 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1619 gimple_set_location (repl, loc);
1620 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1621 gimple_call_set_lhs (repl, newdst);
1622 gimple_seq_add_stmt_without_update (&stmts, repl);
1624 /* Create (dst p+ strlen (dst)). */
1625 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1626 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1627 gimple_seq_add_seq_without_update (&stmts, stmts2);
1629 len = fold_convert_loc (loc, size_type_node, len);
1630 len = size_binop_loc (loc, PLUS_EXPR, len,
1631 build_int_cst (size_type_node, 1));
1632 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1633 gimple_seq_add_seq_without_update (&stmts, stmts2);
1635 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1636 gimple_seq_add_stmt_without_update (&stmts, repl);
1637 if (gimple_call_lhs (stmt))
1639 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1640 gimple_seq_add_stmt_without_update (&stmts, repl);
1641 gsi_replace_with_seq_vops (gsi, stmts);
1642 /* gsi now points at the assignment to the lhs, get a
1643 stmt iterator to the memcpy call.
1644 ??? We can't use gsi_for_stmt as that doesn't work when the
1645 CFG isn't built yet. */
1646 gimple_stmt_iterator gsi2 = *gsi;
1652 gsi_replace_with_seq_vops (gsi, stmts);
1658 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1659 are the arguments to the call. */
1662 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1664 gimple *stmt = gsi_stmt (*gsi);
1665 tree dest = gimple_call_arg (stmt, 0);
1666 tree src = gimple_call_arg (stmt, 1);
1667 tree size = gimple_call_arg (stmt, 2);
1673 /* If the SRC parameter is "", return DEST. */
1674 if (p && *p == '\0')
1676 replace_call_with_value (gsi, dest);
1680 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1683 /* If __builtin_strcat_chk is used, assume strcat is available. */
1684 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1688 gimple *repl = gimple_build_call (fn, 2, dest, src);
1689 replace_call_with_call_and_fold (gsi, repl);
1693 /* Simplify a call to the strncat builtin. */
1696 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1698 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1699 tree dst = gimple_call_arg (stmt, 0);
1700 tree src = gimple_call_arg (stmt, 1);
1701 tree len = gimple_call_arg (stmt, 2);
1703 const char *p = c_getstr (src);
1705 /* If the requested length is zero, or the src parameter string
1706 length is zero, return the dst parameter. */
1707 if (integer_zerop (len) || (p && *p == '\0'))
1709 replace_call_with_value (gsi, dst);
1713 /* If the requested len is greater than or equal to the string
1714 length, call strcat. */
1715 if (TREE_CODE (len) == INTEGER_CST && p
1716 && compare_tree_int (len, strlen (p)) >= 0)
1718 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1720 /* If the replacement _DECL isn't initialized, don't do the
1725 gcall *repl = gimple_build_call (fn, 2, dst, src);
1726 replace_call_with_call_and_fold (gsi, repl);
1733 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1737 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1739 gimple *stmt = gsi_stmt (*gsi);
1740 tree dest = gimple_call_arg (stmt, 0);
1741 tree src = gimple_call_arg (stmt, 1);
1742 tree len = gimple_call_arg (stmt, 2);
1743 tree size = gimple_call_arg (stmt, 3);
1748 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1749 if ((p && *p == '\0')
1750 || integer_zerop (len))
1752 replace_call_with_value (gsi, dest);
1756 if (! tree_fits_uhwi_p (size))
1759 if (! integer_all_onesp (size))
1761 tree src_len = c_strlen (src, 1);
1763 && tree_fits_uhwi_p (src_len)
1764 && tree_fits_uhwi_p (len)
1765 && ! tree_int_cst_lt (len, src_len))
1767 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1768 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1772 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
1773 replace_call_with_call_and_fold (gsi, repl);
1779 /* If __builtin_strncat_chk is used, assume strncat is available. */
1780 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1784 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1785 replace_call_with_call_and_fold (gsi, repl);
1789 /* Build and append gimple statements to STMTS that would load a first
1790 character of a memory location identified by STR. LOC is location
1791 of the statement. */
1794 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
1798 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
1799 tree cst_uchar_ptr_node
1800 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
1801 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
1803 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
1804 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
1805 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
1807 gimple_assign_set_lhs (stmt, var);
1808 gimple_seq_add_stmt_without_update (stmts, stmt);
1813 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
1814 FCODE is the name of the builtin. */
1817 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
1819 gimple *stmt = gsi_stmt (*gsi);
1820 tree callee = gimple_call_fndecl (stmt);
1821 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
1823 tree type = integer_type_node;
1824 tree str1 = gimple_call_arg (stmt, 0);
1825 tree str2 = gimple_call_arg (stmt, 1);
1826 tree lhs = gimple_call_lhs (stmt);
1827 HOST_WIDE_INT length = -1;
1829 /* Handle strncmp and strncasecmp functions. */
1830 if (gimple_call_num_args (stmt) == 3)
1832 tree len = gimple_call_arg (stmt, 2);
1833 if (tree_fits_uhwi_p (len))
1834 length = tree_to_uhwi (len);
1837 /* If the LEN parameter is zero, return zero. */
1840 replace_call_with_value (gsi, integer_zero_node);
1844 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
1845 if (operand_equal_p (str1, str2, 0))
1847 replace_call_with_value (gsi, integer_zero_node);
1851 const char *p1 = c_getstr (str1);
1852 const char *p2 = c_getstr (str2);
1854 /* For known strings, return an immediate value. */
1858 bool known_result = false;
1862 case BUILT_IN_STRCMP:
1864 r = strcmp (p1, p2);
1865 known_result = true;
1868 case BUILT_IN_STRNCMP:
1872 r = strncmp (p1, p2, length);
1873 known_result = true;
1876 /* Only handleable situation is where the string are equal (result 0),
1877 which is already handled by operand_equal_p case. */
1878 case BUILT_IN_STRCASECMP:
1880 case BUILT_IN_STRNCASECMP:
1884 r = strncmp (p1, p2, length);
1886 known_result = true;
1895 replace_call_with_value (gsi, build_cmp_result (type, r));
1900 bool nonzero_length = length >= 1
1901 || fcode == BUILT_IN_STRCMP
1902 || fcode == BUILT_IN_STRCASECMP;
1904 location_t loc = gimple_location (stmt);
1906 /* If the second arg is "", return *(const unsigned char*)arg1. */
1907 if (p2 && *p2 == '\0' && nonzero_length)
1909 gimple_seq stmts = NULL;
1910 tree var = gimple_load_first_char (loc, str1, &stmts);
1913 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
1914 gimple_seq_add_stmt_without_update (&stmts, stmt);
1917 gsi_replace_with_seq_vops (gsi, stmts);
1921 /* If the first arg is "", return -*(const unsigned char*)arg2. */
1922 if (p1 && *p1 == '\0' && nonzero_length)
1924 gimple_seq stmts = NULL;
1925 tree var = gimple_load_first_char (loc, str2, &stmts);
1929 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
1930 stmt = gimple_build_assign (c, NOP_EXPR, var);
1931 gimple_seq_add_stmt_without_update (&stmts, stmt);
1933 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
1934 gimple_seq_add_stmt_without_update (&stmts, stmt);
1937 gsi_replace_with_seq_vops (gsi, stmts);
1941 /* If len parameter is one, return an expression corresponding to
1942 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
1943 if (fcode == BUILT_IN_STRNCMP && length == 1)
1945 gimple_seq stmts = NULL;
1946 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
1947 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
1951 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
1952 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
1953 gimple_seq_add_stmt_without_update (&stmts, convert1);
1955 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
1956 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
1957 gimple_seq_add_stmt_without_update (&stmts, convert2);
1959 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
1960 gimple_seq_add_stmt_without_update (&stmts, stmt);
1963 gsi_replace_with_seq_vops (gsi, stmts);
1971 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1972 to the call. IGNORE is true if the value returned
1973 by the builtin will be ignored. UNLOCKED is true is true if this
1974 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1975 the known length of the string. Return NULL_TREE if no simplification
1979 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1980 tree arg0, tree arg1,
1983 gimple *stmt = gsi_stmt (*gsi);
1985 /* If we're using an unlocked function, assume the other unlocked
1986 functions exist explicitly. */
1987 tree const fn_fputc = (unlocked
1988 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1989 : builtin_decl_implicit (BUILT_IN_FPUTC));
1990 tree const fn_fwrite = (unlocked
1991 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1992 : builtin_decl_implicit (BUILT_IN_FWRITE));
1994 /* If the return value is used, don't do the transformation. */
1995 if (gimple_call_lhs (stmt))
1998 /* Get the length of the string passed to fputs. If the length
1999 can't be determined, punt. */
2000 tree len = get_maxval_strlen (arg0, 0);
2002 || TREE_CODE (len) != INTEGER_CST)
2005 switch (compare_tree_int (len, 1))
2007 case -1: /* length is 0, delete the call entirely . */
2008 replace_call_with_value (gsi, integer_zero_node);
2011 case 0: /* length is 1, call fputc. */
2013 const char *p = c_getstr (arg0);
2019 gimple *repl = gimple_build_call (fn_fputc, 2,
2021 (integer_type_node, p[0]), arg1);
2022 replace_call_with_call_and_fold (gsi, repl);
2027 case 1: /* length is greater than 1, call fwrite. */
2029 /* If optimizing for size keep fputs. */
2030 if (optimize_function_for_size_p (cfun))
2032 /* New argument list transforming fputs(string, stream) to
2033 fwrite(string, 1, len, stream). */
2037 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2038 size_one_node, len, arg1);
2039 replace_call_with_call_and_fold (gsi, repl);
2048 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2049 DEST, SRC, LEN, and SIZE are the arguments to the call.
2050 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2051 code of the builtin. If MAXLEN is not NULL, it is maximum length
2052 passed as third argument. */
2055 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2056 tree dest, tree src, tree len, tree size,
2057 enum built_in_function fcode)
2059 gimple *stmt = gsi_stmt (*gsi);
2060 location_t loc = gimple_location (stmt);
2061 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2064 /* If SRC and DEST are the same (and not volatile), return DEST
2065 (resp. DEST+LEN for __mempcpy_chk). */
2066 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2068 if (fcode != BUILT_IN_MEMPCPY_CHK)
2070 replace_call_with_value (gsi, dest);
2075 gimple_seq stmts = NULL;
2076 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2077 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2078 TREE_TYPE (dest), dest, len);
2079 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2080 replace_call_with_value (gsi, temp);
2085 if (! tree_fits_uhwi_p (size))
2088 tree maxlen = get_maxval_strlen (len, 2);
2089 if (! integer_all_onesp (size))
2091 if (! tree_fits_uhwi_p (len))
2093 /* If LEN is not constant, try MAXLEN too.
2094 For MAXLEN only allow optimizing into non-_ocs function
2095 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2096 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2098 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2100 /* (void) __mempcpy_chk () can be optimized into
2101 (void) __memcpy_chk (). */
2102 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2106 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2107 replace_call_with_call_and_fold (gsi, repl);
2116 if (tree_int_cst_lt (size, maxlen))
2121 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2122 mem{cpy,pcpy,move,set} is available. */
2125 case BUILT_IN_MEMCPY_CHK:
2126 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2128 case BUILT_IN_MEMPCPY_CHK:
2129 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2131 case BUILT_IN_MEMMOVE_CHK:
2132 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2134 case BUILT_IN_MEMSET_CHK:
2135 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2144 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2145 replace_call_with_call_and_fold (gsi, repl);
2149 /* Fold a call to the __st[rp]cpy_chk builtin.
2150 DEST, SRC, and SIZE are the arguments to the call.
2151 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2152 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2153 strings passed as second argument. */
2156 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2158 tree src, tree size,
2159 enum built_in_function fcode)
2161 gimple *stmt = gsi_stmt (*gsi);
2162 location_t loc = gimple_location (stmt);
2163 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2166 /* If SRC and DEST are the same (and not volatile), return DEST. */
2167 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2169 replace_call_with_value (gsi, dest);
2173 if (! tree_fits_uhwi_p (size))
2176 tree maxlen = get_maxval_strlen (src, 1);
2177 if (! integer_all_onesp (size))
2179 len = c_strlen (src, 1);
2180 if (! len || ! tree_fits_uhwi_p (len))
2182 /* If LEN is not constant, try MAXLEN too.
2183 For MAXLEN only allow optimizing into non-_ocs function
2184 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2185 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2187 if (fcode == BUILT_IN_STPCPY_CHK)
2192 /* If return value of __stpcpy_chk is ignored,
2193 optimize into __strcpy_chk. */
2194 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2198 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2199 replace_call_with_call_and_fold (gsi, repl);
2203 if (! len || TREE_SIDE_EFFECTS (len))
2206 /* If c_strlen returned something, but not a constant,
2207 transform __strcpy_chk into __memcpy_chk. */
2208 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2212 gimple_seq stmts = NULL;
2213 len = gimple_convert (&stmts, loc, size_type_node, len);
2214 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2215 build_int_cst (size_type_node, 1));
2216 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2217 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2218 replace_call_with_call_and_fold (gsi, repl);
2225 if (! tree_int_cst_lt (maxlen, size))
2229 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2230 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2231 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2235 gimple *repl = gimple_build_call (fn, 2, dest, src);
2236 replace_call_with_call_and_fold (gsi, repl);
2240 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2241 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2242 length passed as third argument. IGNORE is true if return value can be
2243 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2246 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2247 tree dest, tree src,
2248 tree len, tree size,
2249 enum built_in_function fcode)
2251 gimple *stmt = gsi_stmt (*gsi);
2252 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2255 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2257 /* If return value of __stpncpy_chk is ignored,
2258 optimize into __strncpy_chk. */
2259 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2262 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2263 replace_call_with_call_and_fold (gsi, repl);
2268 if (! tree_fits_uhwi_p (size))
2271 tree maxlen = get_maxval_strlen (len, 2);
2272 if (! integer_all_onesp (size))
2274 if (! tree_fits_uhwi_p (len))
2276 /* If LEN is not constant, try MAXLEN too.
2277 For MAXLEN only allow optimizing into non-_ocs function
2278 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2279 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2285 if (tree_int_cst_lt (size, maxlen))
2289 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2290 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2291 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2295 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2296 replace_call_with_call_and_fold (gsi, repl);
2300 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2301 Return NULL_TREE if no simplification can be made. */
2304 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2306 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2307 location_t loc = gimple_location (stmt);
2308 tree dest = gimple_call_arg (stmt, 0);
2309 tree src = gimple_call_arg (stmt, 1);
2310 tree fn, len, lenp1;
2312 /* If the result is unused, replace stpcpy with strcpy. */
2313 if (gimple_call_lhs (stmt) == NULL_TREE)
2315 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2318 gimple_call_set_fndecl (stmt, fn);
2323 len = c_strlen (src, 1);
2325 || TREE_CODE (len) != INTEGER_CST)
2328 if (optimize_function_for_size_p (cfun)
2329 /* If length is zero it's small enough. */
2330 && !integer_zerop (len))
2333 /* If the source has a known length replace stpcpy with memcpy. */
2334 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2338 gimple_seq stmts = NULL;
2339 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2340 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2341 tem, build_int_cst (size_type_node, 1));
2342 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2343 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2344 gimple_set_vuse (repl, gimple_vuse (stmt));
2345 gimple_set_vdef (repl, gimple_vdef (stmt));
2346 if (gimple_vdef (repl)
2347 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2348 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2349 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2350 /* Replace the result with dest + len. */
2352 tem = gimple_convert (&stmts, loc, sizetype, len);
2353 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2354 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2355 POINTER_PLUS_EXPR, dest, tem);
2356 gsi_replace (gsi, ret, false);
2357 /* Finally fold the memcpy call. */
2358 gimple_stmt_iterator gsi2 = *gsi;
2364 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2365 NULL_TREE if a normal call should be emitted rather than expanding
2366 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2367 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2368 passed as second argument. */
2371 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2372 enum built_in_function fcode)
2374 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2375 tree dest, size, len, fn, fmt, flag;
2376 const char *fmt_str;
2378 /* Verify the required arguments in the original call. */
2379 if (gimple_call_num_args (stmt) < 5)
2382 dest = gimple_call_arg (stmt, 0);
2383 len = gimple_call_arg (stmt, 1);
2384 flag = gimple_call_arg (stmt, 2);
2385 size = gimple_call_arg (stmt, 3);
2386 fmt = gimple_call_arg (stmt, 4);
2388 if (! tree_fits_uhwi_p (size))
2391 if (! integer_all_onesp (size))
2393 tree maxlen = get_maxval_strlen (len, 2);
2394 if (! tree_fits_uhwi_p (len))
2396 /* If LEN is not constant, try MAXLEN too.
2397 For MAXLEN only allow optimizing into non-_ocs function
2398 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2399 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2405 if (tree_int_cst_lt (size, maxlen))
2409 if (!init_target_chars ())
2412 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2413 or if format doesn't contain % chars or is "%s". */
2414 if (! integer_zerop (flag))
2416 fmt_str = c_getstr (fmt);
2417 if (fmt_str == NULL)
2419 if (strchr (fmt_str, target_percent) != NULL
2420 && strcmp (fmt_str, target_percent_s))
2424 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2426 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2427 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2431 /* Replace the called function and the first 5 argument by 3 retaining
2432 trailing varargs. */
2433 gimple_call_set_fndecl (stmt, fn);
2434 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2435 gimple_call_set_arg (stmt, 0, dest);
2436 gimple_call_set_arg (stmt, 1, len);
2437 gimple_call_set_arg (stmt, 2, fmt);
2438 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2439 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2440 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2445 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2446 Return NULL_TREE if a normal call should be emitted rather than
2447 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2448 or BUILT_IN_VSPRINTF_CHK. */
2451 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2452 enum built_in_function fcode)
2454 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2455 tree dest, size, len, fn, fmt, flag;
2456 const char *fmt_str;
2457 unsigned nargs = gimple_call_num_args (stmt);
2459 /* Verify the required arguments in the original call. */
2462 dest = gimple_call_arg (stmt, 0);
2463 flag = gimple_call_arg (stmt, 1);
2464 size = gimple_call_arg (stmt, 2);
2465 fmt = gimple_call_arg (stmt, 3);
2467 if (! tree_fits_uhwi_p (size))
2472 if (!init_target_chars ())
2475 /* Check whether the format is a literal string constant. */
2476 fmt_str = c_getstr (fmt);
2477 if (fmt_str != NULL)
2479 /* If the format doesn't contain % args or %%, we know the size. */
2480 if (strchr (fmt_str, target_percent) == 0)
2482 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2483 len = build_int_cstu (size_type_node, strlen (fmt_str));
2485 /* If the format is "%s" and first ... argument is a string literal,
2486 we know the size too. */
2487 else if (fcode == BUILT_IN_SPRINTF_CHK
2488 && strcmp (fmt_str, target_percent_s) == 0)
2494 arg = gimple_call_arg (stmt, 4);
2495 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2497 len = c_strlen (arg, 1);
2498 if (! len || ! tree_fits_uhwi_p (len))
2505 if (! integer_all_onesp (size))
2507 if (! len || ! tree_int_cst_lt (len, size))
2511 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2512 or if format doesn't contain % chars or is "%s". */
2513 if (! integer_zerop (flag))
2515 if (fmt_str == NULL)
2517 if (strchr (fmt_str, target_percent) != NULL
2518 && strcmp (fmt_str, target_percent_s))
2522 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2523 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2524 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2528 /* Replace the called function and the first 4 argument by 2 retaining
2529 trailing varargs. */
2530 gimple_call_set_fndecl (stmt, fn);
2531 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2532 gimple_call_set_arg (stmt, 0, dest);
2533 gimple_call_set_arg (stmt, 1, fmt);
2534 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2535 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2536 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2541 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2542 ORIG may be null if this is a 2-argument call. We don't attempt to
2543 simplify calls with more than 3 arguments.
2545 Return NULL_TREE if no simplification was possible, otherwise return the
2546 simplified form of the call as a tree. If IGNORED is true, it means that
2547 the caller does not use the returned value of the function. */
2550 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2552 gimple *stmt = gsi_stmt (*gsi);
2553 tree dest = gimple_call_arg (stmt, 0);
2554 tree fmt = gimple_call_arg (stmt, 1);
2555 tree orig = NULL_TREE;
2556 const char *fmt_str = NULL;
2558 /* Verify the required arguments in the original call. We deal with two
2559 types of sprintf() calls: 'sprintf (str, fmt)' and
2560 'sprintf (dest, "%s", orig)'. */
2561 if (gimple_call_num_args (stmt) > 3)
2564 if (gimple_call_num_args (stmt) == 3)
2565 orig = gimple_call_arg (stmt, 2);
2567 /* Check whether the format is a literal string constant. */
2568 fmt_str = c_getstr (fmt);
2569 if (fmt_str == NULL)
2572 if (!init_target_chars ())
2575 /* If the format doesn't contain % args or %%, use strcpy. */
2576 if (strchr (fmt_str, target_percent) == NULL)
2578 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2583 /* Don't optimize sprintf (buf, "abc", ptr++). */
2587 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2588 'format' is known to contain no % formats. */
2589 gimple_seq stmts = NULL;
2590 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2591 gimple_seq_add_stmt_without_update (&stmts, repl);
2592 if (gimple_call_lhs (stmt))
2594 repl = gimple_build_assign (gimple_call_lhs (stmt),
2595 build_int_cst (integer_type_node,
2597 gimple_seq_add_stmt_without_update (&stmts, repl);
2598 gsi_replace_with_seq_vops (gsi, stmts);
2599 /* gsi now points at the assignment to the lhs, get a
2600 stmt iterator to the memcpy call.
2601 ??? We can't use gsi_for_stmt as that doesn't work when the
2602 CFG isn't built yet. */
2603 gimple_stmt_iterator gsi2 = *gsi;
2609 gsi_replace_with_seq_vops (gsi, stmts);
2615 /* If the format is "%s", use strcpy if the result isn't used. */
2616 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2619 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2624 /* Don't crash on sprintf (str1, "%s"). */
2628 tree orig_len = NULL_TREE;
2629 if (gimple_call_lhs (stmt))
2631 orig_len = get_maxval_strlen (orig, 0);
2636 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2637 gimple_seq stmts = NULL;
2638 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2639 gimple_seq_add_stmt_without_update (&stmts, repl);
2640 if (gimple_call_lhs (stmt))
2642 if (!useless_type_conversion_p (integer_type_node,
2643 TREE_TYPE (orig_len)))
2644 orig_len = fold_convert (integer_type_node, orig_len);
2645 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2646 gimple_seq_add_stmt_without_update (&stmts, repl);
2647 gsi_replace_with_seq_vops (gsi, stmts);
2648 /* gsi now points at the assignment to the lhs, get a
2649 stmt iterator to the memcpy call.
2650 ??? We can't use gsi_for_stmt as that doesn't work when the
2651 CFG isn't built yet. */
2652 gimple_stmt_iterator gsi2 = *gsi;
2658 gsi_replace_with_seq_vops (gsi, stmts);
2666 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2667 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2668 attempt to simplify calls with more than 4 arguments.
2670 Return NULL_TREE if no simplification was possible, otherwise return the
2671 simplified form of the call as a tree. If IGNORED is true, it means that
2672 the caller does not use the returned value of the function. */
2675 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2677 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2678 tree dest = gimple_call_arg (stmt, 0);
2679 tree destsize = gimple_call_arg (stmt, 1);
2680 tree fmt = gimple_call_arg (stmt, 2);
2681 tree orig = NULL_TREE;
2682 const char *fmt_str = NULL;
2684 if (gimple_call_num_args (stmt) > 4)
2687 if (gimple_call_num_args (stmt) == 4)
2688 orig = gimple_call_arg (stmt, 3);
2690 if (!tree_fits_uhwi_p (destsize))
2692 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2694 /* Check whether the format is a literal string constant. */
2695 fmt_str = c_getstr (fmt);
2696 if (fmt_str == NULL)
2699 if (!init_target_chars ())
2702 /* If the format doesn't contain % args or %%, use strcpy. */
2703 if (strchr (fmt_str, target_percent) == NULL)
2705 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2709 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2713 /* We could expand this as
2714 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2716 memcpy (str, fmt_with_nul_at_cstm1, cst);
2717 but in the former case that might increase code size
2718 and in the latter case grow .rodata section too much.
2720 size_t len = strlen (fmt_str);
2724 gimple_seq stmts = NULL;
2725 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2726 gimple_seq_add_stmt_without_update (&stmts, repl);
2727 if (gimple_call_lhs (stmt))
2729 repl = gimple_build_assign (gimple_call_lhs (stmt),
2730 build_int_cst (integer_type_node, len));
2731 gimple_seq_add_stmt_without_update (&stmts, repl);
2732 gsi_replace_with_seq_vops (gsi, stmts);
2733 /* gsi now points at the assignment to the lhs, get a
2734 stmt iterator to the memcpy call.
2735 ??? We can't use gsi_for_stmt as that doesn't work when the
2736 CFG isn't built yet. */
2737 gimple_stmt_iterator gsi2 = *gsi;
2743 gsi_replace_with_seq_vops (gsi, stmts);
2749 /* If the format is "%s", use strcpy if the result isn't used. */
2750 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2752 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2756 /* Don't crash on snprintf (str1, cst, "%s"). */
2760 tree orig_len = get_maxval_strlen (orig, 0);
2761 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2764 /* We could expand this as
2765 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2767 memcpy (str1, str2_with_nul_at_cstm1, cst);
2768 but in the former case that might increase code size
2769 and in the latter case grow .rodata section too much.
2771 if (compare_tree_int (orig_len, destlen) >= 0)
2774 /* Convert snprintf (str1, cst, "%s", str2) into
2775 strcpy (str1, str2) if strlen (str2) < cst. */
2776 gimple_seq stmts = NULL;
2777 gimple *repl = gimple_build_call (fn, 2, dest, orig);
2778 gimple_seq_add_stmt_without_update (&stmts, repl);
2779 if (gimple_call_lhs (stmt))
2781 if (!useless_type_conversion_p (integer_type_node,
2782 TREE_TYPE (orig_len)))
2783 orig_len = fold_convert (integer_type_node, orig_len);
2784 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2785 gimple_seq_add_stmt_without_update (&stmts, repl);
2786 gsi_replace_with_seq_vops (gsi, stmts);
2787 /* gsi now points at the assignment to the lhs, get a
2788 stmt iterator to the memcpy call.
2789 ??? We can't use gsi_for_stmt as that doesn't work when the
2790 CFG isn't built yet. */
2791 gimple_stmt_iterator gsi2 = *gsi;
2797 gsi_replace_with_seq_vops (gsi, stmts);
2805 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2806 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2807 more than 3 arguments, and ARG may be null in the 2-argument case.
2809 Return NULL_TREE if no simplification was possible, otherwise return the
2810 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2811 code of the function to be simplified. */
2814 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2815 tree fp, tree fmt, tree arg,
2816 enum built_in_function fcode)
2818 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2819 tree fn_fputc, fn_fputs;
2820 const char *fmt_str = NULL;
2822 /* If the return value is used, don't do the transformation. */
2823 if (gimple_call_lhs (stmt) != NULL_TREE)
2826 /* Check whether the format is a literal string constant. */
2827 fmt_str = c_getstr (fmt);
2828 if (fmt_str == NULL)
2831 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2833 /* If we're using an unlocked function, assume the other
2834 unlocked functions exist explicitly. */
2835 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2836 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2840 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2841 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2844 if (!init_target_chars ())
2847 /* If the format doesn't contain % args or %%, use strcpy. */
2848 if (strchr (fmt_str, target_percent) == NULL)
2850 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2854 /* If the format specifier was "", fprintf does nothing. */
2855 if (fmt_str[0] == '\0')
2857 replace_call_with_value (gsi, NULL_TREE);
2861 /* When "string" doesn't contain %, replace all cases of
2862 fprintf (fp, string) with fputs (string, fp). The fputs
2863 builtin will take care of special cases like length == 1. */
2866 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2867 replace_call_with_call_and_fold (gsi, repl);
2872 /* The other optimizations can be done only on the non-va_list variants. */
2873 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2876 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2877 else if (strcmp (fmt_str, target_percent_s) == 0)
2879 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2883 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2884 replace_call_with_call_and_fold (gsi, repl);
2889 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2890 else if (strcmp (fmt_str, target_percent_c) == 0)
2893 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2897 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2898 replace_call_with_call_and_fold (gsi, repl);
2906 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2907 FMT and ARG are the arguments to the call; we don't fold cases with
2908 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2910 Return NULL_TREE if no simplification was possible, otherwise return the
2911 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2912 code of the function to be simplified. */
2915 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2916 tree arg, enum built_in_function fcode)
2918 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2919 tree fn_putchar, fn_puts, newarg;
2920 const char *fmt_str = NULL;
2922 /* If the return value is used, don't do the transformation. */
2923 if (gimple_call_lhs (stmt) != NULL_TREE)
2926 /* Check whether the format is a literal string constant. */
2927 fmt_str = c_getstr (fmt);
2928 if (fmt_str == NULL)
2931 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2933 /* If we're using an unlocked function, assume the other
2934 unlocked functions exist explicitly. */
2935 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2936 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2940 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2941 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2944 if (!init_target_chars ())
2947 if (strcmp (fmt_str, target_percent_s) == 0
2948 || strchr (fmt_str, target_percent) == NULL)
2952 if (strcmp (fmt_str, target_percent_s) == 0)
2954 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2957 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2960 str = c_getstr (arg);
2966 /* The format specifier doesn't contain any '%' characters. */
2967 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2973 /* If the string was "", printf does nothing. */
2976 replace_call_with_value (gsi, NULL_TREE);
2980 /* If the string has length of 1, call putchar. */
2983 /* Given printf("c"), (where c is any one character,)
2984 convert "c"[0] to an int and pass that to the replacement
2986 newarg = build_int_cst (integer_type_node, str[0]);
2989 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2990 replace_call_with_call_and_fold (gsi, repl);
2996 /* If the string was "string\n", call puts("string"). */
2997 size_t len = strlen (str);
2998 if ((unsigned char)str[len - 1] == target_newline
2999 && (size_t) (int) len == len
3003 tree offset_node, string_cst;
3005 /* Create a NUL-terminated string that's one char shorter
3006 than the original, stripping off the trailing '\n'. */
3007 newarg = build_string_literal (len, str);
3008 string_cst = string_constant (newarg, &offset_node);
3009 gcc_checking_assert (string_cst
3010 && (TREE_STRING_LENGTH (string_cst)
3012 && integer_zerop (offset_node)
3014 TREE_STRING_POINTER (string_cst)[len - 1]
3016 /* build_string_literal creates a new STRING_CST,
3017 modify it in place to avoid double copying. */
3018 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3019 newstr[len - 1] = '\0';
3022 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3023 replace_call_with_call_and_fold (gsi, repl);
3028 /* We'd like to arrange to call fputs(string,stdout) here,
3029 but we need stdout and don't have a way to get it yet. */
3034 /* The other optimizations can be done only on the non-va_list variants. */
3035 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3038 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3039 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3041 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3045 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3046 replace_call_with_call_and_fold (gsi, repl);
3051 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3052 else if (strcmp (fmt_str, target_percent_c) == 0)
3054 if (!arg || ! useless_type_conversion_p (integer_type_node,
3059 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3060 replace_call_with_call_and_fold (gsi, repl);
3070 /* Fold a call to __builtin_strlen with known length LEN. */
3073 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3075 gimple *stmt = gsi_stmt (*gsi);
3076 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
3079 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
3080 replace_call_with_value (gsi, len);
3084 /* Fold a call to __builtin_acc_on_device. */
3087 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3089 /* Defer folding until we know which compiler we're in. */
3090 if (symtab->state != EXPANSION)
3093 unsigned val_host = GOMP_DEVICE_HOST;
3094 unsigned val_dev = GOMP_DEVICE_NONE;
3096 #ifdef ACCEL_COMPILER
3097 val_host = GOMP_DEVICE_NOT_HOST;
3098 val_dev = ACCEL_COMPILER_acc_device;
3101 location_t loc = gimple_location (gsi_stmt (*gsi));
3103 tree host_eq = make_ssa_name (boolean_type_node);
3104 gimple *host_ass = gimple_build_assign
3105 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3106 gimple_set_location (host_ass, loc);
3107 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3109 tree dev_eq = make_ssa_name (boolean_type_node);
3110 gimple *dev_ass = gimple_build_assign
3111 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3112 gimple_set_location (dev_ass, loc);
3113 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3115 tree result = make_ssa_name (boolean_type_node);
3116 gimple *result_ass = gimple_build_assign
3117 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3118 gimple_set_location (result_ass, loc);
3119 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3121 replace_call_with_value (gsi, result);
3126 /* Fold the non-target builtin at *GSI and return whether any simplification
3130 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3132 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3133 tree callee = gimple_call_fndecl (stmt);
3135 /* Give up for always_inline inline builtins until they are
3137 if (avoid_folding_inline_builtin (callee))
3140 unsigned n = gimple_call_num_args (stmt);
3141 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3144 case BUILT_IN_BZERO:
3145 return gimple_fold_builtin_memset (gsi, integer_zero_node,
3146 gimple_call_arg (stmt, 1));
3147 case BUILT_IN_MEMSET:
3148 return gimple_fold_builtin_memset (gsi,
3149 gimple_call_arg (stmt, 1),
3150 gimple_call_arg (stmt, 2));
3151 case BUILT_IN_BCOPY:
3152 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
3153 gimple_call_arg (stmt, 0), 3);
3154 case BUILT_IN_MEMCPY:
3155 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3156 gimple_call_arg (stmt, 1), 0);
3157 case BUILT_IN_MEMPCPY:
3158 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3159 gimple_call_arg (stmt, 1), 1);
3160 case BUILT_IN_MEMMOVE:
3161 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3162 gimple_call_arg (stmt, 1), 3);
3163 case BUILT_IN_SPRINTF_CHK:
3164 case BUILT_IN_VSPRINTF_CHK:
3165 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3166 case BUILT_IN_STRCAT_CHK:
3167 return gimple_fold_builtin_strcat_chk (gsi);
3168 case BUILT_IN_STRNCAT_CHK:
3169 return gimple_fold_builtin_strncat_chk (gsi);
3170 case BUILT_IN_STRLEN:
3171 return gimple_fold_builtin_strlen (gsi);
3172 case BUILT_IN_STRCPY:
3173 return gimple_fold_builtin_strcpy (gsi,
3174 gimple_call_arg (stmt, 0),
3175 gimple_call_arg (stmt, 1));
3176 case BUILT_IN_STRNCPY:
3177 return gimple_fold_builtin_strncpy (gsi,
3178 gimple_call_arg (stmt, 0),
3179 gimple_call_arg (stmt, 1),
3180 gimple_call_arg (stmt, 2));
3181 case BUILT_IN_STRCAT:
3182 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3183 gimple_call_arg (stmt, 1));
3184 case BUILT_IN_STRNCAT:
3185 return gimple_fold_builtin_strncat (gsi);
3186 case BUILT_IN_INDEX:
3187 case BUILT_IN_STRCHR:
3188 return gimple_fold_builtin_strchr (gsi, false);
3189 case BUILT_IN_RINDEX:
3190 case BUILT_IN_STRRCHR:
3191 return gimple_fold_builtin_strchr (gsi, true);
3192 case BUILT_IN_STRCMP:
3193 case BUILT_IN_STRCASECMP:
3194 case BUILT_IN_STRNCMP:
3195 case BUILT_IN_STRNCASECMP:
3196 return gimple_fold_builtin_string_compare (gsi);
3197 case BUILT_IN_FPUTS:
3198 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3199 gimple_call_arg (stmt, 1), false);
3200 case BUILT_IN_FPUTS_UNLOCKED:
3201 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3202 gimple_call_arg (stmt, 1), true);
3203 case BUILT_IN_MEMCPY_CHK:
3204 case BUILT_IN_MEMPCPY_CHK:
3205 case BUILT_IN_MEMMOVE_CHK:
3206 case BUILT_IN_MEMSET_CHK:
3207 return gimple_fold_builtin_memory_chk (gsi,
3208 gimple_call_arg (stmt, 0),
3209 gimple_call_arg (stmt, 1),
3210 gimple_call_arg (stmt, 2),
3211 gimple_call_arg (stmt, 3),
3213 case BUILT_IN_STPCPY:
3214 return gimple_fold_builtin_stpcpy (gsi);
3215 case BUILT_IN_STRCPY_CHK:
3216 case BUILT_IN_STPCPY_CHK:
3217 return gimple_fold_builtin_stxcpy_chk (gsi,
3218 gimple_call_arg (stmt, 0),
3219 gimple_call_arg (stmt, 1),
3220 gimple_call_arg (stmt, 2),
3222 case BUILT_IN_STRNCPY_CHK:
3223 case BUILT_IN_STPNCPY_CHK:
3224 return gimple_fold_builtin_stxncpy_chk (gsi,
3225 gimple_call_arg (stmt, 0),
3226 gimple_call_arg (stmt, 1),
3227 gimple_call_arg (stmt, 2),
3228 gimple_call_arg (stmt, 3),
3230 case BUILT_IN_SNPRINTF_CHK:
3231 case BUILT_IN_VSNPRINTF_CHK:
3232 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3233 case BUILT_IN_SNPRINTF:
3234 return gimple_fold_builtin_snprintf (gsi);
3235 case BUILT_IN_SPRINTF:
3236 return gimple_fold_builtin_sprintf (gsi);
3237 case BUILT_IN_FPRINTF:
3238 case BUILT_IN_FPRINTF_UNLOCKED:
3239 case BUILT_IN_VFPRINTF:
3240 if (n == 2 || n == 3)
3241 return gimple_fold_builtin_fprintf (gsi,
3242 gimple_call_arg (stmt, 0),
3243 gimple_call_arg (stmt, 1),
3245 ? gimple_call_arg (stmt, 2)
3249 case BUILT_IN_FPRINTF_CHK:
3250 case BUILT_IN_VFPRINTF_CHK:
3251 if (n == 3 || n == 4)
3252 return gimple_fold_builtin_fprintf (gsi,
3253 gimple_call_arg (stmt, 0),
3254 gimple_call_arg (stmt, 2),
3256 ? gimple_call_arg (stmt, 3)
3260 case BUILT_IN_PRINTF:
3261 case BUILT_IN_PRINTF_UNLOCKED:
3262 case BUILT_IN_VPRINTF:
3263 if (n == 1 || n == 2)
3264 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3266 ? gimple_call_arg (stmt, 1)
3267 : NULL_TREE, fcode);
3269 case BUILT_IN_PRINTF_CHK:
3270 case BUILT_IN_VPRINTF_CHK:
3271 if (n == 2 || n == 3)
3272 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3274 ? gimple_call_arg (stmt, 2)
3275 : NULL_TREE, fcode);
3277 case BUILT_IN_ACC_ON_DEVICE:
3278 return gimple_fold_builtin_acc_on_device (gsi,
3279 gimple_call_arg (stmt, 0));
3283 /* Try the generic builtin folder. */
3284 bool ignore = (gimple_call_lhs (stmt) == NULL);
3285 tree result = fold_call_stmt (stmt, ignore);
3289 STRIP_NOPS (result);
3291 result = fold_convert (gimple_call_return_type (stmt), result);
3292 if (!update_call_from_tree (gsi, result))
3293 gimplify_and_update_call_from_tree (gsi, result);
3300 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3301 function calls to constants, where possible. */
3304 fold_internal_goacc_dim (const gimple *call)
3306 int axis = get_oacc_ifn_dim_arg (call);
3307 int size = get_oacc_fn_dim_size (current_function_decl, axis);
3308 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
3309 tree result = NULL_TREE;
3311 /* If the size is 1, or we only want the size and it is not dynamic,
3312 we know the answer. */
3313 if (size == 1 || (!is_pos && size))
3315 tree type = TREE_TYPE (gimple_call_lhs (call));
3316 result = build_int_cst (type, size - is_pos);
3322 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3323 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3324 &var where var is only addressable because of such calls. */
3327 optimize_atomic_compare_exchange_p (gimple *stmt)
3329 if (gimple_call_num_args (stmt) != 6
3330 || !flag_inline_atomics
3332 || (flag_sanitize & (SANITIZE_THREAD | SANITIZE_ADDRESS)) != 0
3333 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3334 || !gimple_vdef (stmt)
3335 || !gimple_vuse (stmt))
3338 tree fndecl = gimple_call_fndecl (stmt);
3339 switch (DECL_FUNCTION_CODE (fndecl))
3341 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3342 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3343 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3344 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3345 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3351 tree expected = gimple_call_arg (stmt, 1);
3352 if (TREE_CODE (expected) != ADDR_EXPR
3353 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3356 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3357 if (!is_gimple_reg_type (etype)
3358 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3359 || TREE_THIS_VOLATILE (etype)
3360 || VECTOR_TYPE_P (etype)
3361 || TREE_CODE (etype) == COMPLEX_TYPE
3362 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3363 might not preserve all the bits. See PR71716. */
3364 || SCALAR_FLOAT_TYPE_P (etype)
3365 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
3368 tree weak = gimple_call_arg (stmt, 3);
3369 if (!integer_zerop (weak) && !integer_onep (weak))
3372 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3373 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3374 machine_mode mode = TYPE_MODE (itype);
3376 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3378 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3381 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
3388 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3390 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3391 i = IMAGPART_EXPR <t>;
3393 e = REALPART_EXPR <t>; */
3396 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3398 gimple *stmt = gsi_stmt (*gsi);
3399 tree fndecl = gimple_call_fndecl (stmt);
3400 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3401 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3402 tree ctype = build_complex_type (itype);
3403 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3404 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3406 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3407 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3408 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3410 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3411 build1 (VIEW_CONVERT_EXPR, itype,
3412 gimple_assign_lhs (g)));
3413 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3415 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3416 + int_size_in_bytes (itype);
3417 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3418 gimple_call_arg (stmt, 0),
3419 gimple_assign_lhs (g),
3420 gimple_call_arg (stmt, 2),
3421 build_int_cst (integer_type_node, flag),
3422 gimple_call_arg (stmt, 4),
3423 gimple_call_arg (stmt, 5));
3424 tree lhs = make_ssa_name (ctype);
3425 gimple_call_set_lhs (g, lhs);
3426 gimple_set_vdef (g, gimple_vdef (stmt));
3427 gimple_set_vuse (g, gimple_vuse (stmt));
3428 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3429 if (gimple_call_lhs (stmt))
3431 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3432 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3433 build1 (IMAGPART_EXPR, itype, lhs));
3434 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3435 g = gimple_build_assign (gimple_call_lhs (stmt), NOP_EXPR,
3436 gimple_assign_lhs (g));
3438 gsi_replace (gsi, g, true);
3439 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3440 build1 (REALPART_EXPR, itype, lhs));
3441 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3442 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3444 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3446 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3447 gimple_assign_lhs (g)));
3448 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3450 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3451 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3455 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3456 doesn't fit into TYPE. The test for overflow should be regardless of
3457 -fwrapv, and even for unsigned types. */
3460 arith_overflowed_p (enum tree_code code, const_tree type,
3461 const_tree arg0, const_tree arg1)
3463 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3464 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3466 widest2_int warg0 = widest2_int_cst (arg0);
3467 widest2_int warg1 = widest2_int_cst (arg1);
3471 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3472 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3473 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3474 default: gcc_unreachable ();
3476 signop sign = TYPE_SIGN (type);
3477 if (sign == UNSIGNED && wi::neg_p (wres))
3479 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3482 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3483 The statement may be replaced by another statement, e.g., if the call
3484 simplifies to a constant value. Return true if any changes were made.
3485 It is assumed that the operands have been previously folded. */
3488 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3490 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3492 bool changed = false;
3495 /* Fold *& in call arguments. */
3496 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3497 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3499 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3502 gimple_call_set_arg (stmt, i, tmp);
3507 /* Check for virtual calls that became direct calls. */
3508 callee = gimple_call_fn (stmt);
3509 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3511 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3513 if (dump_file && virtual_method_call_p (callee)
3514 && !possible_polymorphic_call_target_p
3515 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3516 (OBJ_TYPE_REF_EXPR (callee)))))
3519 "Type inheritance inconsistent devirtualization of ");
3520 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3521 fprintf (dump_file, " to ");
3522 print_generic_expr (dump_file, callee, TDF_SLIM);
3523 fprintf (dump_file, "\n");
3526 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3529 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3532 vec <cgraph_node *>targets
3533 = possible_polymorphic_call_targets (callee, stmt, &final);
3534 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3536 tree lhs = gimple_call_lhs (stmt);
3537 if (dump_enabled_p ())
3539 location_t loc = gimple_location_safe (stmt);
3540 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3541 "folding virtual function call to %s\n",
3542 targets.length () == 1
3543 ? targets[0]->name ()
3544 : "__builtin_unreachable");
3546 if (targets.length () == 1)
3548 tree fndecl = targets[0]->decl;
3549 gimple_call_set_fndecl (stmt, fndecl);
3551 /* If changing the call to __cxa_pure_virtual
3552 or similar noreturn function, adjust gimple_call_fntype
3554 if (gimple_call_noreturn_p (stmt)
3555 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
3556 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
3557 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3559 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
3560 /* If the call becomes noreturn, remove the lhs. */
3562 && gimple_call_noreturn_p (stmt)
3563 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
3564 || should_remove_lhs_p (lhs)))
3566 if (TREE_CODE (lhs) == SSA_NAME)
3568 tree var = create_tmp_var (TREE_TYPE (lhs));
3569 tree def = get_or_create_ssa_default_def (cfun, var);
3570 gimple *new_stmt = gimple_build_assign (lhs, def);
3571 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3573 gimple_call_set_lhs (stmt, NULL_TREE);
3575 maybe_remove_unused_call_args (cfun, stmt);
3579 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3580 gimple *new_stmt = gimple_build_call (fndecl, 0);
3581 gimple_set_location (new_stmt, gimple_location (stmt));
3582 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3584 tree var = create_tmp_var (TREE_TYPE (lhs));
3585 tree def = get_or_create_ssa_default_def (cfun, var);
3587 /* To satisfy condition for
3588 cgraph_update_edges_for_call_stmt_node,
3589 we need to preserve GIMPLE_CALL statement
3590 at position of GSI iterator. */
3591 update_call_from_tree (gsi, def);
3592 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3596 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3597 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3598 gsi_replace (gsi, new_stmt, false);
3606 /* Check for indirect calls that became direct calls, and then
3607 no longer require a static chain. */
3608 if (gimple_call_chain (stmt))
3610 tree fn = gimple_call_fndecl (stmt);
3611 if (fn && !DECL_STATIC_CHAIN (fn))
3613 gimple_call_set_chain (stmt, NULL);
3618 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3621 gimple_call_set_chain (stmt, tmp);
3630 /* Check for builtins that CCP can handle using information not
3631 available in the generic fold routines. */
3632 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3634 if (gimple_fold_builtin (gsi))
3637 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3639 changed |= targetm.gimple_fold_builtin (gsi);
3641 else if (gimple_call_internal_p (stmt))
3643 enum tree_code subcode = ERROR_MARK;
3644 tree result = NULL_TREE;
3645 bool cplx_result = false;
3646 tree overflow = NULL_TREE;
3647 switch (gimple_call_internal_fn (stmt))
3649 case IFN_BUILTIN_EXPECT:
3650 result = fold_builtin_expect (gimple_location (stmt),
3651 gimple_call_arg (stmt, 0),
3652 gimple_call_arg (stmt, 1),
3653 gimple_call_arg (stmt, 2));
3655 case IFN_UBSAN_OBJECT_SIZE:
3656 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3657 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3658 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3659 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3660 gimple_call_arg (stmt, 2))))
3662 gsi_replace (gsi, gimple_build_nop (), false);
3663 unlink_stmt_vdef (stmt);
3664 release_defs (stmt);
3668 case IFN_GOACC_DIM_SIZE:
3669 case IFN_GOACC_DIM_POS:
3670 result = fold_internal_goacc_dim (stmt);
3672 case IFN_UBSAN_CHECK_ADD:
3673 subcode = PLUS_EXPR;
3675 case IFN_UBSAN_CHECK_SUB:
3676 subcode = MINUS_EXPR;
3678 case IFN_UBSAN_CHECK_MUL:
3679 subcode = MULT_EXPR;
3681 case IFN_ADD_OVERFLOW:
3682 subcode = PLUS_EXPR;
3685 case IFN_SUB_OVERFLOW:
3686 subcode = MINUS_EXPR;
3689 case IFN_MUL_OVERFLOW:
3690 subcode = MULT_EXPR;
3696 if (subcode != ERROR_MARK)
3698 tree arg0 = gimple_call_arg (stmt, 0);
3699 tree arg1 = gimple_call_arg (stmt, 1);
3700 tree type = TREE_TYPE (arg0);
3703 tree lhs = gimple_call_lhs (stmt);
3704 if (lhs == NULL_TREE)
3707 type = TREE_TYPE (TREE_TYPE (lhs));
3709 if (type == NULL_TREE)
3711 /* x = y + 0; x = y - 0; x = y * 0; */
3712 else if (integer_zerop (arg1))
3713 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3714 /* x = 0 + y; x = 0 * y; */
3715 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3716 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3718 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3719 result = integer_zero_node;
3720 /* x = y * 1; x = 1 * y; */
3721 else if (subcode == MULT_EXPR && integer_onep (arg1))
3723 else if (subcode == MULT_EXPR && integer_onep (arg0))
3725 else if (TREE_CODE (arg0) == INTEGER_CST
3726 && TREE_CODE (arg1) == INTEGER_CST)
3729 result = int_const_binop (subcode, fold_convert (type, arg0),
3730 fold_convert (type, arg1));
3732 result = int_const_binop (subcode, arg0, arg1);
3733 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3736 overflow = build_one_cst (type);
3743 if (result == integer_zero_node)
3744 result = build_zero_cst (type);
3745 else if (cplx_result && TREE_TYPE (result) != type)
3747 if (TREE_CODE (result) == INTEGER_CST)
3749 if (arith_overflowed_p (PLUS_EXPR, type, result,
3751 overflow = build_one_cst (type);
3753 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3754 && TYPE_UNSIGNED (type))
3755 || (TYPE_PRECISION (type)
3756 < (TYPE_PRECISION (TREE_TYPE (result))
3757 + (TYPE_UNSIGNED (TREE_TYPE (result))
3758 && !TYPE_UNSIGNED (type)))))
3761 result = fold_convert (type, result);
3768 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3769 result = drop_tree_overflow (result);
3772 if (overflow == NULL_TREE)
3773 overflow = build_zero_cst (TREE_TYPE (result));
3774 tree ctype = build_complex_type (TREE_TYPE (result));
3775 if (TREE_CODE (result) == INTEGER_CST
3776 && TREE_CODE (overflow) == INTEGER_CST)
3777 result = build_complex (ctype, result, overflow);
3779 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3780 ctype, result, overflow);
3782 if (!update_call_from_tree (gsi, result))
3783 gimplify_and_update_call_from_tree (gsi, result);
3792 /* Return true whether NAME has a use on STMT. */
3795 has_use_on_stmt (tree name, gimple *stmt)
3797 imm_use_iterator iter;
3798 use_operand_p use_p;
3799 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3800 if (USE_STMT (use_p) == stmt)
3805 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3808 Replaces *GSI with the simplification result in RCODE and OPS
3809 and the associated statements in *SEQ. Does the replacement
3810 according to INPLACE and returns true if the operation succeeded. */
3813 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3814 code_helper rcode, tree *ops,
3815 gimple_seq *seq, bool inplace)
3817 gimple *stmt = gsi_stmt (*gsi);
3819 /* Play safe and do not allow abnormals to be mentioned in
3820 newly created statements. See also maybe_push_res_to_seq.
3821 As an exception allow such uses if there was a use of the
3822 same SSA name on the old stmt. */
3823 if ((TREE_CODE (ops[0]) == SSA_NAME
3824 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3825 && !has_use_on_stmt (ops[0], stmt))
3827 && TREE_CODE (ops[1]) == SSA_NAME
3828 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3829 && !has_use_on_stmt (ops[1], stmt))
3831 && TREE_CODE (ops[2]) == SSA_NAME
3832 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3833 && !has_use_on_stmt (ops[2], stmt))
3834 || (COMPARISON_CLASS_P (ops[0])
3835 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
3836 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
3837 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
3838 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
3839 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
3840 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
3843 /* Don't insert new statements when INPLACE is true, even if we could
3844 reuse STMT for the final statement. */
3845 if (inplace && !gimple_seq_empty_p (*seq))
3848 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3850 gcc_assert (rcode.is_tree_code ());
3851 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3852 /* GIMPLE_CONDs condition may not throw. */
3853 && (!flag_exceptions
3854 || !cfun->can_throw_non_call_exceptions
3855 || !operation_could_trap_p (rcode,
3856 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3858 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3859 else if (rcode == SSA_NAME)
3860 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3861 build_zero_cst (TREE_TYPE (ops[0])));
3862 else if (rcode == INTEGER_CST)
3864 if (integer_zerop (ops[0]))
3865 gimple_cond_make_false (cond_stmt);
3867 gimple_cond_make_true (cond_stmt);
3871 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3875 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3876 build_zero_cst (TREE_TYPE (res)));
3880 if (dump_file && (dump_flags & TDF_DETAILS))
3882 fprintf (dump_file, "gimple_simplified to ");
3883 if (!gimple_seq_empty_p (*seq))
3884 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3885 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3888 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3891 else if (is_gimple_assign (stmt)
3892 && rcode.is_tree_code ())
3895 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3897 maybe_build_generic_op (rcode,
3898 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
3899 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3900 if (dump_file && (dump_flags & TDF_DETAILS))
3902 fprintf (dump_file, "gimple_simplified to ");
3903 if (!gimple_seq_empty_p (*seq))
3904 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3905 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3908 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3912 else if (rcode.is_fn_code ()
3913 && gimple_call_combined_fn (stmt) == rcode)
3916 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3918 gcc_assert (ops[i] != NULL_TREE);
3919 gimple_call_set_arg (stmt, i, ops[i]);
3922 gcc_assert (ops[i] == NULL_TREE);
3923 if (dump_file && (dump_flags & TDF_DETAILS))
3925 fprintf (dump_file, "gimple_simplified to ");
3926 if (!gimple_seq_empty_p (*seq))
3927 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3928 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3930 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3935 if (gimple_has_lhs (stmt))
3937 tree lhs = gimple_get_lhs (stmt);
3938 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3941 if (dump_file && (dump_flags & TDF_DETAILS))
3943 fprintf (dump_file, "gimple_simplified to ");
3944 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3946 gsi_replace_with_seq_vops (gsi, *seq);
3956 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3959 maybe_canonicalize_mem_ref_addr (tree *t)
3963 if (TREE_CODE (*t) == ADDR_EXPR)
3964 t = &TREE_OPERAND (*t, 0);
3966 /* The C and C++ frontends use an ARRAY_REF for indexing with their
3967 generic vector extension. The actual vector referenced is
3968 view-converted to an array type for this purpose. If the index
3969 is constant the canonical representation in the middle-end is a
3970 BIT_FIELD_REF so re-write the former to the latter here. */
3971 if (TREE_CODE (*t) == ARRAY_REF
3972 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
3973 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
3974 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
3976 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
3977 if (VECTOR_TYPE_P (vtype))
3979 tree low = array_ref_low_bound (*t);
3980 if (TREE_CODE (low) == INTEGER_CST)
3982 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
3984 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
3985 wi::to_widest (low));
3986 idx = wi::mul (idx, wi::to_widest
3987 (TYPE_SIZE (TREE_TYPE (*t))));
3989 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
3990 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
3992 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
3994 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
3995 TYPE_SIZE (TREE_TYPE (*t)),
3996 wide_int_to_tree (sizetype, idx));
4004 while (handled_component_p (*t))
4005 t = &TREE_OPERAND (*t, 0);
4007 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4008 of invariant addresses into a SSA name MEM_REF address. */
4009 if (TREE_CODE (*t) == MEM_REF
4010 || TREE_CODE (*t) == TARGET_MEM_REF)
4012 tree addr = TREE_OPERAND (*t, 0);
4013 if (TREE_CODE (addr) == ADDR_EXPR
4014 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4015 || handled_component_p (TREE_OPERAND (addr, 0))))
4018 HOST_WIDE_INT coffset;
4019 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4024 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4025 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4026 TREE_OPERAND (*t, 1),
4027 size_int (coffset));
4030 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4031 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4034 /* Canonicalize back MEM_REFs to plain reference trees if the object
4035 accessed is a decl that has the same access semantics as the MEM_REF. */
4036 if (TREE_CODE (*t) == MEM_REF
4037 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4038 && integer_zerop (TREE_OPERAND (*t, 1))
4039 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4041 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4042 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4043 if (/* Same volatile qualification. */
4044 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4045 /* Same TBAA behavior with -fstrict-aliasing. */
4046 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4047 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4048 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4049 /* Same alignment. */
4050 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4051 /* We have to look out here to not drop a required conversion
4052 from the rhs to the lhs if *t appears on the lhs or vice-versa
4053 if it appears on the rhs. Thus require strict type
4055 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4057 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4062 /* Canonicalize TARGET_MEM_REF in particular with respect to
4063 the indexes becoming constant. */
4064 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4066 tree tem = maybe_fold_tmr (*t);
4077 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4078 distinguishes both cases. */
4081 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4083 bool changed = false;
4084 gimple *stmt = gsi_stmt (*gsi);
4085 bool nowarning = gimple_no_warning_p (stmt);
4087 fold_defer_overflow_warnings ();
4089 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4091 ??? This shouldn't be done in generic folding but in the
4092 propagation helpers which also know whether an address was
4094 Also canonicalize operand order. */
4095 switch (gimple_code (stmt))
4098 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4100 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4101 if ((REFERENCE_CLASS_P (*rhs)
4102 || TREE_CODE (*rhs) == ADDR_EXPR)
4103 && maybe_canonicalize_mem_ref_addr (rhs))
4105 tree *lhs = gimple_assign_lhs_ptr (stmt);
4106 if (REFERENCE_CLASS_P (*lhs)
4107 && maybe_canonicalize_mem_ref_addr (lhs))
4112 /* Canonicalize operand order. */
4113 enum tree_code code = gimple_assign_rhs_code (stmt);
4114 if (TREE_CODE_CLASS (code) == tcc_comparison
4115 || commutative_tree_code (code)
4116 || commutative_ternary_tree_code (code))
4118 tree rhs1 = gimple_assign_rhs1 (stmt);
4119 tree rhs2 = gimple_assign_rhs2 (stmt);
4120 if (tree_swap_operands_p (rhs1, rhs2, false))
4122 gimple_assign_set_rhs1 (stmt, rhs2);
4123 gimple_assign_set_rhs2 (stmt, rhs1);
4124 if (TREE_CODE_CLASS (code) == tcc_comparison)
4125 gimple_assign_set_rhs_code (stmt,
4126 swap_tree_comparison (code));
4134 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4136 tree *arg = gimple_call_arg_ptr (stmt, i);
4137 if (REFERENCE_CLASS_P (*arg)
4138 && maybe_canonicalize_mem_ref_addr (arg))
4141 tree *lhs = gimple_call_lhs_ptr (stmt);
4143 && REFERENCE_CLASS_P (*lhs)
4144 && maybe_canonicalize_mem_ref_addr (lhs))
4150 gasm *asm_stmt = as_a <gasm *> (stmt);
4151 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4153 tree link = gimple_asm_output_op (asm_stmt, i);
4154 tree op = TREE_VALUE (link);
4155 if (REFERENCE_CLASS_P (op)
4156 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4159 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4161 tree link = gimple_asm_input_op (asm_stmt, i);
4162 tree op = TREE_VALUE (link);
4163 if ((REFERENCE_CLASS_P (op)
4164 || TREE_CODE (op) == ADDR_EXPR)
4165 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4171 if (gimple_debug_bind_p (stmt))
4173 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4175 && (REFERENCE_CLASS_P (*val)
4176 || TREE_CODE (*val) == ADDR_EXPR)
4177 && maybe_canonicalize_mem_ref_addr (val))
4183 /* Canonicalize operand order. */
4184 tree lhs = gimple_cond_lhs (stmt);
4185 tree rhs = gimple_cond_rhs (stmt);
4186 if (tree_swap_operands_p (lhs, rhs, false))
4188 gcond *gc = as_a <gcond *> (stmt);
4189 gimple_cond_set_lhs (gc, rhs);
4190 gimple_cond_set_rhs (gc, lhs);
4191 gimple_cond_set_code (gc,
4192 swap_tree_comparison (gimple_cond_code (gc)));
4199 /* Dispatch to pattern-based folding. */
4201 || is_gimple_assign (stmt)
4202 || gimple_code (stmt) == GIMPLE_COND)
4204 gimple_seq seq = NULL;
4207 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4208 valueize, valueize))
4210 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4213 gimple_seq_discard (seq);
4217 stmt = gsi_stmt (*gsi);
4219 /* Fold the main computation performed by the statement. */
4220 switch (gimple_code (stmt))
4224 /* Try to canonicalize for boolean-typed X the comparisons
4225 X == 0, X == 1, X != 0, and X != 1. */
4226 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4227 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4229 tree lhs = gimple_assign_lhs (stmt);
4230 tree op1 = gimple_assign_rhs1 (stmt);
4231 tree op2 = gimple_assign_rhs2 (stmt);
4232 tree type = TREE_TYPE (op1);
4234 /* Check whether the comparison operands are of the same boolean
4235 type as the result type is.
4236 Check that second operand is an integer-constant with value
4238 if (TREE_CODE (op2) == INTEGER_CST
4239 && (integer_zerop (op2) || integer_onep (op2))
4240 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4242 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4243 bool is_logical_not = false;
4245 /* X == 0 and X != 1 is a logical-not.of X
4246 X == 1 and X != 0 is X */
4247 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4248 || (cmp_code == NE_EXPR && integer_onep (op2)))
4249 is_logical_not = true;
4251 if (is_logical_not == false)
4252 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4253 /* Only for one-bit precision typed X the transformation
4254 !X -> ~X is valied. */
4255 else if (TYPE_PRECISION (type) == 1)
4256 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4257 /* Otherwise we use !X -> X ^ 1. */
4259 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4260 build_int_cst (type, 1));
4266 unsigned old_num_ops = gimple_num_ops (stmt);
4267 tree lhs = gimple_assign_lhs (stmt);
4268 tree new_rhs = fold_gimple_assign (gsi);
4270 && !useless_type_conversion_p (TREE_TYPE (lhs),
4271 TREE_TYPE (new_rhs)))
4272 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4275 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4277 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4284 changed |= gimple_fold_call (gsi, inplace);
4288 /* Fold *& in asm operands. */
4290 gasm *asm_stmt = as_a <gasm *> (stmt);
4292 const char **oconstraints;
4293 const char *constraint;
4294 bool allows_mem, allows_reg;
4296 noutputs = gimple_asm_noutputs (asm_stmt);
4297 oconstraints = XALLOCAVEC (const char *, noutputs);
4299 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4301 tree link = gimple_asm_output_op (asm_stmt, i);
4302 tree op = TREE_VALUE (link);
4304 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4305 if (REFERENCE_CLASS_P (op)
4306 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4308 TREE_VALUE (link) = op;
4312 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4314 tree link = gimple_asm_input_op (asm_stmt, i);
4315 tree op = TREE_VALUE (link);
4317 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4318 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4319 oconstraints, &allows_mem, &allows_reg);
4320 if (REFERENCE_CLASS_P (op)
4321 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4324 TREE_VALUE (link) = op;
4332 if (gimple_debug_bind_p (stmt))
4334 tree val = gimple_debug_bind_get_value (stmt);
4336 && REFERENCE_CLASS_P (val))
4338 tree tem = maybe_fold_reference (val, false);
4341 gimple_debug_bind_set_value (stmt, tem);
4346 && TREE_CODE (val) == ADDR_EXPR)
4348 tree ref = TREE_OPERAND (val, 0);
4349 tree tem = maybe_fold_reference (ref, false);
4352 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4353 gimple_debug_bind_set_value (stmt, tem);
4363 stmt = gsi_stmt (*gsi);
4365 /* Fold *& on the lhs. */
4366 if (gimple_has_lhs (stmt))
4368 tree lhs = gimple_get_lhs (stmt);
4369 if (lhs && REFERENCE_CLASS_P (lhs))
4371 tree new_lhs = maybe_fold_reference (lhs, true);
4374 gimple_set_lhs (stmt, new_lhs);
4380 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4384 /* Valueziation callback that ends up not following SSA edges. */
4387 no_follow_ssa_edges (tree)
4392 /* Valueization callback that ends up following single-use SSA edges only. */
4395 follow_single_use_edges (tree val)
4397 if (TREE_CODE (val) == SSA_NAME
4398 && !has_single_use (val))
4403 /* Fold the statement pointed to by GSI. In some cases, this function may
4404 replace the whole statement with a new one. Returns true iff folding
4406 The statement pointed to by GSI should be in valid gimple form but may
4407 be in unfolded state as resulting from for example constant propagation
4408 which can produce *&x = 0. */
4411 fold_stmt (gimple_stmt_iterator *gsi)
4413 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4417 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4419 return fold_stmt_1 (gsi, false, valueize);
4422 /* Perform the minimal folding on statement *GSI. Only operations like
4423 *&x created by constant propagation are handled. The statement cannot
4424 be replaced with a new one. Return true if the statement was
4425 changed, false otherwise.
4426 The statement *GSI should be in valid gimple form but may
4427 be in unfolded state as resulting from for example constant propagation
4428 which can produce *&x = 0. */
4431 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4433 gimple *stmt = gsi_stmt (*gsi);
4434 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4435 gcc_assert (gsi_stmt (*gsi) == stmt);
4439 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4440 if EXPR is null or we don't know how.
4441 If non-null, the result always has boolean type. */
4444 canonicalize_bool (tree expr, bool invert)
4450 if (integer_nonzerop (expr))
4451 return boolean_false_node;
4452 else if (integer_zerop (expr))
4453 return boolean_true_node;
4454 else if (TREE_CODE (expr) == SSA_NAME)
4455 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4456 build_int_cst (TREE_TYPE (expr), 0));
4457 else if (COMPARISON_CLASS_P (expr))
4458 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4460 TREE_OPERAND (expr, 0),
4461 TREE_OPERAND (expr, 1));
4467 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4469 if (integer_nonzerop (expr))
4470 return boolean_true_node;
4471 else if (integer_zerop (expr))
4472 return boolean_false_node;
4473 else if (TREE_CODE (expr) == SSA_NAME)
4474 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4475 build_int_cst (TREE_TYPE (expr), 0));
4476 else if (COMPARISON_CLASS_P (expr))
4477 return fold_build2 (TREE_CODE (expr),
4479 TREE_OPERAND (expr, 0),
4480 TREE_OPERAND (expr, 1));
4486 /* Check to see if a boolean expression EXPR is logically equivalent to the
4487 comparison (OP1 CODE OP2). Check for various identities involving
4491 same_bool_comparison_p (const_tree expr, enum tree_code code,
4492 const_tree op1, const_tree op2)
4496 /* The obvious case. */
4497 if (TREE_CODE (expr) == code
4498 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4499 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4502 /* Check for comparing (name, name != 0) and the case where expr
4503 is an SSA_NAME with a definition matching the comparison. */
4504 if (TREE_CODE (expr) == SSA_NAME
4505 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4507 if (operand_equal_p (expr, op1, 0))
4508 return ((code == NE_EXPR && integer_zerop (op2))
4509 || (code == EQ_EXPR && integer_nonzerop (op2)));
4510 s = SSA_NAME_DEF_STMT (expr);
4511 if (is_gimple_assign (s)
4512 && gimple_assign_rhs_code (s) == code
4513 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4514 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4518 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4519 of name is a comparison, recurse. */
4520 if (TREE_CODE (op1) == SSA_NAME
4521 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
4523 s = SSA_NAME_DEF_STMT (op1);
4524 if (is_gimple_assign (s)
4525 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
4527 enum tree_code c = gimple_assign_rhs_code (s);
4528 if ((c == NE_EXPR && integer_zerop (op2))
4529 || (c == EQ_EXPR && integer_nonzerop (op2)))
4530 return same_bool_comparison_p (expr, c,
4531 gimple_assign_rhs1 (s),
4532 gimple_assign_rhs2 (s));
4533 if ((c == EQ_EXPR && integer_zerop (op2))
4534 || (c == NE_EXPR && integer_nonzerop (op2)))
4535 return same_bool_comparison_p (expr,
4536 invert_tree_comparison (c, false),
4537 gimple_assign_rhs1 (s),
4538 gimple_assign_rhs2 (s));
4544 /* Check to see if two boolean expressions OP1 and OP2 are logically
4548 same_bool_result_p (const_tree op1, const_tree op2)
4550 /* Simple cases first. */
4551 if (operand_equal_p (op1, op2, 0))
4554 /* Check the cases where at least one of the operands is a comparison.
4555 These are a bit smarter than operand_equal_p in that they apply some
4556 identifies on SSA_NAMEs. */
4557 if (COMPARISON_CLASS_P (op2)
4558 && same_bool_comparison_p (op1, TREE_CODE (op2),
4559 TREE_OPERAND (op2, 0),
4560 TREE_OPERAND (op2, 1)))
4562 if (COMPARISON_CLASS_P (op1)
4563 && same_bool_comparison_p (op2, TREE_CODE (op1),
4564 TREE_OPERAND (op1, 0),
4565 TREE_OPERAND (op1, 1)))
4572 /* Forward declarations for some mutually recursive functions. */
4575 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4576 enum tree_code code2, tree op2a, tree op2b);
4578 and_var_with_comparison (tree var, bool invert,
4579 enum tree_code code2, tree op2a, tree op2b);
4581 and_var_with_comparison_1 (gimple *stmt,
4582 enum tree_code code2, tree op2a, tree op2b);
4584 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4585 enum tree_code code2, tree op2a, tree op2b);
4587 or_var_with_comparison (tree var, bool invert,
4588 enum tree_code code2, tree op2a, tree op2b);
4590 or_var_with_comparison_1 (gimple *stmt,
4591 enum tree_code code2, tree op2a, tree op2b);
4593 /* Helper function for and_comparisons_1: try to simplify the AND of the
4594 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4595 If INVERT is true, invert the value of the VAR before doing the AND.
4596 Return NULL_EXPR if we can't simplify this to a single expression. */
4599 and_var_with_comparison (tree var, bool invert,
4600 enum tree_code code2, tree op2a, tree op2b)
4603 gimple *stmt = SSA_NAME_DEF_STMT (var);
4605 /* We can only deal with variables whose definitions are assignments. */
4606 if (!is_gimple_assign (stmt))
4609 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4610 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4611 Then we only have to consider the simpler non-inverted cases. */
4613 t = or_var_with_comparison_1 (stmt,
4614 invert_tree_comparison (code2, false),
4617 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4618 return canonicalize_bool (t, invert);
4621 /* Try to simplify the AND of the ssa variable defined by the assignment
4622 STMT with the comparison specified by (OP2A CODE2 OP2B).
4623 Return NULL_EXPR if we can't simplify this to a single expression. */
4626 and_var_with_comparison_1 (gimple *stmt,
4627 enum tree_code code2, tree op2a, tree op2b)
4629 tree var = gimple_assign_lhs (stmt);
4630 tree true_test_var = NULL_TREE;
4631 tree false_test_var = NULL_TREE;
4632 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4634 /* Check for identities like (var AND (var == 0)) => false. */
4635 if (TREE_CODE (op2a) == SSA_NAME
4636 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4638 if ((code2 == NE_EXPR && integer_zerop (op2b))
4639 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4641 true_test_var = op2a;
4642 if (var == true_test_var)
4645 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4646 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4648 false_test_var = op2a;
4649 if (var == false_test_var)
4650 return boolean_false_node;
4654 /* If the definition is a comparison, recurse on it. */
4655 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4657 tree t = and_comparisons_1 (innercode,
4658 gimple_assign_rhs1 (stmt),
4659 gimple_assign_rhs2 (stmt),
4667 /* If the definition is an AND or OR expression, we may be able to
4668 simplify by reassociating. */
4669 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4670 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4672 tree inner1 = gimple_assign_rhs1 (stmt);
4673 tree inner2 = gimple_assign_rhs2 (stmt);
4676 tree partial = NULL_TREE;
4677 bool is_and = (innercode == BIT_AND_EXPR);
4679 /* Check for boolean identities that don't require recursive examination
4681 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4682 inner1 AND (inner1 OR inner2) => inner1
4683 !inner1 AND (inner1 AND inner2) => false
4684 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4685 Likewise for similar cases involving inner2. */
4686 if (inner1 == true_test_var)
4687 return (is_and ? var : inner1);
4688 else if (inner2 == true_test_var)
4689 return (is_and ? var : inner2);
4690 else if (inner1 == false_test_var)
4692 ? boolean_false_node
4693 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4694 else if (inner2 == false_test_var)
4696 ? boolean_false_node
4697 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4699 /* Next, redistribute/reassociate the AND across the inner tests.
4700 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4701 if (TREE_CODE (inner1) == SSA_NAME
4702 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4703 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4704 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4705 gimple_assign_rhs1 (s),
4706 gimple_assign_rhs2 (s),
4707 code2, op2a, op2b)))
4709 /* Handle the AND case, where we are reassociating:
4710 (inner1 AND inner2) AND (op2a code2 op2b)
4712 If the partial result t is a constant, we win. Otherwise
4713 continue on to try reassociating with the other inner test. */
4716 if (integer_onep (t))
4718 else if (integer_zerop (t))
4719 return boolean_false_node;
4722 /* Handle the OR case, where we are redistributing:
4723 (inner1 OR inner2) AND (op2a code2 op2b)
4724 => (t OR (inner2 AND (op2a code2 op2b))) */
4725 else if (integer_onep (t))
4726 return boolean_true_node;
4728 /* Save partial result for later. */
4732 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4733 if (TREE_CODE (inner2) == SSA_NAME
4734 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4735 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4736 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4737 gimple_assign_rhs1 (s),
4738 gimple_assign_rhs2 (s),
4739 code2, op2a, op2b)))
4741 /* Handle the AND case, where we are reassociating:
4742 (inner1 AND inner2) AND (op2a code2 op2b)
4743 => (inner1 AND t) */
4746 if (integer_onep (t))
4748 else if (integer_zerop (t))
4749 return boolean_false_node;
4750 /* If both are the same, we can apply the identity
4752 else if (partial && same_bool_result_p (t, partial))
4756 /* Handle the OR case. where we are redistributing:
4757 (inner1 OR inner2) AND (op2a code2 op2b)
4758 => (t OR (inner1 AND (op2a code2 op2b)))
4759 => (t OR partial) */
4762 if (integer_onep (t))
4763 return boolean_true_node;
4766 /* We already got a simplification for the other
4767 operand to the redistributed OR expression. The
4768 interesting case is when at least one is false.
4769 Or, if both are the same, we can apply the identity
4771 if (integer_zerop (partial))
4773 else if (integer_zerop (t))
4775 else if (same_bool_result_p (t, partial))
4784 /* Try to simplify the AND of two comparisons defined by
4785 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4786 If this can be done without constructing an intermediate value,
4787 return the resulting tree; otherwise NULL_TREE is returned.
4788 This function is deliberately asymmetric as it recurses on SSA_DEFs
4789 in the first comparison but not the second. */
4792 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4793 enum tree_code code2, tree op2a, tree op2b)
4795 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4797 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4798 if (operand_equal_p (op1a, op2a, 0)
4799 && operand_equal_p (op1b, op2b, 0))
4801 /* Result will be either NULL_TREE, or a combined comparison. */
4802 tree t = combine_comparisons (UNKNOWN_LOCATION,
4803 TRUTH_ANDIF_EXPR, code1, code2,
4804 truth_type, op1a, op1b);
4809 /* Likewise the swapped case of the above. */
4810 if (operand_equal_p (op1a, op2b, 0)
4811 && operand_equal_p (op1b, op2a, 0))
4813 /* Result will be either NULL_TREE, or a combined comparison. */
4814 tree t = combine_comparisons (UNKNOWN_LOCATION,
4815 TRUTH_ANDIF_EXPR, code1,
4816 swap_tree_comparison (code2),
4817 truth_type, op1a, op1b);
4822 /* If both comparisons are of the same value against constants, we might
4823 be able to merge them. */
4824 if (operand_equal_p (op1a, op2a, 0)
4825 && TREE_CODE (op1b) == INTEGER_CST
4826 && TREE_CODE (op2b) == INTEGER_CST)
4828 int cmp = tree_int_cst_compare (op1b, op2b);
4830 /* If we have (op1a == op1b), we should either be able to
4831 return that or FALSE, depending on whether the constant op1b
4832 also satisfies the other comparison against op2b. */
4833 if (code1 == EQ_EXPR)
4839 case EQ_EXPR: val = (cmp == 0); break;
4840 case NE_EXPR: val = (cmp != 0); break;
4841 case LT_EXPR: val = (cmp < 0); break;
4842 case GT_EXPR: val = (cmp > 0); break;
4843 case LE_EXPR: val = (cmp <= 0); break;
4844 case GE_EXPR: val = (cmp >= 0); break;
4845 default: done = false;
4850 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4852 return boolean_false_node;
4855 /* Likewise if the second comparison is an == comparison. */
4856 else if (code2 == EQ_EXPR)
4862 case EQ_EXPR: val = (cmp == 0); break;
4863 case NE_EXPR: val = (cmp != 0); break;
4864 case LT_EXPR: val = (cmp > 0); break;
4865 case GT_EXPR: val = (cmp < 0); break;
4866 case LE_EXPR: val = (cmp >= 0); break;
4867 case GE_EXPR: val = (cmp <= 0); break;
4868 default: done = false;
4873 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4875 return boolean_false_node;
4879 /* Same business with inequality tests. */
4880 else if (code1 == NE_EXPR)
4885 case EQ_EXPR: val = (cmp != 0); break;
4886 case NE_EXPR: val = (cmp == 0); break;
4887 case LT_EXPR: val = (cmp >= 0); break;
4888 case GT_EXPR: val = (cmp <= 0); break;
4889 case LE_EXPR: val = (cmp > 0); break;
4890 case GE_EXPR: val = (cmp < 0); break;
4895 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4897 else if (code2 == NE_EXPR)
4902 case EQ_EXPR: val = (cmp == 0); break;
4903 case NE_EXPR: val = (cmp != 0); break;
4904 case LT_EXPR: val = (cmp <= 0); break;
4905 case GT_EXPR: val = (cmp >= 0); break;
4906 case LE_EXPR: val = (cmp < 0); break;
4907 case GE_EXPR: val = (cmp > 0); break;
4912 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4915 /* Chose the more restrictive of two < or <= comparisons. */
4916 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4917 && (code2 == LT_EXPR || code2 == LE_EXPR))
4919 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4920 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4922 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4925 /* Likewise chose the more restrictive of two > or >= comparisons. */
4926 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4927 && (code2 == GT_EXPR || code2 == GE_EXPR))
4929 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4930 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4932 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4935 /* Check for singleton ranges. */
4937 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4938 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4939 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4941 /* Check for disjoint ranges. */
4943 && (code1 == LT_EXPR || code1 == LE_EXPR)
4944 && (code2 == GT_EXPR || code2 == GE_EXPR))
4945 return boolean_false_node;
4947 && (code1 == GT_EXPR || code1 == GE_EXPR)
4948 && (code2 == LT_EXPR || code2 == LE_EXPR))
4949 return boolean_false_node;
4952 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4953 NAME's definition is a truth value. See if there are any simplifications
4954 that can be done against the NAME's definition. */
4955 if (TREE_CODE (op1a) == SSA_NAME
4956 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4957 && (integer_zerop (op1b) || integer_onep (op1b)))
4959 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4960 || (code1 == NE_EXPR && integer_onep (op1b)));
4961 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
4962 switch (gimple_code (stmt))
4965 /* Try to simplify by copy-propagating the definition. */
4966 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4969 /* If every argument to the PHI produces the same result when
4970 ANDed with the second comparison, we win.
4971 Do not do this unless the type is bool since we need a bool
4972 result here anyway. */
4973 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4975 tree result = NULL_TREE;
4977 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4979 tree arg = gimple_phi_arg_def (stmt, i);
4981 /* If this PHI has itself as an argument, ignore it.
4982 If all the other args produce the same result,
4984 if (arg == gimple_phi_result (stmt))
4986 else if (TREE_CODE (arg) == INTEGER_CST)
4988 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4991 result = boolean_false_node;
4992 else if (!integer_zerop (result))
4996 result = fold_build2 (code2, boolean_type_node,
4998 else if (!same_bool_comparison_p (result,
5002 else if (TREE_CODE (arg) == SSA_NAME
5003 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5006 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5007 /* In simple cases we can look through PHI nodes,
5008 but we have to be careful with loops.
5010 if (! dom_info_available_p (CDI_DOMINATORS)
5011 || gimple_bb (def_stmt) == gimple_bb (stmt)
5012 || dominated_by_p (CDI_DOMINATORS,
5013 gimple_bb (def_stmt),
5016 temp = and_var_with_comparison (arg, invert, code2,
5022 else if (!same_bool_result_p (result, temp))
5038 /* Try to simplify the AND of two comparisons, specified by
5039 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5040 If this can be simplified to a single expression (without requiring
5041 introducing more SSA variables to hold intermediate values),
5042 return the resulting tree. Otherwise return NULL_TREE.
5043 If the result expression is non-null, it has boolean type. */
5046 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5047 enum tree_code code2, tree op2a, tree op2b)
5049 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5053 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5056 /* Helper function for or_comparisons_1: try to simplify the OR of the
5057 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5058 If INVERT is true, invert the value of VAR before doing the OR.
5059 Return NULL_EXPR if we can't simplify this to a single expression. */
5062 or_var_with_comparison (tree var, bool invert,
5063 enum tree_code code2, tree op2a, tree op2b)
5066 gimple *stmt = SSA_NAME_DEF_STMT (var);
5068 /* We can only deal with variables whose definitions are assignments. */
5069 if (!is_gimple_assign (stmt))
5072 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5073 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5074 Then we only have to consider the simpler non-inverted cases. */
5076 t = and_var_with_comparison_1 (stmt,
5077 invert_tree_comparison (code2, false),
5080 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5081 return canonicalize_bool (t, invert);
5084 /* Try to simplify the OR of the ssa variable defined by the assignment
5085 STMT with the comparison specified by (OP2A CODE2 OP2B).
5086 Return NULL_EXPR if we can't simplify this to a single expression. */
5089 or_var_with_comparison_1 (gimple *stmt,
5090 enum tree_code code2, tree op2a, tree op2b)
5092 tree var = gimple_assign_lhs (stmt);
5093 tree true_test_var = NULL_TREE;
5094 tree false_test_var = NULL_TREE;
5095 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5097 /* Check for identities like (var OR (var != 0)) => true . */
5098 if (TREE_CODE (op2a) == SSA_NAME
5099 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5101 if ((code2 == NE_EXPR && integer_zerop (op2b))
5102 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5104 true_test_var = op2a;
5105 if (var == true_test_var)
5108 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5109 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5111 false_test_var = op2a;
5112 if (var == false_test_var)
5113 return boolean_true_node;
5117 /* If the definition is a comparison, recurse on it. */
5118 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5120 tree t = or_comparisons_1 (innercode,
5121 gimple_assign_rhs1 (stmt),
5122 gimple_assign_rhs2 (stmt),
5130 /* If the definition is an AND or OR expression, we may be able to
5131 simplify by reassociating. */
5132 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5133 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5135 tree inner1 = gimple_assign_rhs1 (stmt);
5136 tree inner2 = gimple_assign_rhs2 (stmt);
5139 tree partial = NULL_TREE;
5140 bool is_or = (innercode == BIT_IOR_EXPR);
5142 /* Check for boolean identities that don't require recursive examination
5144 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5145 inner1 OR (inner1 AND inner2) => inner1
5146 !inner1 OR (inner1 OR inner2) => true
5147 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5149 if (inner1 == true_test_var)
5150 return (is_or ? var : inner1);
5151 else if (inner2 == true_test_var)
5152 return (is_or ? var : inner2);
5153 else if (inner1 == false_test_var)
5156 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5157 else if (inner2 == false_test_var)
5160 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5162 /* Next, redistribute/reassociate the OR across the inner tests.
5163 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5164 if (TREE_CODE (inner1) == SSA_NAME
5165 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5166 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5167 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5168 gimple_assign_rhs1 (s),
5169 gimple_assign_rhs2 (s),
5170 code2, op2a, op2b)))
5172 /* Handle the OR case, where we are reassociating:
5173 (inner1 OR inner2) OR (op2a code2 op2b)
5175 If the partial result t is a constant, we win. Otherwise
5176 continue on to try reassociating with the other inner test. */
5179 if (integer_onep (t))
5180 return boolean_true_node;
5181 else if (integer_zerop (t))
5185 /* Handle the AND case, where we are redistributing:
5186 (inner1 AND inner2) OR (op2a code2 op2b)
5187 => (t AND (inner2 OR (op2a code op2b))) */
5188 else if (integer_zerop (t))
5189 return boolean_false_node;
5191 /* Save partial result for later. */
5195 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5196 if (TREE_CODE (inner2) == SSA_NAME
5197 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5198 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5199 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5200 gimple_assign_rhs1 (s),
5201 gimple_assign_rhs2 (s),
5202 code2, op2a, op2b)))
5204 /* Handle the OR case, where we are reassociating:
5205 (inner1 OR inner2) OR (op2a code2 op2b)
5207 => (t OR partial) */
5210 if (integer_zerop (t))
5212 else if (integer_onep (t))
5213 return boolean_true_node;
5214 /* If both are the same, we can apply the identity
5216 else if (partial && same_bool_result_p (t, partial))
5220 /* Handle the AND case, where we are redistributing:
5221 (inner1 AND inner2) OR (op2a code2 op2b)
5222 => (t AND (inner1 OR (op2a code2 op2b)))
5223 => (t AND partial) */
5226 if (integer_zerop (t))
5227 return boolean_false_node;
5230 /* We already got a simplification for the other
5231 operand to the redistributed AND expression. The
5232 interesting case is when at least one is true.
5233 Or, if both are the same, we can apply the identity
5235 if (integer_onep (partial))
5237 else if (integer_onep (t))
5239 else if (same_bool_result_p (t, partial))
5248 /* Try to simplify the OR of two comparisons defined by
5249 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5250 If this can be done without constructing an intermediate value,
5251 return the resulting tree; otherwise NULL_TREE is returned.
5252 This function is deliberately asymmetric as it recurses on SSA_DEFs
5253 in the first comparison but not the second. */
5256 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5257 enum tree_code code2, tree op2a, tree op2b)
5259 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5261 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5262 if (operand_equal_p (op1a, op2a, 0)
5263 && operand_equal_p (op1b, op2b, 0))
5265 /* Result will be either NULL_TREE, or a combined comparison. */
5266 tree t = combine_comparisons (UNKNOWN_LOCATION,
5267 TRUTH_ORIF_EXPR, code1, code2,
5268 truth_type, op1a, op1b);
5273 /* Likewise the swapped case of the above. */
5274 if (operand_equal_p (op1a, op2b, 0)
5275 && operand_equal_p (op1b, op2a, 0))
5277 /* Result will be either NULL_TREE, or a combined comparison. */
5278 tree t = combine_comparisons (UNKNOWN_LOCATION,
5279 TRUTH_ORIF_EXPR, code1,
5280 swap_tree_comparison (code2),
5281 truth_type, op1a, op1b);
5286 /* If both comparisons are of the same value against constants, we might
5287 be able to merge them. */
5288 if (operand_equal_p (op1a, op2a, 0)
5289 && TREE_CODE (op1b) == INTEGER_CST
5290 && TREE_CODE (op2b) == INTEGER_CST)
5292 int cmp = tree_int_cst_compare (op1b, op2b);
5294 /* If we have (op1a != op1b), we should either be able to
5295 return that or TRUE, depending on whether the constant op1b
5296 also satisfies the other comparison against op2b. */
5297 if (code1 == NE_EXPR)
5303 case EQ_EXPR: val = (cmp == 0); break;
5304 case NE_EXPR: val = (cmp != 0); break;
5305 case LT_EXPR: val = (cmp < 0); break;
5306 case GT_EXPR: val = (cmp > 0); break;
5307 case LE_EXPR: val = (cmp <= 0); break;
5308 case GE_EXPR: val = (cmp >= 0); break;
5309 default: done = false;
5314 return boolean_true_node;
5316 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5319 /* Likewise if the second comparison is a != comparison. */
5320 else if (code2 == NE_EXPR)
5326 case EQ_EXPR: val = (cmp == 0); break;
5327 case NE_EXPR: val = (cmp != 0); break;
5328 case LT_EXPR: val = (cmp > 0); break;
5329 case GT_EXPR: val = (cmp < 0); break;
5330 case LE_EXPR: val = (cmp >= 0); break;
5331 case GE_EXPR: val = (cmp <= 0); break;
5332 default: done = false;
5337 return boolean_true_node;
5339 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5343 /* See if an equality test is redundant with the other comparison. */
5344 else if (code1 == EQ_EXPR)
5349 case EQ_EXPR: val = (cmp == 0); break;
5350 case NE_EXPR: val = (cmp != 0); break;
5351 case LT_EXPR: val = (cmp < 0); break;
5352 case GT_EXPR: val = (cmp > 0); break;
5353 case LE_EXPR: val = (cmp <= 0); break;
5354 case GE_EXPR: val = (cmp >= 0); break;
5359 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5361 else if (code2 == EQ_EXPR)
5366 case EQ_EXPR: val = (cmp == 0); break;
5367 case NE_EXPR: val = (cmp != 0); break;
5368 case LT_EXPR: val = (cmp > 0); break;
5369 case GT_EXPR: val = (cmp < 0); break;
5370 case LE_EXPR: val = (cmp >= 0); break;
5371 case GE_EXPR: val = (cmp <= 0); break;
5376 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5379 /* Chose the less restrictive of two < or <= comparisons. */
5380 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5381 && (code2 == LT_EXPR || code2 == LE_EXPR))
5383 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5384 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5386 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5389 /* Likewise chose the less restrictive of two > or >= comparisons. */
5390 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5391 && (code2 == GT_EXPR || code2 == GE_EXPR))
5393 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5394 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5396 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5399 /* Check for singleton ranges. */
5401 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5402 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5403 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5405 /* Check for less/greater pairs that don't restrict the range at all. */
5407 && (code1 == LT_EXPR || code1 == LE_EXPR)
5408 && (code2 == GT_EXPR || code2 == GE_EXPR))
5409 return boolean_true_node;
5411 && (code1 == GT_EXPR || code1 == GE_EXPR)
5412 && (code2 == LT_EXPR || code2 == LE_EXPR))
5413 return boolean_true_node;
5416 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5417 NAME's definition is a truth value. See if there are any simplifications
5418 that can be done against the NAME's definition. */
5419 if (TREE_CODE (op1a) == SSA_NAME
5420 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5421 && (integer_zerop (op1b) || integer_onep (op1b)))
5423 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5424 || (code1 == NE_EXPR && integer_onep (op1b)));
5425 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5426 switch (gimple_code (stmt))
5429 /* Try to simplify by copy-propagating the definition. */
5430 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5433 /* If every argument to the PHI produces the same result when
5434 ORed with the second comparison, we win.
5435 Do not do this unless the type is bool since we need a bool
5436 result here anyway. */
5437 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5439 tree result = NULL_TREE;
5441 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5443 tree arg = gimple_phi_arg_def (stmt, i);
5445 /* If this PHI has itself as an argument, ignore it.
5446 If all the other args produce the same result,
5448 if (arg == gimple_phi_result (stmt))
5450 else if (TREE_CODE (arg) == INTEGER_CST)
5452 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5455 result = boolean_true_node;
5456 else if (!integer_onep (result))
5460 result = fold_build2 (code2, boolean_type_node,
5462 else if (!same_bool_comparison_p (result,
5466 else if (TREE_CODE (arg) == SSA_NAME
5467 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5470 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5471 /* In simple cases we can look through PHI nodes,
5472 but we have to be careful with loops.
5474 if (! dom_info_available_p (CDI_DOMINATORS)
5475 || gimple_bb (def_stmt) == gimple_bb (stmt)
5476 || dominated_by_p (CDI_DOMINATORS,
5477 gimple_bb (def_stmt),
5480 temp = or_var_with_comparison (arg, invert, code2,
5486 else if (!same_bool_result_p (result, temp))
5502 /* Try to simplify the OR of two comparisons, specified by
5503 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5504 If this can be simplified to a single expression (without requiring
5505 introducing more SSA variables to hold intermediate values),
5506 return the resulting tree. Otherwise return NULL_TREE.
5507 If the result expression is non-null, it has boolean type. */
5510 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5511 enum tree_code code2, tree op2a, tree op2b)
5513 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5517 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5521 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5523 Either NULL_TREE, a simplified but non-constant or a constant
5526 ??? This should go into a gimple-fold-inline.h file to be eventually
5527 privatized with the single valueize function used in the various TUs
5528 to avoid the indirect function call overhead. */
5531 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
5532 tree (*gvalueize) (tree))
5536 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5537 edges if there are intermediate VARYING defs. For this reason
5538 do not follow SSA edges here even though SCCVN can technically
5539 just deal fine with that. */
5540 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
5542 tree res = NULL_TREE;
5543 if (gimple_simplified_result_is_gimple_val (rcode, ops))
5545 else if (mprts_hook)
5546 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
5549 if (dump_file && dump_flags & TDF_DETAILS)
5551 fprintf (dump_file, "Match-and-simplified ");
5552 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
5553 fprintf (dump_file, " to ");
5554 print_generic_expr (dump_file, res, 0);
5555 fprintf (dump_file, "\n");
5561 location_t loc = gimple_location (stmt);
5562 switch (gimple_code (stmt))
5566 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5568 switch (get_gimple_rhs_class (subcode))
5570 case GIMPLE_SINGLE_RHS:
5572 tree rhs = gimple_assign_rhs1 (stmt);
5573 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5575 if (TREE_CODE (rhs) == SSA_NAME)
5577 /* If the RHS is an SSA_NAME, return its known constant value,
5579 return (*valueize) (rhs);
5581 /* Handle propagating invariant addresses into address
5583 else if (TREE_CODE (rhs) == ADDR_EXPR
5584 && !is_gimple_min_invariant (rhs))
5586 HOST_WIDE_INT offset = 0;
5588 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5592 && (CONSTANT_CLASS_P (base)
5593 || decl_address_invariant_p (base)))
5594 return build_invariant_address (TREE_TYPE (rhs),
5597 else if (TREE_CODE (rhs) == CONSTRUCTOR
5598 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5599 && (CONSTRUCTOR_NELTS (rhs)
5600 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5605 vec = XALLOCAVEC (tree,
5606 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
5607 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5609 val = (*valueize) (val);
5610 if (TREE_CODE (val) == INTEGER_CST
5611 || TREE_CODE (val) == REAL_CST
5612 || TREE_CODE (val) == FIXED_CST)
5618 return build_vector (TREE_TYPE (rhs), vec);
5620 if (subcode == OBJ_TYPE_REF)
5622 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5623 /* If callee is constant, we can fold away the wrapper. */
5624 if (is_gimple_min_invariant (val))
5628 if (kind == tcc_reference)
5630 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5631 || TREE_CODE (rhs) == REALPART_EXPR
5632 || TREE_CODE (rhs) == IMAGPART_EXPR)
5633 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5635 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5636 return fold_unary_loc (EXPR_LOCATION (rhs),
5638 TREE_TYPE (rhs), val);
5640 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5641 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5643 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5644 return fold_ternary_loc (EXPR_LOCATION (rhs),
5646 TREE_TYPE (rhs), val,
5647 TREE_OPERAND (rhs, 1),
5648 TREE_OPERAND (rhs, 2));
5650 else if (TREE_CODE (rhs) == MEM_REF
5651 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5653 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5654 if (TREE_CODE (val) == ADDR_EXPR
5655 && is_gimple_min_invariant (val))
5657 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5659 TREE_OPERAND (rhs, 1));
5664 return fold_const_aggregate_ref_1 (rhs, valueize);
5666 else if (kind == tcc_declaration)
5667 return get_symbol_constant_value (rhs);
5671 case GIMPLE_UNARY_RHS:
5674 case GIMPLE_BINARY_RHS:
5675 /* Translate &x + CST into an invariant form suitable for
5676 further propagation. */
5677 if (subcode == POINTER_PLUS_EXPR)
5679 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5680 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5681 if (TREE_CODE (op0) == ADDR_EXPR
5682 && TREE_CODE (op1) == INTEGER_CST)
5684 tree off = fold_convert (ptr_type_node, op1);
5685 return build_fold_addr_expr_loc
5687 fold_build2 (MEM_REF,
5688 TREE_TYPE (TREE_TYPE (op0)),
5689 unshare_expr (op0), off));
5692 /* Canonicalize bool != 0 and bool == 0 appearing after
5693 valueization. While gimple_simplify handles this
5694 it can get confused by the ~X == 1 -> X == 0 transform
5695 which we cant reduce to a SSA name or a constant
5696 (and we have no way to tell gimple_simplify to not
5697 consider those transforms in the first place). */
5698 else if (subcode == EQ_EXPR
5699 || subcode == NE_EXPR)
5701 tree lhs = gimple_assign_lhs (stmt);
5702 tree op0 = gimple_assign_rhs1 (stmt);
5703 if (useless_type_conversion_p (TREE_TYPE (lhs),
5706 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5707 op0 = (*valueize) (op0);
5708 if (TREE_CODE (op0) == INTEGER_CST)
5709 std::swap (op0, op1);
5710 if (TREE_CODE (op1) == INTEGER_CST
5711 && ((subcode == NE_EXPR && integer_zerop (op1))
5712 || (subcode == EQ_EXPR && integer_onep (op1))))
5718 case GIMPLE_TERNARY_RHS:
5720 /* Handle ternary operators that can appear in GIMPLE form. */
5721 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5722 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5723 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5724 return fold_ternary_loc (loc, subcode,
5725 gimple_expr_type (stmt), op0, op1, op2);
5736 gcall *call_stmt = as_a <gcall *> (stmt);
5738 if (gimple_call_internal_p (stmt))
5740 enum tree_code subcode = ERROR_MARK;
5741 switch (gimple_call_internal_fn (stmt))
5743 case IFN_UBSAN_CHECK_ADD:
5744 subcode = PLUS_EXPR;
5746 case IFN_UBSAN_CHECK_SUB:
5747 subcode = MINUS_EXPR;
5749 case IFN_UBSAN_CHECK_MUL:
5750 subcode = MULT_EXPR;
5752 case IFN_BUILTIN_EXPECT:
5754 tree arg0 = gimple_call_arg (stmt, 0);
5755 tree op0 = (*valueize) (arg0);
5756 if (TREE_CODE (op0) == INTEGER_CST)
5763 tree arg0 = gimple_call_arg (stmt, 0);
5764 tree arg1 = gimple_call_arg (stmt, 1);
5765 tree op0 = (*valueize) (arg0);
5766 tree op1 = (*valueize) (arg1);
5768 if (TREE_CODE (op0) != INTEGER_CST
5769 || TREE_CODE (op1) != INTEGER_CST)
5774 /* x * 0 = 0 * x = 0 without overflow. */
5775 if (integer_zerop (op0) || integer_zerop (op1))
5776 return build_zero_cst (TREE_TYPE (arg0));
5779 /* y - y = 0 without overflow. */
5780 if (operand_equal_p (op0, op1, 0))
5781 return build_zero_cst (TREE_TYPE (arg0));
5788 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5790 && TREE_CODE (res) == INTEGER_CST
5791 && !TREE_OVERFLOW (res))
5796 fn = (*valueize) (gimple_call_fn (stmt));
5797 if (TREE_CODE (fn) == ADDR_EXPR
5798 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5799 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5800 && gimple_builtin_call_types_compatible_p (stmt,
5801 TREE_OPERAND (fn, 0)))
5803 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5806 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5807 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5808 retval = fold_builtin_call_array (loc,
5809 gimple_call_return_type (call_stmt),
5810 fn, gimple_call_num_args (stmt), args);
5813 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5814 STRIP_NOPS (retval);
5815 retval = fold_convert (gimple_call_return_type (call_stmt),
5828 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5829 Returns NULL_TREE if folding to a constant is not possible, otherwise
5830 returns a constant according to is_gimple_min_invariant. */
5833 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
5835 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5836 if (res && is_gimple_min_invariant (res))
5842 /* The following set of functions are supposed to fold references using
5843 their constant initializers. */
5845 /* See if we can find constructor defining value of BASE.
5846 When we know the consructor with constant offset (such as
5847 base is array[40] and we do know constructor of array), then
5848 BIT_OFFSET is adjusted accordingly.
5850 As a special case, return error_mark_node when constructor
5851 is not explicitly available, but it is known to be zero
5852 such as 'static const int a;'. */
5854 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5855 tree (*valueize)(tree))
5857 HOST_WIDE_INT bit_offset2, size, max_size;
5860 if (TREE_CODE (base) == MEM_REF)
5862 if (!integer_zerop (TREE_OPERAND (base, 1)))
5864 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5866 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5871 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5872 base = valueize (TREE_OPERAND (base, 0));
5873 if (!base || TREE_CODE (base) != ADDR_EXPR)
5875 base = TREE_OPERAND (base, 0);
5878 && TREE_CODE (base) == SSA_NAME)
5879 base = valueize (base);
5881 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5882 DECL_INITIAL. If BASE is a nested reference into another
5883 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5884 the inner reference. */
5885 switch (TREE_CODE (base))
5890 tree init = ctor_for_folding (base);
5892 /* Our semantic is exact opposite of ctor_for_folding;
5893 NULL means unknown, while error_mark_node is 0. */
5894 if (init == error_mark_node)
5897 return error_mark_node;
5901 case VIEW_CONVERT_EXPR:
5902 return get_base_constructor (TREE_OPERAND (base, 0),
5903 bit_offset, valueize);
5907 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5909 if (max_size == -1 || size != max_size)
5911 *bit_offset += bit_offset2;
5912 return get_base_constructor (base, bit_offset, valueize);
5918 if (CONSTANT_CLASS_P (base))
5925 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5926 SIZE to the memory at bit OFFSET. */
5929 fold_array_ctor_reference (tree type, tree ctor,
5930 unsigned HOST_WIDE_INT offset,
5931 unsigned HOST_WIDE_INT size,
5934 offset_int low_bound;
5935 offset_int elt_size;
5936 offset_int access_index;
5937 tree domain_type = NULL_TREE;
5938 HOST_WIDE_INT inner_offset;
5940 /* Compute low bound and elt size. */
5941 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5942 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5943 if (domain_type && TYPE_MIN_VALUE (domain_type))
5945 /* Static constructors for variably sized objects makes no sense. */
5946 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
5948 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5952 /* Static constructors for variably sized objects makes no sense. */
5953 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
5955 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5957 /* We can handle only constantly sized accesses that are known to not
5958 be larger than size of array element. */
5959 if (!TYPE_SIZE_UNIT (type)
5960 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5961 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
5965 /* Compute the array index we look for. */
5966 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5968 access_index += low_bound;
5970 /* And offset within the access. */
5971 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5973 /* See if the array field is large enough to span whole access. We do not
5974 care to fold accesses spanning multiple array indexes. */
5975 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5977 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5978 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
5980 /* When memory is not explicitely mentioned in constructor,
5981 it is 0 (or out of range). */
5982 return build_zero_cst (type);
5985 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5986 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5989 fold_nonarray_ctor_reference (tree type, tree ctor,
5990 unsigned HOST_WIDE_INT offset,
5991 unsigned HOST_WIDE_INT size,
5994 unsigned HOST_WIDE_INT cnt;
5997 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6000 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6001 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6002 tree field_size = DECL_SIZE (cfield);
6003 offset_int bitoffset;
6004 offset_int bitoffset_end, access_end;
6006 /* Variable sized objects in static constructors makes no sense,
6007 but field_size can be NULL for flexible array members. */
6008 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6009 && TREE_CODE (byte_offset) == INTEGER_CST
6010 && (field_size != NULL_TREE
6011 ? TREE_CODE (field_size) == INTEGER_CST
6012 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6014 /* Compute bit offset of the field. */
6015 bitoffset = (wi::to_offset (field_offset)
6016 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6017 /* Compute bit offset where the field ends. */
6018 if (field_size != NULL_TREE)
6019 bitoffset_end = bitoffset + wi::to_offset (field_size);
6023 access_end = offset_int (offset) + size;
6025 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6026 [BITOFFSET, BITOFFSET_END)? */
6027 if (wi::cmps (access_end, bitoffset) > 0
6028 && (field_size == NULL_TREE
6029 || wi::lts_p (offset, bitoffset_end)))
6031 offset_int inner_offset = offset_int (offset) - bitoffset;
6032 /* We do have overlap. Now see if field is large enough to
6033 cover the access. Give up for accesses spanning multiple
6035 if (wi::cmps (access_end, bitoffset_end) > 0)
6037 if (offset < bitoffset)
6039 return fold_ctor_reference (type, cval,
6040 inner_offset.to_uhwi (), size,
6044 /* When memory is not explicitely mentioned in constructor, it is 0. */
6045 return build_zero_cst (type);
6048 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
6049 to the memory at bit OFFSET. */
6052 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
6053 unsigned HOST_WIDE_INT size, tree from_decl)
6057 /* We found the field with exact match. */
6058 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6060 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6062 /* We are at the end of walk, see if we can view convert the
6064 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6065 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6066 && !compare_tree_int (TYPE_SIZE (type), size)
6067 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6069 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6070 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6072 STRIP_USELESS_TYPE_CONVERSION (ret);
6075 /* For constants and byte-aligned/sized reads try to go through
6076 native_encode/interpret. */
6077 if (CONSTANT_CLASS_P (ctor)
6078 && BITS_PER_UNIT == 8
6079 && offset % BITS_PER_UNIT == 0
6080 && size % BITS_PER_UNIT == 0
6081 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6083 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6084 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6085 offset / BITS_PER_UNIT);
6087 return native_interpret_expr (type, buf, len);
6089 if (TREE_CODE (ctor) == CONSTRUCTOR)
6092 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6093 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6094 return fold_array_ctor_reference (type, ctor, offset, size,
6097 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6104 /* Return the tree representing the element referenced by T if T is an
6105 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6106 names using VALUEIZE. Return NULL_TREE otherwise. */
6109 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6111 tree ctor, idx, base;
6112 HOST_WIDE_INT offset, size, max_size;
6116 if (TREE_THIS_VOLATILE (t))
6120 return get_symbol_constant_value (t);
6122 tem = fold_read_from_constant_string (t);
6126 switch (TREE_CODE (t))
6129 case ARRAY_RANGE_REF:
6130 /* Constant indexes are handled well by get_base_constructor.
6131 Only special case variable offsets.
6132 FIXME: This code can't handle nested references with variable indexes
6133 (they will be handled only by iteration of ccp). Perhaps we can bring
6134 get_ref_base_and_extent here and make it use a valueize callback. */
6135 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6137 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6138 && TREE_CODE (idx) == INTEGER_CST)
6140 tree low_bound, unit_size;
6142 /* If the resulting bit-offset is constant, track it. */
6143 if ((low_bound = array_ref_low_bound (t),
6144 TREE_CODE (low_bound) == INTEGER_CST)
6145 && (unit_size = array_ref_element_size (t),
6146 tree_fits_uhwi_p (unit_size)))
6149 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
6150 TYPE_PRECISION (TREE_TYPE (idx)));
6152 if (wi::fits_shwi_p (woffset))
6154 offset = woffset.to_shwi ();
6155 /* TODO: This code seems wrong, multiply then check
6156 to see if it fits. */
6157 offset *= tree_to_uhwi (unit_size);
6158 offset *= BITS_PER_UNIT;
6160 base = TREE_OPERAND (t, 0);
6161 ctor = get_base_constructor (base, &offset, valueize);
6162 /* Empty constructor. Always fold to 0. */
6163 if (ctor == error_mark_node)
6164 return build_zero_cst (TREE_TYPE (t));
6165 /* Out of bound array access. Value is undefined,
6169 /* We can not determine ctor. */
6172 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6173 tree_to_uhwi (unit_size)
6183 case TARGET_MEM_REF:
6185 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6186 ctor = get_base_constructor (base, &offset, valueize);
6188 /* Empty constructor. Always fold to 0. */
6189 if (ctor == error_mark_node)
6190 return build_zero_cst (TREE_TYPE (t));
6191 /* We do not know precise address. */
6192 if (max_size == -1 || max_size != size)
6194 /* We can not determine ctor. */
6198 /* Out of bound array access. Value is undefined, but don't fold. */
6202 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6208 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6209 if (c && TREE_CODE (c) == COMPLEX_CST)
6210 return fold_build1_loc (EXPR_LOCATION (t),
6211 TREE_CODE (t), TREE_TYPE (t), c);
6223 fold_const_aggregate_ref (tree t)
6225 return fold_const_aggregate_ref_1 (t, NULL);
6228 /* Lookup virtual method with index TOKEN in a virtual table V
6230 Set CAN_REFER if non-NULL to false if method
6231 is not referable or if the virtual table is ill-formed (such as rewriten
6232 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6235 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6237 unsigned HOST_WIDE_INT offset,
6240 tree vtable = v, init, fn;
6241 unsigned HOST_WIDE_INT size;
6242 unsigned HOST_WIDE_INT elt_size, access_index;
6248 /* First of all double check we have virtual table. */
6249 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6251 /* Pass down that we lost track of the target. */
6257 init = ctor_for_folding (v);
6259 /* The virtual tables should always be born with constructors
6260 and we always should assume that they are avaialble for
6261 folding. At the moment we do not stream them in all cases,
6262 but it should never happen that ctor seem unreachable. */
6264 if (init == error_mark_node)
6266 gcc_assert (in_lto_p);
6267 /* Pass down that we lost track of the target. */
6272 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6273 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6274 offset *= BITS_PER_UNIT;
6275 offset += token * size;
6277 /* Lookup the value in the constructor that is assumed to be array.
6278 This is equivalent to
6279 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6280 offset, size, NULL);
6281 but in a constant time. We expect that frontend produced a simple
6282 array without indexed initializers. */
6284 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6285 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6286 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6287 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6289 access_index = offset / BITS_PER_UNIT / elt_size;
6290 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6292 /* This code makes an assumption that there are no
6293 indexed fileds produced by C++ FE, so we can directly index the array. */
6294 if (access_index < CONSTRUCTOR_NELTS (init))
6296 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6297 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6303 /* For type inconsistent program we may end up looking up virtual method
6304 in virtual table that does not contain TOKEN entries. We may overrun
6305 the virtual table and pick up a constant or RTTI info pointer.
6306 In any case the call is undefined. */
6308 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6309 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6310 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6313 fn = TREE_OPERAND (fn, 0);
6315 /* When cgraph node is missing and function is not public, we cannot
6316 devirtualize. This can happen in WHOPR when the actual method
6317 ends up in other partition, because we found devirtualization
6318 possibility too late. */
6319 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6330 /* Make sure we create a cgraph node for functions we'll reference.
6331 They can be non-existent if the reference comes from an entry
6332 of an external vtable for example. */
6333 cgraph_node::get_create (fn);
6338 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6339 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6340 KNOWN_BINFO carries the binfo describing the true type of
6341 OBJ_TYPE_REF_OBJECT(REF).
6342 Set CAN_REFER if non-NULL to false if method
6343 is not referable or if the virtual table is ill-formed (such as rewriten
6344 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6347 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6350 unsigned HOST_WIDE_INT offset;
6353 v = BINFO_VTABLE (known_binfo);
6354 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6358 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6364 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6367 /* Given a pointer value OP0, return a simplified version of an
6368 indirection through OP0, or NULL_TREE if no simplification is
6369 possible. Note that the resulting type may be different from
6370 the type pointed to in the sense that it is still compatible
6371 from the langhooks point of view. */
6374 gimple_fold_indirect_ref (tree t)
6376 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6381 subtype = TREE_TYPE (sub);
6382 if (!POINTER_TYPE_P (subtype))
6385 if (TREE_CODE (sub) == ADDR_EXPR)
6387 tree op = TREE_OPERAND (sub, 0);
6388 tree optype = TREE_TYPE (op);
6390 if (useless_type_conversion_p (type, optype))
6393 /* *(foo *)&fooarray => fooarray[0] */
6394 if (TREE_CODE (optype) == ARRAY_TYPE
6395 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6396 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6398 tree type_domain = TYPE_DOMAIN (optype);
6399 tree min_val = size_zero_node;
6400 if (type_domain && TYPE_MIN_VALUE (type_domain))
6401 min_val = TYPE_MIN_VALUE (type_domain);
6402 if (TREE_CODE (min_val) == INTEGER_CST)
6403 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6405 /* *(foo *)&complexfoo => __real__ complexfoo */
6406 else if (TREE_CODE (optype) == COMPLEX_TYPE
6407 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6408 return fold_build1 (REALPART_EXPR, type, op);
6409 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6410 else if (TREE_CODE (optype) == VECTOR_TYPE
6411 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6413 tree part_width = TYPE_SIZE (type);
6414 tree index = bitsize_int (0);
6415 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6419 /* *(p + CST) -> ... */
6420 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6421 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6423 tree addr = TREE_OPERAND (sub, 0);
6424 tree off = TREE_OPERAND (sub, 1);
6428 addrtype = TREE_TYPE (addr);
6430 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6431 if (TREE_CODE (addr) == ADDR_EXPR
6432 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6433 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6434 && tree_fits_uhwi_p (off))
6436 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
6437 tree part_width = TYPE_SIZE (type);
6438 unsigned HOST_WIDE_INT part_widthi
6439 = tree_to_shwi (part_width) / BITS_PER_UNIT;
6440 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6441 tree index = bitsize_int (indexi);
6442 if (offset / part_widthi
6443 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
6444 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6448 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6449 if (TREE_CODE (addr) == ADDR_EXPR
6450 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6451 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6453 tree size = TYPE_SIZE_UNIT (type);
6454 if (tree_int_cst_equal (size, off))
6455 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6458 /* *(p + CST) -> MEM_REF <p, CST>. */
6459 if (TREE_CODE (addr) != ADDR_EXPR
6460 || DECL_P (TREE_OPERAND (addr, 0)))
6461 return fold_build2 (MEM_REF, type,
6463 wide_int_to_tree (ptype, off));
6466 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6467 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6468 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6469 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6472 tree min_val = size_zero_node;
6474 sub = gimple_fold_indirect_ref (sub);
6476 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6477 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6478 if (type_domain && TYPE_MIN_VALUE (type_domain))
6479 min_val = TYPE_MIN_VALUE (type_domain);
6480 if (TREE_CODE (min_val) == INTEGER_CST)
6481 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6487 /* Return true if CODE is an operation that when operating on signed
6488 integer types involves undefined behavior on overflow and the
6489 operation can be expressed with unsigned arithmetic. */
6492 arith_code_with_undefined_signed_overflow (tree_code code)
6500 case POINTER_PLUS_EXPR:
6507 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6508 operation that can be transformed to unsigned arithmetic by converting
6509 its operand, carrying out the operation in the corresponding unsigned
6510 type and converting the result back to the original type.
6512 Returns a sequence of statements that replace STMT and also contain
6513 a modified form of STMT itself. */
6516 rewrite_to_defined_overflow (gimple *stmt)
6518 if (dump_file && (dump_flags & TDF_DETAILS))
6520 fprintf (dump_file, "rewriting stmt with undefined signed "
6522 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6525 tree lhs = gimple_assign_lhs (stmt);
6526 tree type = unsigned_type_for (TREE_TYPE (lhs));
6527 gimple_seq stmts = NULL;
6528 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6530 tree op = gimple_op (stmt, i);
6531 op = gimple_convert (&stmts, type, op);
6532 gimple_set_op (stmt, i, op);
6534 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6535 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6536 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6537 gimple_seq_add_stmt (&stmts, stmt);
6538 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6539 gimple_seq_add_stmt (&stmts, cvt);
6545 /* The valueization hook we use for the gimple_build API simplification.
6546 This makes us match fold_buildN behavior by only combining with
6547 statements in the sequence(s) we are currently building. */
6550 gimple_build_valueize (tree op)
6552 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6557 /* Build the expression CODE OP0 of type TYPE with location LOC,
6558 simplifying it first if possible. Returns the built
6559 expression value and appends statements possibly defining it
6563 gimple_build (gimple_seq *seq, location_t loc,
6564 enum tree_code code, tree type, tree op0)
6566 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6569 res = create_tmp_reg_or_ssa_name (type);
6571 if (code == REALPART_EXPR
6572 || code == IMAGPART_EXPR
6573 || code == VIEW_CONVERT_EXPR)
6574 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6576 stmt = gimple_build_assign (res, code, op0);
6577 gimple_set_location (stmt, loc);
6578 gimple_seq_add_stmt_without_update (seq, stmt);
6583 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6584 simplifying it first if possible. Returns the built
6585 expression value and appends statements possibly defining it
6589 gimple_build (gimple_seq *seq, location_t loc,
6590 enum tree_code code, tree type, tree op0, tree op1)
6592 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6595 res = create_tmp_reg_or_ssa_name (type);
6596 gimple *stmt = gimple_build_assign (res, code, op0, op1);
6597 gimple_set_location (stmt, loc);
6598 gimple_seq_add_stmt_without_update (seq, stmt);
6603 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6604 simplifying it first if possible. Returns the built
6605 expression value and appends statements possibly defining it
6609 gimple_build (gimple_seq *seq, location_t loc,
6610 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6612 tree res = gimple_simplify (code, type, op0, op1, op2,
6613 seq, gimple_build_valueize);
6616 res = create_tmp_reg_or_ssa_name (type);
6618 if (code == BIT_FIELD_REF)
6619 stmt = gimple_build_assign (res, code,
6620 build3 (code, type, op0, op1, op2));
6622 stmt = gimple_build_assign (res, code, op0, op1, op2);
6623 gimple_set_location (stmt, loc);
6624 gimple_seq_add_stmt_without_update (seq, stmt);
6629 /* Build the call FN (ARG0) with a result of type TYPE
6630 (or no result if TYPE is void) with location LOC,
6631 simplifying it first if possible. Returns the built
6632 expression value (or NULL_TREE if TYPE is void) and appends
6633 statements possibly defining it to SEQ. */
6636 gimple_build (gimple_seq *seq, location_t loc,
6637 enum built_in_function fn, tree type, tree arg0)
6639 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6642 tree decl = builtin_decl_implicit (fn);
6643 gimple *stmt = gimple_build_call (decl, 1, arg0);
6644 if (!VOID_TYPE_P (type))
6646 res = create_tmp_reg_or_ssa_name (type);
6647 gimple_call_set_lhs (stmt, res);
6649 gimple_set_location (stmt, loc);
6650 gimple_seq_add_stmt_without_update (seq, stmt);
6655 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6656 (or no result if TYPE is void) with location LOC,
6657 simplifying it first if possible. Returns the built
6658 expression value (or NULL_TREE if TYPE is void) and appends
6659 statements possibly defining it to SEQ. */
6662 gimple_build (gimple_seq *seq, location_t loc,
6663 enum built_in_function fn, tree type, tree arg0, tree arg1)
6665 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6668 tree decl = builtin_decl_implicit (fn);
6669 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
6670 if (!VOID_TYPE_P (type))
6672 res = create_tmp_reg_or_ssa_name (type);
6673 gimple_call_set_lhs (stmt, res);
6675 gimple_set_location (stmt, loc);
6676 gimple_seq_add_stmt_without_update (seq, stmt);
6681 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6682 (or no result if TYPE is void) with location LOC,
6683 simplifying it first if possible. Returns the built
6684 expression value (or NULL_TREE if TYPE is void) and appends
6685 statements possibly defining it to SEQ. */
6688 gimple_build (gimple_seq *seq, location_t loc,
6689 enum built_in_function fn, tree type,
6690 tree arg0, tree arg1, tree arg2)
6692 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6693 seq, gimple_build_valueize);
6696 tree decl = builtin_decl_implicit (fn);
6697 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6698 if (!VOID_TYPE_P (type))
6700 res = create_tmp_reg_or_ssa_name (type);
6701 gimple_call_set_lhs (stmt, res);
6703 gimple_set_location (stmt, loc);
6704 gimple_seq_add_stmt_without_update (seq, stmt);
6709 /* Build the conversion (TYPE) OP with a result of type TYPE
6710 with location LOC if such conversion is neccesary in GIMPLE,
6711 simplifying it first.
6712 Returns the built expression value and appends
6713 statements possibly defining it to SEQ. */
6716 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6718 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6720 return gimple_build (seq, loc, NOP_EXPR, type, op);
6723 /* Build the conversion (ptrofftype) OP with a result of a type
6724 compatible with ptrofftype with location LOC if such conversion
6725 is neccesary in GIMPLE, simplifying it first.
6726 Returns the built expression value and appends
6727 statements possibly defining it to SEQ. */
6730 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6732 if (ptrofftype_p (TREE_TYPE (op)))
6734 return gimple_convert (seq, loc, sizetype, op);
6737 /* Return true if the result of assignment STMT is known to be non-negative.
6738 If the return value is based on the assumption that signed overflow is
6739 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6740 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6743 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6746 enum tree_code code = gimple_assign_rhs_code (stmt);
6747 switch (get_gimple_rhs_class (code))
6749 case GIMPLE_UNARY_RHS:
6750 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6751 gimple_expr_type (stmt),
6752 gimple_assign_rhs1 (stmt),
6753 strict_overflow_p, depth);
6754 case GIMPLE_BINARY_RHS:
6755 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6756 gimple_expr_type (stmt),
6757 gimple_assign_rhs1 (stmt),
6758 gimple_assign_rhs2 (stmt),
6759 strict_overflow_p, depth);
6760 case GIMPLE_TERNARY_RHS:
6762 case GIMPLE_SINGLE_RHS:
6763 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6764 strict_overflow_p, depth);
6765 case GIMPLE_INVALID_RHS:
6771 /* Return true if return value of call STMT is known to be non-negative.
6772 If the return value is based on the assumption that signed overflow is
6773 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6774 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6777 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6780 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6781 gimple_call_arg (stmt, 0) : NULL_TREE;
6782 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6783 gimple_call_arg (stmt, 1) : NULL_TREE;
6785 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
6786 gimple_call_combined_fn (stmt),
6789 strict_overflow_p, depth);
6792 /* Return true if return value of call STMT is known to be non-negative.
6793 If the return value is based on the assumption that signed overflow is
6794 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6795 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6798 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6801 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6803 tree arg = gimple_phi_arg_def (stmt, i);
6804 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6810 /* Return true if STMT is known to compute a non-negative value.
6811 If the return value is based on the assumption that signed overflow is
6812 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6813 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6816 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6819 switch (gimple_code (stmt))
6822 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6825 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6828 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6835 /* Return true if the floating-point value computed by assignment STMT
6836 is known to have an integer value. We also allow +Inf, -Inf and NaN
6837 to be considered integer values. Return false for signaling NaN.
6839 DEPTH is the current nesting depth of the query. */
6842 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6844 enum tree_code code = gimple_assign_rhs_code (stmt);
6845 switch (get_gimple_rhs_class (code))
6847 case GIMPLE_UNARY_RHS:
6848 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6849 gimple_assign_rhs1 (stmt), depth);
6850 case GIMPLE_BINARY_RHS:
6851 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6852 gimple_assign_rhs1 (stmt),
6853 gimple_assign_rhs2 (stmt), depth);
6854 case GIMPLE_TERNARY_RHS:
6856 case GIMPLE_SINGLE_RHS:
6857 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6858 case GIMPLE_INVALID_RHS:
6864 /* Return true if the floating-point value computed by call STMT is known
6865 to have an integer value. We also allow +Inf, -Inf and NaN to be
6866 considered integer values. Return false for signaling NaN.
6868 DEPTH is the current nesting depth of the query. */
6871 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6873 tree arg0 = (gimple_call_num_args (stmt) > 0
6874 ? gimple_call_arg (stmt, 0)
6876 tree arg1 = (gimple_call_num_args (stmt) > 1
6877 ? gimple_call_arg (stmt, 1)
6879 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
6883 /* Return true if the floating-point result of phi STMT is known to have
6884 an integer value. We also allow +Inf, -Inf and NaN to be considered
6885 integer values. Return false for signaling NaN.
6887 DEPTH is the current nesting depth of the query. */
6890 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6892 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6894 tree arg = gimple_phi_arg_def (stmt, i);
6895 if (!integer_valued_real_single_p (arg, depth + 1))
6901 /* Return true if the floating-point value computed by STMT is known
6902 to have an integer value. We also allow +Inf, -Inf and NaN to be
6903 considered integer values. Return false for signaling NaN.
6905 DEPTH is the current nesting depth of the query. */
6908 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6910 switch (gimple_code (stmt))
6913 return gimple_assign_integer_valued_real_p (stmt, depth);
6915 return gimple_call_integer_valued_real_p (stmt, depth);
6917 return gimple_phi_integer_valued_real_p (stmt, depth);