1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
60 struct target_builtins default_target_builtins;
62 struct target_builtins *this_target_builtins = &default_target_builtins;
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
72 #include "builtins.def"
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 tree built_in_decls[(int) END_BUILTINS];
79 /* Declarations used when constructing the builtin implicitly in the compiler.
80 It may be NULL_TREE when this is invalid (for instance runtime is not
81 required to implement the function call in all cases). */
82 tree implicit_built_in_decls[(int) END_BUILTINS];
84 static const char *c_getstr (tree);
85 static rtx c_readstr (const char *, enum machine_mode);
86 static int target_char_cast (tree, char *);
87 static rtx get_memory_rtx (tree, tree);
88 static int apply_args_size (void);
89 static int apply_result_size (void);
90 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
91 static rtx result_vector (int, rtx);
93 static void expand_builtin_update_setjmp_buf (rtx);
94 static void expand_builtin_prefetch (tree);
95 static rtx expand_builtin_apply_args (void);
96 static rtx expand_builtin_apply_args_1 (void);
97 static rtx expand_builtin_apply (rtx, rtx, rtx);
98 static void expand_builtin_return (rtx);
99 static enum type_class type_to_class (tree);
100 static rtx expand_builtin_classify_type (tree);
101 static void expand_errno_check (tree, rtx);
102 static rtx expand_builtin_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
105 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
106 static rtx expand_builtin_interclass_mathfn (tree, rtx);
107 static rtx expand_builtin_sincos (tree);
108 static rtx expand_builtin_cexpi (tree, rtx);
109 static rtx expand_builtin_int_roundingfn (tree, rtx);
110 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
111 static rtx expand_builtin_next_arg (void);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcmp (tree, rtx);
117 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
118 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
119 static rtx expand_builtin_memcpy (tree, rtx);
120 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
122 enum machine_mode, int);
123 static rtx expand_builtin_strcpy (tree, rtx);
124 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
125 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strncpy (tree, rtx);
127 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
128 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
130 static rtx expand_builtin_bzero (tree);
131 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_alloca (tree, bool);
133 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
134 static rtx expand_builtin_frame_address (tree, tree);
135 static tree stabilize_va_list_loc (location_t, tree, int);
136 static rtx expand_builtin_expect (tree, rtx);
137 static tree fold_builtin_constant_p (tree);
138 static tree fold_builtin_expect (location_t, tree, tree);
139 static tree fold_builtin_classify_type (tree);
140 static tree fold_builtin_strlen (location_t, tree, tree);
141 static tree fold_builtin_inf (location_t, tree, int);
142 static tree fold_builtin_nan (tree, tree, int);
143 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
144 static bool validate_arg (const_tree, enum tree_code code);
145 static bool integer_valued_real_p (tree);
146 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
147 static bool readonly_data_expr (tree);
148 static rtx expand_builtin_fabs (tree, rtx, rtx);
149 static rtx expand_builtin_signbit (tree, rtx);
150 static tree fold_builtin_sqrt (location_t, tree, tree);
151 static tree fold_builtin_cbrt (location_t, tree, tree);
152 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
153 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
154 static tree fold_builtin_cos (location_t, tree, tree, tree);
155 static tree fold_builtin_cosh (location_t, tree, tree, tree);
156 static tree fold_builtin_tan (tree, tree);
157 static tree fold_builtin_trunc (location_t, tree, tree);
158 static tree fold_builtin_floor (location_t, tree, tree);
159 static tree fold_builtin_ceil (location_t, tree, tree);
160 static tree fold_builtin_round (location_t, tree, tree);
161 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
162 static tree fold_builtin_bitop (tree, tree);
163 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
164 static tree fold_builtin_strchr (location_t, tree, tree, tree);
165 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
167 static tree fold_builtin_strcmp (location_t, tree, tree);
168 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
169 static tree fold_builtin_signbit (location_t, tree, tree);
170 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_isascii (location_t, tree);
172 static tree fold_builtin_toascii (location_t, tree);
173 static tree fold_builtin_isdigit (location_t, tree);
174 static tree fold_builtin_fabs (location_t, tree, tree);
175 static tree fold_builtin_abs (location_t, tree, tree);
176 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
178 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
179 static tree fold_builtin_0 (location_t, tree, bool);
180 static tree fold_builtin_1 (location_t, tree, tree, bool);
181 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
182 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
183 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
184 static tree fold_builtin_varargs (location_t, tree, tree, bool);
186 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
187 static tree fold_builtin_strstr (location_t, tree, tree, tree);
188 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
189 static tree fold_builtin_strcat (location_t, tree, tree);
190 static tree fold_builtin_strncat (location_t, tree, tree, tree);
191 static tree fold_builtin_strspn (location_t, tree, tree);
192 static tree fold_builtin_strcspn (location_t, tree, tree);
193 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
194 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
196 static rtx expand_builtin_object_size (tree);
197 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
198 enum built_in_function);
199 static void maybe_emit_chk_warning (tree, enum built_in_function);
200 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
201 static void maybe_emit_free_warning (tree);
202 static tree fold_builtin_object_size (tree, tree);
203 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
204 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
205 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
206 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
207 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
208 enum built_in_function);
209 static bool init_target_chars (void);
211 static unsigned HOST_WIDE_INT target_newline;
212 static unsigned HOST_WIDE_INT target_percent;
213 static unsigned HOST_WIDE_INT target_c;
214 static unsigned HOST_WIDE_INT target_s;
215 static char target_percent_c[3];
216 static char target_percent_s[3];
217 static char target_percent_s_newline[4];
218 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
219 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
220 static tree do_mpfr_arg2 (tree, tree, tree,
221 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
222 static tree do_mpfr_arg3 (tree, tree, tree, tree,
223 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
224 static tree do_mpfr_sincos (tree, tree, tree);
225 static tree do_mpfr_bessel_n (tree, tree, tree,
226 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_remquo (tree, tree, tree);
229 static tree do_mpfr_lgamma_r (tree, tree, tree);
231 /* Return true if NAME starts with __builtin_ or __sync_. */
234 is_builtin_name (const char *name)
236 if (strncmp (name, "__builtin_", 10) == 0)
238 if (strncmp (name, "__sync_", 7) == 0)
244 /* Return true if DECL is a function symbol representing a built-in. */
247 is_builtin_fn (tree decl)
249 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
253 /* Return true if NODE should be considered for inline expansion regardless
254 of the optimization level. This means whenever a function is invoked with
255 its "internal" name, which normally contains the prefix "__builtin". */
258 called_as_built_in (tree node)
260 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
261 we want the name used to call the function, not the name it
263 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
264 return is_builtin_name (name);
267 /* Compute values M and N such that M divides (address of EXP - N) and
268 such that N < M. Store N in *BITPOSP and return M.
270 Note that the address (and thus the alignment) computed here is based
271 on the address to which a symbol resolves, whereas DECL_ALIGN is based
272 on the address at which an object is actually located. These two
273 addresses are not always the same. For example, on ARM targets,
274 the address &foo of a Thumb function foo() has the lowest bit set,
275 whereas foo() itself starts on an even address. */
278 get_object_alignment_1 (tree exp, unsigned HOST_WIDE_INT *bitposp)
280 HOST_WIDE_INT bitsize, bitpos;
282 enum machine_mode mode;
283 int unsignedp, volatilep;
284 unsigned int align, inner;
286 /* Get the innermost object and the constant (bitpos) and possibly
287 variable (offset) offset of the access. */
288 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
289 &mode, &unsignedp, &volatilep, true);
291 /* Extract alignment information from the innermost object and
292 possibly adjust bitpos and offset. */
293 if (TREE_CODE (exp) == CONST_DECL)
294 exp = DECL_INITIAL (exp);
296 && TREE_CODE (exp) != LABEL_DECL)
298 if (TREE_CODE (exp) == FUNCTION_DECL)
300 /* Function addresses can encode extra information besides their
301 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
302 allows the low bit to be used as a virtual bit, we know
303 that the address itself must be 2-byte aligned. */
304 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
305 align = 2 * BITS_PER_UNIT;
307 align = BITS_PER_UNIT;
310 align = DECL_ALIGN (exp);
312 else if (CONSTANT_CLASS_P (exp))
314 align = TYPE_ALIGN (TREE_TYPE (exp));
315 #ifdef CONSTANT_ALIGNMENT
316 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
319 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
320 align = TYPE_ALIGN (TREE_TYPE (exp));
321 else if (TREE_CODE (exp) == INDIRECT_REF)
322 align = TYPE_ALIGN (TREE_TYPE (exp));
323 else if (TREE_CODE (exp) == MEM_REF)
325 tree addr = TREE_OPERAND (exp, 0);
326 struct ptr_info_def *pi;
327 if (TREE_CODE (addr) == BIT_AND_EXPR
328 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
330 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
331 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
332 align *= BITS_PER_UNIT;
333 addr = TREE_OPERAND (addr, 0);
336 align = BITS_PER_UNIT;
337 if (TREE_CODE (addr) == SSA_NAME
338 && (pi = SSA_NAME_PTR_INFO (addr)))
340 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
341 align = MAX (pi->align * BITS_PER_UNIT, align);
343 else if (TREE_CODE (addr) == ADDR_EXPR)
344 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0)));
345 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
347 else if (TREE_CODE (exp) == TARGET_MEM_REF)
349 struct ptr_info_def *pi;
350 tree addr = TMR_BASE (exp);
351 if (TREE_CODE (addr) == BIT_AND_EXPR
352 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
354 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
355 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
356 align *= BITS_PER_UNIT;
357 addr = TREE_OPERAND (addr, 0);
360 align = BITS_PER_UNIT;
361 if (TREE_CODE (addr) == SSA_NAME
362 && (pi = SSA_NAME_PTR_INFO (addr)))
364 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
365 align = MAX (pi->align * BITS_PER_UNIT, align);
367 else if (TREE_CODE (addr) == ADDR_EXPR)
368 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0)));
369 if (TMR_OFFSET (exp))
370 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
371 if (TMR_INDEX (exp) && TMR_STEP (exp))
373 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
374 align = MIN (align, (step & -step) * BITS_PER_UNIT);
376 else if (TMR_INDEX (exp))
377 align = BITS_PER_UNIT;
378 if (TMR_INDEX2 (exp))
379 align = BITS_PER_UNIT;
382 align = BITS_PER_UNIT;
384 /* If there is a non-constant offset part extract the maximum
385 alignment that can prevail. */
391 if (TREE_CODE (offset) == PLUS_EXPR)
393 next_offset = TREE_OPERAND (offset, 0);
394 offset = TREE_OPERAND (offset, 1);
398 if (host_integerp (offset, 1))
400 /* Any overflow in calculating offset_bits won't change
403 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
406 inner = MIN (inner, (offset_bits & -offset_bits));
408 else if (TREE_CODE (offset) == MULT_EXPR
409 && host_integerp (TREE_OPERAND (offset, 1), 1))
411 /* Any overflow in calculating offset_factor won't change
413 unsigned offset_factor
414 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
418 inner = MIN (inner, (offset_factor & -offset_factor));
422 inner = MIN (inner, BITS_PER_UNIT);
425 offset = next_offset;
428 /* Alignment is innermost object alignment adjusted by the constant
429 and non-constant offset parts. */
430 align = MIN (align, inner);
431 bitpos = bitpos & (align - 1);
437 /* Return the alignment in bits of EXP, an object. */
440 get_object_alignment (tree exp)
442 unsigned HOST_WIDE_INT bitpos = 0;
445 align = get_object_alignment_1 (exp, &bitpos);
447 /* align and bitpos now specify known low bits of the pointer.
448 ptr & (align - 1) == bitpos. */
451 align = (bitpos & -bitpos);
456 /* Return the alignment in bits of EXP, a pointer valued expression.
457 The alignment returned is, by default, the alignment of the thing that
458 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
460 Otherwise, look at the expression to see if we can do better, i.e., if the
461 expression is actually pointing at an object whose alignment is tighter. */
464 get_pointer_alignment (tree exp)
468 if (TREE_CODE (exp) == ADDR_EXPR)
469 return get_object_alignment (TREE_OPERAND (exp, 0));
470 else if (TREE_CODE (exp) == SSA_NAME
471 && POINTER_TYPE_P (TREE_TYPE (exp)))
473 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
476 return BITS_PER_UNIT;
477 if (pi->misalign != 0)
478 align = (pi->misalign & -pi->misalign);
481 return align * BITS_PER_UNIT;
484 return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
487 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
488 way, because it could contain a zero byte in the middle.
489 TREE_STRING_LENGTH is the size of the character array, not the string.
491 ONLY_VALUE should be nonzero if the result is not going to be emitted
492 into the instruction stream and zero if it is going to be expanded.
493 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
494 is returned, otherwise NULL, since
495 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
496 evaluate the side-effects.
498 The value returned is of type `ssizetype'.
500 Unfortunately, string_constant can't access the values of const char
501 arrays with initializers, so neither can we do so here. */
504 c_strlen (tree src, int only_value)
507 HOST_WIDE_INT offset;
513 if (TREE_CODE (src) == COND_EXPR
514 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
518 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
519 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
520 if (tree_int_cst_equal (len1, len2))
524 if (TREE_CODE (src) == COMPOUND_EXPR
525 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
526 return c_strlen (TREE_OPERAND (src, 1), only_value);
528 loc = EXPR_LOC_OR_HERE (src);
530 src = string_constant (src, &offset_node);
534 max = TREE_STRING_LENGTH (src) - 1;
535 ptr = TREE_STRING_POINTER (src);
537 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
539 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
540 compute the offset to the following null if we don't know where to
541 start searching for it. */
544 for (i = 0; i < max; i++)
548 /* We don't know the starting offset, but we do know that the string
549 has no internal zero bytes. We can assume that the offset falls
550 within the bounds of the string; otherwise, the programmer deserves
551 what he gets. Subtract the offset from the length of the string,
552 and return that. This would perhaps not be valid if we were dealing
553 with named arrays in addition to literal string constants. */
555 return size_diffop_loc (loc, size_int (max), offset_node);
558 /* We have a known offset into the string. Start searching there for
559 a null character if we can represent it as a single HOST_WIDE_INT. */
560 if (offset_node == 0)
562 else if (! host_integerp (offset_node, 0))
565 offset = tree_low_cst (offset_node, 0);
567 /* If the offset is known to be out of bounds, warn, and call strlen at
569 if (offset < 0 || offset > max)
571 /* Suppress multiple warnings for propagated constant strings. */
572 if (! TREE_NO_WARNING (src))
574 warning_at (loc, 0, "offset outside bounds of constant string");
575 TREE_NO_WARNING (src) = 1;
580 /* Use strlen to search for the first zero byte. Since any strings
581 constructed with build_string will have nulls appended, we win even
582 if we get handed something like (char[4])"abcd".
584 Since OFFSET is our starting index into the string, no further
585 calculation is needed. */
586 return ssize_int (strlen (ptr + offset));
589 /* Return a char pointer for a C string if it is a string constant
590 or sum of string constant and integer constant. */
597 src = string_constant (src, &offset_node);
601 if (offset_node == 0)
602 return TREE_STRING_POINTER (src);
603 else if (!host_integerp (offset_node, 1)
604 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
607 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
610 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
611 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
614 c_readstr (const char *str, enum machine_mode mode)
620 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
625 for (i = 0; i < GET_MODE_SIZE (mode); i++)
628 if (WORDS_BIG_ENDIAN)
629 j = GET_MODE_SIZE (mode) - i - 1;
630 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
631 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
632 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
634 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
637 ch = (unsigned char) str[i];
638 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
640 return immed_double_const (c[0], c[1], mode);
643 /* Cast a target constant CST to target CHAR and if that value fits into
644 host char type, return zero and put that value into variable pointed to by
648 target_char_cast (tree cst, char *p)
650 unsigned HOST_WIDE_INT val, hostval;
652 if (TREE_CODE (cst) != INTEGER_CST
653 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
656 val = TREE_INT_CST_LOW (cst);
657 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
658 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
661 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
662 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
671 /* Similar to save_expr, but assumes that arbitrary code is not executed
672 in between the multiple evaluations. In particular, we assume that a
673 non-addressable local variable will not be modified. */
676 builtin_save_expr (tree exp)
678 if (TREE_CODE (exp) == SSA_NAME
679 || (TREE_ADDRESSABLE (exp) == 0
680 && (TREE_CODE (exp) == PARM_DECL
681 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
684 return save_expr (exp);
687 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
688 times to get the address of either a higher stack frame, or a return
689 address located within it (depending on FNDECL_CODE). */
692 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
696 #ifdef INITIAL_FRAME_ADDRESS_RTX
697 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
701 /* For a zero count with __builtin_return_address, we don't care what
702 frame address we return, because target-specific definitions will
703 override us. Therefore frame pointer elimination is OK, and using
704 the soft frame pointer is OK.
706 For a nonzero count, or a zero count with __builtin_frame_address,
707 we require a stable offset from the current frame pointer to the
708 previous one, so we must use the hard frame pointer, and
709 we must disable frame pointer elimination. */
710 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
711 tem = frame_pointer_rtx;
714 tem = hard_frame_pointer_rtx;
716 /* Tell reload not to eliminate the frame pointer. */
717 crtl->accesses_prior_frames = 1;
721 /* Some machines need special handling before we can access
722 arbitrary frames. For example, on the SPARC, we must first flush
723 all register windows to the stack. */
724 #ifdef SETUP_FRAME_ADDRESSES
726 SETUP_FRAME_ADDRESSES ();
729 /* On the SPARC, the return address is not in the frame, it is in a
730 register. There is no way to access it off of the current frame
731 pointer, but it can be accessed off the previous frame pointer by
732 reading the value from the register window save area. */
733 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
734 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
738 /* Scan back COUNT frames to the specified frame. */
739 for (i = 0; i < count; i++)
741 /* Assume the dynamic chain pointer is in the word that the
742 frame address points to, unless otherwise specified. */
743 #ifdef DYNAMIC_CHAIN_ADDRESS
744 tem = DYNAMIC_CHAIN_ADDRESS (tem);
746 tem = memory_address (Pmode, tem);
747 tem = gen_frame_mem (Pmode, tem);
748 tem = copy_to_reg (tem);
751 /* For __builtin_frame_address, return what we've got. But, on
752 the SPARC for example, we may have to add a bias. */
753 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
754 #ifdef FRAME_ADDR_RTX
755 return FRAME_ADDR_RTX (tem);
760 /* For __builtin_return_address, get the return address from that frame. */
761 #ifdef RETURN_ADDR_RTX
762 tem = RETURN_ADDR_RTX (count, tem);
764 tem = memory_address (Pmode,
765 plus_constant (tem, GET_MODE_SIZE (Pmode)));
766 tem = gen_frame_mem (Pmode, tem);
771 /* Alias set used for setjmp buffer. */
772 static alias_set_type setjmp_alias_set = -1;
774 /* Construct the leading half of a __builtin_setjmp call. Control will
775 return to RECEIVER_LABEL. This is also called directly by the SJLJ
776 exception handling code. */
779 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
781 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
785 if (setjmp_alias_set == -1)
786 setjmp_alias_set = new_alias_set ();
788 buf_addr = convert_memory_address (Pmode, buf_addr);
790 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
792 /* We store the frame pointer and the address of receiver_label in
793 the buffer and use the rest of it for the stack save area, which
794 is machine-dependent. */
796 mem = gen_rtx_MEM (Pmode, buf_addr);
797 set_mem_alias_set (mem, setjmp_alias_set);
798 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
800 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
801 set_mem_alias_set (mem, setjmp_alias_set);
803 emit_move_insn (validize_mem (mem),
804 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
806 stack_save = gen_rtx_MEM (sa_mode,
807 plus_constant (buf_addr,
808 2 * GET_MODE_SIZE (Pmode)));
809 set_mem_alias_set (stack_save, setjmp_alias_set);
810 emit_stack_save (SAVE_NONLOCAL, &stack_save);
812 /* If there is further processing to do, do it. */
813 #ifdef HAVE_builtin_setjmp_setup
814 if (HAVE_builtin_setjmp_setup)
815 emit_insn (gen_builtin_setjmp_setup (buf_addr));
818 /* We have a nonlocal label. */
819 cfun->has_nonlocal_label = 1;
822 /* Construct the trailing part of a __builtin_setjmp call. This is
823 also called directly by the SJLJ exception handling code. */
826 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
830 /* Clobber the FP when we get here, so we have to make sure it's
831 marked as used by this function. */
832 emit_use (hard_frame_pointer_rtx);
834 /* Mark the static chain as clobbered here so life information
835 doesn't get messed up for it. */
836 chain = targetm.calls.static_chain (current_function_decl, true);
837 if (chain && REG_P (chain))
838 emit_clobber (chain);
840 /* Now put in the code to restore the frame pointer, and argument
841 pointer, if needed. */
842 #ifdef HAVE_nonlocal_goto
843 if (! HAVE_nonlocal_goto)
846 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
847 /* This might change the hard frame pointer in ways that aren't
848 apparent to early optimization passes, so force a clobber. */
849 emit_clobber (hard_frame_pointer_rtx);
852 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
853 if (fixed_regs[ARG_POINTER_REGNUM])
855 #ifdef ELIMINABLE_REGS
857 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
859 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
860 if (elim_regs[i].from == ARG_POINTER_REGNUM
861 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
864 if (i == ARRAY_SIZE (elim_regs))
867 /* Now restore our arg pointer from the address at which it
868 was saved in our stack frame. */
869 emit_move_insn (crtl->args.internal_arg_pointer,
870 copy_to_reg (get_arg_pointer_save_area ()));
875 #ifdef HAVE_builtin_setjmp_receiver
876 if (HAVE_builtin_setjmp_receiver)
877 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
880 #ifdef HAVE_nonlocal_goto_receiver
881 if (HAVE_nonlocal_goto_receiver)
882 emit_insn (gen_nonlocal_goto_receiver ());
887 /* We must not allow the code we just generated to be reordered by
888 scheduling. Specifically, the update of the frame pointer must
889 happen immediately, not later. */
890 emit_insn (gen_blockage ());
893 /* __builtin_longjmp is passed a pointer to an array of five words (not
894 all will be used on all machines). It operates similarly to the C
895 library function of the same name, but is more efficient. Much of
896 the code below is copied from the handling of non-local gotos. */
899 expand_builtin_longjmp (rtx buf_addr, rtx value)
901 rtx fp, lab, stack, insn, last;
902 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
904 /* DRAP is needed for stack realign if longjmp is expanded to current
906 if (SUPPORTS_STACK_ALIGNMENT)
907 crtl->need_drap = true;
909 if (setjmp_alias_set == -1)
910 setjmp_alias_set = new_alias_set ();
912 buf_addr = convert_memory_address (Pmode, buf_addr);
914 buf_addr = force_reg (Pmode, buf_addr);
916 /* We require that the user must pass a second argument of 1, because
917 that is what builtin_setjmp will return. */
918 gcc_assert (value == const1_rtx);
920 last = get_last_insn ();
921 #ifdef HAVE_builtin_longjmp
922 if (HAVE_builtin_longjmp)
923 emit_insn (gen_builtin_longjmp (buf_addr));
927 fp = gen_rtx_MEM (Pmode, buf_addr);
928 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
929 GET_MODE_SIZE (Pmode)));
931 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
932 2 * GET_MODE_SIZE (Pmode)));
933 set_mem_alias_set (fp, setjmp_alias_set);
934 set_mem_alias_set (lab, setjmp_alias_set);
935 set_mem_alias_set (stack, setjmp_alias_set);
937 /* Pick up FP, label, and SP from the block and jump. This code is
938 from expand_goto in stmt.c; see there for detailed comments. */
939 #ifdef HAVE_nonlocal_goto
940 if (HAVE_nonlocal_goto)
941 /* We have to pass a value to the nonlocal_goto pattern that will
942 get copied into the static_chain pointer, but it does not matter
943 what that value is, because builtin_setjmp does not use it. */
944 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
948 lab = copy_to_reg (lab);
950 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
953 emit_move_insn (hard_frame_pointer_rtx, fp);
954 emit_stack_restore (SAVE_NONLOCAL, stack);
956 emit_use (hard_frame_pointer_rtx);
957 emit_use (stack_pointer_rtx);
958 emit_indirect_jump (lab);
962 /* Search backwards and mark the jump insn as a non-local goto.
963 Note that this precludes the use of __builtin_longjmp to a
964 __builtin_setjmp target in the same function. However, we've
965 already cautioned the user that these functions are for
966 internal exception handling use only. */
967 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
969 gcc_assert (insn != last);
973 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
976 else if (CALL_P (insn))
981 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
982 and the address of the save area. */
985 expand_builtin_nonlocal_goto (tree exp)
987 tree t_label, t_save_area;
988 rtx r_label, r_save_area, r_fp, r_sp, insn;
990 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
993 t_label = CALL_EXPR_ARG (exp, 0);
994 t_save_area = CALL_EXPR_ARG (exp, 1);
996 r_label = expand_normal (t_label);
997 r_label = convert_memory_address (Pmode, r_label);
998 r_save_area = expand_normal (t_save_area);
999 r_save_area = convert_memory_address (Pmode, r_save_area);
1000 /* Copy the address of the save location to a register just in case it was
1001 based on the frame pointer. */
1002 r_save_area = copy_to_reg (r_save_area);
1003 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1004 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1005 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
1007 crtl->has_nonlocal_goto = 1;
1009 #ifdef HAVE_nonlocal_goto
1010 /* ??? We no longer need to pass the static chain value, afaik. */
1011 if (HAVE_nonlocal_goto)
1012 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1016 r_label = copy_to_reg (r_label);
1018 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1019 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1021 /* Restore frame pointer for containing function. */
1022 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1023 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1025 /* USE of hard_frame_pointer_rtx added for consistency;
1026 not clear if really needed. */
1027 emit_use (hard_frame_pointer_rtx);
1028 emit_use (stack_pointer_rtx);
1030 /* If the architecture is using a GP register, we must
1031 conservatively assume that the target function makes use of it.
1032 The prologue of functions with nonlocal gotos must therefore
1033 initialize the GP register to the appropriate value, and we
1034 must then make sure that this value is live at the point
1035 of the jump. (Note that this doesn't necessarily apply
1036 to targets with a nonlocal_goto pattern; they are free
1037 to implement it in their own way. Note also that this is
1038 a no-op if the GP register is a global invariant.) */
1039 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1040 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1041 emit_use (pic_offset_table_rtx);
1043 emit_indirect_jump (r_label);
1046 /* Search backwards to the jump insn and mark it as a
1048 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1052 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1055 else if (CALL_P (insn))
1062 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1063 (not all will be used on all machines) that was passed to __builtin_setjmp.
1064 It updates the stack pointer in that block to correspond to the current
1068 expand_builtin_update_setjmp_buf (rtx buf_addr)
1070 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1072 = gen_rtx_MEM (sa_mode,
1075 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1077 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1080 /* Expand a call to __builtin_prefetch. For a target that does not support
1081 data prefetch, evaluate the memory address argument in case it has side
1085 expand_builtin_prefetch (tree exp)
1087 tree arg0, arg1, arg2;
1091 if (!validate_arglist (exp, POINTER_TYPE, 0))
1094 arg0 = CALL_EXPR_ARG (exp, 0);
1096 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1097 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1099 nargs = call_expr_nargs (exp);
1101 arg1 = CALL_EXPR_ARG (exp, 1);
1103 arg1 = integer_zero_node;
1105 arg2 = CALL_EXPR_ARG (exp, 2);
1107 arg2 = integer_three_node;
1109 /* Argument 0 is an address. */
1110 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1112 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1113 if (TREE_CODE (arg1) != INTEGER_CST)
1115 error ("second argument to %<__builtin_prefetch%> must be a constant");
1116 arg1 = integer_zero_node;
1118 op1 = expand_normal (arg1);
1119 /* Argument 1 must be either zero or one. */
1120 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1122 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1127 /* Argument 2 (locality) must be a compile-time constant int. */
1128 if (TREE_CODE (arg2) != INTEGER_CST)
1130 error ("third argument to %<__builtin_prefetch%> must be a constant");
1131 arg2 = integer_zero_node;
1133 op2 = expand_normal (arg2);
1134 /* Argument 2 must be 0, 1, 2, or 3. */
1135 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1137 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1141 #ifdef HAVE_prefetch
1144 struct expand_operand ops[3];
1146 create_address_operand (&ops[0], op0);
1147 create_integer_operand (&ops[1], INTVAL (op1));
1148 create_integer_operand (&ops[2], INTVAL (op2));
1149 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1154 /* Don't do anything with direct references to volatile memory, but
1155 generate code to handle other side effects. */
1156 if (!MEM_P (op0) && side_effects_p (op0))
1160 /* Get a MEM rtx for expression EXP which is the address of an operand
1161 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1162 the maximum length of the block of memory that might be accessed or
1166 get_memory_rtx (tree exp, tree len)
1168 tree orig_exp = exp;
1172 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1173 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1174 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1175 exp = TREE_OPERAND (exp, 0);
1177 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1178 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1180 /* Get an expression we can use to find the attributes to assign to MEM.
1181 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1182 we can. First remove any nops. */
1183 while (CONVERT_EXPR_P (exp)
1184 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1185 exp = TREE_OPERAND (exp, 0);
1188 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1189 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1190 && host_integerp (TREE_OPERAND (exp, 1), 0)
1191 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1192 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1193 else if (TREE_CODE (exp) == ADDR_EXPR)
1194 exp = TREE_OPERAND (exp, 0);
1195 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1196 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1200 /* Honor attributes derived from exp, except for the alias set
1201 (as builtin stringops may alias with anything) and the size
1202 (as stringops may access multiple array elements). */
1205 set_mem_attributes (mem, exp, 0);
1208 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1210 /* Allow the string and memory builtins to overflow from one
1211 field into another, see http://gcc.gnu.org/PR23561.
1212 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1213 memory accessed by the string or memory builtin will fit
1214 within the field. */
1215 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1217 tree mem_expr = MEM_EXPR (mem);
1218 HOST_WIDE_INT offset = -1, length = -1;
1221 while (TREE_CODE (inner) == ARRAY_REF
1222 || CONVERT_EXPR_P (inner)
1223 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1224 || TREE_CODE (inner) == SAVE_EXPR)
1225 inner = TREE_OPERAND (inner, 0);
1227 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1229 if (MEM_OFFSET_KNOWN_P (mem))
1230 offset = MEM_OFFSET (mem);
1232 if (offset >= 0 && len && host_integerp (len, 0))
1233 length = tree_low_cst (len, 0);
1235 while (TREE_CODE (inner) == COMPONENT_REF)
1237 tree field = TREE_OPERAND (inner, 1);
1238 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1239 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1241 /* Bitfields are generally not byte-addressable. */
1242 gcc_assert (!DECL_BIT_FIELD (field)
1243 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1244 % BITS_PER_UNIT) == 0
1245 && host_integerp (DECL_SIZE (field), 0)
1246 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1247 % BITS_PER_UNIT) == 0));
1249 /* If we can prove that the memory starting at XEXP (mem, 0) and
1250 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1251 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1252 fields without DECL_SIZE_UNIT like flexible array members. */
1254 && DECL_SIZE_UNIT (field)
1255 && host_integerp (DECL_SIZE_UNIT (field), 0))
1258 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1261 && offset + length <= size)
1266 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1267 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1268 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1276 mem_expr = TREE_OPERAND (mem_expr, 0);
1277 inner = TREE_OPERAND (inner, 0);
1280 if (mem_expr == NULL)
1282 if (mem_expr != MEM_EXPR (mem))
1284 set_mem_expr (mem, mem_expr);
1286 set_mem_offset (mem, offset);
1288 clear_mem_offset (mem);
1291 set_mem_alias_set (mem, 0);
1292 clear_mem_size (mem);
1298 /* Built-in functions to perform an untyped call and return. */
1300 #define apply_args_mode \
1301 (this_target_builtins->x_apply_args_mode)
1302 #define apply_result_mode \
1303 (this_target_builtins->x_apply_result_mode)
1305 /* Return the size required for the block returned by __builtin_apply_args,
1306 and initialize apply_args_mode. */
1309 apply_args_size (void)
1311 static int size = -1;
1314 enum machine_mode mode;
1316 /* The values computed by this function never change. */
1319 /* The first value is the incoming arg-pointer. */
1320 size = GET_MODE_SIZE (Pmode);
1322 /* The second value is the structure value address unless this is
1323 passed as an "invisible" first argument. */
1324 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1325 size += GET_MODE_SIZE (Pmode);
1327 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1328 if (FUNCTION_ARG_REGNO_P (regno))
1330 mode = targetm.calls.get_raw_arg_mode (regno);
1332 gcc_assert (mode != VOIDmode);
1334 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1335 if (size % align != 0)
1336 size = CEIL (size, align) * align;
1337 size += GET_MODE_SIZE (mode);
1338 apply_args_mode[regno] = mode;
1342 apply_args_mode[regno] = VOIDmode;
1348 /* Return the size required for the block returned by __builtin_apply,
1349 and initialize apply_result_mode. */
1352 apply_result_size (void)
1354 static int size = -1;
1356 enum machine_mode mode;
1358 /* The values computed by this function never change. */
1363 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1364 if (targetm.calls.function_value_regno_p (regno))
1366 mode = targetm.calls.get_raw_result_mode (regno);
1368 gcc_assert (mode != VOIDmode);
1370 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1371 if (size % align != 0)
1372 size = CEIL (size, align) * align;
1373 size += GET_MODE_SIZE (mode);
1374 apply_result_mode[regno] = mode;
1377 apply_result_mode[regno] = VOIDmode;
1379 /* Allow targets that use untyped_call and untyped_return to override
1380 the size so that machine-specific information can be stored here. */
1381 #ifdef APPLY_RESULT_SIZE
1382 size = APPLY_RESULT_SIZE;
1388 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1389 /* Create a vector describing the result block RESULT. If SAVEP is true,
1390 the result block is used to save the values; otherwise it is used to
1391 restore the values. */
1394 result_vector (int savep, rtx result)
1396 int regno, size, align, nelts;
1397 enum machine_mode mode;
1399 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1402 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1403 if ((mode = apply_result_mode[regno]) != VOIDmode)
1405 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1406 if (size % align != 0)
1407 size = CEIL (size, align) * align;
1408 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1409 mem = adjust_address (result, mode, size);
1410 savevec[nelts++] = (savep
1411 ? gen_rtx_SET (VOIDmode, mem, reg)
1412 : gen_rtx_SET (VOIDmode, reg, mem));
1413 size += GET_MODE_SIZE (mode);
1415 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1417 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1419 /* Save the state required to perform an untyped call with the same
1420 arguments as were passed to the current function. */
1423 expand_builtin_apply_args_1 (void)
1426 int size, align, regno;
1427 enum machine_mode mode;
1428 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1430 /* Create a block where the arg-pointer, structure value address,
1431 and argument registers can be saved. */
1432 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1434 /* Walk past the arg-pointer and structure value address. */
1435 size = GET_MODE_SIZE (Pmode);
1436 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1437 size += GET_MODE_SIZE (Pmode);
1439 /* Save each register used in calling a function to the block. */
1440 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1441 if ((mode = apply_args_mode[regno]) != VOIDmode)
1443 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1444 if (size % align != 0)
1445 size = CEIL (size, align) * align;
1447 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1449 emit_move_insn (adjust_address (registers, mode, size), tem);
1450 size += GET_MODE_SIZE (mode);
1453 /* Save the arg pointer to the block. */
1454 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1455 #ifdef STACK_GROWS_DOWNWARD
1456 /* We need the pointer as the caller actually passed them to us, not
1457 as we might have pretended they were passed. Make sure it's a valid
1458 operand, as emit_move_insn isn't expected to handle a PLUS. */
1460 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1463 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1465 size = GET_MODE_SIZE (Pmode);
1467 /* Save the structure value address unless this is passed as an
1468 "invisible" first argument. */
1469 if (struct_incoming_value)
1471 emit_move_insn (adjust_address (registers, Pmode, size),
1472 copy_to_reg (struct_incoming_value));
1473 size += GET_MODE_SIZE (Pmode);
1476 /* Return the address of the block. */
1477 return copy_addr_to_reg (XEXP (registers, 0));
1480 /* __builtin_apply_args returns block of memory allocated on
1481 the stack into which is stored the arg pointer, structure
1482 value address, static chain, and all the registers that might
1483 possibly be used in performing a function call. The code is
1484 moved to the start of the function so the incoming values are
1488 expand_builtin_apply_args (void)
1490 /* Don't do __builtin_apply_args more than once in a function.
1491 Save the result of the first call and reuse it. */
1492 if (apply_args_value != 0)
1493 return apply_args_value;
1495 /* When this function is called, it means that registers must be
1496 saved on entry to this function. So we migrate the
1497 call to the first insn of this function. */
1502 temp = expand_builtin_apply_args_1 ();
1506 apply_args_value = temp;
1508 /* Put the insns after the NOTE that starts the function.
1509 If this is inside a start_sequence, make the outer-level insn
1510 chain current, so the code is placed at the start of the
1511 function. If internal_arg_pointer is a non-virtual pseudo,
1512 it needs to be placed after the function that initializes
1514 push_topmost_sequence ();
1515 if (REG_P (crtl->args.internal_arg_pointer)
1516 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1517 emit_insn_before (seq, parm_birth_insn);
1519 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1520 pop_topmost_sequence ();
1525 /* Perform an untyped call and save the state required to perform an
1526 untyped return of whatever value was returned by the given function. */
1529 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1531 int size, align, regno;
1532 enum machine_mode mode;
1533 rtx incoming_args, result, reg, dest, src, call_insn;
1534 rtx old_stack_level = 0;
1535 rtx call_fusage = 0;
1536 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1538 arguments = convert_memory_address (Pmode, arguments);
1540 /* Create a block where the return registers can be saved. */
1541 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1543 /* Fetch the arg pointer from the ARGUMENTS block. */
1544 incoming_args = gen_reg_rtx (Pmode);
1545 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1546 #ifndef STACK_GROWS_DOWNWARD
1547 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1548 incoming_args, 0, OPTAB_LIB_WIDEN);
1551 /* Push a new argument block and copy the arguments. Do not allow
1552 the (potential) memcpy call below to interfere with our stack
1554 do_pending_stack_adjust ();
1557 /* Save the stack with nonlocal if available. */
1558 #ifdef HAVE_save_stack_nonlocal
1559 if (HAVE_save_stack_nonlocal)
1560 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1563 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1565 /* Allocate a block of memory onto the stack and copy the memory
1566 arguments to the outgoing arguments address. We can pass TRUE
1567 as the 4th argument because we just saved the stack pointer
1568 and will restore it right after the call. */
1569 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1571 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1572 may have already set current_function_calls_alloca to true.
1573 current_function_calls_alloca won't be set if argsize is zero,
1574 so we have to guarantee need_drap is true here. */
1575 if (SUPPORTS_STACK_ALIGNMENT)
1576 crtl->need_drap = true;
1578 dest = virtual_outgoing_args_rtx;
1579 #ifndef STACK_GROWS_DOWNWARD
1580 if (CONST_INT_P (argsize))
1581 dest = plus_constant (dest, -INTVAL (argsize));
1583 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1585 dest = gen_rtx_MEM (BLKmode, dest);
1586 set_mem_align (dest, PARM_BOUNDARY);
1587 src = gen_rtx_MEM (BLKmode, incoming_args);
1588 set_mem_align (src, PARM_BOUNDARY);
1589 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1591 /* Refer to the argument block. */
1593 arguments = gen_rtx_MEM (BLKmode, arguments);
1594 set_mem_align (arguments, PARM_BOUNDARY);
1596 /* Walk past the arg-pointer and structure value address. */
1597 size = GET_MODE_SIZE (Pmode);
1599 size += GET_MODE_SIZE (Pmode);
1601 /* Restore each of the registers previously saved. Make USE insns
1602 for each of these registers for use in making the call. */
1603 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1604 if ((mode = apply_args_mode[regno]) != VOIDmode)
1606 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1607 if (size % align != 0)
1608 size = CEIL (size, align) * align;
1609 reg = gen_rtx_REG (mode, regno);
1610 emit_move_insn (reg, adjust_address (arguments, mode, size));
1611 use_reg (&call_fusage, reg);
1612 size += GET_MODE_SIZE (mode);
1615 /* Restore the structure value address unless this is passed as an
1616 "invisible" first argument. */
1617 size = GET_MODE_SIZE (Pmode);
1620 rtx value = gen_reg_rtx (Pmode);
1621 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1622 emit_move_insn (struct_value, value);
1623 if (REG_P (struct_value))
1624 use_reg (&call_fusage, struct_value);
1625 size += GET_MODE_SIZE (Pmode);
1628 /* All arguments and registers used for the call are set up by now! */
1629 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1631 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1632 and we don't want to load it into a register as an optimization,
1633 because prepare_call_address already did it if it should be done. */
1634 if (GET_CODE (function) != SYMBOL_REF)
1635 function = memory_address (FUNCTION_MODE, function);
1637 /* Generate the actual call instruction and save the return value. */
1638 #ifdef HAVE_untyped_call
1639 if (HAVE_untyped_call)
1640 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1641 result, result_vector (1, result)));
1644 #ifdef HAVE_call_value
1645 if (HAVE_call_value)
1649 /* Locate the unique return register. It is not possible to
1650 express a call that sets more than one return register using
1651 call_value; use untyped_call for that. In fact, untyped_call
1652 only needs to save the return registers in the given block. */
1653 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1654 if ((mode = apply_result_mode[regno]) != VOIDmode)
1656 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1658 valreg = gen_rtx_REG (mode, regno);
1661 emit_call_insn (GEN_CALL_VALUE (valreg,
1662 gen_rtx_MEM (FUNCTION_MODE, function),
1663 const0_rtx, NULL_RTX, const0_rtx));
1665 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1671 /* Find the CALL insn we just emitted, and attach the register usage
1673 call_insn = last_call_insn ();
1674 add_function_usage_to (call_insn, call_fusage);
1676 /* Restore the stack. */
1677 #ifdef HAVE_save_stack_nonlocal
1678 if (HAVE_save_stack_nonlocal)
1679 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1682 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1683 fixup_args_size_notes (call_insn, get_last_insn(), 0);
1687 /* Return the address of the result block. */
1688 result = copy_addr_to_reg (XEXP (result, 0));
1689 return convert_memory_address (ptr_mode, result);
1692 /* Perform an untyped return. */
1695 expand_builtin_return (rtx result)
1697 int size, align, regno;
1698 enum machine_mode mode;
1700 rtx call_fusage = 0;
1702 result = convert_memory_address (Pmode, result);
1704 apply_result_size ();
1705 result = gen_rtx_MEM (BLKmode, result);
1707 #ifdef HAVE_untyped_return
1708 if (HAVE_untyped_return)
1710 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1716 /* Restore the return value and note that each value is used. */
1718 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1719 if ((mode = apply_result_mode[regno]) != VOIDmode)
1721 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1722 if (size % align != 0)
1723 size = CEIL (size, align) * align;
1724 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1725 emit_move_insn (reg, adjust_address (result, mode, size));
1727 push_to_sequence (call_fusage);
1729 call_fusage = get_insns ();
1731 size += GET_MODE_SIZE (mode);
1734 /* Put the USE insns before the return. */
1735 emit_insn (call_fusage);
1737 /* Return whatever values was restored by jumping directly to the end
1739 expand_naked_return ();
1742 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1744 static enum type_class
1745 type_to_class (tree type)
1747 switch (TREE_CODE (type))
1749 case VOID_TYPE: return void_type_class;
1750 case INTEGER_TYPE: return integer_type_class;
1751 case ENUMERAL_TYPE: return enumeral_type_class;
1752 case BOOLEAN_TYPE: return boolean_type_class;
1753 case POINTER_TYPE: return pointer_type_class;
1754 case REFERENCE_TYPE: return reference_type_class;
1755 case OFFSET_TYPE: return offset_type_class;
1756 case REAL_TYPE: return real_type_class;
1757 case COMPLEX_TYPE: return complex_type_class;
1758 case FUNCTION_TYPE: return function_type_class;
1759 case METHOD_TYPE: return method_type_class;
1760 case RECORD_TYPE: return record_type_class;
1762 case QUAL_UNION_TYPE: return union_type_class;
1763 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1764 ? string_type_class : array_type_class);
1765 case LANG_TYPE: return lang_type_class;
1766 default: return no_type_class;
1770 /* Expand a call EXP to __builtin_classify_type. */
1773 expand_builtin_classify_type (tree exp)
1775 if (call_expr_nargs (exp))
1776 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1777 return GEN_INT (no_type_class);
1780 /* This helper macro, meant to be used in mathfn_built_in below,
1781 determines which among a set of three builtin math functions is
1782 appropriate for a given type mode. The `F' and `L' cases are
1783 automatically generated from the `double' case. */
1784 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1785 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1786 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1787 fcodel = BUILT_IN_MATHFN##L ; break;
1788 /* Similar to above, but appends _R after any F/L suffix. */
1789 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1790 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1791 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1792 fcodel = BUILT_IN_MATHFN##L_R ; break;
1794 /* Return mathematic function equivalent to FN but operating directly
1795 on TYPE, if available. If IMPLICIT is true find the function in
1796 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1797 can't do the conversion, return zero. */
1800 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1802 tree const *const fn_arr
1803 = implicit ? implicit_built_in_decls : built_in_decls;
1804 enum built_in_function fcode, fcodef, fcodel;
1808 CASE_MATHFN (BUILT_IN_ACOS)
1809 CASE_MATHFN (BUILT_IN_ACOSH)
1810 CASE_MATHFN (BUILT_IN_ASIN)
1811 CASE_MATHFN (BUILT_IN_ASINH)
1812 CASE_MATHFN (BUILT_IN_ATAN)
1813 CASE_MATHFN (BUILT_IN_ATAN2)
1814 CASE_MATHFN (BUILT_IN_ATANH)
1815 CASE_MATHFN (BUILT_IN_CBRT)
1816 CASE_MATHFN (BUILT_IN_CEIL)
1817 CASE_MATHFN (BUILT_IN_CEXPI)
1818 CASE_MATHFN (BUILT_IN_COPYSIGN)
1819 CASE_MATHFN (BUILT_IN_COS)
1820 CASE_MATHFN (BUILT_IN_COSH)
1821 CASE_MATHFN (BUILT_IN_DREM)
1822 CASE_MATHFN (BUILT_IN_ERF)
1823 CASE_MATHFN (BUILT_IN_ERFC)
1824 CASE_MATHFN (BUILT_IN_EXP)
1825 CASE_MATHFN (BUILT_IN_EXP10)
1826 CASE_MATHFN (BUILT_IN_EXP2)
1827 CASE_MATHFN (BUILT_IN_EXPM1)
1828 CASE_MATHFN (BUILT_IN_FABS)
1829 CASE_MATHFN (BUILT_IN_FDIM)
1830 CASE_MATHFN (BUILT_IN_FLOOR)
1831 CASE_MATHFN (BUILT_IN_FMA)
1832 CASE_MATHFN (BUILT_IN_FMAX)
1833 CASE_MATHFN (BUILT_IN_FMIN)
1834 CASE_MATHFN (BUILT_IN_FMOD)
1835 CASE_MATHFN (BUILT_IN_FREXP)
1836 CASE_MATHFN (BUILT_IN_GAMMA)
1837 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1838 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1839 CASE_MATHFN (BUILT_IN_HYPOT)
1840 CASE_MATHFN (BUILT_IN_ILOGB)
1841 CASE_MATHFN (BUILT_IN_ICEIL)
1842 CASE_MATHFN (BUILT_IN_IFLOOR)
1843 CASE_MATHFN (BUILT_IN_INF)
1844 CASE_MATHFN (BUILT_IN_IRINT)
1845 CASE_MATHFN (BUILT_IN_IROUND)
1846 CASE_MATHFN (BUILT_IN_ISINF)
1847 CASE_MATHFN (BUILT_IN_J0)
1848 CASE_MATHFN (BUILT_IN_J1)
1849 CASE_MATHFN (BUILT_IN_JN)
1850 CASE_MATHFN (BUILT_IN_LCEIL)
1851 CASE_MATHFN (BUILT_IN_LDEXP)
1852 CASE_MATHFN (BUILT_IN_LFLOOR)
1853 CASE_MATHFN (BUILT_IN_LGAMMA)
1854 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1855 CASE_MATHFN (BUILT_IN_LLCEIL)
1856 CASE_MATHFN (BUILT_IN_LLFLOOR)
1857 CASE_MATHFN (BUILT_IN_LLRINT)
1858 CASE_MATHFN (BUILT_IN_LLROUND)
1859 CASE_MATHFN (BUILT_IN_LOG)
1860 CASE_MATHFN (BUILT_IN_LOG10)
1861 CASE_MATHFN (BUILT_IN_LOG1P)
1862 CASE_MATHFN (BUILT_IN_LOG2)
1863 CASE_MATHFN (BUILT_IN_LOGB)
1864 CASE_MATHFN (BUILT_IN_LRINT)
1865 CASE_MATHFN (BUILT_IN_LROUND)
1866 CASE_MATHFN (BUILT_IN_MODF)
1867 CASE_MATHFN (BUILT_IN_NAN)
1868 CASE_MATHFN (BUILT_IN_NANS)
1869 CASE_MATHFN (BUILT_IN_NEARBYINT)
1870 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1871 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1872 CASE_MATHFN (BUILT_IN_POW)
1873 CASE_MATHFN (BUILT_IN_POWI)
1874 CASE_MATHFN (BUILT_IN_POW10)
1875 CASE_MATHFN (BUILT_IN_REMAINDER)
1876 CASE_MATHFN (BUILT_IN_REMQUO)
1877 CASE_MATHFN (BUILT_IN_RINT)
1878 CASE_MATHFN (BUILT_IN_ROUND)
1879 CASE_MATHFN (BUILT_IN_SCALB)
1880 CASE_MATHFN (BUILT_IN_SCALBLN)
1881 CASE_MATHFN (BUILT_IN_SCALBN)
1882 CASE_MATHFN (BUILT_IN_SIGNBIT)
1883 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1884 CASE_MATHFN (BUILT_IN_SIN)
1885 CASE_MATHFN (BUILT_IN_SINCOS)
1886 CASE_MATHFN (BUILT_IN_SINH)
1887 CASE_MATHFN (BUILT_IN_SQRT)
1888 CASE_MATHFN (BUILT_IN_TAN)
1889 CASE_MATHFN (BUILT_IN_TANH)
1890 CASE_MATHFN (BUILT_IN_TGAMMA)
1891 CASE_MATHFN (BUILT_IN_TRUNC)
1892 CASE_MATHFN (BUILT_IN_Y0)
1893 CASE_MATHFN (BUILT_IN_Y1)
1894 CASE_MATHFN (BUILT_IN_YN)
1900 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1901 return fn_arr[fcode];
1902 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1903 return fn_arr[fcodef];
1904 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1905 return fn_arr[fcodel];
1910 /* Like mathfn_built_in_1(), but always use the implicit array. */
1913 mathfn_built_in (tree type, enum built_in_function fn)
1915 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1918 /* If errno must be maintained, expand the RTL to check if the result,
1919 TARGET, of a built-in function call, EXP, is NaN, and if so set
1923 expand_errno_check (tree exp, rtx target)
1925 rtx lab = gen_label_rtx ();
1927 /* Test the result; if it is NaN, set errno=EDOM because
1928 the argument was not in the domain. */
1929 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1930 NULL_RTX, NULL_RTX, lab,
1931 /* The jump is very likely. */
1932 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1935 /* If this built-in doesn't throw an exception, set errno directly. */
1936 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1938 #ifdef GEN_ERRNO_RTX
1939 rtx errno_rtx = GEN_ERRNO_RTX;
1942 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1944 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1950 /* Make sure the library call isn't expanded as a tail call. */
1951 CALL_EXPR_TAILCALL (exp) = 0;
1953 /* We can't set errno=EDOM directly; let the library call do it.
1954 Pop the arguments right away in case the call gets deleted. */
1956 expand_call (exp, target, 0);
1961 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1962 Return NULL_RTX if a normal call should be emitted rather than expanding
1963 the function in-line. EXP is the expression that is a call to the builtin
1964 function; if convenient, the result should be placed in TARGET.
1965 SUBTARGET may be used as the target for computing one of EXP's operands. */
1968 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1970 optab builtin_optab;
1972 tree fndecl = get_callee_fndecl (exp);
1973 enum machine_mode mode;
1974 bool errno_set = false;
1977 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1980 arg = CALL_EXPR_ARG (exp, 0);
1982 switch (DECL_FUNCTION_CODE (fndecl))
1984 CASE_FLT_FN (BUILT_IN_SQRT):
1985 errno_set = ! tree_expr_nonnegative_p (arg);
1986 builtin_optab = sqrt_optab;
1988 CASE_FLT_FN (BUILT_IN_EXP):
1989 errno_set = true; builtin_optab = exp_optab; break;
1990 CASE_FLT_FN (BUILT_IN_EXP10):
1991 CASE_FLT_FN (BUILT_IN_POW10):
1992 errno_set = true; builtin_optab = exp10_optab; break;
1993 CASE_FLT_FN (BUILT_IN_EXP2):
1994 errno_set = true; builtin_optab = exp2_optab; break;
1995 CASE_FLT_FN (BUILT_IN_EXPM1):
1996 errno_set = true; builtin_optab = expm1_optab; break;
1997 CASE_FLT_FN (BUILT_IN_LOGB):
1998 errno_set = true; builtin_optab = logb_optab; break;
1999 CASE_FLT_FN (BUILT_IN_LOG):
2000 errno_set = true; builtin_optab = log_optab; break;
2001 CASE_FLT_FN (BUILT_IN_LOG10):
2002 errno_set = true; builtin_optab = log10_optab; break;
2003 CASE_FLT_FN (BUILT_IN_LOG2):
2004 errno_set = true; builtin_optab = log2_optab; break;
2005 CASE_FLT_FN (BUILT_IN_LOG1P):
2006 errno_set = true; builtin_optab = log1p_optab; break;
2007 CASE_FLT_FN (BUILT_IN_ASIN):
2008 builtin_optab = asin_optab; break;
2009 CASE_FLT_FN (BUILT_IN_ACOS):
2010 builtin_optab = acos_optab; break;
2011 CASE_FLT_FN (BUILT_IN_TAN):
2012 builtin_optab = tan_optab; break;
2013 CASE_FLT_FN (BUILT_IN_ATAN):
2014 builtin_optab = atan_optab; break;
2015 CASE_FLT_FN (BUILT_IN_FLOOR):
2016 builtin_optab = floor_optab; break;
2017 CASE_FLT_FN (BUILT_IN_CEIL):
2018 builtin_optab = ceil_optab; break;
2019 CASE_FLT_FN (BUILT_IN_TRUNC):
2020 builtin_optab = btrunc_optab; break;
2021 CASE_FLT_FN (BUILT_IN_ROUND):
2022 builtin_optab = round_optab; break;
2023 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2024 builtin_optab = nearbyint_optab;
2025 if (flag_trapping_math)
2027 /* Else fallthrough and expand as rint. */
2028 CASE_FLT_FN (BUILT_IN_RINT):
2029 builtin_optab = rint_optab; break;
2030 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2031 builtin_optab = significand_optab; break;
2036 /* Make a suitable register to place result in. */
2037 mode = TYPE_MODE (TREE_TYPE (exp));
2039 if (! flag_errno_math || ! HONOR_NANS (mode))
2042 /* Before working hard, check whether the instruction is available. */
2043 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2044 && (!errno_set || !optimize_insn_for_size_p ()))
2046 target = gen_reg_rtx (mode);
2048 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2049 need to expand the argument again. This way, we will not perform
2050 side-effects more the once. */
2051 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2053 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2057 /* Compute into TARGET.
2058 Set TARGET to wherever the result comes back. */
2059 target = expand_unop (mode, builtin_optab, op0, target, 0);
2064 expand_errno_check (exp, target);
2066 /* Output the entire sequence. */
2067 insns = get_insns ();
2073 /* If we were unable to expand via the builtin, stop the sequence
2074 (without outputting the insns) and call to the library function
2075 with the stabilized argument list. */
2079 return expand_call (exp, target, target == const0_rtx);
2082 /* Expand a call to the builtin binary math functions (pow and atan2).
2083 Return NULL_RTX if a normal call should be emitted rather than expanding the
2084 function in-line. EXP is the expression that is a call to the builtin
2085 function; if convenient, the result should be placed in TARGET.
2086 SUBTARGET may be used as the target for computing one of EXP's
2090 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2092 optab builtin_optab;
2093 rtx op0, op1, insns;
2094 int op1_type = REAL_TYPE;
2095 tree fndecl = get_callee_fndecl (exp);
2097 enum machine_mode mode;
2098 bool errno_set = true;
2100 switch (DECL_FUNCTION_CODE (fndecl))
2102 CASE_FLT_FN (BUILT_IN_SCALBN):
2103 CASE_FLT_FN (BUILT_IN_SCALBLN):
2104 CASE_FLT_FN (BUILT_IN_LDEXP):
2105 op1_type = INTEGER_TYPE;
2110 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2113 arg0 = CALL_EXPR_ARG (exp, 0);
2114 arg1 = CALL_EXPR_ARG (exp, 1);
2116 switch (DECL_FUNCTION_CODE (fndecl))
2118 CASE_FLT_FN (BUILT_IN_POW):
2119 builtin_optab = pow_optab; break;
2120 CASE_FLT_FN (BUILT_IN_ATAN2):
2121 builtin_optab = atan2_optab; break;
2122 CASE_FLT_FN (BUILT_IN_SCALB):
2123 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2125 builtin_optab = scalb_optab; break;
2126 CASE_FLT_FN (BUILT_IN_SCALBN):
2127 CASE_FLT_FN (BUILT_IN_SCALBLN):
2128 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2130 /* Fall through... */
2131 CASE_FLT_FN (BUILT_IN_LDEXP):
2132 builtin_optab = ldexp_optab; break;
2133 CASE_FLT_FN (BUILT_IN_FMOD):
2134 builtin_optab = fmod_optab; break;
2135 CASE_FLT_FN (BUILT_IN_REMAINDER):
2136 CASE_FLT_FN (BUILT_IN_DREM):
2137 builtin_optab = remainder_optab; break;
2142 /* Make a suitable register to place result in. */
2143 mode = TYPE_MODE (TREE_TYPE (exp));
2145 /* Before working hard, check whether the instruction is available. */
2146 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2149 target = gen_reg_rtx (mode);
2151 if (! flag_errno_math || ! HONOR_NANS (mode))
2154 if (errno_set && optimize_insn_for_size_p ())
2157 /* Always stabilize the argument list. */
2158 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2159 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2161 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2162 op1 = expand_normal (arg1);
2166 /* Compute into TARGET.
2167 Set TARGET to wherever the result comes back. */
2168 target = expand_binop (mode, builtin_optab, op0, op1,
2169 target, 0, OPTAB_DIRECT);
2171 /* If we were unable to expand via the builtin, stop the sequence
2172 (without outputting the insns) and call to the library function
2173 with the stabilized argument list. */
2177 return expand_call (exp, target, target == const0_rtx);
2181 expand_errno_check (exp, target);
2183 /* Output the entire sequence. */
2184 insns = get_insns ();
2191 /* Expand a call to the builtin trinary math functions (fma).
2192 Return NULL_RTX if a normal call should be emitted rather than expanding the
2193 function in-line. EXP is the expression that is a call to the builtin
2194 function; if convenient, the result should be placed in TARGET.
2195 SUBTARGET may be used as the target for computing one of EXP's
2199 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2201 optab builtin_optab;
2202 rtx op0, op1, op2, insns;
2203 tree fndecl = get_callee_fndecl (exp);
2204 tree arg0, arg1, arg2;
2205 enum machine_mode mode;
2207 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2210 arg0 = CALL_EXPR_ARG (exp, 0);
2211 arg1 = CALL_EXPR_ARG (exp, 1);
2212 arg2 = CALL_EXPR_ARG (exp, 2);
2214 switch (DECL_FUNCTION_CODE (fndecl))
2216 CASE_FLT_FN (BUILT_IN_FMA):
2217 builtin_optab = fma_optab; break;
2222 /* Make a suitable register to place result in. */
2223 mode = TYPE_MODE (TREE_TYPE (exp));
2225 /* Before working hard, check whether the instruction is available. */
2226 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2229 target = gen_reg_rtx (mode);
2231 /* Always stabilize the argument list. */
2232 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2233 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2234 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2236 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2237 op1 = expand_normal (arg1);
2238 op2 = expand_normal (arg2);
2242 /* Compute into TARGET.
2243 Set TARGET to wherever the result comes back. */
2244 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2247 /* If we were unable to expand via the builtin, stop the sequence
2248 (without outputting the insns) and call to the library function
2249 with the stabilized argument list. */
2253 return expand_call (exp, target, target == const0_rtx);
2256 /* Output the entire sequence. */
2257 insns = get_insns ();
2264 /* Expand a call to the builtin sin and cos math functions.
2265 Return NULL_RTX if a normal call should be emitted rather than expanding the
2266 function in-line. EXP is the expression that is a call to the builtin
2267 function; if convenient, the result should be placed in TARGET.
2268 SUBTARGET may be used as the target for computing one of EXP's
2272 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2274 optab builtin_optab;
2276 tree fndecl = get_callee_fndecl (exp);
2277 enum machine_mode mode;
2280 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2283 arg = CALL_EXPR_ARG (exp, 0);
2285 switch (DECL_FUNCTION_CODE (fndecl))
2287 CASE_FLT_FN (BUILT_IN_SIN):
2288 CASE_FLT_FN (BUILT_IN_COS):
2289 builtin_optab = sincos_optab; break;
2294 /* Make a suitable register to place result in. */
2295 mode = TYPE_MODE (TREE_TYPE (exp));
2297 /* Check if sincos insn is available, otherwise fallback
2298 to sin or cos insn. */
2299 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2300 switch (DECL_FUNCTION_CODE (fndecl))
2302 CASE_FLT_FN (BUILT_IN_SIN):
2303 builtin_optab = sin_optab; break;
2304 CASE_FLT_FN (BUILT_IN_COS):
2305 builtin_optab = cos_optab; break;
2310 /* Before working hard, check whether the instruction is available. */
2311 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2313 target = gen_reg_rtx (mode);
2315 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2316 need to expand the argument again. This way, we will not perform
2317 side-effects more the once. */
2318 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2320 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2324 /* Compute into TARGET.
2325 Set TARGET to wherever the result comes back. */
2326 if (builtin_optab == sincos_optab)
2330 switch (DECL_FUNCTION_CODE (fndecl))
2332 CASE_FLT_FN (BUILT_IN_SIN):
2333 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2335 CASE_FLT_FN (BUILT_IN_COS):
2336 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2341 gcc_assert (result);
2345 target = expand_unop (mode, builtin_optab, op0, target, 0);
2350 /* Output the entire sequence. */
2351 insns = get_insns ();
2357 /* If we were unable to expand via the builtin, stop the sequence
2358 (without outputting the insns) and call to the library function
2359 with the stabilized argument list. */
2363 target = expand_call (exp, target, target == const0_rtx);
2368 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2369 return an RTL instruction code that implements the functionality.
2370 If that isn't possible or available return CODE_FOR_nothing. */
2372 static enum insn_code
2373 interclass_mathfn_icode (tree arg, tree fndecl)
2375 bool errno_set = false;
2376 optab builtin_optab = 0;
2377 enum machine_mode mode;
2379 switch (DECL_FUNCTION_CODE (fndecl))
2381 CASE_FLT_FN (BUILT_IN_ILOGB):
2382 errno_set = true; builtin_optab = ilogb_optab; break;
2383 CASE_FLT_FN (BUILT_IN_ISINF):
2384 builtin_optab = isinf_optab; break;
2385 case BUILT_IN_ISNORMAL:
2386 case BUILT_IN_ISFINITE:
2387 CASE_FLT_FN (BUILT_IN_FINITE):
2388 case BUILT_IN_FINITED32:
2389 case BUILT_IN_FINITED64:
2390 case BUILT_IN_FINITED128:
2391 case BUILT_IN_ISINFD32:
2392 case BUILT_IN_ISINFD64:
2393 case BUILT_IN_ISINFD128:
2394 /* These builtins have no optabs (yet). */
2400 /* There's no easy way to detect the case we need to set EDOM. */
2401 if (flag_errno_math && errno_set)
2402 return CODE_FOR_nothing;
2404 /* Optab mode depends on the mode of the input argument. */
2405 mode = TYPE_MODE (TREE_TYPE (arg));
2408 return optab_handler (builtin_optab, mode);
2409 return CODE_FOR_nothing;
2412 /* Expand a call to one of the builtin math functions that operate on
2413 floating point argument and output an integer result (ilogb, isinf,
2415 Return 0 if a normal call should be emitted rather than expanding the
2416 function in-line. EXP is the expression that is a call to the builtin
2417 function; if convenient, the result should be placed in TARGET. */
2420 expand_builtin_interclass_mathfn (tree exp, rtx target)
2422 enum insn_code icode = CODE_FOR_nothing;
2424 tree fndecl = get_callee_fndecl (exp);
2425 enum machine_mode mode;
2428 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2431 arg = CALL_EXPR_ARG (exp, 0);
2432 icode = interclass_mathfn_icode (arg, fndecl);
2433 mode = TYPE_MODE (TREE_TYPE (arg));
2435 if (icode != CODE_FOR_nothing)
2437 struct expand_operand ops[1];
2438 rtx last = get_last_insn ();
2439 tree orig_arg = arg;
2441 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2442 need to expand the argument again. This way, we will not perform
2443 side-effects more the once. */
2444 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2446 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2448 if (mode != GET_MODE (op0))
2449 op0 = convert_to_mode (mode, op0, 0);
2451 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2452 if (maybe_legitimize_operands (icode, 0, 1, ops)
2453 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2454 return ops[0].value;
2456 delete_insns_since (last);
2457 CALL_EXPR_ARG (exp, 0) = orig_arg;
2463 /* Expand a call to the builtin sincos math function.
2464 Return NULL_RTX if a normal call should be emitted rather than expanding the
2465 function in-line. EXP is the expression that is a call to the builtin
2469 expand_builtin_sincos (tree exp)
2471 rtx op0, op1, op2, target1, target2;
2472 enum machine_mode mode;
2473 tree arg, sinp, cosp;
2475 location_t loc = EXPR_LOCATION (exp);
2476 tree alias_type, alias_off;
2478 if (!validate_arglist (exp, REAL_TYPE,
2479 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2482 arg = CALL_EXPR_ARG (exp, 0);
2483 sinp = CALL_EXPR_ARG (exp, 1);
2484 cosp = CALL_EXPR_ARG (exp, 2);
2486 /* Make a suitable register to place result in. */
2487 mode = TYPE_MODE (TREE_TYPE (arg));
2489 /* Check if sincos insn is available, otherwise emit the call. */
2490 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2493 target1 = gen_reg_rtx (mode);
2494 target2 = gen_reg_rtx (mode);
2496 op0 = expand_normal (arg);
2497 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2498 alias_off = build_int_cst (alias_type, 0);
2499 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2501 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2504 /* Compute into target1 and target2.
2505 Set TARGET to wherever the result comes back. */
2506 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2507 gcc_assert (result);
2509 /* Move target1 and target2 to the memory locations indicated
2511 emit_move_insn (op1, target1);
2512 emit_move_insn (op2, target2);
2517 /* Expand a call to the internal cexpi builtin to the sincos math function.
2518 EXP is the expression that is a call to the builtin function; if convenient,
2519 the result should be placed in TARGET. */
2522 expand_builtin_cexpi (tree exp, rtx target)
2524 tree fndecl = get_callee_fndecl (exp);
2526 enum machine_mode mode;
2528 location_t loc = EXPR_LOCATION (exp);
2530 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2533 arg = CALL_EXPR_ARG (exp, 0);
2534 type = TREE_TYPE (arg);
2535 mode = TYPE_MODE (TREE_TYPE (arg));
2537 /* Try expanding via a sincos optab, fall back to emitting a libcall
2538 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2539 is only generated from sincos, cexp or if we have either of them. */
2540 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2542 op1 = gen_reg_rtx (mode);
2543 op2 = gen_reg_rtx (mode);
2545 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2547 /* Compute into op1 and op2. */
2548 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2550 else if (TARGET_HAS_SINCOS)
2552 tree call, fn = NULL_TREE;
2556 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2557 fn = built_in_decls[BUILT_IN_SINCOSF];
2558 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2559 fn = built_in_decls[BUILT_IN_SINCOS];
2560 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2561 fn = built_in_decls[BUILT_IN_SINCOSL];
2565 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2566 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2567 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2568 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2569 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2570 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2572 /* Make sure not to fold the sincos call again. */
2573 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2574 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2575 call, 3, arg, top1, top2));
2579 tree call, fn = NULL_TREE, narg;
2580 tree ctype = build_complex_type (type);
2582 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2583 fn = built_in_decls[BUILT_IN_CEXPF];
2584 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2585 fn = built_in_decls[BUILT_IN_CEXP];
2586 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2587 fn = built_in_decls[BUILT_IN_CEXPL];
2591 /* If we don't have a decl for cexp create one. This is the
2592 friendliest fallback if the user calls __builtin_cexpi
2593 without full target C99 function support. */
2594 if (fn == NULL_TREE)
2597 const char *name = NULL;
2599 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2601 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2603 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2606 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2607 fn = build_fn_decl (name, fntype);
2610 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2611 build_real (type, dconst0), arg);
2613 /* Make sure not to fold the cexp call again. */
2614 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2615 return expand_expr (build_call_nary (ctype, call, 1, narg),
2616 target, VOIDmode, EXPAND_NORMAL);
2619 /* Now build the proper return type. */
2620 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2621 make_tree (TREE_TYPE (arg), op2),
2622 make_tree (TREE_TYPE (arg), op1)),
2623 target, VOIDmode, EXPAND_NORMAL);
2626 /* Conveniently construct a function call expression. FNDECL names the
2627 function to be called, N is the number of arguments, and the "..."
2628 parameters are the argument expressions. Unlike build_call_exr
2629 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2632 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2635 tree fntype = TREE_TYPE (fndecl);
2636 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2639 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2641 SET_EXPR_LOCATION (fn, loc);
2645 /* Expand a call to one of the builtin rounding functions gcc defines
2646 as an extension (lfloor and lceil). As these are gcc extensions we
2647 do not need to worry about setting errno to EDOM.
2648 If expanding via optab fails, lower expression to (int)(floor(x)).
2649 EXP is the expression that is a call to the builtin function;
2650 if convenient, the result should be placed in TARGET. */
2653 expand_builtin_int_roundingfn (tree exp, rtx target)
2655 convert_optab builtin_optab;
2656 rtx op0, insns, tmp;
2657 tree fndecl = get_callee_fndecl (exp);
2658 enum built_in_function fallback_fn;
2659 tree fallback_fndecl;
2660 enum machine_mode mode;
2663 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2666 arg = CALL_EXPR_ARG (exp, 0);
2668 switch (DECL_FUNCTION_CODE (fndecl))
2670 CASE_FLT_FN (BUILT_IN_ICEIL):
2671 CASE_FLT_FN (BUILT_IN_LCEIL):
2672 CASE_FLT_FN (BUILT_IN_LLCEIL):
2673 builtin_optab = lceil_optab;
2674 fallback_fn = BUILT_IN_CEIL;
2677 CASE_FLT_FN (BUILT_IN_IFLOOR):
2678 CASE_FLT_FN (BUILT_IN_LFLOOR):
2679 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2680 builtin_optab = lfloor_optab;
2681 fallback_fn = BUILT_IN_FLOOR;
2688 /* Make a suitable register to place result in. */
2689 mode = TYPE_MODE (TREE_TYPE (exp));
2691 target = gen_reg_rtx (mode);
2693 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2694 need to expand the argument again. This way, we will not perform
2695 side-effects more the once. */
2696 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2698 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2702 /* Compute into TARGET. */
2703 if (expand_sfix_optab (target, op0, builtin_optab))
2705 /* Output the entire sequence. */
2706 insns = get_insns ();
2712 /* If we were unable to expand via the builtin, stop the sequence
2713 (without outputting the insns). */
2716 /* Fall back to floating point rounding optab. */
2717 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2719 /* For non-C99 targets we may end up without a fallback fndecl here
2720 if the user called __builtin_lfloor directly. In this case emit
2721 a call to the floor/ceil variants nevertheless. This should result
2722 in the best user experience for not full C99 targets. */
2723 if (fallback_fndecl == NULL_TREE)
2726 const char *name = NULL;
2728 switch (DECL_FUNCTION_CODE (fndecl))
2730 case BUILT_IN_ICEIL:
2731 case BUILT_IN_LCEIL:
2732 case BUILT_IN_LLCEIL:
2735 case BUILT_IN_ICEILF:
2736 case BUILT_IN_LCEILF:
2737 case BUILT_IN_LLCEILF:
2740 case BUILT_IN_ICEILL:
2741 case BUILT_IN_LCEILL:
2742 case BUILT_IN_LLCEILL:
2745 case BUILT_IN_IFLOOR:
2746 case BUILT_IN_LFLOOR:
2747 case BUILT_IN_LLFLOOR:
2750 case BUILT_IN_IFLOORF:
2751 case BUILT_IN_LFLOORF:
2752 case BUILT_IN_LLFLOORF:
2755 case BUILT_IN_IFLOORL:
2756 case BUILT_IN_LFLOORL:
2757 case BUILT_IN_LLFLOORL:
2764 fntype = build_function_type_list (TREE_TYPE (arg),
2765 TREE_TYPE (arg), NULL_TREE);
2766 fallback_fndecl = build_fn_decl (name, fntype);
2769 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2771 tmp = expand_normal (exp);
2773 /* Truncate the result of floating point optab to integer
2774 via expand_fix (). */
2775 target = gen_reg_rtx (mode);
2776 expand_fix (target, tmp, 0);
2781 /* Expand a call to one of the builtin math functions doing integer
2783 Return 0 if a normal call should be emitted rather than expanding the
2784 function in-line. EXP is the expression that is a call to the builtin
2785 function; if convenient, the result should be placed in TARGET. */
2788 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2790 convert_optab builtin_optab;
2792 tree fndecl = get_callee_fndecl (exp);
2794 enum machine_mode mode;
2796 /* There's no easy way to detect the case we need to set EDOM. */
2797 if (flag_errno_math)
2800 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2803 arg = CALL_EXPR_ARG (exp, 0);
2805 switch (DECL_FUNCTION_CODE (fndecl))
2807 CASE_FLT_FN (BUILT_IN_IRINT):
2808 CASE_FLT_FN (BUILT_IN_LRINT):
2809 CASE_FLT_FN (BUILT_IN_LLRINT):
2810 builtin_optab = lrint_optab; break;
2812 CASE_FLT_FN (BUILT_IN_IROUND):
2813 CASE_FLT_FN (BUILT_IN_LROUND):
2814 CASE_FLT_FN (BUILT_IN_LLROUND):
2815 builtin_optab = lround_optab; break;
2821 /* Make a suitable register to place result in. */
2822 mode = TYPE_MODE (TREE_TYPE (exp));
2824 target = gen_reg_rtx (mode);
2826 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2827 need to expand the argument again. This way, we will not perform
2828 side-effects more the once. */
2829 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2831 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2835 if (expand_sfix_optab (target, op0, builtin_optab))
2837 /* Output the entire sequence. */
2838 insns = get_insns ();
2844 /* If we were unable to expand via the builtin, stop the sequence
2845 (without outputting the insns) and call to the library function
2846 with the stabilized argument list. */
2849 target = expand_call (exp, target, target == const0_rtx);
2854 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2855 a normal call should be emitted rather than expanding the function
2856 in-line. EXP is the expression that is a call to the builtin
2857 function; if convenient, the result should be placed in TARGET. */
2860 expand_builtin_powi (tree exp, rtx target)
2864 enum machine_mode mode;
2865 enum machine_mode mode2;
2867 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2870 arg0 = CALL_EXPR_ARG (exp, 0);
2871 arg1 = CALL_EXPR_ARG (exp, 1);
2872 mode = TYPE_MODE (TREE_TYPE (exp));
2874 /* Emit a libcall to libgcc. */
2876 /* Mode of the 2nd argument must match that of an int. */
2877 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2879 if (target == NULL_RTX)
2880 target = gen_reg_rtx (mode);
2882 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2883 if (GET_MODE (op0) != mode)
2884 op0 = convert_to_mode (mode, op0, 0);
2885 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2886 if (GET_MODE (op1) != mode2)
2887 op1 = convert_to_mode (mode2, op1, 0);
2889 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2890 target, LCT_CONST, mode, 2,
2891 op0, mode, op1, mode2);
2896 /* Expand expression EXP which is a call to the strlen builtin. Return
2897 NULL_RTX if we failed the caller should emit a normal call, otherwise
2898 try to get the result in TARGET, if convenient. */
2901 expand_builtin_strlen (tree exp, rtx target,
2902 enum machine_mode target_mode)
2904 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2908 struct expand_operand ops[4];
2911 tree src = CALL_EXPR_ARG (exp, 0);
2912 rtx src_reg, before_strlen;
2913 enum machine_mode insn_mode = target_mode;
2914 enum insn_code icode = CODE_FOR_nothing;
2917 /* If the length can be computed at compile-time, return it. */
2918 len = c_strlen (src, 0);
2920 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2922 /* If the length can be computed at compile-time and is constant
2923 integer, but there are side-effects in src, evaluate
2924 src for side-effects, then return len.
2925 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2926 can be optimized into: i++; x = 3; */
2927 len = c_strlen (src, 1);
2928 if (len && TREE_CODE (len) == INTEGER_CST)
2930 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2931 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2934 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2936 /* If SRC is not a pointer type, don't do this operation inline. */
2940 /* Bail out if we can't compute strlen in the right mode. */
2941 while (insn_mode != VOIDmode)
2943 icode = optab_handler (strlen_optab, insn_mode);
2944 if (icode != CODE_FOR_nothing)
2947 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2949 if (insn_mode == VOIDmode)
2952 /* Make a place to hold the source address. We will not expand
2953 the actual source until we are sure that the expansion will
2954 not fail -- there are trees that cannot be expanded twice. */
2955 src_reg = gen_reg_rtx (Pmode);
2957 /* Mark the beginning of the strlen sequence so we can emit the
2958 source operand later. */
2959 before_strlen = get_last_insn ();
2961 create_output_operand (&ops[0], target, insn_mode);
2962 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2963 create_integer_operand (&ops[2], 0);
2964 create_integer_operand (&ops[3], align);
2965 if (!maybe_expand_insn (icode, 4, ops))
2968 /* Now that we are assured of success, expand the source. */
2970 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2973 #ifdef POINTERS_EXTEND_UNSIGNED
2974 if (GET_MODE (pat) != Pmode)
2975 pat = convert_to_mode (Pmode, pat,
2976 POINTERS_EXTEND_UNSIGNED);
2978 emit_move_insn (src_reg, pat);
2984 emit_insn_after (pat, before_strlen);
2986 emit_insn_before (pat, get_insns ());
2988 /* Return the value in the proper mode for this function. */
2989 if (GET_MODE (ops[0].value) == target_mode)
2990 target = ops[0].value;
2991 else if (target != 0)
2992 convert_move (target, ops[0].value, 0);
2994 target = convert_to_mode (target_mode, ops[0].value, 0);
3000 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3001 bytes from constant string DATA + OFFSET and return it as target
3005 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3006 enum machine_mode mode)
3008 const char *str = (const char *) data;
3010 gcc_assert (offset >= 0
3011 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3012 <= strlen (str) + 1));
3014 return c_readstr (str + offset, mode);
3017 /* Expand a call EXP to the memcpy builtin.
3018 Return NULL_RTX if we failed, the caller should emit a normal call,
3019 otherwise try to get the result in TARGET, if convenient (and in
3020 mode MODE if that's convenient). */
3023 expand_builtin_memcpy (tree exp, rtx target)
3025 if (!validate_arglist (exp,
3026 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3030 tree dest = CALL_EXPR_ARG (exp, 0);
3031 tree src = CALL_EXPR_ARG (exp, 1);
3032 tree len = CALL_EXPR_ARG (exp, 2);
3033 const char *src_str;
3034 unsigned int src_align = get_pointer_alignment (src);
3035 unsigned int dest_align = get_pointer_alignment (dest);
3036 rtx dest_mem, src_mem, dest_addr, len_rtx;
3037 HOST_WIDE_INT expected_size = -1;
3038 unsigned int expected_align = 0;
3040 /* If DEST is not a pointer type, call the normal function. */
3041 if (dest_align == 0)
3044 /* If either SRC is not a pointer type, don't do this
3045 operation in-line. */
3049 if (currently_expanding_gimple_stmt)
3050 stringop_block_profile (currently_expanding_gimple_stmt,
3051 &expected_align, &expected_size);
3053 if (expected_align < dest_align)
3054 expected_align = dest_align;
3055 dest_mem = get_memory_rtx (dest, len);
3056 set_mem_align (dest_mem, dest_align);
3057 len_rtx = expand_normal (len);
3058 src_str = c_getstr (src);
3060 /* If SRC is a string constant and block move would be done
3061 by pieces, we can avoid loading the string from memory
3062 and only stored the computed constants. */
3064 && CONST_INT_P (len_rtx)
3065 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3066 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3067 CONST_CAST (char *, src_str),
3070 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3071 builtin_memcpy_read_str,
3072 CONST_CAST (char *, src_str),
3073 dest_align, false, 0);
3074 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3075 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3079 src_mem = get_memory_rtx (src, len);
3080 set_mem_align (src_mem, src_align);
3082 /* Copy word part most expediently. */
3083 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3084 CALL_EXPR_TAILCALL (exp)
3085 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3086 expected_align, expected_size);
3090 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3091 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3097 /* Expand a call EXP to the mempcpy builtin.
3098 Return NULL_RTX if we failed; the caller should emit a normal call,
3099 otherwise try to get the result in TARGET, if convenient (and in
3100 mode MODE if that's convenient). If ENDP is 0 return the
3101 destination pointer, if ENDP is 1 return the end pointer ala
3102 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3106 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3108 if (!validate_arglist (exp,
3109 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3113 tree dest = CALL_EXPR_ARG (exp, 0);
3114 tree src = CALL_EXPR_ARG (exp, 1);
3115 tree len = CALL_EXPR_ARG (exp, 2);
3116 return expand_builtin_mempcpy_args (dest, src, len,
3117 target, mode, /*endp=*/ 1);
3121 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3122 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3123 so that this can also be called without constructing an actual CALL_EXPR.
3124 The other arguments and return value are the same as for
3125 expand_builtin_mempcpy. */
3128 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3129 rtx target, enum machine_mode mode, int endp)
3131 /* If return value is ignored, transform mempcpy into memcpy. */
3132 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3134 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3135 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3137 return expand_expr (result, target, mode, EXPAND_NORMAL);
3141 const char *src_str;
3142 unsigned int src_align = get_pointer_alignment (src);
3143 unsigned int dest_align = get_pointer_alignment (dest);
3144 rtx dest_mem, src_mem, len_rtx;
3146 /* If either SRC or DEST is not a pointer type, don't do this
3147 operation in-line. */
3148 if (dest_align == 0 || src_align == 0)
3151 /* If LEN is not constant, call the normal function. */
3152 if (! host_integerp (len, 1))
3155 len_rtx = expand_normal (len);
3156 src_str = c_getstr (src);
3158 /* If SRC is a string constant and block move would be done
3159 by pieces, we can avoid loading the string from memory
3160 and only stored the computed constants. */
3162 && CONST_INT_P (len_rtx)
3163 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3164 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3165 CONST_CAST (char *, src_str),
3168 dest_mem = get_memory_rtx (dest, len);
3169 set_mem_align (dest_mem, dest_align);
3170 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3171 builtin_memcpy_read_str,
3172 CONST_CAST (char *, src_str),
3173 dest_align, false, endp);
3174 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3175 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3179 if (CONST_INT_P (len_rtx)
3180 && can_move_by_pieces (INTVAL (len_rtx),
3181 MIN (dest_align, src_align)))
3183 dest_mem = get_memory_rtx (dest, len);
3184 set_mem_align (dest_mem, dest_align);
3185 src_mem = get_memory_rtx (src, len);
3186 set_mem_align (src_mem, src_align);
3187 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3188 MIN (dest_align, src_align), endp);
3189 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3190 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3199 # define HAVE_movstr 0
3200 # define CODE_FOR_movstr CODE_FOR_nothing
3203 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3204 we failed, the caller should emit a normal call, otherwise try to
3205 get the result in TARGET, if convenient. If ENDP is 0 return the
3206 destination pointer, if ENDP is 1 return the end pointer ala
3207 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3211 expand_movstr (tree dest, tree src, rtx target, int endp)
3213 struct expand_operand ops[3];
3220 dest_mem = get_memory_rtx (dest, NULL);
3221 src_mem = get_memory_rtx (src, NULL);
3224 target = force_reg (Pmode, XEXP (dest_mem, 0));
3225 dest_mem = replace_equiv_address (dest_mem, target);
3228 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3229 create_fixed_operand (&ops[1], dest_mem);
3230 create_fixed_operand (&ops[2], src_mem);
3231 expand_insn (CODE_FOR_movstr, 3, ops);
3233 if (endp && target != const0_rtx)
3235 target = ops[0].value;
3236 /* movstr is supposed to set end to the address of the NUL
3237 terminator. If the caller requested a mempcpy-like return value,
3241 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), target), 1);
3242 emit_move_insn (target, force_operand (tem, NULL_RTX));
3248 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3249 NULL_RTX if we failed the caller should emit a normal call, otherwise
3250 try to get the result in TARGET, if convenient (and in mode MODE if that's
3254 expand_builtin_strcpy (tree exp, rtx target)
3256 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3258 tree dest = CALL_EXPR_ARG (exp, 0);
3259 tree src = CALL_EXPR_ARG (exp, 1);
3260 return expand_builtin_strcpy_args (dest, src, target);
3265 /* Helper function to do the actual work for expand_builtin_strcpy. The
3266 arguments to the builtin_strcpy call DEST and SRC are broken out
3267 so that this can also be called without constructing an actual CALL_EXPR.
3268 The other arguments and return value are the same as for
3269 expand_builtin_strcpy. */
3272 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3274 return expand_movstr (dest, src, target, /*endp=*/0);
3277 /* Expand a call EXP to the stpcpy builtin.
3278 Return NULL_RTX if we failed the caller should emit a normal call,
3279 otherwise try to get the result in TARGET, if convenient (and in
3280 mode MODE if that's convenient). */
3283 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3286 location_t loc = EXPR_LOCATION (exp);
3288 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3291 dst = CALL_EXPR_ARG (exp, 0);
3292 src = CALL_EXPR_ARG (exp, 1);
3294 /* If return value is ignored, transform stpcpy into strcpy. */
3295 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3297 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3298 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3299 return expand_expr (result, target, mode, EXPAND_NORMAL);
3306 /* Ensure we get an actual string whose length can be evaluated at
3307 compile-time, not an expression containing a string. This is
3308 because the latter will potentially produce pessimized code
3309 when used to produce the return value. */
3310 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3311 return expand_movstr (dst, src, target, /*endp=*/2);
3313 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3314 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3315 target, mode, /*endp=*/2);
3320 if (TREE_CODE (len) == INTEGER_CST)
3322 rtx len_rtx = expand_normal (len);
3324 if (CONST_INT_P (len_rtx))
3326 ret = expand_builtin_strcpy_args (dst, src, target);
3332 if (mode != VOIDmode)
3333 target = gen_reg_rtx (mode);
3335 target = gen_reg_rtx (GET_MODE (ret));
3337 if (GET_MODE (target) != GET_MODE (ret))
3338 ret = gen_lowpart (GET_MODE (target), ret);
3340 ret = plus_constant (ret, INTVAL (len_rtx));
3341 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3349 return expand_movstr (dst, src, target, /*endp=*/2);
3353 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3354 bytes from constant string DATA + OFFSET and return it as target
3358 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3359 enum machine_mode mode)
3361 const char *str = (const char *) data;
3363 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3366 return c_readstr (str + offset, mode);
3369 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3370 NULL_RTX if we failed the caller should emit a normal call. */
3373 expand_builtin_strncpy (tree exp, rtx target)
3375 location_t loc = EXPR_LOCATION (exp);
3377 if (validate_arglist (exp,
3378 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3380 tree dest = CALL_EXPR_ARG (exp, 0);
3381 tree src = CALL_EXPR_ARG (exp, 1);
3382 tree len = CALL_EXPR_ARG (exp, 2);
3383 tree slen = c_strlen (src, 1);
3385 /* We must be passed a constant len and src parameter. */
3386 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3389 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3391 /* We're required to pad with trailing zeros if the requested
3392 len is greater than strlen(s2)+1. In that case try to
3393 use store_by_pieces, if it fails, punt. */
3394 if (tree_int_cst_lt (slen, len))
3396 unsigned int dest_align = get_pointer_alignment (dest);
3397 const char *p = c_getstr (src);
3400 if (!p || dest_align == 0 || !host_integerp (len, 1)
3401 || !can_store_by_pieces (tree_low_cst (len, 1),
3402 builtin_strncpy_read_str,
3403 CONST_CAST (char *, p),
3407 dest_mem = get_memory_rtx (dest, len);
3408 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3409 builtin_strncpy_read_str,
3410 CONST_CAST (char *, p), dest_align, false, 0);
3411 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3412 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3419 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3420 bytes from constant string DATA + OFFSET and return it as target
3424 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3425 enum machine_mode mode)
3427 const char *c = (const char *) data;
3428 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3430 memset (p, *c, GET_MODE_SIZE (mode));
3432 return c_readstr (p, mode);
3435 /* Callback routine for store_by_pieces. Return the RTL of a register
3436 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3437 char value given in the RTL register data. For example, if mode is
3438 4 bytes wide, return the RTL for 0x01010101*data. */
3441 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3442 enum machine_mode mode)
3448 size = GET_MODE_SIZE (mode);
3452 p = XALLOCAVEC (char, size);
3453 memset (p, 1, size);
3454 coeff = c_readstr (p, mode);
3456 target = convert_to_mode (mode, (rtx) data, 1);
3457 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3458 return force_reg (mode, target);
3461 /* Expand expression EXP, which is a call to the memset builtin. Return
3462 NULL_RTX if we failed the caller should emit a normal call, otherwise
3463 try to get the result in TARGET, if convenient (and in mode MODE if that's
3467 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3469 if (!validate_arglist (exp,
3470 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3474 tree dest = CALL_EXPR_ARG (exp, 0);
3475 tree val = CALL_EXPR_ARG (exp, 1);
3476 tree len = CALL_EXPR_ARG (exp, 2);
3477 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3481 /* Helper function to do the actual work for expand_builtin_memset. The
3482 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3483 so that this can also be called without constructing an actual CALL_EXPR.
3484 The other arguments and return value are the same as for
3485 expand_builtin_memset. */
3488 expand_builtin_memset_args (tree dest, tree val, tree len,
3489 rtx target, enum machine_mode mode, tree orig_exp)
3492 enum built_in_function fcode;
3493 enum machine_mode val_mode;
3495 unsigned int dest_align;
3496 rtx dest_mem, dest_addr, len_rtx;
3497 HOST_WIDE_INT expected_size = -1;
3498 unsigned int expected_align = 0;
3500 dest_align = get_pointer_alignment (dest);
3502 /* If DEST is not a pointer type, don't do this operation in-line. */
3503 if (dest_align == 0)
3506 if (currently_expanding_gimple_stmt)
3507 stringop_block_profile (currently_expanding_gimple_stmt,
3508 &expected_align, &expected_size);
3510 if (expected_align < dest_align)
3511 expected_align = dest_align;
3513 /* If the LEN parameter is zero, return DEST. */
3514 if (integer_zerop (len))
3516 /* Evaluate and ignore VAL in case it has side-effects. */
3517 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3518 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3521 /* Stabilize the arguments in case we fail. */
3522 dest = builtin_save_expr (dest);
3523 val = builtin_save_expr (val);
3524 len = builtin_save_expr (len);
3526 len_rtx = expand_normal (len);
3527 dest_mem = get_memory_rtx (dest, len);
3528 val_mode = TYPE_MODE (unsigned_char_type_node);
3530 if (TREE_CODE (val) != INTEGER_CST)
3534 val_rtx = expand_normal (val);
3535 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3537 /* Assume that we can memset by pieces if we can store
3538 * the coefficients by pieces (in the required modes).
3539 * We can't pass builtin_memset_gen_str as that emits RTL. */
3541 if (host_integerp (len, 1)
3542 && can_store_by_pieces (tree_low_cst (len, 1),
3543 builtin_memset_read_str, &c, dest_align,
3546 val_rtx = force_reg (val_mode, val_rtx);
3547 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3548 builtin_memset_gen_str, val_rtx, dest_align,
3551 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3552 dest_align, expected_align,
3556 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3557 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3561 if (target_char_cast (val, &c))
3566 if (host_integerp (len, 1)
3567 && can_store_by_pieces (tree_low_cst (len, 1),
3568 builtin_memset_read_str, &c, dest_align,
3570 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3571 builtin_memset_read_str, &c, dest_align, true, 0);
3572 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3573 gen_int_mode (c, val_mode),
3574 dest_align, expected_align,
3578 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3579 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3583 set_mem_align (dest_mem, dest_align);
3584 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3585 CALL_EXPR_TAILCALL (orig_exp)
3586 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3587 expected_align, expected_size);
3591 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3592 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3598 fndecl = get_callee_fndecl (orig_exp);
3599 fcode = DECL_FUNCTION_CODE (fndecl);
3600 if (fcode == BUILT_IN_MEMSET)
3601 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3603 else if (fcode == BUILT_IN_BZERO)
3604 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3608 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3609 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3610 return expand_call (fn, target, target == const0_rtx);
3613 /* Expand expression EXP, which is a call to the bzero builtin. Return
3614 NULL_RTX if we failed the caller should emit a normal call. */
3617 expand_builtin_bzero (tree exp)
3620 location_t loc = EXPR_LOCATION (exp);
3622 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3625 dest = CALL_EXPR_ARG (exp, 0);
3626 size = CALL_EXPR_ARG (exp, 1);
3628 /* New argument list transforming bzero(ptr x, int y) to
3629 memset(ptr x, int 0, size_t y). This is done this way
3630 so that if it isn't expanded inline, we fallback to
3631 calling bzero instead of memset. */
3633 return expand_builtin_memset_args (dest, integer_zero_node,
3634 fold_convert_loc (loc, sizetype, size),
3635 const0_rtx, VOIDmode, exp);
3638 /* Expand expression EXP, which is a call to the memcmp built-in function.
3639 Return NULL_RTX if we failed and the caller should emit a normal call,
3640 otherwise try to get the result in TARGET, if convenient (and in mode
3641 MODE, if that's convenient). */
3644 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3645 ATTRIBUTE_UNUSED enum machine_mode mode)
3647 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3649 if (!validate_arglist (exp,
3650 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3653 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3654 implementing memcmp because it will stop if it encounters two
3656 #if defined HAVE_cmpmemsi
3658 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3661 tree arg1 = CALL_EXPR_ARG (exp, 0);
3662 tree arg2 = CALL_EXPR_ARG (exp, 1);
3663 tree len = CALL_EXPR_ARG (exp, 2);
3665 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3666 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3667 enum machine_mode insn_mode;
3670 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3674 /* If we don't have POINTER_TYPE, call the function. */
3675 if (arg1_align == 0 || arg2_align == 0)
3678 /* Make a place to write the result of the instruction. */
3681 && REG_P (result) && GET_MODE (result) == insn_mode
3682 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3683 result = gen_reg_rtx (insn_mode);
3685 arg1_rtx = get_memory_rtx (arg1, len);
3686 arg2_rtx = get_memory_rtx (arg2, len);
3687 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3689 /* Set MEM_SIZE as appropriate. */
3690 if (CONST_INT_P (arg3_rtx))
3692 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3693 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3697 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3698 GEN_INT (MIN (arg1_align, arg2_align)));
3705 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3706 TYPE_MODE (integer_type_node), 3,
3707 XEXP (arg1_rtx, 0), Pmode,
3708 XEXP (arg2_rtx, 0), Pmode,
3709 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3710 TYPE_UNSIGNED (sizetype)),
3711 TYPE_MODE (sizetype));
3713 /* Return the value in the proper mode for this function. */
3714 mode = TYPE_MODE (TREE_TYPE (exp));
3715 if (GET_MODE (result) == mode)
3717 else if (target != 0)
3719 convert_move (target, result, 0);
3723 return convert_to_mode (mode, result, 0);
3725 #endif /* HAVE_cmpmemsi. */
3730 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3731 if we failed the caller should emit a normal call, otherwise try to get
3732 the result in TARGET, if convenient. */
3735 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3737 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3740 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3741 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3742 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3744 rtx arg1_rtx, arg2_rtx;
3745 rtx result, insn = NULL_RTX;
3747 tree arg1 = CALL_EXPR_ARG (exp, 0);
3748 tree arg2 = CALL_EXPR_ARG (exp, 1);
3750 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3751 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3753 /* If we don't have POINTER_TYPE, call the function. */
3754 if (arg1_align == 0 || arg2_align == 0)
3757 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3758 arg1 = builtin_save_expr (arg1);
3759 arg2 = builtin_save_expr (arg2);
3761 arg1_rtx = get_memory_rtx (arg1, NULL);
3762 arg2_rtx = get_memory_rtx (arg2, NULL);
3764 #ifdef HAVE_cmpstrsi
3765 /* Try to call cmpstrsi. */
3768 enum machine_mode insn_mode
3769 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3771 /* Make a place to write the result of the instruction. */
3774 && REG_P (result) && GET_MODE (result) == insn_mode
3775 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3776 result = gen_reg_rtx (insn_mode);
3778 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3779 GEN_INT (MIN (arg1_align, arg2_align)));
3782 #ifdef HAVE_cmpstrnsi
3783 /* Try to determine at least one length and call cmpstrnsi. */
3784 if (!insn && HAVE_cmpstrnsi)
3789 enum machine_mode insn_mode
3790 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3791 tree len1 = c_strlen (arg1, 1);
3792 tree len2 = c_strlen (arg2, 1);
3795 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3797 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3799 /* If we don't have a constant length for the first, use the length
3800 of the second, if we know it. We don't require a constant for
3801 this case; some cost analysis could be done if both are available
3802 but neither is constant. For now, assume they're equally cheap,
3803 unless one has side effects. If both strings have constant lengths,
3810 else if (TREE_SIDE_EFFECTS (len1))
3812 else if (TREE_SIDE_EFFECTS (len2))
3814 else if (TREE_CODE (len1) != INTEGER_CST)
3816 else if (TREE_CODE (len2) != INTEGER_CST)
3818 else if (tree_int_cst_lt (len1, len2))
3823 /* If both arguments have side effects, we cannot optimize. */
3824 if (!len || TREE_SIDE_EFFECTS (len))
3827 arg3_rtx = expand_normal (len);
3829 /* Make a place to write the result of the instruction. */
3832 && REG_P (result) && GET_MODE (result) == insn_mode
3833 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3834 result = gen_reg_rtx (insn_mode);
3836 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3837 GEN_INT (MIN (arg1_align, arg2_align)));
3843 enum machine_mode mode;
3846 /* Return the value in the proper mode for this function. */
3847 mode = TYPE_MODE (TREE_TYPE (exp));
3848 if (GET_MODE (result) == mode)
3851 return convert_to_mode (mode, result, 0);
3852 convert_move (target, result, 0);
3856 /* Expand the library call ourselves using a stabilized argument
3857 list to avoid re-evaluating the function's arguments twice. */
3858 #ifdef HAVE_cmpstrnsi
3861 fndecl = get_callee_fndecl (exp);
3862 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3863 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3864 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3865 return expand_call (fn, target, target == const0_rtx);
3871 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3872 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3873 the result in TARGET, if convenient. */
3876 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3877 ATTRIBUTE_UNUSED enum machine_mode mode)
3879 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3881 if (!validate_arglist (exp,
3882 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3885 /* If c_strlen can determine an expression for one of the string
3886 lengths, and it doesn't have side effects, then emit cmpstrnsi
3887 using length MIN(strlen(string)+1, arg3). */
3888 #ifdef HAVE_cmpstrnsi
3891 tree len, len1, len2;
3892 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3895 tree arg1 = CALL_EXPR_ARG (exp, 0);
3896 tree arg2 = CALL_EXPR_ARG (exp, 1);
3897 tree arg3 = CALL_EXPR_ARG (exp, 2);
3899 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3900 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3901 enum machine_mode insn_mode
3902 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3904 len1 = c_strlen (arg1, 1);
3905 len2 = c_strlen (arg2, 1);
3908 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3910 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3912 /* If we don't have a constant length for the first, use the length
3913 of the second, if we know it. We don't require a constant for
3914 this case; some cost analysis could be done if both are available
3915 but neither is constant. For now, assume they're equally cheap,
3916 unless one has side effects. If both strings have constant lengths,
3923 else if (TREE_SIDE_EFFECTS (len1))
3925 else if (TREE_SIDE_EFFECTS (len2))
3927 else if (TREE_CODE (len1) != INTEGER_CST)
3929 else if (TREE_CODE (len2) != INTEGER_CST)
3931 else if (tree_int_cst_lt (len1, len2))
3936 /* If both arguments have side effects, we cannot optimize. */
3937 if (!len || TREE_SIDE_EFFECTS (len))
3940 /* The actual new length parameter is MIN(len,arg3). */
3941 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3942 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3944 /* If we don't have POINTER_TYPE, call the function. */
3945 if (arg1_align == 0 || arg2_align == 0)
3948 /* Make a place to write the result of the instruction. */
3951 && REG_P (result) && GET_MODE (result) == insn_mode
3952 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3953 result = gen_reg_rtx (insn_mode);
3955 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3956 arg1 = builtin_save_expr (arg1);
3957 arg2 = builtin_save_expr (arg2);
3958 len = builtin_save_expr (len);
3960 arg1_rtx = get_memory_rtx (arg1, len);
3961 arg2_rtx = get_memory_rtx (arg2, len);
3962 arg3_rtx = expand_normal (len);
3963 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3964 GEN_INT (MIN (arg1_align, arg2_align)));
3969 /* Return the value in the proper mode for this function. */
3970 mode = TYPE_MODE (TREE_TYPE (exp));
3971 if (GET_MODE (result) == mode)
3974 return convert_to_mode (mode, result, 0);
3975 convert_move (target, result, 0);
3979 /* Expand the library call ourselves using a stabilized argument
3980 list to avoid re-evaluating the function's arguments twice. */
3981 fndecl = get_callee_fndecl (exp);
3982 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3984 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3985 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3986 return expand_call (fn, target, target == const0_rtx);
3992 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3993 if that's convenient. */
3996 expand_builtin_saveregs (void)
4000 /* Don't do __builtin_saveregs more than once in a function.
4001 Save the result of the first call and reuse it. */
4002 if (saveregs_value != 0)
4003 return saveregs_value;
4005 /* When this function is called, it means that registers must be
4006 saved on entry to this function. So we migrate the call to the
4007 first insn of this function. */
4011 /* Do whatever the machine needs done in this case. */
4012 val = targetm.calls.expand_builtin_saveregs ();
4017 saveregs_value = val;
4019 /* Put the insns after the NOTE that starts the function. If this
4020 is inside a start_sequence, make the outer-level insn chain current, so
4021 the code is placed at the start of the function. */
4022 push_topmost_sequence ();
4023 emit_insn_after (seq, entry_of_function ());
4024 pop_topmost_sequence ();
4029 /* Expand a call to __builtin_next_arg. */
4032 expand_builtin_next_arg (void)
4034 /* Checking arguments is already done in fold_builtin_next_arg
4035 that must be called before this function. */
4036 return expand_binop (ptr_mode, add_optab,
4037 crtl->args.internal_arg_pointer,
4038 crtl->args.arg_offset_rtx,
4039 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4042 /* Make it easier for the backends by protecting the valist argument
4043 from multiple evaluations. */
4046 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4048 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4050 /* The current way of determining the type of valist is completely
4051 bogus. We should have the information on the va builtin instead. */
4053 vatype = targetm.fn_abi_va_list (cfun->decl);
4055 if (TREE_CODE (vatype) == ARRAY_TYPE)
4057 if (TREE_SIDE_EFFECTS (valist))
4058 valist = save_expr (valist);
4060 /* For this case, the backends will be expecting a pointer to
4061 vatype, but it's possible we've actually been given an array
4062 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4064 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4066 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4067 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4072 tree pt = build_pointer_type (vatype);
4076 if (! TREE_SIDE_EFFECTS (valist))
4079 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4080 TREE_SIDE_EFFECTS (valist) = 1;
4083 if (TREE_SIDE_EFFECTS (valist))
4084 valist = save_expr (valist);
4085 valist = fold_build2_loc (loc, MEM_REF,
4086 vatype, valist, build_int_cst (pt, 0));
4092 /* The "standard" definition of va_list is void*. */
4095 std_build_builtin_va_list (void)
4097 return ptr_type_node;
4100 /* The "standard" abi va_list is va_list_type_node. */
4103 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4105 return va_list_type_node;
4108 /* The "standard" type of va_list is va_list_type_node. */
4111 std_canonical_va_list_type (tree type)
4115 if (INDIRECT_REF_P (type))
4116 type = TREE_TYPE (type);
4117 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4118 type = TREE_TYPE (type);
4119 wtype = va_list_type_node;
4121 /* Treat structure va_list types. */
4122 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4123 htype = TREE_TYPE (htype);
4124 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4126 /* If va_list is an array type, the argument may have decayed
4127 to a pointer type, e.g. by being passed to another function.
4128 In that case, unwrap both types so that we can compare the
4129 underlying records. */
4130 if (TREE_CODE (htype) == ARRAY_TYPE
4131 || POINTER_TYPE_P (htype))
4133 wtype = TREE_TYPE (wtype);
4134 htype = TREE_TYPE (htype);
4137 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4138 return va_list_type_node;
4143 /* The "standard" implementation of va_start: just assign `nextarg' to
4147 std_expand_builtin_va_start (tree valist, rtx nextarg)
4149 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4150 convert_move (va_r, nextarg, 0);
4153 /* Expand EXP, a call to __builtin_va_start. */
4156 expand_builtin_va_start (tree exp)
4160 location_t loc = EXPR_LOCATION (exp);
4162 if (call_expr_nargs (exp) < 2)
4164 error_at (loc, "too few arguments to function %<va_start%>");
4168 if (fold_builtin_next_arg (exp, true))
4171 nextarg = expand_builtin_next_arg ();
4172 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4174 if (targetm.expand_builtin_va_start)
4175 targetm.expand_builtin_va_start (valist, nextarg);
4177 std_expand_builtin_va_start (valist, nextarg);
4182 /* The "standard" implementation of va_arg: read the value from the
4183 current (padded) address and increment by the (padded) size. */
4186 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4189 tree addr, t, type_size, rounded_size, valist_tmp;
4190 unsigned HOST_WIDE_INT align, boundary;
4193 #ifdef ARGS_GROW_DOWNWARD
4194 /* All of the alignment and movement below is for args-grow-up machines.
4195 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4196 implement their own specialized gimplify_va_arg_expr routines. */
4200 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4202 type = build_pointer_type (type);
4204 align = PARM_BOUNDARY / BITS_PER_UNIT;
4205 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4207 /* When we align parameter on stack for caller, if the parameter
4208 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4209 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4210 here with caller. */
4211 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4212 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4214 boundary /= BITS_PER_UNIT;
4216 /* Hoist the valist value into a temporary for the moment. */
4217 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4219 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4220 requires greater alignment, we must perform dynamic alignment. */
4221 if (boundary > align
4222 && !integer_zerop (TYPE_SIZE (type)))
4224 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4225 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
4226 gimplify_and_add (t, pre_p);
4228 t = fold_convert (sizetype, valist_tmp);
4229 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4230 fold_convert (TREE_TYPE (valist),
4231 fold_build2 (BIT_AND_EXPR, sizetype, t,
4232 size_int (-boundary))));
4233 gimplify_and_add (t, pre_p);
4238 /* If the actual alignment is less than the alignment of the type,
4239 adjust the type accordingly so that we don't assume strict alignment
4240 when dereferencing the pointer. */
4241 boundary *= BITS_PER_UNIT;
4242 if (boundary < TYPE_ALIGN (type))
4244 type = build_variant_type_copy (type);
4245 TYPE_ALIGN (type) = boundary;
4248 /* Compute the rounded size of the type. */
4249 type_size = size_in_bytes (type);
4250 rounded_size = round_up (type_size, align);
4252 /* Reduce rounded_size so it's sharable with the postqueue. */
4253 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4257 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4259 /* Small args are padded downward. */
4260 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4261 rounded_size, size_int (align));
4262 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4263 size_binop (MINUS_EXPR, rounded_size, type_size));
4264 addr = fold_build_pointer_plus (addr, t);
4267 /* Compute new value for AP. */
4268 t = fold_build_pointer_plus (valist_tmp, rounded_size);
4269 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4270 gimplify_and_add (t, pre_p);
4272 addr = fold_convert (build_pointer_type (type), addr);
4275 addr = build_va_arg_indirect_ref (addr);
4277 return build_va_arg_indirect_ref (addr);
4280 /* Build an indirect-ref expression over the given TREE, which represents a
4281 piece of a va_arg() expansion. */
4283 build_va_arg_indirect_ref (tree addr)
4285 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
4287 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4293 /* Return a dummy expression of type TYPE in order to keep going after an
4297 dummy_object (tree type)
4299 tree t = build_int_cst (build_pointer_type (type), 0);
4300 return build2 (MEM_REF, type, t, t);
4303 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4304 builtin function, but a very special sort of operator. */
4306 enum gimplify_status
4307 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4309 tree promoted_type, have_va_type;
4310 tree valist = TREE_OPERAND (*expr_p, 0);
4311 tree type = TREE_TYPE (*expr_p);
4313 location_t loc = EXPR_LOCATION (*expr_p);
4315 /* Verify that valist is of the proper type. */
4316 have_va_type = TREE_TYPE (valist);
4317 if (have_va_type == error_mark_node)
4319 have_va_type = targetm.canonical_va_list_type (have_va_type);
4321 if (have_va_type == NULL_TREE)
4323 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4327 /* Generate a diagnostic for requesting data of a type that cannot
4328 be passed through `...' due to type promotion at the call site. */
4329 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4332 static bool gave_help;
4335 /* Unfortunately, this is merely undefined, rather than a constraint
4336 violation, so we cannot make this an error. If this call is never
4337 executed, the program is still strictly conforming. */
4338 warned = warning_at (loc, 0,
4339 "%qT is promoted to %qT when passed through %<...%>",
4340 type, promoted_type);
4341 if (!gave_help && warned)
4344 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4345 promoted_type, type);
4348 /* We can, however, treat "undefined" any way we please.
4349 Call abort to encourage the user to fix the program. */
4351 inform (loc, "if this code is reached, the program will abort");
4352 /* Before the abort, allow the evaluation of the va_list
4353 expression to exit or longjmp. */
4354 gimplify_and_add (valist, pre_p);
4355 t = build_call_expr_loc (loc,
4356 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4357 gimplify_and_add (t, pre_p);
4359 /* This is dead code, but go ahead and finish so that the
4360 mode of the result comes out right. */
4361 *expr_p = dummy_object (type);
4366 /* Make it easier for the backends by protecting the valist argument
4367 from multiple evaluations. */
4368 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4370 /* For this case, the backends will be expecting a pointer to
4371 TREE_TYPE (abi), but it's possible we've
4372 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4374 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4376 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4377 valist = fold_convert_loc (loc, p1,
4378 build_fold_addr_expr_loc (loc, valist));
4381 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4384 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4386 if (!targetm.gimplify_va_arg_expr)
4387 /* FIXME: Once most targets are converted we should merely
4388 assert this is non-null. */
4391 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4396 /* Expand EXP, a call to __builtin_va_end. */
4399 expand_builtin_va_end (tree exp)
4401 tree valist = CALL_EXPR_ARG (exp, 0);
4403 /* Evaluate for side effects, if needed. I hate macros that don't
4405 if (TREE_SIDE_EFFECTS (valist))
4406 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4411 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4412 builtin rather than just as an assignment in stdarg.h because of the
4413 nastiness of array-type va_list types. */
4416 expand_builtin_va_copy (tree exp)
4419 location_t loc = EXPR_LOCATION (exp);
4421 dst = CALL_EXPR_ARG (exp, 0);
4422 src = CALL_EXPR_ARG (exp, 1);
4424 dst = stabilize_va_list_loc (loc, dst, 1);
4425 src = stabilize_va_list_loc (loc, src, 0);
4427 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4429 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4431 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4432 TREE_SIDE_EFFECTS (t) = 1;
4433 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4437 rtx dstb, srcb, size;
4439 /* Evaluate to pointers. */
4440 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4441 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4442 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4443 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4445 dstb = convert_memory_address (Pmode, dstb);
4446 srcb = convert_memory_address (Pmode, srcb);
4448 /* "Dereference" to BLKmode memories. */
4449 dstb = gen_rtx_MEM (BLKmode, dstb);
4450 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4451 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4452 srcb = gen_rtx_MEM (BLKmode, srcb);
4453 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4454 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4457 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4463 /* Expand a call to one of the builtin functions __builtin_frame_address or
4464 __builtin_return_address. */
4467 expand_builtin_frame_address (tree fndecl, tree exp)
4469 /* The argument must be a nonnegative integer constant.
4470 It counts the number of frames to scan up the stack.
4471 The value is the return address saved in that frame. */
4472 if (call_expr_nargs (exp) == 0)
4473 /* Warning about missing arg was already issued. */
4475 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4477 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4478 error ("invalid argument to %<__builtin_frame_address%>");
4480 error ("invalid argument to %<__builtin_return_address%>");
4486 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4487 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4489 /* Some ports cannot access arbitrary stack frames. */
4492 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4493 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4495 warning (0, "unsupported argument to %<__builtin_return_address%>");
4499 /* For __builtin_frame_address, return what we've got. */
4500 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4504 && ! CONSTANT_P (tem))
4505 tem = copy_to_mode_reg (Pmode, tem);
4510 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4511 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4512 is the same as for allocate_dynamic_stack_space. */
4515 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4520 /* Emit normal call if marked not-inlineable. */
4521 if (CALL_CANNOT_INLINE_P (exp))
4524 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4527 /* Compute the argument. */
4528 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4530 /* Allocate the desired space. */
4531 result = allocate_dynamic_stack_space (op0, 0, BIGGEST_ALIGNMENT,
4533 result = convert_memory_address (ptr_mode, result);
4538 /* Expand a call to a bswap builtin with argument ARG0. MODE
4539 is the mode to expand with. */
4542 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4544 enum machine_mode mode;
4548 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4551 arg = CALL_EXPR_ARG (exp, 0);
4552 mode = TYPE_MODE (TREE_TYPE (arg));
4553 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4555 target = expand_unop (mode, bswap_optab, op0, target, 1);
4557 gcc_assert (target);
4559 return convert_to_mode (mode, target, 0);
4562 /* Expand a call to a unary builtin in EXP.
4563 Return NULL_RTX if a normal call should be emitted rather than expanding the
4564 function in-line. If convenient, the result should be placed in TARGET.
4565 SUBTARGET may be used as the target for computing one of EXP's operands. */
4568 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4569 rtx subtarget, optab op_optab)
4573 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4576 /* Compute the argument. */
4577 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4579 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4580 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4581 VOIDmode, EXPAND_NORMAL);
4582 /* Compute op, into TARGET if possible.
4583 Set TARGET to wherever the result comes back. */
4584 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4585 op_optab, op0, target, op_optab != clrsb_optab);
4586 gcc_assert (target);
4588 return convert_to_mode (target_mode, target, 0);
4591 /* Expand a call to __builtin_expect. We just return our argument
4592 as the builtin_expect semantic should've been already executed by
4593 tree branch prediction pass. */
4596 expand_builtin_expect (tree exp, rtx target)
4600 if (call_expr_nargs (exp) < 2)
4602 arg = CALL_EXPR_ARG (exp, 0);
4604 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4605 /* When guessing was done, the hints should be already stripped away. */
4606 gcc_assert (!flag_guess_branch_prob
4607 || optimize == 0 || seen_error ());
4611 /* Expand a call to __builtin_assume_aligned. We just return our first
4612 argument as the builtin_assume_aligned semantic should've been already
4616 expand_builtin_assume_aligned (tree exp, rtx target)
4618 if (call_expr_nargs (exp) < 2)
4620 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4622 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4623 && (call_expr_nargs (exp) < 3
4624 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4629 expand_builtin_trap (void)
4633 emit_insn (gen_trap ());
4636 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4640 /* Expand a call to __builtin_unreachable. We do nothing except emit
4641 a barrier saying that control flow will not pass here.
4643 It is the responsibility of the program being compiled to ensure
4644 that control flow does never reach __builtin_unreachable. */
4646 expand_builtin_unreachable (void)
4651 /* Expand EXP, a call to fabs, fabsf or fabsl.
4652 Return NULL_RTX if a normal call should be emitted rather than expanding
4653 the function inline. If convenient, the result should be placed
4654 in TARGET. SUBTARGET may be used as the target for computing
4658 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4660 enum machine_mode mode;
4664 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4667 arg = CALL_EXPR_ARG (exp, 0);
4668 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4669 mode = TYPE_MODE (TREE_TYPE (arg));
4670 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4671 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4674 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4675 Return NULL is a normal call should be emitted rather than expanding the
4676 function inline. If convenient, the result should be placed in TARGET.
4677 SUBTARGET may be used as the target for computing the operand. */
4680 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4685 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4688 arg = CALL_EXPR_ARG (exp, 0);
4689 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4691 arg = CALL_EXPR_ARG (exp, 1);
4692 op1 = expand_normal (arg);
4694 return expand_copysign (op0, op1, target);
4697 /* Create a new constant string literal and return a char* pointer to it.
4698 The STRING_CST value is the LEN characters at STR. */
4700 build_string_literal (int len, const char *str)
4702 tree t, elem, index, type;
4704 t = build_string (len, str);
4705 elem = build_type_variant (char_type_node, 1, 0);
4706 index = build_index_type (size_int (len - 1));
4707 type = build_array_type (elem, index);
4708 TREE_TYPE (t) = type;
4709 TREE_CONSTANT (t) = 1;
4710 TREE_READONLY (t) = 1;
4711 TREE_STATIC (t) = 1;
4713 type = build_pointer_type (elem);
4714 t = build1 (ADDR_EXPR, type,
4715 build4 (ARRAY_REF, elem,
4716 t, integer_zero_node, NULL_TREE, NULL_TREE));
4720 /* Expand a call to __builtin___clear_cache. */
4723 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4725 #ifndef HAVE_clear_cache
4726 #ifdef CLEAR_INSN_CACHE
4727 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4728 does something. Just do the default expansion to a call to
4732 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4733 does nothing. There is no need to call it. Do nothing. */
4735 #endif /* CLEAR_INSN_CACHE */
4737 /* We have a "clear_cache" insn, and it will handle everything. */
4739 rtx begin_rtx, end_rtx;
4741 /* We must not expand to a library call. If we did, any
4742 fallback library function in libgcc that might contain a call to
4743 __builtin___clear_cache() would recurse infinitely. */
4744 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4746 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4750 if (HAVE_clear_cache)
4752 struct expand_operand ops[2];
4754 begin = CALL_EXPR_ARG (exp, 0);
4755 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4757 end = CALL_EXPR_ARG (exp, 1);
4758 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4760 create_address_operand (&ops[0], begin_rtx);
4761 create_address_operand (&ops[1], end_rtx);
4762 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4766 #endif /* HAVE_clear_cache */
4769 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4772 round_trampoline_addr (rtx tramp)
4774 rtx temp, addend, mask;
4776 /* If we don't need too much alignment, we'll have been guaranteed
4777 proper alignment by get_trampoline_type. */
4778 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4781 /* Round address up to desired boundary. */
4782 temp = gen_reg_rtx (Pmode);
4783 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
4784 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
4786 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4787 temp, 0, OPTAB_LIB_WIDEN);
4788 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4789 temp, 0, OPTAB_LIB_WIDEN);
4795 expand_builtin_init_trampoline (tree exp)
4797 tree t_tramp, t_func, t_chain;
4798 rtx m_tramp, r_tramp, r_chain, tmp;
4800 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4801 POINTER_TYPE, VOID_TYPE))
4804 t_tramp = CALL_EXPR_ARG (exp, 0);
4805 t_func = CALL_EXPR_ARG (exp, 1);
4806 t_chain = CALL_EXPR_ARG (exp, 2);
4808 r_tramp = expand_normal (t_tramp);
4809 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4810 MEM_NOTRAP_P (m_tramp) = 1;
4812 /* The TRAMP argument should be the address of a field within the
4813 local function's FRAME decl. Let's see if we can fill in the
4814 to fill in the MEM_ATTRs for this memory. */
4815 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4816 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
4819 tmp = round_trampoline_addr (r_tramp);
4822 m_tramp = change_address (m_tramp, BLKmode, tmp);
4823 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4824 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4827 /* The FUNC argument should be the address of the nested function.
4828 Extract the actual function decl to pass to the hook. */
4829 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4830 t_func = TREE_OPERAND (t_func, 0);
4831 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4833 r_chain = expand_normal (t_chain);
4835 /* Generate insns to initialize the trampoline. */
4836 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4838 trampolines_created = 1;
4840 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4841 "trampoline generated for nested function %qD", t_func);
4847 expand_builtin_adjust_trampoline (tree exp)
4851 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4854 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4855 tramp = round_trampoline_addr (tramp);
4856 if (targetm.calls.trampoline_adjust_address)
4857 tramp = targetm.calls.trampoline_adjust_address (tramp);
4862 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4863 function. The function first checks whether the back end provides
4864 an insn to implement signbit for the respective mode. If not, it
4865 checks whether the floating point format of the value is such that
4866 the sign bit can be extracted. If that is not the case, the
4867 function returns NULL_RTX to indicate that a normal call should be
4868 emitted rather than expanding the function in-line. EXP is the
4869 expression that is a call to the builtin function; if convenient,
4870 the result should be placed in TARGET. */
4872 expand_builtin_signbit (tree exp, rtx target)
4874 const struct real_format *fmt;
4875 enum machine_mode fmode, imode, rmode;
4878 enum insn_code icode;
4880 location_t loc = EXPR_LOCATION (exp);
4882 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4885 arg = CALL_EXPR_ARG (exp, 0);
4886 fmode = TYPE_MODE (TREE_TYPE (arg));
4887 rmode = TYPE_MODE (TREE_TYPE (exp));
4888 fmt = REAL_MODE_FORMAT (fmode);
4890 arg = builtin_save_expr (arg);
4892 /* Expand the argument yielding a RTX expression. */
4893 temp = expand_normal (arg);
4895 /* Check if the back end provides an insn that handles signbit for the
4897 icode = optab_handler (signbit_optab, fmode);
4898 if (icode != CODE_FOR_nothing)
4900 rtx last = get_last_insn ();
4901 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4902 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4904 delete_insns_since (last);
4907 /* For floating point formats without a sign bit, implement signbit
4909 bitpos = fmt->signbit_ro;
4912 /* But we can't do this if the format supports signed zero. */
4913 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4916 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4917 build_real (TREE_TYPE (arg), dconst0));
4918 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4921 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4923 imode = int_mode_for_mode (fmode);
4924 if (imode == BLKmode)
4926 temp = gen_lowpart (imode, temp);
4931 /* Handle targets with different FP word orders. */
4932 if (FLOAT_WORDS_BIG_ENDIAN)
4933 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4935 word = bitpos / BITS_PER_WORD;
4936 temp = operand_subword_force (temp, word, fmode);
4937 bitpos = bitpos % BITS_PER_WORD;
4940 /* Force the intermediate word_mode (or narrower) result into a
4941 register. This avoids attempting to create paradoxical SUBREGs
4942 of floating point modes below. */
4943 temp = force_reg (imode, temp);
4945 /* If the bitpos is within the "result mode" lowpart, the operation
4946 can be implement with a single bitwise AND. Otherwise, we need
4947 a right shift and an AND. */
4949 if (bitpos < GET_MODE_BITSIZE (rmode))
4951 double_int mask = double_int_setbit (double_int_zero, bitpos);
4953 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4954 temp = gen_lowpart (rmode, temp);
4955 temp = expand_binop (rmode, and_optab, temp,
4956 immed_double_int_const (mask, rmode),
4957 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4961 /* Perform a logical right shift to place the signbit in the least
4962 significant bit, then truncate the result to the desired mode
4963 and mask just this bit. */
4964 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4965 temp = gen_lowpart (rmode, temp);
4966 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4967 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4973 /* Expand fork or exec calls. TARGET is the desired target of the
4974 call. EXP is the call. FN is the
4975 identificator of the actual function. IGNORE is nonzero if the
4976 value is to be ignored. */
4979 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4984 /* If we are not profiling, just call the function. */
4985 if (!profile_arc_flag)
4988 /* Otherwise call the wrapper. This should be equivalent for the rest of
4989 compiler, so the code does not diverge, and the wrapper may run the
4990 code necessary for keeping the profiling sane. */
4992 switch (DECL_FUNCTION_CODE (fn))
4995 id = get_identifier ("__gcov_fork");
4998 case BUILT_IN_EXECL:
4999 id = get_identifier ("__gcov_execl");
5002 case BUILT_IN_EXECV:
5003 id = get_identifier ("__gcov_execv");
5006 case BUILT_IN_EXECLP:
5007 id = get_identifier ("__gcov_execlp");
5010 case BUILT_IN_EXECLE:
5011 id = get_identifier ("__gcov_execle");
5014 case BUILT_IN_EXECVP:
5015 id = get_identifier ("__gcov_execvp");
5018 case BUILT_IN_EXECVE:
5019 id = get_identifier ("__gcov_execve");
5026 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5027 FUNCTION_DECL, id, TREE_TYPE (fn));
5028 DECL_EXTERNAL (decl) = 1;
5029 TREE_PUBLIC (decl) = 1;
5030 DECL_ARTIFICIAL (decl) = 1;
5031 TREE_NOTHROW (decl) = 1;
5032 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5033 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5034 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5035 return expand_call (call, target, ignore);
5040 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5041 the pointer in these functions is void*, the tree optimizers may remove
5042 casts. The mode computed in expand_builtin isn't reliable either, due
5043 to __sync_bool_compare_and_swap.
5045 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5046 group of builtins. This gives us log2 of the mode size. */
5048 static inline enum machine_mode
5049 get_builtin_sync_mode (int fcode_diff)
5051 /* The size is not negotiable, so ask not to get BLKmode in return
5052 if the target indicates that a smaller size would be better. */
5053 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5056 /* Expand the memory expression LOC and return the appropriate memory operand
5057 for the builtin_sync operations. */
5060 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5064 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5065 addr = convert_memory_address (Pmode, addr);
5067 /* Note that we explicitly do not want any alias information for this
5068 memory, so that we kill all other live memories. Otherwise we don't
5069 satisfy the full barrier semantics of the intrinsic. */
5070 mem = validize_mem (gen_rtx_MEM (mode, addr));
5072 /* The alignment needs to be at least according to that of the mode. */
5073 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5074 get_pointer_alignment (loc)));
5075 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5076 MEM_VOLATILE_P (mem) = 1;
5081 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5082 EXP is the CALL_EXPR. CODE is the rtx code
5083 that corresponds to the arithmetic or logical operation from the name;
5084 an exception here is that NOT actually means NAND. TARGET is an optional
5085 place for us to store the results; AFTER is true if this is the
5086 fetch_and_xxx form. IGNORE is true if we don't actually care about
5087 the result of the operation at all. */
5090 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5091 enum rtx_code code, bool after,
5092 rtx target, bool ignore)
5095 enum machine_mode old_mode;
5096 location_t loc = EXPR_LOCATION (exp);
5098 if (code == NOT && warn_sync_nand)
5100 tree fndecl = get_callee_fndecl (exp);
5101 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5103 static bool warned_f_a_n, warned_n_a_f;
5107 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5108 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5109 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5110 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5111 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5116 fndecl = implicit_built_in_decls[BUILT_IN_SYNC_FETCH_AND_NAND_N];
5117 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5118 warned_f_a_n = true;
5121 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5122 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5123 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5124 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5125 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5130 fndecl = implicit_built_in_decls[BUILT_IN_SYNC_NAND_AND_FETCH_N];
5131 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5132 warned_n_a_f = true;
5140 /* Expand the operands. */
5141 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5143 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5144 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5145 of CONST_INTs, where we know the old_mode only from the call argument. */
5146 old_mode = GET_MODE (val);
5147 if (old_mode == VOIDmode)
5148 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5149 val = convert_modes (mode, old_mode, val, 1);
5152 return expand_sync_operation (mem, val, code);
5154 return expand_sync_fetch_operation (mem, val, code, after, target);
5157 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5158 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5159 true if this is the boolean form. TARGET is a place for us to store the
5160 results; this is NOT optional if IS_BOOL is true. */
5163 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5164 bool is_bool, rtx target)
5166 rtx old_val, new_val, mem;
5167 enum machine_mode old_mode;
5169 /* Expand the operands. */
5170 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5173 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5174 mode, EXPAND_NORMAL);
5175 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5176 of CONST_INTs, where we know the old_mode only from the call argument. */
5177 old_mode = GET_MODE (old_val);
5178 if (old_mode == VOIDmode)
5179 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5180 old_val = convert_modes (mode, old_mode, old_val, 1);
5182 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5183 mode, EXPAND_NORMAL);
5184 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5185 of CONST_INTs, where we know the old_mode only from the call argument. */
5186 old_mode = GET_MODE (new_val);
5187 if (old_mode == VOIDmode)
5188 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5189 new_val = convert_modes (mode, old_mode, new_val, 1);
5192 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5194 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5197 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5198 general form is actually an atomic exchange, and some targets only
5199 support a reduced form with the second argument being a constant 1.
5200 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5204 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5208 enum machine_mode old_mode;
5210 /* Expand the operands. */
5211 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5212 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5213 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5214 of CONST_INTs, where we know the old_mode only from the call argument. */
5215 old_mode = GET_MODE (val);
5216 if (old_mode == VOIDmode)
5217 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5218 val = convert_modes (mode, old_mode, val, 1);
5220 return expand_sync_lock_test_and_set (mem, val, target);
5223 /* Expand the __sync_synchronize intrinsic. */
5226 expand_builtin_sync_synchronize (void)
5229 VEC (tree, gc) *v_clobbers;
5231 #ifdef HAVE_memory_barrier
5232 if (HAVE_memory_barrier)
5234 emit_insn (gen_memory_barrier ());
5239 if (synchronize_libfunc != NULL_RTX)
5241 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5245 /* If no explicit memory barrier instruction is available, create an
5246 empty asm stmt with a memory clobber. */
5247 v_clobbers = VEC_alloc (tree, gc, 1);
5248 VEC_quick_push (tree, v_clobbers,
5249 tree_cons (NULL, build_string (6, "memory"), NULL));
5250 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5251 gimple_asm_set_volatile (x, true);
5252 expand_asm_stmt (x);
5255 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5258 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5260 struct expand_operand ops[2];
5261 enum insn_code icode;
5264 /* Expand the operands. */
5265 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5267 /* If there is an explicit operation in the md file, use it. */
5268 icode = direct_optab_handler (sync_lock_release_optab, mode);
5269 if (icode != CODE_FOR_nothing)
5271 create_fixed_operand (&ops[0], mem);
5272 create_input_operand (&ops[1], const0_rtx, mode);
5273 if (maybe_expand_insn (icode, 2, ops))
5277 /* Otherwise we can implement this operation by emitting a barrier
5278 followed by a store of zero. */
5279 expand_builtin_sync_synchronize ();
5280 emit_move_insn (mem, const0_rtx);
5283 /* Expand an expression EXP that calls a built-in function,
5284 with result going to TARGET if that's convenient
5285 (and in mode MODE if that's convenient).
5286 SUBTARGET may be used as the target for computing one of EXP's operands.
5287 IGNORE is nonzero if the value is to be ignored. */
5290 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5293 tree fndecl = get_callee_fndecl (exp);
5294 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5295 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5298 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5299 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5301 /* When not optimizing, generate calls to library functions for a certain
5304 && !called_as_built_in (fndecl)
5305 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5306 && fcode != BUILT_IN_ALLOCA
5307 && fcode != BUILT_IN_FREE)
5308 return expand_call (exp, target, ignore);
5310 /* The built-in function expanders test for target == const0_rtx
5311 to determine whether the function's result will be ignored. */
5313 target = const0_rtx;
5315 /* If the result of a pure or const built-in function is ignored, and
5316 none of its arguments are volatile, we can avoid expanding the
5317 built-in call and just evaluate the arguments for side-effects. */
5318 if (target == const0_rtx
5319 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5320 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5322 bool volatilep = false;
5324 call_expr_arg_iterator iter;
5326 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5327 if (TREE_THIS_VOLATILE (arg))
5335 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5336 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5343 CASE_FLT_FN (BUILT_IN_FABS):
5344 target = expand_builtin_fabs (exp, target, subtarget);
5349 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5350 target = expand_builtin_copysign (exp, target, subtarget);
5355 /* Just do a normal library call if we were unable to fold
5357 CASE_FLT_FN (BUILT_IN_CABS):
5360 CASE_FLT_FN (BUILT_IN_EXP):
5361 CASE_FLT_FN (BUILT_IN_EXP10):
5362 CASE_FLT_FN (BUILT_IN_POW10):
5363 CASE_FLT_FN (BUILT_IN_EXP2):
5364 CASE_FLT_FN (BUILT_IN_EXPM1):
5365 CASE_FLT_FN (BUILT_IN_LOGB):
5366 CASE_FLT_FN (BUILT_IN_LOG):
5367 CASE_FLT_FN (BUILT_IN_LOG10):
5368 CASE_FLT_FN (BUILT_IN_LOG2):
5369 CASE_FLT_FN (BUILT_IN_LOG1P):
5370 CASE_FLT_FN (BUILT_IN_TAN):
5371 CASE_FLT_FN (BUILT_IN_ASIN):
5372 CASE_FLT_FN (BUILT_IN_ACOS):
5373 CASE_FLT_FN (BUILT_IN_ATAN):
5374 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5375 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5376 because of possible accuracy problems. */
5377 if (! flag_unsafe_math_optimizations)
5379 CASE_FLT_FN (BUILT_IN_SQRT):
5380 CASE_FLT_FN (BUILT_IN_FLOOR):
5381 CASE_FLT_FN (BUILT_IN_CEIL):
5382 CASE_FLT_FN (BUILT_IN_TRUNC):
5383 CASE_FLT_FN (BUILT_IN_ROUND):
5384 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5385 CASE_FLT_FN (BUILT_IN_RINT):
5386 target = expand_builtin_mathfn (exp, target, subtarget);
5391 CASE_FLT_FN (BUILT_IN_FMA):
5392 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5397 CASE_FLT_FN (BUILT_IN_ILOGB):
5398 if (! flag_unsafe_math_optimizations)
5400 CASE_FLT_FN (BUILT_IN_ISINF):
5401 CASE_FLT_FN (BUILT_IN_FINITE):
5402 case BUILT_IN_ISFINITE:
5403 case BUILT_IN_ISNORMAL:
5404 target = expand_builtin_interclass_mathfn (exp, target);
5409 CASE_FLT_FN (BUILT_IN_ICEIL):
5410 CASE_FLT_FN (BUILT_IN_LCEIL):
5411 CASE_FLT_FN (BUILT_IN_LLCEIL):
5412 CASE_FLT_FN (BUILT_IN_LFLOOR):
5413 CASE_FLT_FN (BUILT_IN_IFLOOR):
5414 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5415 target = expand_builtin_int_roundingfn (exp, target);
5420 CASE_FLT_FN (BUILT_IN_IRINT):
5421 CASE_FLT_FN (BUILT_IN_LRINT):
5422 CASE_FLT_FN (BUILT_IN_LLRINT):
5423 CASE_FLT_FN (BUILT_IN_IROUND):
5424 CASE_FLT_FN (BUILT_IN_LROUND):
5425 CASE_FLT_FN (BUILT_IN_LLROUND):
5426 target = expand_builtin_int_roundingfn_2 (exp, target);
5431 CASE_FLT_FN (BUILT_IN_POWI):
5432 target = expand_builtin_powi (exp, target);
5437 CASE_FLT_FN (BUILT_IN_ATAN2):
5438 CASE_FLT_FN (BUILT_IN_LDEXP):
5439 CASE_FLT_FN (BUILT_IN_SCALB):
5440 CASE_FLT_FN (BUILT_IN_SCALBN):
5441 CASE_FLT_FN (BUILT_IN_SCALBLN):
5442 if (! flag_unsafe_math_optimizations)
5445 CASE_FLT_FN (BUILT_IN_FMOD):
5446 CASE_FLT_FN (BUILT_IN_REMAINDER):
5447 CASE_FLT_FN (BUILT_IN_DREM):
5448 CASE_FLT_FN (BUILT_IN_POW):
5449 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5454 CASE_FLT_FN (BUILT_IN_CEXPI):
5455 target = expand_builtin_cexpi (exp, target);
5456 gcc_assert (target);
5459 CASE_FLT_FN (BUILT_IN_SIN):
5460 CASE_FLT_FN (BUILT_IN_COS):
5461 if (! flag_unsafe_math_optimizations)
5463 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5468 CASE_FLT_FN (BUILT_IN_SINCOS):
5469 if (! flag_unsafe_math_optimizations)
5471 target = expand_builtin_sincos (exp);
5476 case BUILT_IN_APPLY_ARGS:
5477 return expand_builtin_apply_args ();
5479 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5480 FUNCTION with a copy of the parameters described by
5481 ARGUMENTS, and ARGSIZE. It returns a block of memory
5482 allocated on the stack into which is stored all the registers
5483 that might possibly be used for returning the result of a
5484 function. ARGUMENTS is the value returned by
5485 __builtin_apply_args. ARGSIZE is the number of bytes of
5486 arguments that must be copied. ??? How should this value be
5487 computed? We'll also need a safe worst case value for varargs
5489 case BUILT_IN_APPLY:
5490 if (!validate_arglist (exp, POINTER_TYPE,
5491 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5492 && !validate_arglist (exp, REFERENCE_TYPE,
5493 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5499 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5500 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5501 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5503 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5506 /* __builtin_return (RESULT) causes the function to return the
5507 value described by RESULT. RESULT is address of the block of
5508 memory returned by __builtin_apply. */
5509 case BUILT_IN_RETURN:
5510 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5511 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5514 case BUILT_IN_SAVEREGS:
5515 return expand_builtin_saveregs ();
5517 case BUILT_IN_VA_ARG_PACK:
5518 /* All valid uses of __builtin_va_arg_pack () are removed during
5520 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5523 case BUILT_IN_VA_ARG_PACK_LEN:
5524 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5526 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5529 /* Return the address of the first anonymous stack arg. */
5530 case BUILT_IN_NEXT_ARG:
5531 if (fold_builtin_next_arg (exp, false))
5533 return expand_builtin_next_arg ();
5535 case BUILT_IN_CLEAR_CACHE:
5536 target = expand_builtin___clear_cache (exp);
5541 case BUILT_IN_CLASSIFY_TYPE:
5542 return expand_builtin_classify_type (exp);
5544 case BUILT_IN_CONSTANT_P:
5547 case BUILT_IN_FRAME_ADDRESS:
5548 case BUILT_IN_RETURN_ADDRESS:
5549 return expand_builtin_frame_address (fndecl, exp);
5551 /* Returns the address of the area where the structure is returned.
5553 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5554 if (call_expr_nargs (exp) != 0
5555 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5556 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5559 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5561 case BUILT_IN_ALLOCA:
5562 /* If the allocation stems from the declaration of a variable-sized
5563 object, it cannot accumulate. */
5564 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
5569 case BUILT_IN_STACK_SAVE:
5570 return expand_stack_save ();
5572 case BUILT_IN_STACK_RESTORE:
5573 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5576 case BUILT_IN_BSWAP32:
5577 case BUILT_IN_BSWAP64:
5578 target = expand_builtin_bswap (exp, target, subtarget);
5584 CASE_INT_FN (BUILT_IN_FFS):
5585 case BUILT_IN_FFSIMAX:
5586 target = expand_builtin_unop (target_mode, exp, target,
5587 subtarget, ffs_optab);
5592 CASE_INT_FN (BUILT_IN_CLZ):
5593 case BUILT_IN_CLZIMAX:
5594 target = expand_builtin_unop (target_mode, exp, target,
5595 subtarget, clz_optab);
5600 CASE_INT_FN (BUILT_IN_CTZ):
5601 case BUILT_IN_CTZIMAX:
5602 target = expand_builtin_unop (target_mode, exp, target,
5603 subtarget, ctz_optab);
5608 CASE_INT_FN (BUILT_IN_CLRSB):
5609 case BUILT_IN_CLRSBIMAX:
5610 target = expand_builtin_unop (target_mode, exp, target,
5611 subtarget, clrsb_optab);
5616 CASE_INT_FN (BUILT_IN_POPCOUNT):
5617 case BUILT_IN_POPCOUNTIMAX:
5618 target = expand_builtin_unop (target_mode, exp, target,
5619 subtarget, popcount_optab);
5624 CASE_INT_FN (BUILT_IN_PARITY):
5625 case BUILT_IN_PARITYIMAX:
5626 target = expand_builtin_unop (target_mode, exp, target,
5627 subtarget, parity_optab);
5632 case BUILT_IN_STRLEN:
5633 target = expand_builtin_strlen (exp, target, target_mode);
5638 case BUILT_IN_STRCPY:
5639 target = expand_builtin_strcpy (exp, target);
5644 case BUILT_IN_STRNCPY:
5645 target = expand_builtin_strncpy (exp, target);
5650 case BUILT_IN_STPCPY:
5651 target = expand_builtin_stpcpy (exp, target, mode);
5656 case BUILT_IN_MEMCPY:
5657 target = expand_builtin_memcpy (exp, target);
5662 case BUILT_IN_MEMPCPY:
5663 target = expand_builtin_mempcpy (exp, target, mode);
5668 case BUILT_IN_MEMSET:
5669 target = expand_builtin_memset (exp, target, mode);
5674 case BUILT_IN_BZERO:
5675 target = expand_builtin_bzero (exp);
5680 case BUILT_IN_STRCMP:
5681 target = expand_builtin_strcmp (exp, target);
5686 case BUILT_IN_STRNCMP:
5687 target = expand_builtin_strncmp (exp, target, mode);
5693 case BUILT_IN_MEMCMP:
5694 target = expand_builtin_memcmp (exp, target, mode);
5699 case BUILT_IN_SETJMP:
5700 /* This should have been lowered to the builtins below. */
5703 case BUILT_IN_SETJMP_SETUP:
5704 /* __builtin_setjmp_setup is passed a pointer to an array of five words
5705 and the receiver label. */
5706 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5708 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5709 VOIDmode, EXPAND_NORMAL);
5710 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
5711 rtx label_r = label_rtx (label);
5713 /* This is copied from the handling of non-local gotos. */
5714 expand_builtin_setjmp_setup (buf_addr, label_r);
5715 nonlocal_goto_handler_labels
5716 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
5717 nonlocal_goto_handler_labels);
5718 /* ??? Do not let expand_label treat us as such since we would
5719 not want to be both on the list of non-local labels and on
5720 the list of forced labels. */
5721 FORCED_LABEL (label) = 0;
5726 case BUILT_IN_SETJMP_DISPATCHER:
5727 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
5728 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5730 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
5731 rtx label_r = label_rtx (label);
5733 /* Remove the dispatcher label from the list of non-local labels
5734 since the receiver labels have been added to it above. */
5735 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
5740 case BUILT_IN_SETJMP_RECEIVER:
5741 /* __builtin_setjmp_receiver is passed the receiver label. */
5742 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5744 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
5745 rtx label_r = label_rtx (label);
5747 expand_builtin_setjmp_receiver (label_r);
5752 /* __builtin_longjmp is passed a pointer to an array of five words.
5753 It's similar to the C library longjmp function but works with
5754 __builtin_setjmp above. */
5755 case BUILT_IN_LONGJMP:
5756 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5758 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5759 VOIDmode, EXPAND_NORMAL);
5760 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
5762 if (value != const1_rtx)
5764 error ("%<__builtin_longjmp%> second argument must be 1");
5768 expand_builtin_longjmp (buf_addr, value);
5773 case BUILT_IN_NONLOCAL_GOTO:
5774 target = expand_builtin_nonlocal_goto (exp);
5779 /* This updates the setjmp buffer that is its argument with the value
5780 of the current stack pointer. */
5781 case BUILT_IN_UPDATE_SETJMP_BUF:
5782 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5785 = expand_normal (CALL_EXPR_ARG (exp, 0));
5787 expand_builtin_update_setjmp_buf (buf_addr);
5793 expand_builtin_trap ();
5796 case BUILT_IN_UNREACHABLE:
5797 expand_builtin_unreachable ();
5800 CASE_FLT_FN (BUILT_IN_SIGNBIT):
5801 case BUILT_IN_SIGNBITD32:
5802 case BUILT_IN_SIGNBITD64:
5803 case BUILT_IN_SIGNBITD128:
5804 target = expand_builtin_signbit (exp, target);
5809 /* Various hooks for the DWARF 2 __throw routine. */
5810 case BUILT_IN_UNWIND_INIT:
5811 expand_builtin_unwind_init ();
5813 case BUILT_IN_DWARF_CFA:
5814 return virtual_cfa_rtx;
5815 #ifdef DWARF2_UNWIND_INFO
5816 case BUILT_IN_DWARF_SP_COLUMN:
5817 return expand_builtin_dwarf_sp_column ();
5818 case BUILT_IN_INIT_DWARF_REG_SIZES:
5819 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
5822 case BUILT_IN_FROB_RETURN_ADDR:
5823 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
5824 case BUILT_IN_EXTRACT_RETURN_ADDR:
5825 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
5826 case BUILT_IN_EH_RETURN:
5827 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
5828 CALL_EXPR_ARG (exp, 1));
5830 #ifdef EH_RETURN_DATA_REGNO
5831 case BUILT_IN_EH_RETURN_DATA_REGNO:
5832 return expand_builtin_eh_return_data_regno (exp);
5834 case BUILT_IN_EXTEND_POINTER:
5835 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
5836 case BUILT_IN_EH_POINTER:
5837 return expand_builtin_eh_pointer (exp);
5838 case BUILT_IN_EH_FILTER:
5839 return expand_builtin_eh_filter (exp);
5840 case BUILT_IN_EH_COPY_VALUES:
5841 return expand_builtin_eh_copy_values (exp);
5843 case BUILT_IN_VA_START:
5844 return expand_builtin_va_start (exp);
5845 case BUILT_IN_VA_END:
5846 return expand_builtin_va_end (exp);
5847 case BUILT_IN_VA_COPY:
5848 return expand_builtin_va_copy (exp);
5849 case BUILT_IN_EXPECT:
5850 return expand_builtin_expect (exp, target);
5851 case BUILT_IN_ASSUME_ALIGNED:
5852 return expand_builtin_assume_aligned (exp, target);
5853 case BUILT_IN_PREFETCH:
5854 expand_builtin_prefetch (exp);
5857 case BUILT_IN_INIT_TRAMPOLINE:
5858 return expand_builtin_init_trampoline (exp);
5859 case BUILT_IN_ADJUST_TRAMPOLINE:
5860 return expand_builtin_adjust_trampoline (exp);
5863 case BUILT_IN_EXECL:
5864 case BUILT_IN_EXECV:
5865 case BUILT_IN_EXECLP:
5866 case BUILT_IN_EXECLE:
5867 case BUILT_IN_EXECVP:
5868 case BUILT_IN_EXECVE:
5869 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
5874 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
5875 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
5876 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
5877 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
5878 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
5879 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
5880 target = expand_builtin_sync_operation (mode, exp, PLUS,
5881 false, target, ignore);
5886 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
5887 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
5888 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
5889 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
5890 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
5891 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
5892 target = expand_builtin_sync_operation (mode, exp, MINUS,
5893 false, target, ignore);
5898 case BUILT_IN_SYNC_FETCH_AND_OR_1:
5899 case BUILT_IN_SYNC_FETCH_AND_OR_2:
5900 case BUILT_IN_SYNC_FETCH_AND_OR_4:
5901 case BUILT_IN_SYNC_FETCH_AND_OR_8:
5902 case BUILT_IN_SYNC_FETCH_AND_OR_16:
5903 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
5904 target = expand_builtin_sync_operation (mode, exp, IOR,
5905 false, target, ignore);
5910 case BUILT_IN_SYNC_FETCH_AND_AND_1:
5911 case BUILT_IN_SYNC_FETCH_AND_AND_2:
5912 case BUILT_IN_SYNC_FETCH_AND_AND_4:
5913 case BUILT_IN_SYNC_FETCH_AND_AND_8:
5914 case BUILT_IN_SYNC_FETCH_AND_AND_16:
5915 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
5916 target = expand_builtin_sync_operation (mode, exp, AND,
5917 false, target, ignore);
5922 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
5923 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
5924 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
5925 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
5926 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
5927 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
5928 target = expand_builtin_sync_operation (mode, exp, XOR,
5929 false, target, ignore);
5934 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5935 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5936 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5937 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5938 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5939 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
5940 target = expand_builtin_sync_operation (mode, exp, NOT,
5941 false, target, ignore);
5946 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
5947 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
5948 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
5949 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
5950 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
5951 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
5952 target = expand_builtin_sync_operation (mode, exp, PLUS,
5953 true, target, ignore);
5958 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
5959 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
5960 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
5961 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
5962 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
5963 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
5964 target = expand_builtin_sync_operation (mode, exp, MINUS,
5965 true, target, ignore);
5970 case BUILT_IN_SYNC_OR_AND_FETCH_1:
5971 case BUILT_IN_SYNC_OR_AND_FETCH_2:
5972 case BUILT_IN_SYNC_OR_AND_FETCH_4:
5973 case BUILT_IN_SYNC_OR_AND_FETCH_8:
5974 case BUILT_IN_SYNC_OR_AND_FETCH_16:
5975 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
5976 target = expand_builtin_sync_operation (mode, exp, IOR,
5977 true, target, ignore);
5982 case BUILT_IN_SYNC_AND_AND_FETCH_1:
5983 case BUILT_IN_SYNC_AND_AND_FETCH_2:
5984 case BUILT_IN_SYNC_AND_AND_FETCH_4:
5985 case BUILT_IN_SYNC_AND_AND_FETCH_8:
5986 case BUILT_IN_SYNC_AND_AND_FETCH_16:
5987 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
5988 target = expand_builtin_sync_operation (mode, exp, AND,
5989 true, target, ignore);
5994 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
5995 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
5996 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
5997 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
5998 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
5999 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6000 target = expand_builtin_sync_operation (mode, exp, XOR,
6001 true, target, ignore);
6006 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6007 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6008 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6009 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6010 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6011 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6012 target = expand_builtin_sync_operation (mode, exp, NOT,
6013 true, target, ignore);
6018 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6019 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6020 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6021 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6022 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6023 if (mode == VOIDmode)
6024 mode = TYPE_MODE (boolean_type_node);
6025 if (!target || !register_operand (target, mode))
6026 target = gen_reg_rtx (mode);
6028 mode = get_builtin_sync_mode
6029 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6030 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6035 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6036 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6037 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6038 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6039 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6040 mode = get_builtin_sync_mode
6041 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6042 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6047 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6048 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6049 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6050 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6051 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6052 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6053 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6058 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6059 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6060 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6061 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6062 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6063 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6064 expand_builtin_sync_lock_release (mode, exp);
6067 case BUILT_IN_SYNC_SYNCHRONIZE:
6068 expand_builtin_sync_synchronize ();
6071 case BUILT_IN_OBJECT_SIZE:
6072 return expand_builtin_object_size (exp);
6074 case BUILT_IN_MEMCPY_CHK:
6075 case BUILT_IN_MEMPCPY_CHK:
6076 case BUILT_IN_MEMMOVE_CHK:
6077 case BUILT_IN_MEMSET_CHK:
6078 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6083 case BUILT_IN_STRCPY_CHK:
6084 case BUILT_IN_STPCPY_CHK:
6085 case BUILT_IN_STRNCPY_CHK:
6086 case BUILT_IN_STRCAT_CHK:
6087 case BUILT_IN_STRNCAT_CHK:
6088 case BUILT_IN_SNPRINTF_CHK:
6089 case BUILT_IN_VSNPRINTF_CHK:
6090 maybe_emit_chk_warning (exp, fcode);
6093 case BUILT_IN_SPRINTF_CHK:
6094 case BUILT_IN_VSPRINTF_CHK:
6095 maybe_emit_sprintf_chk_warning (exp, fcode);
6099 maybe_emit_free_warning (exp);
6102 default: /* just do library call, if unknown builtin */
6106 /* The switch statement above can drop through to cause the function
6107 to be called normally. */
6108 return expand_call (exp, target, ignore);
6111 /* Determine whether a tree node represents a call to a built-in
6112 function. If the tree T is a call to a built-in function with
6113 the right number of arguments of the appropriate types, return
6114 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6115 Otherwise the return value is END_BUILTINS. */
6117 enum built_in_function
6118 builtin_mathfn_code (const_tree t)
6120 const_tree fndecl, arg, parmlist;
6121 const_tree argtype, parmtype;
6122 const_call_expr_arg_iterator iter;
6124 if (TREE_CODE (t) != CALL_EXPR
6125 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6126 return END_BUILTINS;
6128 fndecl = get_callee_fndecl (t);
6129 if (fndecl == NULL_TREE
6130 || TREE_CODE (fndecl) != FUNCTION_DECL
6131 || ! DECL_BUILT_IN (fndecl)
6132 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6133 return END_BUILTINS;
6135 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6136 init_const_call_expr_arg_iterator (t, &iter);
6137 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6139 /* If a function doesn't take a variable number of arguments,
6140 the last element in the list will have type `void'. */
6141 parmtype = TREE_VALUE (parmlist);
6142 if (VOID_TYPE_P (parmtype))
6144 if (more_const_call_expr_args_p (&iter))
6145 return END_BUILTINS;
6146 return DECL_FUNCTION_CODE (fndecl);
6149 if (! more_const_call_expr_args_p (&iter))
6150 return END_BUILTINS;
6152 arg = next_const_call_expr_arg (&iter);
6153 argtype = TREE_TYPE (arg);
6155 if (SCALAR_FLOAT_TYPE_P (parmtype))
6157 if (! SCALAR_FLOAT_TYPE_P (argtype))
6158 return END_BUILTINS;
6160 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6162 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6163 return END_BUILTINS;
6165 else if (POINTER_TYPE_P (parmtype))
6167 if (! POINTER_TYPE_P (argtype))
6168 return END_BUILTINS;
6170 else if (INTEGRAL_TYPE_P (parmtype))
6172 if (! INTEGRAL_TYPE_P (argtype))
6173 return END_BUILTINS;
6176 return END_BUILTINS;
6179 /* Variable-length argument list. */
6180 return DECL_FUNCTION_CODE (fndecl);
6183 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6184 evaluate to a constant. */
6187 fold_builtin_constant_p (tree arg)
6189 /* We return 1 for a numeric type that's known to be a constant
6190 value at compile-time or for an aggregate type that's a
6191 literal constant. */
6194 /* If we know this is a constant, emit the constant of one. */
6195 if (CONSTANT_CLASS_P (arg)
6196 || (TREE_CODE (arg) == CONSTRUCTOR
6197 && TREE_CONSTANT (arg)))
6198 return integer_one_node;
6199 if (TREE_CODE (arg) == ADDR_EXPR)
6201 tree op = TREE_OPERAND (arg, 0);
6202 if (TREE_CODE (op) == STRING_CST
6203 || (TREE_CODE (op) == ARRAY_REF
6204 && integer_zerop (TREE_OPERAND (op, 1))
6205 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6206 return integer_one_node;
6209 /* If this expression has side effects, show we don't know it to be a
6210 constant. Likewise if it's a pointer or aggregate type since in
6211 those case we only want literals, since those are only optimized
6212 when generating RTL, not later.
6213 And finally, if we are compiling an initializer, not code, we
6214 need to return a definite result now; there's not going to be any
6215 more optimization done. */
6216 if (TREE_SIDE_EFFECTS (arg)
6217 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6218 || POINTER_TYPE_P (TREE_TYPE (arg))
6220 || folding_initializer)
6221 return integer_zero_node;
6226 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6227 return it as a truthvalue. */
6230 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6232 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6234 fn = built_in_decls[BUILT_IN_EXPECT];
6235 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6236 ret_type = TREE_TYPE (TREE_TYPE (fn));
6237 pred_type = TREE_VALUE (arg_types);
6238 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6240 pred = fold_convert_loc (loc, pred_type, pred);
6241 expected = fold_convert_loc (loc, expected_type, expected);
6242 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6244 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6245 build_int_cst (ret_type, 0));
6248 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6249 NULL_TREE if no simplification is possible. */
6252 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6254 tree inner, fndecl, inner_arg0;
6255 enum tree_code code;
6257 /* Distribute the expected value over short-circuiting operators.
6258 See through the cast from truthvalue_type_node to long. */
6260 while (TREE_CODE (inner_arg0) == NOP_EXPR
6261 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6262 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6263 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6265 /* If this is a builtin_expect within a builtin_expect keep the
6266 inner one. See through a comparison against a constant. It
6267 might have been added to create a thruthvalue. */
6270 if (COMPARISON_CLASS_P (inner)
6271 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6272 inner = TREE_OPERAND (inner, 0);
6274 if (TREE_CODE (inner) == CALL_EXPR
6275 && (fndecl = get_callee_fndecl (inner))
6276 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6277 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6281 code = TREE_CODE (inner);
6282 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6284 tree op0 = TREE_OPERAND (inner, 0);
6285 tree op1 = TREE_OPERAND (inner, 1);
6287 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6288 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6289 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6291 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6294 /* If the argument isn't invariant then there's nothing else we can do. */
6295 if (!TREE_CONSTANT (inner_arg0))
6298 /* If we expect that a comparison against the argument will fold to
6299 a constant return the constant. In practice, this means a true
6300 constant or the address of a non-weak symbol. */
6303 if (TREE_CODE (inner) == ADDR_EXPR)
6307 inner = TREE_OPERAND (inner, 0);
6309 while (TREE_CODE (inner) == COMPONENT_REF
6310 || TREE_CODE (inner) == ARRAY_REF);
6311 if ((TREE_CODE (inner) == VAR_DECL
6312 || TREE_CODE (inner) == FUNCTION_DECL)
6313 && DECL_WEAK (inner))
6317 /* Otherwise, ARG0 already has the proper type for the return value. */
6321 /* Fold a call to __builtin_classify_type with argument ARG. */
6324 fold_builtin_classify_type (tree arg)
6327 return build_int_cst (integer_type_node, no_type_class);
6329 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
6332 /* Fold a call to __builtin_strlen with argument ARG. */
6335 fold_builtin_strlen (location_t loc, tree type, tree arg)
6337 if (!validate_arg (arg, POINTER_TYPE))
6341 tree len = c_strlen (arg, 0);
6344 return fold_convert_loc (loc, type, len);
6350 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6353 fold_builtin_inf (location_t loc, tree type, int warn)
6355 REAL_VALUE_TYPE real;
6357 /* __builtin_inff is intended to be usable to define INFINITY on all
6358 targets. If an infinity is not available, INFINITY expands "to a
6359 positive constant of type float that overflows at translation
6360 time", footnote "In this case, using INFINITY will violate the
6361 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6362 Thus we pedwarn to ensure this constraint violation is
6364 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6365 pedwarn (loc, 0, "target format does not support infinity");
6368 return build_real (type, real);
6371 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6374 fold_builtin_nan (tree arg, tree type, int quiet)
6376 REAL_VALUE_TYPE real;
6379 if (!validate_arg (arg, POINTER_TYPE))
6381 str = c_getstr (arg);
6385 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6388 return build_real (type, real);
6391 /* Return true if the floating point expression T has an integer value.
6392 We also allow +Inf, -Inf and NaN to be considered integer values. */
6395 integer_valued_real_p (tree t)
6397 switch (TREE_CODE (t))
6404 return integer_valued_real_p (TREE_OPERAND (t, 0));
6409 return integer_valued_real_p (TREE_OPERAND (t, 1));
6416 return integer_valued_real_p (TREE_OPERAND (t, 0))
6417 && integer_valued_real_p (TREE_OPERAND (t, 1));
6420 return integer_valued_real_p (TREE_OPERAND (t, 1))
6421 && integer_valued_real_p (TREE_OPERAND (t, 2));
6424 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6428 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6429 if (TREE_CODE (type) == INTEGER_TYPE)
6431 if (TREE_CODE (type) == REAL_TYPE)
6432 return integer_valued_real_p (TREE_OPERAND (t, 0));
6437 switch (builtin_mathfn_code (t))
6439 CASE_FLT_FN (BUILT_IN_CEIL):
6440 CASE_FLT_FN (BUILT_IN_FLOOR):
6441 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6442 CASE_FLT_FN (BUILT_IN_RINT):
6443 CASE_FLT_FN (BUILT_IN_ROUND):
6444 CASE_FLT_FN (BUILT_IN_TRUNC):
6447 CASE_FLT_FN (BUILT_IN_FMIN):
6448 CASE_FLT_FN (BUILT_IN_FMAX):
6449 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6450 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6463 /* FNDECL is assumed to be a builtin where truncation can be propagated
6464 across (for instance floor((double)f) == (double)floorf (f).
6465 Do the transformation for a call with argument ARG. */
6468 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6470 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6472 if (!validate_arg (arg, REAL_TYPE))
6475 /* Integer rounding functions are idempotent. */
6476 if (fcode == builtin_mathfn_code (arg))
6479 /* If argument is already integer valued, and we don't need to worry
6480 about setting errno, there's no need to perform rounding. */
6481 if (! flag_errno_math && integer_valued_real_p (arg))
6486 tree arg0 = strip_float_extensions (arg);
6487 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6488 tree newtype = TREE_TYPE (arg0);
6491 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6492 && (decl = mathfn_built_in (newtype, fcode)))
6493 return fold_convert_loc (loc, ftype,
6494 build_call_expr_loc (loc, decl, 1,
6495 fold_convert_loc (loc,
6502 /* FNDECL is assumed to be builtin which can narrow the FP type of
6503 the argument, for instance lround((double)f) -> lroundf (f).
6504 Do the transformation for a call with argument ARG. */
6507 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6509 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6511 if (!validate_arg (arg, REAL_TYPE))
6514 /* If argument is already integer valued, and we don't need to worry
6515 about setting errno, there's no need to perform rounding. */
6516 if (! flag_errno_math && integer_valued_real_p (arg))
6517 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6518 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6522 tree ftype = TREE_TYPE (arg);
6523 tree arg0 = strip_float_extensions (arg);
6524 tree newtype = TREE_TYPE (arg0);
6527 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6528 && (decl = mathfn_built_in (newtype, fcode)))
6529 return build_call_expr_loc (loc, decl, 1,
6530 fold_convert_loc (loc, newtype, arg0));
6533 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
6534 sizeof (int) == sizeof (long). */
6535 if (TYPE_PRECISION (integer_type_node)
6536 == TYPE_PRECISION (long_integer_type_node))
6538 tree newfn = NULL_TREE;
6541 CASE_FLT_FN (BUILT_IN_ICEIL):
6542 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6545 CASE_FLT_FN (BUILT_IN_IFLOOR):
6546 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6549 CASE_FLT_FN (BUILT_IN_IROUND):
6550 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6553 CASE_FLT_FN (BUILT_IN_IRINT):
6554 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6563 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6564 return fold_convert_loc (loc,
6565 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6569 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6570 sizeof (long long) == sizeof (long). */
6571 if (TYPE_PRECISION (long_long_integer_type_node)
6572 == TYPE_PRECISION (long_integer_type_node))
6574 tree newfn = NULL_TREE;
6577 CASE_FLT_FN (BUILT_IN_LLCEIL):
6578 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6581 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6582 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6585 CASE_FLT_FN (BUILT_IN_LLROUND):
6586 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6589 CASE_FLT_FN (BUILT_IN_LLRINT):
6590 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6599 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6600 return fold_convert_loc (loc,
6601 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6608 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6609 return type. Return NULL_TREE if no simplification can be made. */
6612 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6616 if (!validate_arg (arg, COMPLEX_TYPE)
6617 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6620 /* Calculate the result when the argument is a constant. */
6621 if (TREE_CODE (arg) == COMPLEX_CST
6622 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6626 if (TREE_CODE (arg) == COMPLEX_EXPR)
6628 tree real = TREE_OPERAND (arg, 0);
6629 tree imag = TREE_OPERAND (arg, 1);
6631 /* If either part is zero, cabs is fabs of the other. */
6632 if (real_zerop (real))
6633 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6634 if (real_zerop (imag))
6635 return fold_build1_loc (loc, ABS_EXPR, type, real);
6637 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6638 if (flag_unsafe_math_optimizations
6639 && operand_equal_p (real, imag, OEP_PURE_SAME))
6641 const REAL_VALUE_TYPE sqrt2_trunc
6642 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6644 return fold_build2_loc (loc, MULT_EXPR, type,
6645 fold_build1_loc (loc, ABS_EXPR, type, real),
6646 build_real (type, sqrt2_trunc));
6650 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
6651 if (TREE_CODE (arg) == NEGATE_EXPR
6652 || TREE_CODE (arg) == CONJ_EXPR)
6653 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6655 /* Don't do this when optimizing for size. */
6656 if (flag_unsafe_math_optimizations
6657 && optimize && optimize_function_for_speed_p (cfun))
6659 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6661 if (sqrtfn != NULL_TREE)
6663 tree rpart, ipart, result;
6665 arg = builtin_save_expr (arg);
6667 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6668 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6670 rpart = builtin_save_expr (rpart);
6671 ipart = builtin_save_expr (ipart);
6673 result = fold_build2_loc (loc, PLUS_EXPR, type,
6674 fold_build2_loc (loc, MULT_EXPR, type,
6676 fold_build2_loc (loc, MULT_EXPR, type,
6679 return build_call_expr_loc (loc, sqrtfn, 1, result);
6686 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
6687 complex tree type of the result. If NEG is true, the imaginary
6688 zero is negative. */
6691 build_complex_cproj (tree type, bool neg)
6693 REAL_VALUE_TYPE rinf, rzero = dconst0;
6697 return build_complex (type, build_real (TREE_TYPE (type), rinf),
6698 build_real (TREE_TYPE (type), rzero));
6701 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
6702 return type. Return NULL_TREE if no simplification can be made. */
6705 fold_builtin_cproj (location_t loc, tree arg, tree type)
6707 if (!validate_arg (arg, COMPLEX_TYPE)
6708 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6711 /* If there are no infinities, return arg. */
6712 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
6713 return non_lvalue_loc (loc, arg);
6715 /* Calculate the result when the argument is a constant. */
6716 if (TREE_CODE (arg) == COMPLEX_CST)
6718 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
6719 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
6721 if (real_isinf (real) || real_isinf (imag))
6722 return build_complex_cproj (type, imag->sign);
6726 else if (TREE_CODE (arg) == COMPLEX_EXPR)
6728 tree real = TREE_OPERAND (arg, 0);
6729 tree imag = TREE_OPERAND (arg, 1);
6734 /* If the real part is inf and the imag part is known to be
6735 nonnegative, return (inf + 0i). Remember side-effects are
6736 possible in the imag part. */
6737 if (TREE_CODE (real) == REAL_CST
6738 && real_isinf (TREE_REAL_CST_PTR (real))
6739 && tree_expr_nonnegative_p (imag))
6740 return omit_one_operand_loc (loc, type,
6741 build_complex_cproj (type, false),
6744 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
6745 Remember side-effects are possible in the real part. */
6746 if (TREE_CODE (imag) == REAL_CST
6747 && real_isinf (TREE_REAL_CST_PTR (imag)))
6749 omit_one_operand_loc (loc, type,
6750 build_complex_cproj (type, TREE_REAL_CST_PTR
6751 (imag)->sign), arg);
6757 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6758 Return NULL_TREE if no simplification can be made. */
6761 fold_builtin_sqrt (location_t loc, tree arg, tree type)
6764 enum built_in_function fcode;
6767 if (!validate_arg (arg, REAL_TYPE))
6770 /* Calculate the result when the argument is a constant. */
6771 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6774 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6775 fcode = builtin_mathfn_code (arg);
6776 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6778 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6779 arg = fold_build2_loc (loc, MULT_EXPR, type,
6780 CALL_EXPR_ARG (arg, 0),
6781 build_real (type, dconsthalf));
6782 return build_call_expr_loc (loc, expfn, 1, arg);
6785 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6786 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6788 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6792 tree arg0 = CALL_EXPR_ARG (arg, 0);
6794 /* The inner root was either sqrt or cbrt. */
6795 /* This was a conditional expression but it triggered a bug
6797 REAL_VALUE_TYPE dconstroot;
6798 if (BUILTIN_SQRT_P (fcode))
6799 dconstroot = dconsthalf;
6801 dconstroot = dconst_third ();
6803 /* Adjust for the outer root. */
6804 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6805 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6806 tree_root = build_real (type, dconstroot);
6807 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6811 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
6812 if (flag_unsafe_math_optimizations
6813 && (fcode == BUILT_IN_POW
6814 || fcode == BUILT_IN_POWF
6815 || fcode == BUILT_IN_POWL))
6817 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6818 tree arg0 = CALL_EXPR_ARG (arg, 0);
6819 tree arg1 = CALL_EXPR_ARG (arg, 1);
6821 if (!tree_expr_nonnegative_p (arg0))
6822 arg0 = build1 (ABS_EXPR, type, arg0);
6823 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
6824 build_real (type, dconsthalf));
6825 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
6831 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
6832 Return NULL_TREE if no simplification can be made. */
6835 fold_builtin_cbrt (location_t loc, tree arg, tree type)
6837 const enum built_in_function fcode = builtin_mathfn_code (arg);
6840 if (!validate_arg (arg, REAL_TYPE))
6843 /* Calculate the result when the argument is a constant. */
6844 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
6847 if (flag_unsafe_math_optimizations)
6849 /* Optimize cbrt(expN(x)) -> expN(x/3). */
6850 if (BUILTIN_EXPONENT_P (fcode))
6852 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6853 const REAL_VALUE_TYPE third_trunc =
6854 real_value_truncate (TYPE_MODE (type), dconst_third ());
6855 arg = fold_build2_loc (loc, MULT_EXPR, type,
6856 CALL_EXPR_ARG (arg, 0),
6857 build_real (type, third_trunc));
6858 return build_call_expr_loc (loc, expfn, 1, arg);
6861 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
6862 if (BUILTIN_SQRT_P (fcode))
6864 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6868 tree arg0 = CALL_EXPR_ARG (arg, 0);
6870 REAL_VALUE_TYPE dconstroot = dconst_third ();
6872 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6873 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6874 tree_root = build_real (type, dconstroot);
6875 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6879 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
6880 if (BUILTIN_CBRT_P (fcode))
6882 tree arg0 = CALL_EXPR_ARG (arg, 0);
6883 if (tree_expr_nonnegative_p (arg0))
6885 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6890 REAL_VALUE_TYPE dconstroot;
6892 real_arithmetic (&dconstroot, MULT_EXPR,
6893 dconst_third_ptr (), dconst_third_ptr ());
6894 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6895 tree_root = build_real (type, dconstroot);
6896 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6901 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
6902 if (fcode == BUILT_IN_POW
6903 || fcode == BUILT_IN_POWF
6904 || fcode == BUILT_IN_POWL)
6906 tree arg00 = CALL_EXPR_ARG (arg, 0);
6907 tree arg01 = CALL_EXPR_ARG (arg, 1);
6908 if (tree_expr_nonnegative_p (arg00))
6910 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6911 const REAL_VALUE_TYPE dconstroot
6912 = real_value_truncate (TYPE_MODE (type), dconst_third ());
6913 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
6914 build_real (type, dconstroot));
6915 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
6922 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
6923 TYPE is the type of the return value. Return NULL_TREE if no
6924 simplification can be made. */
6927 fold_builtin_cos (location_t loc,
6928 tree arg, tree type, tree fndecl)
6932 if (!validate_arg (arg, REAL_TYPE))
6935 /* Calculate the result when the argument is a constant. */
6936 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
6939 /* Optimize cos(-x) into cos (x). */
6940 if ((narg = fold_strip_sign_ops (arg)))
6941 return build_call_expr_loc (loc, fndecl, 1, narg);
6946 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
6947 Return NULL_TREE if no simplification can be made. */
6950 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
6952 if (validate_arg (arg, REAL_TYPE))
6956 /* Calculate the result when the argument is a constant. */
6957 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
6960 /* Optimize cosh(-x) into cosh (x). */
6961 if ((narg = fold_strip_sign_ops (arg)))
6962 return build_call_expr_loc (loc, fndecl, 1, narg);
6968 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
6969 argument ARG. TYPE is the type of the return value. Return
6970 NULL_TREE if no simplification can be made. */
6973 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
6976 if (validate_arg (arg, COMPLEX_TYPE)
6977 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
6981 /* Calculate the result when the argument is a constant. */
6982 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
6985 /* Optimize fn(-x) into fn(x). */
6986 if ((tmp = fold_strip_sign_ops (arg)))
6987 return build_call_expr_loc (loc, fndecl, 1, tmp);
6993 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
6994 Return NULL_TREE if no simplification can be made. */
6997 fold_builtin_tan (tree arg, tree type)
6999 enum built_in_function fcode;
7002 if (!validate_arg (arg, REAL_TYPE))
7005 /* Calculate the result when the argument is a constant. */
7006 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7009 /* Optimize tan(atan(x)) = x. */
7010 fcode = builtin_mathfn_code (arg);
7011 if (flag_unsafe_math_optimizations
7012 && (fcode == BUILT_IN_ATAN
7013 || fcode == BUILT_IN_ATANF
7014 || fcode == BUILT_IN_ATANL))
7015 return CALL_EXPR_ARG (arg, 0);
7020 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7021 NULL_TREE if no simplification can be made. */
7024 fold_builtin_sincos (location_t loc,
7025 tree arg0, tree arg1, tree arg2)
7030 if (!validate_arg (arg0, REAL_TYPE)
7031 || !validate_arg (arg1, POINTER_TYPE)
7032 || !validate_arg (arg2, POINTER_TYPE))
7035 type = TREE_TYPE (arg0);
7037 /* Calculate the result when the argument is a constant. */
7038 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7041 /* Canonicalize sincos to cexpi. */
7042 if (!TARGET_C99_FUNCTIONS)
7044 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7048 call = build_call_expr_loc (loc, fn, 1, arg0);
7049 call = builtin_save_expr (call);
7051 return build2 (COMPOUND_EXPR, void_type_node,
7052 build2 (MODIFY_EXPR, void_type_node,
7053 build_fold_indirect_ref_loc (loc, arg1),
7054 build1 (IMAGPART_EXPR, type, call)),
7055 build2 (MODIFY_EXPR, void_type_node,
7056 build_fold_indirect_ref_loc (loc, arg2),
7057 build1 (REALPART_EXPR, type, call)));
7060 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7061 NULL_TREE if no simplification can be made. */
7064 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7067 tree realp, imagp, ifn;
7070 if (!validate_arg (arg0, COMPLEX_TYPE)
7071 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7074 /* Calculate the result when the argument is a constant. */
7075 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7078 rtype = TREE_TYPE (TREE_TYPE (arg0));
7080 /* In case we can figure out the real part of arg0 and it is constant zero
7082 if (!TARGET_C99_FUNCTIONS)
7084 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7088 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7089 && real_zerop (realp))
7091 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7092 return build_call_expr_loc (loc, ifn, 1, narg);
7095 /* In case we can easily decompose real and imaginary parts split cexp
7096 to exp (r) * cexpi (i). */
7097 if (flag_unsafe_math_optimizations
7100 tree rfn, rcall, icall;
7102 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7106 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7110 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7111 icall = builtin_save_expr (icall);
7112 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7113 rcall = builtin_save_expr (rcall);
7114 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7115 fold_build2_loc (loc, MULT_EXPR, rtype,
7117 fold_build1_loc (loc, REALPART_EXPR,
7119 fold_build2_loc (loc, MULT_EXPR, rtype,
7121 fold_build1_loc (loc, IMAGPART_EXPR,
7128 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7129 Return NULL_TREE if no simplification can be made. */
7132 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7134 if (!validate_arg (arg, REAL_TYPE))
7137 /* Optimize trunc of constant value. */
7138 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7140 REAL_VALUE_TYPE r, x;
7141 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7143 x = TREE_REAL_CST (arg);
7144 real_trunc (&r, TYPE_MODE (type), &x);
7145 return build_real (type, r);
7148 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7151 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7152 Return NULL_TREE if no simplification can be made. */
7155 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7157 if (!validate_arg (arg, REAL_TYPE))
7160 /* Optimize floor of constant value. */
7161 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7165 x = TREE_REAL_CST (arg);
7166 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7168 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7171 real_floor (&r, TYPE_MODE (type), &x);
7172 return build_real (type, r);
7176 /* Fold floor (x) where x is nonnegative to trunc (x). */
7177 if (tree_expr_nonnegative_p (arg))
7179 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7181 return build_call_expr_loc (loc, truncfn, 1, arg);
7184 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7187 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7188 Return NULL_TREE if no simplification can be made. */
7191 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7193 if (!validate_arg (arg, REAL_TYPE))
7196 /* Optimize ceil of constant value. */
7197 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7201 x = TREE_REAL_CST (arg);
7202 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7204 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7207 real_ceil (&r, TYPE_MODE (type), &x);
7208 return build_real (type, r);
7212 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7215 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7216 Return NULL_TREE if no simplification can be made. */
7219 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7221 if (!validate_arg (arg, REAL_TYPE))
7224 /* Optimize round of constant value. */
7225 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7229 x = TREE_REAL_CST (arg);
7230 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7232 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7235 real_round (&r, TYPE_MODE (type), &x);
7236 return build_real (type, r);
7240 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7243 /* Fold function call to builtin lround, lroundf or lroundl (or the
7244 corresponding long long versions) and other rounding functions. ARG
7245 is the argument to the call. Return NULL_TREE if no simplification
7249 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7251 if (!validate_arg (arg, REAL_TYPE))
7254 /* Optimize lround of constant value. */
7255 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7257 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7259 if (real_isfinite (&x))
7261 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7262 tree ftype = TREE_TYPE (arg);
7266 switch (DECL_FUNCTION_CODE (fndecl))
7268 CASE_FLT_FN (BUILT_IN_IFLOOR):
7269 CASE_FLT_FN (BUILT_IN_LFLOOR):
7270 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7271 real_floor (&r, TYPE_MODE (ftype), &x);
7274 CASE_FLT_FN (BUILT_IN_ICEIL):
7275 CASE_FLT_FN (BUILT_IN_LCEIL):
7276 CASE_FLT_FN (BUILT_IN_LLCEIL):
7277 real_ceil (&r, TYPE_MODE (ftype), &x);
7280 CASE_FLT_FN (BUILT_IN_IROUND):
7281 CASE_FLT_FN (BUILT_IN_LROUND):
7282 CASE_FLT_FN (BUILT_IN_LLROUND):
7283 real_round (&r, TYPE_MODE (ftype), &x);
7290 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7291 if (double_int_fits_to_tree_p (itype, val))
7292 return double_int_to_tree (itype, val);
7296 switch (DECL_FUNCTION_CODE (fndecl))
7298 CASE_FLT_FN (BUILT_IN_LFLOOR):
7299 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7300 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7301 if (tree_expr_nonnegative_p (arg))
7302 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7303 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7308 return fold_fixed_mathfn (loc, fndecl, arg);
7311 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7312 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7313 the argument to the call. Return NULL_TREE if no simplification can
7317 fold_builtin_bitop (tree fndecl, tree arg)
7319 if (!validate_arg (arg, INTEGER_TYPE))
7322 /* Optimize for constant argument. */
7323 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7325 HOST_WIDE_INT hi, width, result;
7326 unsigned HOST_WIDE_INT lo;
7329 type = TREE_TYPE (arg);
7330 width = TYPE_PRECISION (type);
7331 lo = TREE_INT_CST_LOW (arg);
7333 /* Clear all the bits that are beyond the type's precision. */
7334 if (width > HOST_BITS_PER_WIDE_INT)
7336 hi = TREE_INT_CST_HIGH (arg);
7337 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7338 hi &= ~((unsigned HOST_WIDE_INT) (-1)
7339 << (width - HOST_BITS_PER_WIDE_INT));
7344 if (width < HOST_BITS_PER_WIDE_INT)
7345 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7348 switch (DECL_FUNCTION_CODE (fndecl))
7350 CASE_INT_FN (BUILT_IN_FFS):
7352 result = ffs_hwi (lo);
7354 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7359 CASE_INT_FN (BUILT_IN_CLZ):
7361 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7363 result = width - floor_log2 (lo) - 1;
7364 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7368 CASE_INT_FN (BUILT_IN_CTZ):
7370 result = ctz_hwi (lo);
7372 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7373 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7377 CASE_INT_FN (BUILT_IN_CLRSB):
7378 if (width > HOST_BITS_PER_WIDE_INT
7379 && (hi & ((unsigned HOST_WIDE_INT) 1
7380 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
7382 hi = ~hi & ~((unsigned HOST_WIDE_INT) (-1)
7383 << (width - HOST_BITS_PER_WIDE_INT - 1));
7386 else if (width <= HOST_BITS_PER_WIDE_INT
7387 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
7388 lo = ~lo & ~((unsigned HOST_WIDE_INT) (-1) << (width - 1));
7390 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
7392 result = width - floor_log2 (lo) - 2;
7397 CASE_INT_FN (BUILT_IN_POPCOUNT):
7400 result++, lo &= lo - 1;
7402 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7405 CASE_INT_FN (BUILT_IN_PARITY):
7408 result++, lo &= lo - 1;
7410 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7418 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7424 /* Fold function call to builtin_bswap and the long and long long
7425 variants. Return NULL_TREE if no simplification can be made. */
7427 fold_builtin_bswap (tree fndecl, tree arg)
7429 if (! validate_arg (arg, INTEGER_TYPE))
7432 /* Optimize constant value. */
7433 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7435 HOST_WIDE_INT hi, width, r_hi = 0;
7436 unsigned HOST_WIDE_INT lo, r_lo = 0;
7439 type = TREE_TYPE (arg);
7440 width = TYPE_PRECISION (type);
7441 lo = TREE_INT_CST_LOW (arg);
7442 hi = TREE_INT_CST_HIGH (arg);
7444 switch (DECL_FUNCTION_CODE (fndecl))
7446 case BUILT_IN_BSWAP32:
7447 case BUILT_IN_BSWAP64:
7451 for (s = 0; s < width; s += 8)
7453 int d = width - s - 8;
7454 unsigned HOST_WIDE_INT byte;
7456 if (s < HOST_BITS_PER_WIDE_INT)
7457 byte = (lo >> s) & 0xff;
7459 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7461 if (d < HOST_BITS_PER_WIDE_INT)
7464 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7474 if (width < HOST_BITS_PER_WIDE_INT)
7475 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7477 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7483 /* A subroutine of fold_builtin to fold the various logarithmic
7484 functions. Return NULL_TREE if no simplification can me made.
7485 FUNC is the corresponding MPFR logarithm function. */
7488 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7489 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7491 if (validate_arg (arg, REAL_TYPE))
7493 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7495 const enum built_in_function fcode = builtin_mathfn_code (arg);
7497 /* Calculate the result when the argument is a constant. */
7498 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7501 /* Special case, optimize logN(expN(x)) = x. */
7502 if (flag_unsafe_math_optimizations
7503 && ((func == mpfr_log
7504 && (fcode == BUILT_IN_EXP
7505 || fcode == BUILT_IN_EXPF
7506 || fcode == BUILT_IN_EXPL))
7507 || (func == mpfr_log2
7508 && (fcode == BUILT_IN_EXP2
7509 || fcode == BUILT_IN_EXP2F
7510 || fcode == BUILT_IN_EXP2L))
7511 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7512 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7514 /* Optimize logN(func()) for various exponential functions. We
7515 want to determine the value "x" and the power "exponent" in
7516 order to transform logN(x**exponent) into exponent*logN(x). */
7517 if (flag_unsafe_math_optimizations)
7519 tree exponent = 0, x = 0;
7523 CASE_FLT_FN (BUILT_IN_EXP):
7524 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7525 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7527 exponent = CALL_EXPR_ARG (arg, 0);
7529 CASE_FLT_FN (BUILT_IN_EXP2):
7530 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7531 x = build_real (type, dconst2);
7532 exponent = CALL_EXPR_ARG (arg, 0);
7534 CASE_FLT_FN (BUILT_IN_EXP10):
7535 CASE_FLT_FN (BUILT_IN_POW10):
7536 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7538 REAL_VALUE_TYPE dconst10;
7539 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7540 x = build_real (type, dconst10);
7542 exponent = CALL_EXPR_ARG (arg, 0);
7544 CASE_FLT_FN (BUILT_IN_SQRT):
7545 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7546 x = CALL_EXPR_ARG (arg, 0);
7547 exponent = build_real (type, dconsthalf);
7549 CASE_FLT_FN (BUILT_IN_CBRT):
7550 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7551 x = CALL_EXPR_ARG (arg, 0);
7552 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7555 CASE_FLT_FN (BUILT_IN_POW):
7556 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7557 x = CALL_EXPR_ARG (arg, 0);
7558 exponent = CALL_EXPR_ARG (arg, 1);
7564 /* Now perform the optimization. */
7567 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7568 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7576 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7577 NULL_TREE if no simplification can be made. */
7580 fold_builtin_hypot (location_t loc, tree fndecl,
7581 tree arg0, tree arg1, tree type)
7583 tree res, narg0, narg1;
7585 if (!validate_arg (arg0, REAL_TYPE)
7586 || !validate_arg (arg1, REAL_TYPE))
7589 /* Calculate the result when the argument is a constant. */
7590 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7593 /* If either argument to hypot has a negate or abs, strip that off.
7594 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7595 narg0 = fold_strip_sign_ops (arg0);
7596 narg1 = fold_strip_sign_ops (arg1);
7599 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7600 narg1 ? narg1 : arg1);
7603 /* If either argument is zero, hypot is fabs of the other. */
7604 if (real_zerop (arg0))
7605 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7606 else if (real_zerop (arg1))
7607 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7609 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7610 if (flag_unsafe_math_optimizations
7611 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7613 const REAL_VALUE_TYPE sqrt2_trunc
7614 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7615 return fold_build2_loc (loc, MULT_EXPR, type,
7616 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7617 build_real (type, sqrt2_trunc));
7624 /* Fold a builtin function call to pow, powf, or powl. Return
7625 NULL_TREE if no simplification can be made. */
7627 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7631 if (!validate_arg (arg0, REAL_TYPE)
7632 || !validate_arg (arg1, REAL_TYPE))
7635 /* Calculate the result when the argument is a constant. */
7636 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7639 /* Optimize pow(1.0,y) = 1.0. */
7640 if (real_onep (arg0))
7641 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7643 if (TREE_CODE (arg1) == REAL_CST
7644 && !TREE_OVERFLOW (arg1))
7646 REAL_VALUE_TYPE cint;
7650 c = TREE_REAL_CST (arg1);
7652 /* Optimize pow(x,0.0) = 1.0. */
7653 if (REAL_VALUES_EQUAL (c, dconst0))
7654 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7657 /* Optimize pow(x,1.0) = x. */
7658 if (REAL_VALUES_EQUAL (c, dconst1))
7661 /* Optimize pow(x,-1.0) = 1.0/x. */
7662 if (REAL_VALUES_EQUAL (c, dconstm1))
7663 return fold_build2_loc (loc, RDIV_EXPR, type,
7664 build_real (type, dconst1), arg0);
7666 /* Optimize pow(x,0.5) = sqrt(x). */
7667 if (flag_unsafe_math_optimizations
7668 && REAL_VALUES_EQUAL (c, dconsthalf))
7670 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7672 if (sqrtfn != NULL_TREE)
7673 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7676 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7677 if (flag_unsafe_math_optimizations)
7679 const REAL_VALUE_TYPE dconstroot
7680 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7682 if (REAL_VALUES_EQUAL (c, dconstroot))
7684 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7685 if (cbrtfn != NULL_TREE)
7686 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7690 /* Check for an integer exponent. */
7691 n = real_to_integer (&c);
7692 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7693 if (real_identical (&c, &cint))
7695 /* Attempt to evaluate pow at compile-time, unless this should
7696 raise an exception. */
7697 if (TREE_CODE (arg0) == REAL_CST
7698 && !TREE_OVERFLOW (arg0)
7700 || (!flag_trapping_math && !flag_errno_math)
7701 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7706 x = TREE_REAL_CST (arg0);
7707 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7708 if (flag_unsafe_math_optimizations || !inexact)
7709 return build_real (type, x);
7712 /* Strip sign ops from even integer powers. */
7713 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7715 tree narg0 = fold_strip_sign_ops (arg0);
7717 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7722 if (flag_unsafe_math_optimizations)
7724 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7726 /* Optimize pow(expN(x),y) = expN(x*y). */
7727 if (BUILTIN_EXPONENT_P (fcode))
7729 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7730 tree arg = CALL_EXPR_ARG (arg0, 0);
7731 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7732 return build_call_expr_loc (loc, expfn, 1, arg);
7735 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7736 if (BUILTIN_SQRT_P (fcode))
7738 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7739 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7740 build_real (type, dconsthalf));
7741 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7744 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7745 if (BUILTIN_CBRT_P (fcode))
7747 tree arg = CALL_EXPR_ARG (arg0, 0);
7748 if (tree_expr_nonnegative_p (arg))
7750 const REAL_VALUE_TYPE dconstroot
7751 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7752 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7753 build_real (type, dconstroot));
7754 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7758 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7759 if (fcode == BUILT_IN_POW
7760 || fcode == BUILT_IN_POWF
7761 || fcode == BUILT_IN_POWL)
7763 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7764 if (tree_expr_nonnegative_p (arg00))
7766 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7767 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7768 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7776 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7777 Return NULL_TREE if no simplification can be made. */
7779 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7780 tree arg0, tree arg1, tree type)
7782 if (!validate_arg (arg0, REAL_TYPE)
7783 || !validate_arg (arg1, INTEGER_TYPE))
7786 /* Optimize pow(1.0,y) = 1.0. */
7787 if (real_onep (arg0))
7788 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7790 if (host_integerp (arg1, 0))
7792 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7794 /* Evaluate powi at compile-time. */
7795 if (TREE_CODE (arg0) == REAL_CST
7796 && !TREE_OVERFLOW (arg0))
7799 x = TREE_REAL_CST (arg0);
7800 real_powi (&x, TYPE_MODE (type), &x, c);
7801 return build_real (type, x);
7804 /* Optimize pow(x,0) = 1.0. */
7806 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7809 /* Optimize pow(x,1) = x. */
7813 /* Optimize pow(x,-1) = 1.0/x. */
7815 return fold_build2_loc (loc, RDIV_EXPR, type,
7816 build_real (type, dconst1), arg0);
7822 /* A subroutine of fold_builtin to fold the various exponent
7823 functions. Return NULL_TREE if no simplification can be made.
7824 FUNC is the corresponding MPFR exponent function. */
7827 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
7828 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7830 if (validate_arg (arg, REAL_TYPE))
7832 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7835 /* Calculate the result when the argument is a constant. */
7836 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
7839 /* Optimize expN(logN(x)) = x. */
7840 if (flag_unsafe_math_optimizations)
7842 const enum built_in_function fcode = builtin_mathfn_code (arg);
7844 if ((func == mpfr_exp
7845 && (fcode == BUILT_IN_LOG
7846 || fcode == BUILT_IN_LOGF
7847 || fcode == BUILT_IN_LOGL))
7848 || (func == mpfr_exp2
7849 && (fcode == BUILT_IN_LOG2
7850 || fcode == BUILT_IN_LOG2F
7851 || fcode == BUILT_IN_LOG2L))
7852 || (func == mpfr_exp10
7853 && (fcode == BUILT_IN_LOG10
7854 || fcode == BUILT_IN_LOG10F
7855 || fcode == BUILT_IN_LOG10L)))
7856 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7863 /* Return true if VAR is a VAR_DECL or a component thereof. */
7866 var_decl_component_p (tree var)
7869 while (handled_component_p (inner))
7870 inner = TREE_OPERAND (inner, 0);
7871 return SSA_VAR_P (inner);
7874 /* Fold function call to builtin memset. Return
7875 NULL_TREE if no simplification can be made. */
7878 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
7879 tree type, bool ignore)
7881 tree var, ret, etype;
7882 unsigned HOST_WIDE_INT length, cval;
7884 if (! validate_arg (dest, POINTER_TYPE)
7885 || ! validate_arg (c, INTEGER_TYPE)
7886 || ! validate_arg (len, INTEGER_TYPE))
7889 if (! host_integerp (len, 1))
7892 /* If the LEN parameter is zero, return DEST. */
7893 if (integer_zerop (len))
7894 return omit_one_operand_loc (loc, type, dest, c);
7896 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
7901 if (TREE_CODE (var) != ADDR_EXPR)
7904 var = TREE_OPERAND (var, 0);
7905 if (TREE_THIS_VOLATILE (var))
7908 etype = TREE_TYPE (var);
7909 if (TREE_CODE (etype) == ARRAY_TYPE)
7910 etype = TREE_TYPE (etype);
7912 if (!INTEGRAL_TYPE_P (etype)
7913 && !POINTER_TYPE_P (etype))
7916 if (! var_decl_component_p (var))
7919 length = tree_low_cst (len, 1);
7920 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
7921 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
7924 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
7927 if (integer_zerop (c))
7931 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
7934 cval = TREE_INT_CST_LOW (c);
7938 cval |= (cval << 31) << 1;
7941 ret = build_int_cst_type (etype, cval);
7942 var = build_fold_indirect_ref_loc (loc,
7943 fold_convert_loc (loc,
7944 build_pointer_type (etype),
7946 ret = build2 (MODIFY_EXPR, etype, var, ret);
7950 return omit_one_operand_loc (loc, type, dest, ret);
7953 /* Fold function call to builtin memset. Return
7954 NULL_TREE if no simplification can be made. */
7957 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
7959 if (! validate_arg (dest, POINTER_TYPE)
7960 || ! validate_arg (size, INTEGER_TYPE))
7966 /* New argument list transforming bzero(ptr x, int y) to
7967 memset(ptr x, int 0, size_t y). This is done this way
7968 so that if it isn't expanded inline, we fallback to
7969 calling bzero instead of memset. */
7971 return fold_builtin_memset (loc, dest, integer_zero_node,
7972 fold_convert_loc (loc, sizetype, size),
7973 void_type_node, ignore);
7976 /* Fold function call to builtin mem{{,p}cpy,move}. Return
7977 NULL_TREE if no simplification can be made.
7978 If ENDP is 0, return DEST (like memcpy).
7979 If ENDP is 1, return DEST+LEN (like mempcpy).
7980 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
7981 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
7985 fold_builtin_memory_op (location_t loc, tree dest, tree src,
7986 tree len, tree type, bool ignore, int endp)
7988 tree destvar, srcvar, expr;
7990 if (! validate_arg (dest, POINTER_TYPE)
7991 || ! validate_arg (src, POINTER_TYPE)
7992 || ! validate_arg (len, INTEGER_TYPE))
7995 /* If the LEN parameter is zero, return DEST. */
7996 if (integer_zerop (len))
7997 return omit_one_operand_loc (loc, type, dest, src);
7999 /* If SRC and DEST are the same (and not volatile), return
8000 DEST{,+LEN,+LEN-1}. */
8001 if (operand_equal_p (src, dest, 0))
8005 tree srctype, desttype;
8006 unsigned int src_align, dest_align;
8011 src_align = get_pointer_alignment (src);
8012 dest_align = get_pointer_alignment (dest);
8014 /* Both DEST and SRC must be pointer types.
8015 ??? This is what old code did. Is the testing for pointer types
8018 If either SRC is readonly or length is 1, we can use memcpy. */
8019 if (!dest_align || !src_align)
8021 if (readonly_data_expr (src)
8022 || (host_integerp (len, 1)
8023 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8024 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8026 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8029 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8032 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8033 if (TREE_CODE (src) == ADDR_EXPR
8034 && TREE_CODE (dest) == ADDR_EXPR)
8036 tree src_base, dest_base, fn;
8037 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8038 HOST_WIDE_INT size = -1;
8039 HOST_WIDE_INT maxsize = -1;
8041 srcvar = TREE_OPERAND (src, 0);
8042 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8044 destvar = TREE_OPERAND (dest, 0);
8045 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8047 if (host_integerp (len, 1))
8048 maxsize = tree_low_cst (len, 1);
8051 src_offset /= BITS_PER_UNIT;
8052 dest_offset /= BITS_PER_UNIT;
8053 if (SSA_VAR_P (src_base)
8054 && SSA_VAR_P (dest_base))
8056 if (operand_equal_p (src_base, dest_base, 0)
8057 && ranges_overlap_p (src_offset, maxsize,
8058 dest_offset, maxsize))
8061 else if (TREE_CODE (src_base) == MEM_REF
8062 && TREE_CODE (dest_base) == MEM_REF)
8065 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8066 TREE_OPERAND (dest_base, 0), 0))
8068 off = double_int_add (mem_ref_offset (src_base),
8069 shwi_to_double_int (src_offset));
8070 if (!double_int_fits_in_shwi_p (off))
8072 src_offset = off.low;
8073 off = double_int_add (mem_ref_offset (dest_base),
8074 shwi_to_double_int (dest_offset));
8075 if (!double_int_fits_in_shwi_p (off))
8077 dest_offset = off.low;
8078 if (ranges_overlap_p (src_offset, maxsize,
8079 dest_offset, maxsize))
8085 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8088 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8091 /* If the destination and source do not alias optimize into
8093 if ((is_gimple_min_invariant (dest)
8094 || TREE_CODE (dest) == SSA_NAME)
8095 && (is_gimple_min_invariant (src)
8096 || TREE_CODE (src) == SSA_NAME))
8099 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8100 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8101 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8104 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8107 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8114 if (!host_integerp (len, 0))
8117 This logic lose for arguments like (type *)malloc (sizeof (type)),
8118 since we strip the casts of up to VOID return value from malloc.
8119 Perhaps we ought to inherit type from non-VOID argument here? */
8122 if (!POINTER_TYPE_P (TREE_TYPE (src))
8123 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8125 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8126 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8128 tree tem = TREE_OPERAND (src, 0);
8130 if (tem != TREE_OPERAND (src, 0))
8131 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8133 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8135 tree tem = TREE_OPERAND (dest, 0);
8137 if (tem != TREE_OPERAND (dest, 0))
8138 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8140 srctype = TREE_TYPE (TREE_TYPE (src));
8141 if (TREE_CODE (srctype) == ARRAY_TYPE
8142 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8144 srctype = TREE_TYPE (srctype);
8146 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8148 desttype = TREE_TYPE (TREE_TYPE (dest));
8149 if (TREE_CODE (desttype) == ARRAY_TYPE
8150 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8152 desttype = TREE_TYPE (desttype);
8154 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8156 if (TREE_ADDRESSABLE (srctype)
8157 || TREE_ADDRESSABLE (desttype))
8160 src_align = get_pointer_alignment (src);
8161 dest_align = get_pointer_alignment (dest);
8162 if (dest_align < TYPE_ALIGN (desttype)
8163 || src_align < TYPE_ALIGN (srctype))
8167 dest = builtin_save_expr (dest);
8169 /* Build accesses at offset zero with a ref-all character type. */
8170 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8171 ptr_mode, true), 0);
8174 STRIP_NOPS (destvar);
8175 if (TREE_CODE (destvar) == ADDR_EXPR
8176 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8177 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8178 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8180 destvar = NULL_TREE;
8183 STRIP_NOPS (srcvar);
8184 if (TREE_CODE (srcvar) == ADDR_EXPR
8185 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8186 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8189 || src_align >= TYPE_ALIGN (desttype))
8190 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8192 else if (!STRICT_ALIGNMENT)
8194 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8196 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8204 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8207 if (srcvar == NULL_TREE)
8210 if (src_align >= TYPE_ALIGN (desttype))
8211 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8214 if (STRICT_ALIGNMENT)
8216 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8218 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8221 else if (destvar == NULL_TREE)
8224 if (dest_align >= TYPE_ALIGN (srctype))
8225 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8228 if (STRICT_ALIGNMENT)
8230 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8232 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8236 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8242 if (endp == 0 || endp == 3)
8243 return omit_one_operand_loc (loc, type, dest, expr);
8249 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8252 dest = fold_build_pointer_plus_loc (loc, dest, len);
8253 dest = fold_convert_loc (loc, type, dest);
8255 dest = omit_one_operand_loc (loc, type, dest, expr);
8259 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8260 If LEN is not NULL, it represents the length of the string to be
8261 copied. Return NULL_TREE if no simplification can be made. */
8264 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8268 if (!validate_arg (dest, POINTER_TYPE)
8269 || !validate_arg (src, POINTER_TYPE))
8272 /* If SRC and DEST are the same (and not volatile), return DEST. */
8273 if (operand_equal_p (src, dest, 0))
8274 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8276 if (optimize_function_for_size_p (cfun))
8279 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8285 len = c_strlen (src, 1);
8286 if (! len || TREE_SIDE_EFFECTS (len))
8290 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8291 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8292 build_call_expr_loc (loc, fn, 3, dest, src, len));
8295 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8296 Return NULL_TREE if no simplification can be made. */
8299 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8301 tree fn, len, lenp1, call, type;
8303 if (!validate_arg (dest, POINTER_TYPE)
8304 || !validate_arg (src, POINTER_TYPE))
8307 len = c_strlen (src, 1);
8309 || TREE_CODE (len) != INTEGER_CST)
8312 if (optimize_function_for_size_p (cfun)
8313 /* If length is zero it's small enough. */
8314 && !integer_zerop (len))
8317 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8321 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8322 /* We use dest twice in building our expression. Save it from
8323 multiple expansions. */
8324 dest = builtin_save_expr (dest);
8325 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8327 type = TREE_TYPE (TREE_TYPE (fndecl));
8328 dest = fold_build_pointer_plus_loc (loc, dest, len);
8329 dest = fold_convert_loc (loc, type, dest);
8330 dest = omit_one_operand_loc (loc, type, dest, call);
8334 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8335 If SLEN is not NULL, it represents the length of the source string.
8336 Return NULL_TREE if no simplification can be made. */
8339 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8340 tree src, tree len, tree slen)
8344 if (!validate_arg (dest, POINTER_TYPE)
8345 || !validate_arg (src, POINTER_TYPE)
8346 || !validate_arg (len, INTEGER_TYPE))
8349 /* If the LEN parameter is zero, return DEST. */
8350 if (integer_zerop (len))
8351 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8353 /* We can't compare slen with len as constants below if len is not a
8355 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8359 slen = c_strlen (src, 1);
8361 /* Now, we must be passed a constant src ptr parameter. */
8362 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8365 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8367 /* We do not support simplification of this case, though we do
8368 support it when expanding trees into RTL. */
8369 /* FIXME: generate a call to __builtin_memset. */
8370 if (tree_int_cst_lt (slen, len))
8373 /* OK transform into builtin memcpy. */
8374 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8377 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8378 build_call_expr_loc (loc, fn, 3, dest, src, len));
8381 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8382 arguments to the call, and TYPE is its return type.
8383 Return NULL_TREE if no simplification can be made. */
8386 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8388 if (!validate_arg (arg1, POINTER_TYPE)
8389 || !validate_arg (arg2, INTEGER_TYPE)
8390 || !validate_arg (len, INTEGER_TYPE))
8396 if (TREE_CODE (arg2) != INTEGER_CST
8397 || !host_integerp (len, 1))
8400 p1 = c_getstr (arg1);
8401 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8407 if (target_char_cast (arg2, &c))
8410 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8413 return build_int_cst (TREE_TYPE (arg1), 0);
8415 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8416 return fold_convert_loc (loc, type, tem);
8422 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8423 Return NULL_TREE if no simplification can be made. */
8426 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8428 const char *p1, *p2;
8430 if (!validate_arg (arg1, POINTER_TYPE)
8431 || !validate_arg (arg2, POINTER_TYPE)
8432 || !validate_arg (len, INTEGER_TYPE))
8435 /* If the LEN parameter is zero, return zero. */
8436 if (integer_zerop (len))
8437 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8440 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8441 if (operand_equal_p (arg1, arg2, 0))
8442 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8444 p1 = c_getstr (arg1);
8445 p2 = c_getstr (arg2);
8447 /* If all arguments are constant, and the value of len is not greater
8448 than the lengths of arg1 and arg2, evaluate at compile-time. */
8449 if (host_integerp (len, 1) && p1 && p2
8450 && compare_tree_int (len, strlen (p1) + 1) <= 0
8451 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8453 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8456 return integer_one_node;
8458 return integer_minus_one_node;
8460 return integer_zero_node;
8463 /* If len parameter is one, return an expression corresponding to
8464 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8465 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8467 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8468 tree cst_uchar_ptr_node
8469 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8472 = fold_convert_loc (loc, integer_type_node,
8473 build1 (INDIRECT_REF, cst_uchar_node,
8474 fold_convert_loc (loc,
8478 = fold_convert_loc (loc, integer_type_node,
8479 build1 (INDIRECT_REF, cst_uchar_node,
8480 fold_convert_loc (loc,
8483 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8489 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8490 Return NULL_TREE if no simplification can be made. */
8493 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8495 const char *p1, *p2;
8497 if (!validate_arg (arg1, POINTER_TYPE)
8498 || !validate_arg (arg2, POINTER_TYPE))
8501 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8502 if (operand_equal_p (arg1, arg2, 0))
8503 return integer_zero_node;
8505 p1 = c_getstr (arg1);
8506 p2 = c_getstr (arg2);
8510 const int i = strcmp (p1, p2);
8512 return integer_minus_one_node;
8514 return integer_one_node;
8516 return integer_zero_node;
8519 /* If the second arg is "", return *(const unsigned char*)arg1. */
8520 if (p2 && *p2 == '\0')
8522 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8523 tree cst_uchar_ptr_node
8524 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8526 return fold_convert_loc (loc, integer_type_node,
8527 build1 (INDIRECT_REF, cst_uchar_node,
8528 fold_convert_loc (loc,
8533 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8534 if (p1 && *p1 == '\0')
8536 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8537 tree cst_uchar_ptr_node
8538 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8541 = fold_convert_loc (loc, integer_type_node,
8542 build1 (INDIRECT_REF, cst_uchar_node,
8543 fold_convert_loc (loc,
8546 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8552 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8553 Return NULL_TREE if no simplification can be made. */
8556 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8558 const char *p1, *p2;
8560 if (!validate_arg (arg1, POINTER_TYPE)
8561 || !validate_arg (arg2, POINTER_TYPE)
8562 || !validate_arg (len, INTEGER_TYPE))
8565 /* If the LEN parameter is zero, return zero. */
8566 if (integer_zerop (len))
8567 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8570 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8571 if (operand_equal_p (arg1, arg2, 0))
8572 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8574 p1 = c_getstr (arg1);
8575 p2 = c_getstr (arg2);
8577 if (host_integerp (len, 1) && p1 && p2)
8579 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8581 return integer_one_node;
8583 return integer_minus_one_node;
8585 return integer_zero_node;
8588 /* If the second arg is "", and the length is greater than zero,
8589 return *(const unsigned char*)arg1. */
8590 if (p2 && *p2 == '\0'
8591 && TREE_CODE (len) == INTEGER_CST
8592 && tree_int_cst_sgn (len) == 1)
8594 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8595 tree cst_uchar_ptr_node
8596 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8598 return fold_convert_loc (loc, integer_type_node,
8599 build1 (INDIRECT_REF, cst_uchar_node,
8600 fold_convert_loc (loc,
8605 /* If the first arg is "", and the length is greater than zero,
8606 return -*(const unsigned char*)arg2. */
8607 if (p1 && *p1 == '\0'
8608 && TREE_CODE (len) == INTEGER_CST
8609 && tree_int_cst_sgn (len) == 1)
8611 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8612 tree cst_uchar_ptr_node
8613 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8615 tree temp = fold_convert_loc (loc, integer_type_node,
8616 build1 (INDIRECT_REF, cst_uchar_node,
8617 fold_convert_loc (loc,
8620 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8623 /* If len parameter is one, return an expression corresponding to
8624 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8625 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8627 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8628 tree cst_uchar_ptr_node
8629 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8631 tree ind1 = fold_convert_loc (loc, integer_type_node,
8632 build1 (INDIRECT_REF, cst_uchar_node,
8633 fold_convert_loc (loc,
8636 tree ind2 = fold_convert_loc (loc, integer_type_node,
8637 build1 (INDIRECT_REF, cst_uchar_node,
8638 fold_convert_loc (loc,
8641 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8647 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8648 ARG. Return NULL_TREE if no simplification can be made. */
8651 fold_builtin_signbit (location_t loc, tree arg, tree type)
8653 if (!validate_arg (arg, REAL_TYPE))
8656 /* If ARG is a compile-time constant, determine the result. */
8657 if (TREE_CODE (arg) == REAL_CST
8658 && !TREE_OVERFLOW (arg))
8662 c = TREE_REAL_CST (arg);
8663 return (REAL_VALUE_NEGATIVE (c)
8664 ? build_one_cst (type)
8665 : build_zero_cst (type));
8668 /* If ARG is non-negative, the result is always zero. */
8669 if (tree_expr_nonnegative_p (arg))
8670 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8672 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8673 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8674 return fold_convert (type,
8675 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8676 build_real (TREE_TYPE (arg), dconst0)));
8681 /* Fold function call to builtin copysign, copysignf or copysignl with
8682 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8686 fold_builtin_copysign (location_t loc, tree fndecl,
8687 tree arg1, tree arg2, tree type)
8691 if (!validate_arg (arg1, REAL_TYPE)
8692 || !validate_arg (arg2, REAL_TYPE))
8695 /* copysign(X,X) is X. */
8696 if (operand_equal_p (arg1, arg2, 0))
8697 return fold_convert_loc (loc, type, arg1);
8699 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8700 if (TREE_CODE (arg1) == REAL_CST
8701 && TREE_CODE (arg2) == REAL_CST
8702 && !TREE_OVERFLOW (arg1)
8703 && !TREE_OVERFLOW (arg2))
8705 REAL_VALUE_TYPE c1, c2;
8707 c1 = TREE_REAL_CST (arg1);
8708 c2 = TREE_REAL_CST (arg2);
8709 /* c1.sign := c2.sign. */
8710 real_copysign (&c1, &c2);
8711 return build_real (type, c1);
8714 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8715 Remember to evaluate Y for side-effects. */
8716 if (tree_expr_nonnegative_p (arg2))
8717 return omit_one_operand_loc (loc, type,
8718 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8721 /* Strip sign changing operations for the first argument. */
8722 tem = fold_strip_sign_ops (arg1);
8724 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8729 /* Fold a call to builtin isascii with argument ARG. */
8732 fold_builtin_isascii (location_t loc, tree arg)
8734 if (!validate_arg (arg, INTEGER_TYPE))
8738 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8739 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8740 build_int_cst (integer_type_node,
8741 ~ (unsigned HOST_WIDE_INT) 0x7f));
8742 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8743 arg, integer_zero_node);
8747 /* Fold a call to builtin toascii with argument ARG. */
8750 fold_builtin_toascii (location_t loc, tree arg)
8752 if (!validate_arg (arg, INTEGER_TYPE))
8755 /* Transform toascii(c) -> (c & 0x7f). */
8756 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8757 build_int_cst (integer_type_node, 0x7f));
8760 /* Fold a call to builtin isdigit with argument ARG. */
8763 fold_builtin_isdigit (location_t loc, tree arg)
8765 if (!validate_arg (arg, INTEGER_TYPE))
8769 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8770 /* According to the C standard, isdigit is unaffected by locale.
8771 However, it definitely is affected by the target character set. */
8772 unsigned HOST_WIDE_INT target_digit0
8773 = lang_hooks.to_target_charset ('0');
8775 if (target_digit0 == 0)
8778 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8779 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8780 build_int_cst (unsigned_type_node, target_digit0));
8781 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8782 build_int_cst (unsigned_type_node, 9));
8786 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8789 fold_builtin_fabs (location_t loc, tree arg, tree type)
8791 if (!validate_arg (arg, REAL_TYPE))
8794 arg = fold_convert_loc (loc, type, arg);
8795 if (TREE_CODE (arg) == REAL_CST)
8796 return fold_abs_const (arg, type);
8797 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8800 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8803 fold_builtin_abs (location_t loc, tree arg, tree type)
8805 if (!validate_arg (arg, INTEGER_TYPE))
8808 arg = fold_convert_loc (loc, type, arg);
8809 if (TREE_CODE (arg) == INTEGER_CST)
8810 return fold_abs_const (arg, type);
8811 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8814 /* Fold a fma operation with arguments ARG[012]. */
8817 fold_fma (location_t loc ATTRIBUTE_UNUSED,
8818 tree type, tree arg0, tree arg1, tree arg2)
8820 if (TREE_CODE (arg0) == REAL_CST
8821 && TREE_CODE (arg1) == REAL_CST
8822 && TREE_CODE (arg2) == REAL_CST)
8823 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
8828 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8831 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8833 if (validate_arg (arg0, REAL_TYPE)
8834 && validate_arg(arg1, REAL_TYPE)
8835 && validate_arg(arg2, REAL_TYPE))
8837 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
8841 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8842 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8843 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8848 /* Fold a call to builtin fmin or fmax. */
8851 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8852 tree type, bool max)
8854 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8856 /* Calculate the result when the argument is a constant. */
8857 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8862 /* If either argument is NaN, return the other one. Avoid the
8863 transformation if we get (and honor) a signalling NaN. Using
8864 omit_one_operand() ensures we create a non-lvalue. */
8865 if (TREE_CODE (arg0) == REAL_CST
8866 && real_isnan (&TREE_REAL_CST (arg0))
8867 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8868 || ! TREE_REAL_CST (arg0).signalling))
8869 return omit_one_operand_loc (loc, type, arg1, arg0);
8870 if (TREE_CODE (arg1) == REAL_CST
8871 && real_isnan (&TREE_REAL_CST (arg1))
8872 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
8873 || ! TREE_REAL_CST (arg1).signalling))
8874 return omit_one_operand_loc (loc, type, arg0, arg1);
8876 /* Transform fmin/fmax(x,x) -> x. */
8877 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8878 return omit_one_operand_loc (loc, type, arg0, arg1);
8880 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
8881 functions to return the numeric arg if the other one is NaN.
8882 These tree codes don't honor that, so only transform if
8883 -ffinite-math-only is set. C99 doesn't require -0.0 to be
8884 handled, so we don't have to worry about it either. */
8885 if (flag_finite_math_only)
8886 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
8887 fold_convert_loc (loc, type, arg0),
8888 fold_convert_loc (loc, type, arg1));
8893 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8896 fold_builtin_carg (location_t loc, tree arg, tree type)
8898 if (validate_arg (arg, COMPLEX_TYPE)
8899 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8901 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8905 tree new_arg = builtin_save_expr (arg);
8906 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8907 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8908 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8915 /* Fold a call to builtin logb/ilogb. */
8918 fold_builtin_logb (location_t loc, tree arg, tree rettype)
8920 if (! validate_arg (arg, REAL_TYPE))
8925 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8927 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8933 /* If arg is Inf or NaN and we're logb, return it. */
8934 if (TREE_CODE (rettype) == REAL_TYPE)
8935 return fold_convert_loc (loc, rettype, arg);
8936 /* Fall through... */
8938 /* Zero may set errno and/or raise an exception for logb, also
8939 for ilogb we don't know FP_ILOGB0. */
8942 /* For normal numbers, proceed iff radix == 2. In GCC,
8943 normalized significands are in the range [0.5, 1.0). We
8944 want the exponent as if they were [1.0, 2.0) so get the
8945 exponent and subtract 1. */
8946 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8947 return fold_convert_loc (loc, rettype,
8948 build_int_cst (integer_type_node,
8949 REAL_EXP (value)-1));
8957 /* Fold a call to builtin significand, if radix == 2. */
8960 fold_builtin_significand (location_t loc, tree arg, tree rettype)
8962 if (! validate_arg (arg, REAL_TYPE))
8967 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8969 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8976 /* If arg is +-0, +-Inf or +-NaN, then return it. */
8977 return fold_convert_loc (loc, rettype, arg);
8979 /* For normal numbers, proceed iff radix == 2. */
8980 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8982 REAL_VALUE_TYPE result = *value;
8983 /* In GCC, normalized significands are in the range [0.5,
8984 1.0). We want them to be [1.0, 2.0) so set the
8986 SET_REAL_EXP (&result, 1);
8987 return build_real (rettype, result);
8996 /* Fold a call to builtin frexp, we can assume the base is 2. */
8999 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9001 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9006 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9009 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9011 /* Proceed if a valid pointer type was passed in. */
9012 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9014 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9020 /* For +-0, return (*exp = 0, +-0). */
9021 exp = integer_zero_node;
9026 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9027 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9030 /* Since the frexp function always expects base 2, and in
9031 GCC normalized significands are already in the range
9032 [0.5, 1.0), we have exactly what frexp wants. */
9033 REAL_VALUE_TYPE frac_rvt = *value;
9034 SET_REAL_EXP (&frac_rvt, 0);
9035 frac = build_real (rettype, frac_rvt);
9036 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9043 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9044 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9045 TREE_SIDE_EFFECTS (arg1) = 1;
9046 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9052 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9053 then we can assume the base is two. If it's false, then we have to
9054 check the mode of the TYPE parameter in certain cases. */
9057 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9058 tree type, bool ldexp)
9060 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9065 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9066 if (real_zerop (arg0) || integer_zerop (arg1)
9067 || (TREE_CODE (arg0) == REAL_CST
9068 && !real_isfinite (&TREE_REAL_CST (arg0))))
9069 return omit_one_operand_loc (loc, type, arg0, arg1);
9071 /* If both arguments are constant, then try to evaluate it. */
9072 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9073 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9074 && host_integerp (arg1, 0))
9076 /* Bound the maximum adjustment to twice the range of the
9077 mode's valid exponents. Use abs to ensure the range is
9078 positive as a sanity check. */
9079 const long max_exp_adj = 2 *
9080 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9081 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9083 /* Get the user-requested adjustment. */
9084 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9086 /* The requested adjustment must be inside this range. This
9087 is a preliminary cap to avoid things like overflow, we
9088 may still fail to compute the result for other reasons. */
9089 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9091 REAL_VALUE_TYPE initial_result;
9093 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9095 /* Ensure we didn't overflow. */
9096 if (! real_isinf (&initial_result))
9098 const REAL_VALUE_TYPE trunc_result
9099 = real_value_truncate (TYPE_MODE (type), initial_result);
9101 /* Only proceed if the target mode can hold the
9103 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9104 return build_real (type, trunc_result);
9113 /* Fold a call to builtin modf. */
9116 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9118 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9123 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9126 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9128 /* Proceed if a valid pointer type was passed in. */
9129 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9131 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9132 REAL_VALUE_TYPE trunc, frac;
9138 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9139 trunc = frac = *value;
9142 /* For +-Inf, return (*arg1 = arg0, +-0). */
9144 frac.sign = value->sign;
9148 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9149 real_trunc (&trunc, VOIDmode, value);
9150 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9151 /* If the original number was negative and already
9152 integral, then the fractional part is -0.0. */
9153 if (value->sign && frac.cl == rvc_zero)
9154 frac.sign = value->sign;
9158 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9159 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9160 build_real (rettype, trunc));
9161 TREE_SIDE_EFFECTS (arg1) = 1;
9162 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9163 build_real (rettype, frac));
9169 /* Given a location LOC, an interclass builtin function decl FNDECL
9170 and its single argument ARG, return an folded expression computing
9171 the same, or NULL_TREE if we either couldn't or didn't want to fold
9172 (the latter happen if there's an RTL instruction available). */
9175 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9177 enum machine_mode mode;
9179 if (!validate_arg (arg, REAL_TYPE))
9182 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9185 mode = TYPE_MODE (TREE_TYPE (arg));
9187 /* If there is no optab, try generic code. */
9188 switch (DECL_FUNCTION_CODE (fndecl))
9192 CASE_FLT_FN (BUILT_IN_ISINF):
9194 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9195 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9196 tree const type = TREE_TYPE (arg);
9200 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9201 real_from_string (&r, buf);
9202 result = build_call_expr (isgr_fn, 2,
9203 fold_build1_loc (loc, ABS_EXPR, type, arg),
9204 build_real (type, r));
9207 CASE_FLT_FN (BUILT_IN_FINITE):
9208 case BUILT_IN_ISFINITE:
9210 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9211 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9212 tree const type = TREE_TYPE (arg);
9216 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9217 real_from_string (&r, buf);
9218 result = build_call_expr (isle_fn, 2,
9219 fold_build1_loc (loc, ABS_EXPR, type, arg),
9220 build_real (type, r));
9221 /*result = fold_build2_loc (loc, UNGT_EXPR,
9222 TREE_TYPE (TREE_TYPE (fndecl)),
9223 fold_build1_loc (loc, ABS_EXPR, type, arg),
9224 build_real (type, r));
9225 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9226 TREE_TYPE (TREE_TYPE (fndecl)),
9230 case BUILT_IN_ISNORMAL:
9232 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9233 islessequal(fabs(x),DBL_MAX). */
9234 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9235 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9236 tree const type = TREE_TYPE (arg);
9237 REAL_VALUE_TYPE rmax, rmin;
9240 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9241 real_from_string (&rmax, buf);
9242 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9243 real_from_string (&rmin, buf);
9244 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9245 result = build_call_expr (isle_fn, 2, arg,
9246 build_real (type, rmax));
9247 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9248 build_call_expr (isge_fn, 2, arg,
9249 build_real (type, rmin)));
9259 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9260 ARG is the argument for the call. */
9263 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9265 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9268 if (!validate_arg (arg, REAL_TYPE))
9271 switch (builtin_index)
9273 case BUILT_IN_ISINF:
9274 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9275 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9277 if (TREE_CODE (arg) == REAL_CST)
9279 r = TREE_REAL_CST (arg);
9280 if (real_isinf (&r))
9281 return real_compare (GT_EXPR, &r, &dconst0)
9282 ? integer_one_node : integer_minus_one_node;
9284 return integer_zero_node;
9289 case BUILT_IN_ISINF_SIGN:
9291 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9292 /* In a boolean context, GCC will fold the inner COND_EXPR to
9293 1. So e.g. "if (isinf_sign(x))" would be folded to just
9294 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9295 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9296 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9297 tree tmp = NULL_TREE;
9299 arg = builtin_save_expr (arg);
9301 if (signbit_fn && isinf_fn)
9303 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9304 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9306 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9307 signbit_call, integer_zero_node);
9308 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9309 isinf_call, integer_zero_node);
9311 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9312 integer_minus_one_node, integer_one_node);
9313 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9321 case BUILT_IN_ISFINITE:
9322 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9323 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9324 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9326 if (TREE_CODE (arg) == REAL_CST)
9328 r = TREE_REAL_CST (arg);
9329 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9334 case BUILT_IN_ISNAN:
9335 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9336 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9338 if (TREE_CODE (arg) == REAL_CST)
9340 r = TREE_REAL_CST (arg);
9341 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9344 arg = builtin_save_expr (arg);
9345 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9352 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9353 This builtin will generate code to return the appropriate floating
9354 point classification depending on the value of the floating point
9355 number passed in. The possible return values must be supplied as
9356 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9357 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9358 one floating point argument which is "type generic". */
9361 fold_builtin_fpclassify (location_t loc, tree exp)
9363 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9364 arg, type, res, tmp;
9365 enum machine_mode mode;
9369 /* Verify the required arguments in the original call. */
9370 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9371 INTEGER_TYPE, INTEGER_TYPE,
9372 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9375 fp_nan = CALL_EXPR_ARG (exp, 0);
9376 fp_infinite = CALL_EXPR_ARG (exp, 1);
9377 fp_normal = CALL_EXPR_ARG (exp, 2);
9378 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9379 fp_zero = CALL_EXPR_ARG (exp, 4);
9380 arg = CALL_EXPR_ARG (exp, 5);
9381 type = TREE_TYPE (arg);
9382 mode = TYPE_MODE (type);
9383 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9387 (fabs(x) == Inf ? FP_INFINITE :
9388 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9389 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9391 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9392 build_real (type, dconst0));
9393 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9394 tmp, fp_zero, fp_subnormal);
9396 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9397 real_from_string (&r, buf);
9398 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9399 arg, build_real (type, r));
9400 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9402 if (HONOR_INFINITIES (mode))
9405 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9406 build_real (type, r));
9407 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9411 if (HONOR_NANS (mode))
9413 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9414 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9420 /* Fold a call to an unordered comparison function such as
9421 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9422 being called and ARG0 and ARG1 are the arguments for the call.
9423 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9424 the opposite of the desired result. UNORDERED_CODE is used
9425 for modes that can hold NaNs and ORDERED_CODE is used for
9429 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9430 enum tree_code unordered_code,
9431 enum tree_code ordered_code)
9433 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9434 enum tree_code code;
9436 enum tree_code code0, code1;
9437 tree cmp_type = NULL_TREE;
9439 type0 = TREE_TYPE (arg0);
9440 type1 = TREE_TYPE (arg1);
9442 code0 = TREE_CODE (type0);
9443 code1 = TREE_CODE (type1);
9445 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9446 /* Choose the wider of two real types. */
9447 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9449 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9451 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9454 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9455 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9457 if (unordered_code == UNORDERED_EXPR)
9459 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9460 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9461 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9464 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9466 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9467 fold_build2_loc (loc, code, type, arg0, arg1));
9470 /* Fold a call to built-in function FNDECL with 0 arguments.
9471 IGNORE is true if the result of the function call is ignored. This
9472 function returns NULL_TREE if no simplification was possible. */
9475 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9477 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9478 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9481 CASE_FLT_FN (BUILT_IN_INF):
9482 case BUILT_IN_INFD32:
9483 case BUILT_IN_INFD64:
9484 case BUILT_IN_INFD128:
9485 return fold_builtin_inf (loc, type, true);
9487 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9488 return fold_builtin_inf (loc, type, false);
9490 case BUILT_IN_CLASSIFY_TYPE:
9491 return fold_builtin_classify_type (NULL_TREE);
9499 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9500 IGNORE is true if the result of the function call is ignored. This
9501 function returns NULL_TREE if no simplification was possible. */
9504 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9506 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9507 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9510 case BUILT_IN_CONSTANT_P:
9512 tree val = fold_builtin_constant_p (arg0);
9514 /* Gimplification will pull the CALL_EXPR for the builtin out of
9515 an if condition. When not optimizing, we'll not CSE it back.
9516 To avoid link error types of regressions, return false now. */
9517 if (!val && !optimize)
9518 val = integer_zero_node;
9523 case BUILT_IN_CLASSIFY_TYPE:
9524 return fold_builtin_classify_type (arg0);
9526 case BUILT_IN_STRLEN:
9527 return fold_builtin_strlen (loc, type, arg0);
9529 CASE_FLT_FN (BUILT_IN_FABS):
9530 return fold_builtin_fabs (loc, arg0, type);
9534 case BUILT_IN_LLABS:
9535 case BUILT_IN_IMAXABS:
9536 return fold_builtin_abs (loc, arg0, type);
9538 CASE_FLT_FN (BUILT_IN_CONJ):
9539 if (validate_arg (arg0, COMPLEX_TYPE)
9540 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9541 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9544 CASE_FLT_FN (BUILT_IN_CREAL):
9545 if (validate_arg (arg0, COMPLEX_TYPE)
9546 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9547 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9550 CASE_FLT_FN (BUILT_IN_CIMAG):
9551 if (validate_arg (arg0, COMPLEX_TYPE)
9552 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9553 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9556 CASE_FLT_FN (BUILT_IN_CCOS):
9557 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9559 CASE_FLT_FN (BUILT_IN_CCOSH):
9560 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9562 CASE_FLT_FN (BUILT_IN_CPROJ):
9563 return fold_builtin_cproj(loc, arg0, type);
9565 CASE_FLT_FN (BUILT_IN_CSIN):
9566 if (validate_arg (arg0, COMPLEX_TYPE)
9567 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9568 return do_mpc_arg1 (arg0, type, mpc_sin);
9571 CASE_FLT_FN (BUILT_IN_CSINH):
9572 if (validate_arg (arg0, COMPLEX_TYPE)
9573 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9574 return do_mpc_arg1 (arg0, type, mpc_sinh);
9577 CASE_FLT_FN (BUILT_IN_CTAN):
9578 if (validate_arg (arg0, COMPLEX_TYPE)
9579 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9580 return do_mpc_arg1 (arg0, type, mpc_tan);
9583 CASE_FLT_FN (BUILT_IN_CTANH):
9584 if (validate_arg (arg0, COMPLEX_TYPE)
9585 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9586 return do_mpc_arg1 (arg0, type, mpc_tanh);
9589 CASE_FLT_FN (BUILT_IN_CLOG):
9590 if (validate_arg (arg0, COMPLEX_TYPE)
9591 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9592 return do_mpc_arg1 (arg0, type, mpc_log);
9595 CASE_FLT_FN (BUILT_IN_CSQRT):
9596 if (validate_arg (arg0, COMPLEX_TYPE)
9597 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9598 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9601 CASE_FLT_FN (BUILT_IN_CASIN):
9602 if (validate_arg (arg0, COMPLEX_TYPE)
9603 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9604 return do_mpc_arg1 (arg0, type, mpc_asin);
9607 CASE_FLT_FN (BUILT_IN_CACOS):
9608 if (validate_arg (arg0, COMPLEX_TYPE)
9609 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9610 return do_mpc_arg1 (arg0, type, mpc_acos);
9613 CASE_FLT_FN (BUILT_IN_CATAN):
9614 if (validate_arg (arg0, COMPLEX_TYPE)
9615 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9616 return do_mpc_arg1 (arg0, type, mpc_atan);
9619 CASE_FLT_FN (BUILT_IN_CASINH):
9620 if (validate_arg (arg0, COMPLEX_TYPE)
9621 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9622 return do_mpc_arg1 (arg0, type, mpc_asinh);
9625 CASE_FLT_FN (BUILT_IN_CACOSH):
9626 if (validate_arg (arg0, COMPLEX_TYPE)
9627 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9628 return do_mpc_arg1 (arg0, type, mpc_acosh);
9631 CASE_FLT_FN (BUILT_IN_CATANH):
9632 if (validate_arg (arg0, COMPLEX_TYPE)
9633 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9634 return do_mpc_arg1 (arg0, type, mpc_atanh);
9637 CASE_FLT_FN (BUILT_IN_CABS):
9638 return fold_builtin_cabs (loc, arg0, type, fndecl);
9640 CASE_FLT_FN (BUILT_IN_CARG):
9641 return fold_builtin_carg (loc, arg0, type);
9643 CASE_FLT_FN (BUILT_IN_SQRT):
9644 return fold_builtin_sqrt (loc, arg0, type);
9646 CASE_FLT_FN (BUILT_IN_CBRT):
9647 return fold_builtin_cbrt (loc, arg0, type);
9649 CASE_FLT_FN (BUILT_IN_ASIN):
9650 if (validate_arg (arg0, REAL_TYPE))
9651 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9652 &dconstm1, &dconst1, true);
9655 CASE_FLT_FN (BUILT_IN_ACOS):
9656 if (validate_arg (arg0, REAL_TYPE))
9657 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9658 &dconstm1, &dconst1, true);
9661 CASE_FLT_FN (BUILT_IN_ATAN):
9662 if (validate_arg (arg0, REAL_TYPE))
9663 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9666 CASE_FLT_FN (BUILT_IN_ASINH):
9667 if (validate_arg (arg0, REAL_TYPE))
9668 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9671 CASE_FLT_FN (BUILT_IN_ACOSH):
9672 if (validate_arg (arg0, REAL_TYPE))
9673 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9674 &dconst1, NULL, true);
9677 CASE_FLT_FN (BUILT_IN_ATANH):
9678 if (validate_arg (arg0, REAL_TYPE))
9679 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9680 &dconstm1, &dconst1, false);
9683 CASE_FLT_FN (BUILT_IN_SIN):
9684 if (validate_arg (arg0, REAL_TYPE))
9685 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9688 CASE_FLT_FN (BUILT_IN_COS):
9689 return fold_builtin_cos (loc, arg0, type, fndecl);
9691 CASE_FLT_FN (BUILT_IN_TAN):
9692 return fold_builtin_tan (arg0, type);
9694 CASE_FLT_FN (BUILT_IN_CEXP):
9695 return fold_builtin_cexp (loc, arg0, type);
9697 CASE_FLT_FN (BUILT_IN_CEXPI):
9698 if (validate_arg (arg0, REAL_TYPE))
9699 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9702 CASE_FLT_FN (BUILT_IN_SINH):
9703 if (validate_arg (arg0, REAL_TYPE))
9704 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9707 CASE_FLT_FN (BUILT_IN_COSH):
9708 return fold_builtin_cosh (loc, arg0, type, fndecl);
9710 CASE_FLT_FN (BUILT_IN_TANH):
9711 if (validate_arg (arg0, REAL_TYPE))
9712 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9715 CASE_FLT_FN (BUILT_IN_ERF):
9716 if (validate_arg (arg0, REAL_TYPE))
9717 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9720 CASE_FLT_FN (BUILT_IN_ERFC):
9721 if (validate_arg (arg0, REAL_TYPE))
9722 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9725 CASE_FLT_FN (BUILT_IN_TGAMMA):
9726 if (validate_arg (arg0, REAL_TYPE))
9727 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9730 CASE_FLT_FN (BUILT_IN_EXP):
9731 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9733 CASE_FLT_FN (BUILT_IN_EXP2):
9734 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9736 CASE_FLT_FN (BUILT_IN_EXP10):
9737 CASE_FLT_FN (BUILT_IN_POW10):
9738 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9740 CASE_FLT_FN (BUILT_IN_EXPM1):
9741 if (validate_arg (arg0, REAL_TYPE))
9742 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9745 CASE_FLT_FN (BUILT_IN_LOG):
9746 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9748 CASE_FLT_FN (BUILT_IN_LOG2):
9749 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9751 CASE_FLT_FN (BUILT_IN_LOG10):
9752 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9754 CASE_FLT_FN (BUILT_IN_LOG1P):
9755 if (validate_arg (arg0, REAL_TYPE))
9756 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9757 &dconstm1, NULL, false);
9760 CASE_FLT_FN (BUILT_IN_J0):
9761 if (validate_arg (arg0, REAL_TYPE))
9762 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9766 CASE_FLT_FN (BUILT_IN_J1):
9767 if (validate_arg (arg0, REAL_TYPE))
9768 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9772 CASE_FLT_FN (BUILT_IN_Y0):
9773 if (validate_arg (arg0, REAL_TYPE))
9774 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9775 &dconst0, NULL, false);
9778 CASE_FLT_FN (BUILT_IN_Y1):
9779 if (validate_arg (arg0, REAL_TYPE))
9780 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9781 &dconst0, NULL, false);
9784 CASE_FLT_FN (BUILT_IN_NAN):
9785 case BUILT_IN_NAND32:
9786 case BUILT_IN_NAND64:
9787 case BUILT_IN_NAND128:
9788 return fold_builtin_nan (arg0, type, true);
9790 CASE_FLT_FN (BUILT_IN_NANS):
9791 return fold_builtin_nan (arg0, type, false);
9793 CASE_FLT_FN (BUILT_IN_FLOOR):
9794 return fold_builtin_floor (loc, fndecl, arg0);
9796 CASE_FLT_FN (BUILT_IN_CEIL):
9797 return fold_builtin_ceil (loc, fndecl, arg0);
9799 CASE_FLT_FN (BUILT_IN_TRUNC):
9800 return fold_builtin_trunc (loc, fndecl, arg0);
9802 CASE_FLT_FN (BUILT_IN_ROUND):
9803 return fold_builtin_round (loc, fndecl, arg0);
9805 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9806 CASE_FLT_FN (BUILT_IN_RINT):
9807 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9809 CASE_FLT_FN (BUILT_IN_ICEIL):
9810 CASE_FLT_FN (BUILT_IN_LCEIL):
9811 CASE_FLT_FN (BUILT_IN_LLCEIL):
9812 CASE_FLT_FN (BUILT_IN_LFLOOR):
9813 CASE_FLT_FN (BUILT_IN_IFLOOR):
9814 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9815 CASE_FLT_FN (BUILT_IN_IROUND):
9816 CASE_FLT_FN (BUILT_IN_LROUND):
9817 CASE_FLT_FN (BUILT_IN_LLROUND):
9818 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9820 CASE_FLT_FN (BUILT_IN_IRINT):
9821 CASE_FLT_FN (BUILT_IN_LRINT):
9822 CASE_FLT_FN (BUILT_IN_LLRINT):
9823 return fold_fixed_mathfn (loc, fndecl, arg0);
9825 case BUILT_IN_BSWAP32:
9826 case BUILT_IN_BSWAP64:
9827 return fold_builtin_bswap (fndecl, arg0);
9829 CASE_INT_FN (BUILT_IN_FFS):
9830 CASE_INT_FN (BUILT_IN_CLZ):
9831 CASE_INT_FN (BUILT_IN_CTZ):
9832 CASE_INT_FN (BUILT_IN_CLRSB):
9833 CASE_INT_FN (BUILT_IN_POPCOUNT):
9834 CASE_INT_FN (BUILT_IN_PARITY):
9835 return fold_builtin_bitop (fndecl, arg0);
9837 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9838 return fold_builtin_signbit (loc, arg0, type);
9840 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9841 return fold_builtin_significand (loc, arg0, type);
9843 CASE_FLT_FN (BUILT_IN_ILOGB):
9844 CASE_FLT_FN (BUILT_IN_LOGB):
9845 return fold_builtin_logb (loc, arg0, type);
9847 case BUILT_IN_ISASCII:
9848 return fold_builtin_isascii (loc, arg0);
9850 case BUILT_IN_TOASCII:
9851 return fold_builtin_toascii (loc, arg0);
9853 case BUILT_IN_ISDIGIT:
9854 return fold_builtin_isdigit (loc, arg0);
9856 CASE_FLT_FN (BUILT_IN_FINITE):
9857 case BUILT_IN_FINITED32:
9858 case BUILT_IN_FINITED64:
9859 case BUILT_IN_FINITED128:
9860 case BUILT_IN_ISFINITE:
9862 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9865 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9868 CASE_FLT_FN (BUILT_IN_ISINF):
9869 case BUILT_IN_ISINFD32:
9870 case BUILT_IN_ISINFD64:
9871 case BUILT_IN_ISINFD128:
9873 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9876 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9879 case BUILT_IN_ISNORMAL:
9880 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9882 case BUILT_IN_ISINF_SIGN:
9883 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9885 CASE_FLT_FN (BUILT_IN_ISNAN):
9886 case BUILT_IN_ISNAND32:
9887 case BUILT_IN_ISNAND64:
9888 case BUILT_IN_ISNAND128:
9889 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9891 case BUILT_IN_PRINTF:
9892 case BUILT_IN_PRINTF_UNLOCKED:
9893 case BUILT_IN_VPRINTF:
9894 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
9897 if (integer_zerop (arg0))
9898 return build_empty_stmt (loc);
9909 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9910 IGNORE is true if the result of the function call is ignored. This
9911 function returns NULL_TREE if no simplification was possible. */
9914 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
9916 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9917 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9921 CASE_FLT_FN (BUILT_IN_JN):
9922 if (validate_arg (arg0, INTEGER_TYPE)
9923 && validate_arg (arg1, REAL_TYPE))
9924 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9927 CASE_FLT_FN (BUILT_IN_YN):
9928 if (validate_arg (arg0, INTEGER_TYPE)
9929 && validate_arg (arg1, REAL_TYPE))
9930 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9934 CASE_FLT_FN (BUILT_IN_DREM):
9935 CASE_FLT_FN (BUILT_IN_REMAINDER):
9936 if (validate_arg (arg0, REAL_TYPE)
9937 && validate_arg(arg1, REAL_TYPE))
9938 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
9941 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9942 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9943 if (validate_arg (arg0, REAL_TYPE)
9944 && validate_arg(arg1, POINTER_TYPE))
9945 return do_mpfr_lgamma_r (arg0, arg1, type);
9948 CASE_FLT_FN (BUILT_IN_ATAN2):
9949 if (validate_arg (arg0, REAL_TYPE)
9950 && validate_arg(arg1, REAL_TYPE))
9951 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9954 CASE_FLT_FN (BUILT_IN_FDIM):
9955 if (validate_arg (arg0, REAL_TYPE)
9956 && validate_arg(arg1, REAL_TYPE))
9957 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9960 CASE_FLT_FN (BUILT_IN_HYPOT):
9961 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
9963 CASE_FLT_FN (BUILT_IN_CPOW):
9964 if (validate_arg (arg0, COMPLEX_TYPE)
9965 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9966 && validate_arg (arg1, COMPLEX_TYPE)
9967 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
9968 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
9971 CASE_FLT_FN (BUILT_IN_LDEXP):
9972 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
9973 CASE_FLT_FN (BUILT_IN_SCALBN):
9974 CASE_FLT_FN (BUILT_IN_SCALBLN):
9975 return fold_builtin_load_exponent (loc, arg0, arg1,
9976 type, /*ldexp=*/false);
9978 CASE_FLT_FN (BUILT_IN_FREXP):
9979 return fold_builtin_frexp (loc, arg0, arg1, type);
9981 CASE_FLT_FN (BUILT_IN_MODF):
9982 return fold_builtin_modf (loc, arg0, arg1, type);
9984 case BUILT_IN_BZERO:
9985 return fold_builtin_bzero (loc, arg0, arg1, ignore);
9987 case BUILT_IN_FPUTS:
9988 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
9990 case BUILT_IN_FPUTS_UNLOCKED:
9991 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
9993 case BUILT_IN_STRSTR:
9994 return fold_builtin_strstr (loc, arg0, arg1, type);
9996 case BUILT_IN_STRCAT:
9997 return fold_builtin_strcat (loc, arg0, arg1);
9999 case BUILT_IN_STRSPN:
10000 return fold_builtin_strspn (loc, arg0, arg1);
10002 case BUILT_IN_STRCSPN:
10003 return fold_builtin_strcspn (loc, arg0, arg1);
10005 case BUILT_IN_STRCHR:
10006 case BUILT_IN_INDEX:
10007 return fold_builtin_strchr (loc, arg0, arg1, type);
10009 case BUILT_IN_STRRCHR:
10010 case BUILT_IN_RINDEX:
10011 return fold_builtin_strrchr (loc, arg0, arg1, type);
10013 case BUILT_IN_STRCPY:
10014 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10016 case BUILT_IN_STPCPY:
10019 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10023 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10026 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10029 case BUILT_IN_STRCMP:
10030 return fold_builtin_strcmp (loc, arg0, arg1);
10032 case BUILT_IN_STRPBRK:
10033 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10035 case BUILT_IN_EXPECT:
10036 return fold_builtin_expect (loc, arg0, arg1);
10038 CASE_FLT_FN (BUILT_IN_POW):
10039 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10041 CASE_FLT_FN (BUILT_IN_POWI):
10042 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10044 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10045 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10047 CASE_FLT_FN (BUILT_IN_FMIN):
10048 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10050 CASE_FLT_FN (BUILT_IN_FMAX):
10051 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10053 case BUILT_IN_ISGREATER:
10054 return fold_builtin_unordered_cmp (loc, fndecl,
10055 arg0, arg1, UNLE_EXPR, LE_EXPR);
10056 case BUILT_IN_ISGREATEREQUAL:
10057 return fold_builtin_unordered_cmp (loc, fndecl,
10058 arg0, arg1, UNLT_EXPR, LT_EXPR);
10059 case BUILT_IN_ISLESS:
10060 return fold_builtin_unordered_cmp (loc, fndecl,
10061 arg0, arg1, UNGE_EXPR, GE_EXPR);
10062 case BUILT_IN_ISLESSEQUAL:
10063 return fold_builtin_unordered_cmp (loc, fndecl,
10064 arg0, arg1, UNGT_EXPR, GT_EXPR);
10065 case BUILT_IN_ISLESSGREATER:
10066 return fold_builtin_unordered_cmp (loc, fndecl,
10067 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10068 case BUILT_IN_ISUNORDERED:
10069 return fold_builtin_unordered_cmp (loc, fndecl,
10070 arg0, arg1, UNORDERED_EXPR,
10073 /* We do the folding for va_start in the expander. */
10074 case BUILT_IN_VA_START:
10077 case BUILT_IN_SPRINTF:
10078 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10080 case BUILT_IN_OBJECT_SIZE:
10081 return fold_builtin_object_size (arg0, arg1);
10083 case BUILT_IN_PRINTF:
10084 case BUILT_IN_PRINTF_UNLOCKED:
10085 case BUILT_IN_VPRINTF:
10086 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10088 case BUILT_IN_PRINTF_CHK:
10089 case BUILT_IN_VPRINTF_CHK:
10090 if (!validate_arg (arg0, INTEGER_TYPE)
10091 || TREE_SIDE_EFFECTS (arg0))
10094 return fold_builtin_printf (loc, fndecl,
10095 arg1, NULL_TREE, ignore, fcode);
10098 case BUILT_IN_FPRINTF:
10099 case BUILT_IN_FPRINTF_UNLOCKED:
10100 case BUILT_IN_VFPRINTF:
10101 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10110 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10111 and ARG2. IGNORE is true if the result of the function call is ignored.
10112 This function returns NULL_TREE if no simplification was possible. */
10115 fold_builtin_3 (location_t loc, tree fndecl,
10116 tree arg0, tree arg1, tree arg2, bool ignore)
10118 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10119 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10123 CASE_FLT_FN (BUILT_IN_SINCOS):
10124 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10126 CASE_FLT_FN (BUILT_IN_FMA):
10127 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10130 CASE_FLT_FN (BUILT_IN_REMQUO):
10131 if (validate_arg (arg0, REAL_TYPE)
10132 && validate_arg(arg1, REAL_TYPE)
10133 && validate_arg(arg2, POINTER_TYPE))
10134 return do_mpfr_remquo (arg0, arg1, arg2);
10137 case BUILT_IN_MEMSET:
10138 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10140 case BUILT_IN_BCOPY:
10141 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10142 void_type_node, true, /*endp=*/3);
10144 case BUILT_IN_MEMCPY:
10145 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10146 type, ignore, /*endp=*/0);
10148 case BUILT_IN_MEMPCPY:
10149 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10150 type, ignore, /*endp=*/1);
10152 case BUILT_IN_MEMMOVE:
10153 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10154 type, ignore, /*endp=*/3);
10156 case BUILT_IN_STRNCAT:
10157 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10159 case BUILT_IN_STRNCPY:
10160 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10162 case BUILT_IN_STRNCMP:
10163 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10165 case BUILT_IN_MEMCHR:
10166 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10168 case BUILT_IN_BCMP:
10169 case BUILT_IN_MEMCMP:
10170 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10172 case BUILT_IN_SPRINTF:
10173 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10175 case BUILT_IN_SNPRINTF:
10176 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10178 case BUILT_IN_STRCPY_CHK:
10179 case BUILT_IN_STPCPY_CHK:
10180 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10183 case BUILT_IN_STRCAT_CHK:
10184 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10186 case BUILT_IN_PRINTF_CHK:
10187 case BUILT_IN_VPRINTF_CHK:
10188 if (!validate_arg (arg0, INTEGER_TYPE)
10189 || TREE_SIDE_EFFECTS (arg0))
10192 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10195 case BUILT_IN_FPRINTF:
10196 case BUILT_IN_FPRINTF_UNLOCKED:
10197 case BUILT_IN_VFPRINTF:
10198 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10201 case BUILT_IN_FPRINTF_CHK:
10202 case BUILT_IN_VFPRINTF_CHK:
10203 if (!validate_arg (arg1, INTEGER_TYPE)
10204 || TREE_SIDE_EFFECTS (arg1))
10207 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10216 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10217 ARG2, and ARG3. IGNORE is true if the result of the function call is
10218 ignored. This function returns NULL_TREE if no simplification was
10222 fold_builtin_4 (location_t loc, tree fndecl,
10223 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10225 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10229 case BUILT_IN_MEMCPY_CHK:
10230 case BUILT_IN_MEMPCPY_CHK:
10231 case BUILT_IN_MEMMOVE_CHK:
10232 case BUILT_IN_MEMSET_CHK:
10233 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10235 DECL_FUNCTION_CODE (fndecl));
10237 case BUILT_IN_STRNCPY_CHK:
10238 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10240 case BUILT_IN_STRNCAT_CHK:
10241 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10243 case BUILT_IN_SNPRINTF:
10244 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10246 case BUILT_IN_FPRINTF_CHK:
10247 case BUILT_IN_VFPRINTF_CHK:
10248 if (!validate_arg (arg1, INTEGER_TYPE)
10249 || TREE_SIDE_EFFECTS (arg1))
10252 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10262 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10263 arguments, where NARGS <= 4. IGNORE is true if the result of the
10264 function call is ignored. This function returns NULL_TREE if no
10265 simplification was possible. Note that this only folds builtins with
10266 fixed argument patterns. Foldings that do varargs-to-varargs
10267 transformations, or that match calls with more than 4 arguments,
10268 need to be handled with fold_builtin_varargs instead. */
10270 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10273 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10275 tree ret = NULL_TREE;
10280 ret = fold_builtin_0 (loc, fndecl, ignore);
10283 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10286 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10289 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10292 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10300 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10301 SET_EXPR_LOCATION (ret, loc);
10302 TREE_NO_WARNING (ret) = 1;
10308 /* Builtins with folding operations that operate on "..." arguments
10309 need special handling; we need to store the arguments in a convenient
10310 data structure before attempting any folding. Fortunately there are
10311 only a few builtins that fall into this category. FNDECL is the
10312 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10313 result of the function call is ignored. */
10316 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10317 bool ignore ATTRIBUTE_UNUSED)
10319 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10320 tree ret = NULL_TREE;
10324 case BUILT_IN_SPRINTF_CHK:
10325 case BUILT_IN_VSPRINTF_CHK:
10326 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10329 case BUILT_IN_SNPRINTF_CHK:
10330 case BUILT_IN_VSNPRINTF_CHK:
10331 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10334 case BUILT_IN_FPCLASSIFY:
10335 ret = fold_builtin_fpclassify (loc, exp);
10343 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10344 SET_EXPR_LOCATION (ret, loc);
10345 TREE_NO_WARNING (ret) = 1;
10351 /* Return true if FNDECL shouldn't be folded right now.
10352 If a built-in function has an inline attribute always_inline
10353 wrapper, defer folding it after always_inline functions have
10354 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10355 might not be performed. */
10358 avoid_folding_inline_builtin (tree fndecl)
10360 return (DECL_DECLARED_INLINE_P (fndecl)
10361 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10363 && !cfun->always_inline_functions_inlined
10364 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10367 /* A wrapper function for builtin folding that prevents warnings for
10368 "statement without effect" and the like, caused by removing the
10369 call node earlier than the warning is generated. */
10372 fold_call_expr (location_t loc, tree exp, bool ignore)
10374 tree ret = NULL_TREE;
10375 tree fndecl = get_callee_fndecl (exp);
10377 && TREE_CODE (fndecl) == FUNCTION_DECL
10378 && DECL_BUILT_IN (fndecl)
10379 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10380 yet. Defer folding until we see all the arguments
10381 (after inlining). */
10382 && !CALL_EXPR_VA_ARG_PACK (exp))
10384 int nargs = call_expr_nargs (exp);
10386 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10387 instead last argument is __builtin_va_arg_pack (). Defer folding
10388 even in that case, until arguments are finalized. */
10389 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10391 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10393 && TREE_CODE (fndecl2) == FUNCTION_DECL
10394 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10395 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10399 if (avoid_folding_inline_builtin (fndecl))
10402 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10403 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10404 CALL_EXPR_ARGP (exp), ignore);
10407 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10409 tree *args = CALL_EXPR_ARGP (exp);
10410 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10413 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10421 /* Conveniently construct a function call expression. FNDECL names the
10422 function to be called and N arguments are passed in the array
10426 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10428 tree fntype = TREE_TYPE (fndecl);
10429 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10431 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10434 /* Conveniently construct a function call expression. FNDECL names the
10435 function to be called and the arguments are passed in the vector
10439 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
10441 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
10442 VEC_address (tree, vec));
10446 /* Conveniently construct a function call expression. FNDECL names the
10447 function to be called, N is the number of arguments, and the "..."
10448 parameters are the argument expressions. */
10451 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10454 tree *argarray = XALLOCAVEC (tree, n);
10458 for (i = 0; i < n; i++)
10459 argarray[i] = va_arg (ap, tree);
10461 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10464 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10465 varargs macros aren't supported by all bootstrap compilers. */
10468 build_call_expr (tree fndecl, int n, ...)
10471 tree *argarray = XALLOCAVEC (tree, n);
10475 for (i = 0; i < n; i++)
10476 argarray[i] = va_arg (ap, tree);
10478 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10481 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10482 N arguments are passed in the array ARGARRAY. */
10485 fold_builtin_call_array (location_t loc, tree type,
10490 tree ret = NULL_TREE;
10493 if (TREE_CODE (fn) == ADDR_EXPR)
10495 tree fndecl = TREE_OPERAND (fn, 0);
10496 if (TREE_CODE (fndecl) == FUNCTION_DECL
10497 && DECL_BUILT_IN (fndecl))
10499 /* If last argument is __builtin_va_arg_pack (), arguments to this
10500 function are not finalized yet. Defer folding until they are. */
10501 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10503 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10505 && TREE_CODE (fndecl2) == FUNCTION_DECL
10506 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10507 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10508 return build_call_array_loc (loc, type, fn, n, argarray);
10510 if (avoid_folding_inline_builtin (fndecl))
10511 return build_call_array_loc (loc, type, fn, n, argarray);
10512 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10514 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10518 return build_call_array_loc (loc, type, fn, n, argarray);
10520 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10522 /* First try the transformations that don't require consing up
10524 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10529 /* If we got this far, we need to build an exp. */
10530 exp = build_call_array_loc (loc, type, fn, n, argarray);
10531 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10532 return ret ? ret : exp;
10536 return build_call_array_loc (loc, type, fn, n, argarray);
10539 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10540 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10541 of arguments in ARGS to be omitted. OLDNARGS is the number of
10542 elements in ARGS. */
10545 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10546 int skip, tree fndecl, int n, va_list newargs)
10548 int nargs = oldnargs - skip + n;
10555 buffer = XALLOCAVEC (tree, nargs);
10556 for (i = 0; i < n; i++)
10557 buffer[i] = va_arg (newargs, tree);
10558 for (j = skip; j < oldnargs; j++, i++)
10559 buffer[i] = args[j];
10562 buffer = args + skip;
10564 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10567 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10568 list ARGS along with N new arguments specified as the "..."
10569 parameters. SKIP is the number of arguments in ARGS to be omitted.
10570 OLDNARGS is the number of elements in ARGS. */
10573 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
10574 int skip, tree fndecl, int n, ...)
10580 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
10586 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10587 along with N new arguments specified as the "..." parameters. SKIP
10588 is the number of arguments in EXP to be omitted. This function is used
10589 to do varargs-to-varargs transformations. */
10592 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10598 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10599 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10605 /* Validate a single argument ARG against a tree code CODE representing
10609 validate_arg (const_tree arg, enum tree_code code)
10613 else if (code == POINTER_TYPE)
10614 return POINTER_TYPE_P (TREE_TYPE (arg));
10615 else if (code == INTEGER_TYPE)
10616 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10617 return code == TREE_CODE (TREE_TYPE (arg));
10620 /* This function validates the types of a function call argument list
10621 against a specified list of tree_codes. If the last specifier is a 0,
10622 that represents an ellipses, otherwise the last specifier must be a
10625 This is the GIMPLE version of validate_arglist. Eventually we want to
10626 completely convert builtins.c to work from GIMPLEs and the tree based
10627 validate_arglist will then be removed. */
10630 validate_gimple_arglist (const_gimple call, ...)
10632 enum tree_code code;
10638 va_start (ap, call);
10643 code = (enum tree_code) va_arg (ap, int);
10647 /* This signifies an ellipses, any further arguments are all ok. */
10651 /* This signifies an endlink, if no arguments remain, return
10652 true, otherwise return false. */
10653 res = (i == gimple_call_num_args (call));
10656 /* If no parameters remain or the parameter's code does not
10657 match the specified code, return false. Otherwise continue
10658 checking any remaining arguments. */
10659 arg = gimple_call_arg (call, i++);
10660 if (!validate_arg (arg, code))
10667 /* We need gotos here since we can only have one VA_CLOSE in a
10675 /* This function validates the types of a function call argument list
10676 against a specified list of tree_codes. If the last specifier is a 0,
10677 that represents an ellipses, otherwise the last specifier must be a
10681 validate_arglist (const_tree callexpr, ...)
10683 enum tree_code code;
10686 const_call_expr_arg_iterator iter;
10689 va_start (ap, callexpr);
10690 init_const_call_expr_arg_iterator (callexpr, &iter);
10694 code = (enum tree_code) va_arg (ap, int);
10698 /* This signifies an ellipses, any further arguments are all ok. */
10702 /* This signifies an endlink, if no arguments remain, return
10703 true, otherwise return false. */
10704 res = !more_const_call_expr_args_p (&iter);
10707 /* If no parameters remain or the parameter's code does not
10708 match the specified code, return false. Otherwise continue
10709 checking any remaining arguments. */
10710 arg = next_const_call_expr_arg (&iter);
10711 if (!validate_arg (arg, code))
10718 /* We need gotos here since we can only have one VA_CLOSE in a
10726 /* Default target-specific builtin expander that does nothing. */
10729 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10730 rtx target ATTRIBUTE_UNUSED,
10731 rtx subtarget ATTRIBUTE_UNUSED,
10732 enum machine_mode mode ATTRIBUTE_UNUSED,
10733 int ignore ATTRIBUTE_UNUSED)
10738 /* Returns true is EXP represents data that would potentially reside
10739 in a readonly section. */
10742 readonly_data_expr (tree exp)
10746 if (TREE_CODE (exp) != ADDR_EXPR)
10749 exp = get_base_address (TREE_OPERAND (exp, 0));
10753 /* Make sure we call decl_readonly_section only for trees it
10754 can handle (since it returns true for everything it doesn't
10756 if (TREE_CODE (exp) == STRING_CST
10757 || TREE_CODE (exp) == CONSTRUCTOR
10758 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10759 return decl_readonly_section (exp, 0);
10764 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10765 to the call, and TYPE is its return type.
10767 Return NULL_TREE if no simplification was possible, otherwise return the
10768 simplified form of the call as a tree.
10770 The simplified form may be a constant or other expression which
10771 computes the same value, but in a more efficient manner (including
10772 calls to other builtin functions).
10774 The call may contain arguments which need to be evaluated, but
10775 which are not useful to determine the result of the call. In
10776 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10777 COMPOUND_EXPR will be an argument which must be evaluated.
10778 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10779 COMPOUND_EXPR in the chain will contain the tree for the simplified
10780 form of the builtin function call. */
10783 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10785 if (!validate_arg (s1, POINTER_TYPE)
10786 || !validate_arg (s2, POINTER_TYPE))
10791 const char *p1, *p2;
10793 p2 = c_getstr (s2);
10797 p1 = c_getstr (s1);
10800 const char *r = strstr (p1, p2);
10804 return build_int_cst (TREE_TYPE (s1), 0);
10806 /* Return an offset into the constant string argument. */
10807 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10808 return fold_convert_loc (loc, type, tem);
10811 /* The argument is const char *, and the result is char *, so we need
10812 a type conversion here to avoid a warning. */
10814 return fold_convert_loc (loc, type, s1);
10819 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10823 /* New argument list transforming strstr(s1, s2) to
10824 strchr(s1, s2[0]). */
10825 return build_call_expr_loc (loc, fn, 2, s1,
10826 build_int_cst (integer_type_node, p2[0]));
10830 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10831 the call, and TYPE is its return type.
10833 Return NULL_TREE if no simplification was possible, otherwise return the
10834 simplified form of the call as a tree.
10836 The simplified form may be a constant or other expression which
10837 computes the same value, but in a more efficient manner (including
10838 calls to other builtin functions).
10840 The call may contain arguments which need to be evaluated, but
10841 which are not useful to determine the result of the call. In
10842 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10843 COMPOUND_EXPR will be an argument which must be evaluated.
10844 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10845 COMPOUND_EXPR in the chain will contain the tree for the simplified
10846 form of the builtin function call. */
10849 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10851 if (!validate_arg (s1, POINTER_TYPE)
10852 || !validate_arg (s2, INTEGER_TYPE))
10858 if (TREE_CODE (s2) != INTEGER_CST)
10861 p1 = c_getstr (s1);
10868 if (target_char_cast (s2, &c))
10871 r = strchr (p1, c);
10874 return build_int_cst (TREE_TYPE (s1), 0);
10876 /* Return an offset into the constant string argument. */
10877 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10878 return fold_convert_loc (loc, type, tem);
10884 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10885 the call, and TYPE is its return type.
10887 Return NULL_TREE if no simplification was possible, otherwise return the
10888 simplified form of the call as a tree.
10890 The simplified form may be a constant or other expression which
10891 computes the same value, but in a more efficient manner (including
10892 calls to other builtin functions).
10894 The call may contain arguments which need to be evaluated, but
10895 which are not useful to determine the result of the call. In
10896 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10897 COMPOUND_EXPR will be an argument which must be evaluated.
10898 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10899 COMPOUND_EXPR in the chain will contain the tree for the simplified
10900 form of the builtin function call. */
10903 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10905 if (!validate_arg (s1, POINTER_TYPE)
10906 || !validate_arg (s2, INTEGER_TYPE))
10913 if (TREE_CODE (s2) != INTEGER_CST)
10916 p1 = c_getstr (s1);
10923 if (target_char_cast (s2, &c))
10926 r = strrchr (p1, c);
10929 return build_int_cst (TREE_TYPE (s1), 0);
10931 /* Return an offset into the constant string argument. */
10932 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10933 return fold_convert_loc (loc, type, tem);
10936 if (! integer_zerop (s2))
10939 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10943 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10944 return build_call_expr_loc (loc, fn, 2, s1, s2);
10948 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10949 to the call, and TYPE is its return type.
10951 Return NULL_TREE if no simplification was possible, otherwise return the
10952 simplified form of the call as a tree.
10954 The simplified form may be a constant or other expression which
10955 computes the same value, but in a more efficient manner (including
10956 calls to other builtin functions).
10958 The call may contain arguments which need to be evaluated, but
10959 which are not useful to determine the result of the call. In
10960 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10961 COMPOUND_EXPR will be an argument which must be evaluated.
10962 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10963 COMPOUND_EXPR in the chain will contain the tree for the simplified
10964 form of the builtin function call. */
10967 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10969 if (!validate_arg (s1, POINTER_TYPE)
10970 || !validate_arg (s2, POINTER_TYPE))
10975 const char *p1, *p2;
10977 p2 = c_getstr (s2);
10981 p1 = c_getstr (s1);
10984 const char *r = strpbrk (p1, p2);
10988 return build_int_cst (TREE_TYPE (s1), 0);
10990 /* Return an offset into the constant string argument. */
10991 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10992 return fold_convert_loc (loc, type, tem);
10996 /* strpbrk(x, "") == NULL.
10997 Evaluate and ignore s1 in case it had side-effects. */
10998 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11001 return NULL_TREE; /* Really call strpbrk. */
11003 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11007 /* New argument list transforming strpbrk(s1, s2) to
11008 strchr(s1, s2[0]). */
11009 return build_call_expr_loc (loc, fn, 2, s1,
11010 build_int_cst (integer_type_node, p2[0]));
11014 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11017 Return NULL_TREE if no simplification was possible, otherwise return the
11018 simplified form of the call as a tree.
11020 The simplified form may be a constant or other expression which
11021 computes the same value, but in a more efficient manner (including
11022 calls to other builtin functions).
11024 The call may contain arguments which need to be evaluated, but
11025 which are not useful to determine the result of the call. In
11026 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11027 COMPOUND_EXPR will be an argument which must be evaluated.
11028 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11029 COMPOUND_EXPR in the chain will contain the tree for the simplified
11030 form of the builtin function call. */
11033 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11035 if (!validate_arg (dst, POINTER_TYPE)
11036 || !validate_arg (src, POINTER_TYPE))
11040 const char *p = c_getstr (src);
11042 /* If the string length is zero, return the dst parameter. */
11043 if (p && *p == '\0')
11046 if (optimize_insn_for_speed_p ())
11048 /* See if we can store by pieces into (dst + strlen(dst)). */
11050 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11051 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11053 if (!strlen_fn || !strcpy_fn)
11056 /* If we don't have a movstr we don't want to emit an strcpy
11057 call. We have to do that if the length of the source string
11058 isn't computable (in that case we can use memcpy probably
11059 later expanding to a sequence of mov instructions). If we
11060 have movstr instructions we can emit strcpy calls. */
11063 tree len = c_strlen (src, 1);
11064 if (! len || TREE_SIDE_EFFECTS (len))
11068 /* Stabilize the argument list. */
11069 dst = builtin_save_expr (dst);
11071 /* Create strlen (dst). */
11072 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11073 /* Create (dst p+ strlen (dst)). */
11075 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11076 newdst = builtin_save_expr (newdst);
11078 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11079 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11085 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11086 arguments to the call.
11088 Return NULL_TREE if no simplification was possible, otherwise return the
11089 simplified form of the call as a tree.
11091 The simplified form may be a constant or other expression which
11092 computes the same value, but in a more efficient manner (including
11093 calls to other builtin functions).
11095 The call may contain arguments which need to be evaluated, but
11096 which are not useful to determine the result of the call. In
11097 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11098 COMPOUND_EXPR will be an argument which must be evaluated.
11099 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11100 COMPOUND_EXPR in the chain will contain the tree for the simplified
11101 form of the builtin function call. */
11104 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11106 if (!validate_arg (dst, POINTER_TYPE)
11107 || !validate_arg (src, POINTER_TYPE)
11108 || !validate_arg (len, INTEGER_TYPE))
11112 const char *p = c_getstr (src);
11114 /* If the requested length is zero, or the src parameter string
11115 length is zero, return the dst parameter. */
11116 if (integer_zerop (len) || (p && *p == '\0'))
11117 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11119 /* If the requested len is greater than or equal to the string
11120 length, call strcat. */
11121 if (TREE_CODE (len) == INTEGER_CST && p
11122 && compare_tree_int (len, strlen (p)) >= 0)
11124 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11126 /* If the replacement _DECL isn't initialized, don't do the
11131 return build_call_expr_loc (loc, fn, 2, dst, src);
11137 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11140 Return NULL_TREE if no simplification was possible, otherwise return the
11141 simplified form of the call as a tree.
11143 The simplified form may be a constant or other expression which
11144 computes the same value, but in a more efficient manner (including
11145 calls to other builtin functions).
11147 The call may contain arguments which need to be evaluated, but
11148 which are not useful to determine the result of the call. In
11149 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11150 COMPOUND_EXPR will be an argument which must be evaluated.
11151 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11152 COMPOUND_EXPR in the chain will contain the tree for the simplified
11153 form of the builtin function call. */
11156 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11158 if (!validate_arg (s1, POINTER_TYPE)
11159 || !validate_arg (s2, POINTER_TYPE))
11163 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11165 /* If both arguments are constants, evaluate at compile-time. */
11168 const size_t r = strspn (p1, p2);
11169 return size_int (r);
11172 /* If either argument is "", return NULL_TREE. */
11173 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11174 /* Evaluate and ignore both arguments in case either one has
11176 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11182 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11185 Return NULL_TREE if no simplification was possible, otherwise return the
11186 simplified form of the call as a tree.
11188 The simplified form may be a constant or other expression which
11189 computes the same value, but in a more efficient manner (including
11190 calls to other builtin functions).
11192 The call may contain arguments which need to be evaluated, but
11193 which are not useful to determine the result of the call. In
11194 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11195 COMPOUND_EXPR will be an argument which must be evaluated.
11196 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11197 COMPOUND_EXPR in the chain will contain the tree for the simplified
11198 form of the builtin function call. */
11201 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11203 if (!validate_arg (s1, POINTER_TYPE)
11204 || !validate_arg (s2, POINTER_TYPE))
11208 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11210 /* If both arguments are constants, evaluate at compile-time. */
11213 const size_t r = strcspn (p1, p2);
11214 return size_int (r);
11217 /* If the first argument is "", return NULL_TREE. */
11218 if (p1 && *p1 == '\0')
11220 /* Evaluate and ignore argument s2 in case it has
11222 return omit_one_operand_loc (loc, size_type_node,
11223 size_zero_node, s2);
11226 /* If the second argument is "", return __builtin_strlen(s1). */
11227 if (p2 && *p2 == '\0')
11229 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11231 /* If the replacement _DECL isn't initialized, don't do the
11236 return build_call_expr_loc (loc, fn, 1, s1);
11242 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11243 to the call. IGNORE is true if the value returned
11244 by the builtin will be ignored. UNLOCKED is true is true if this
11245 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11246 the known length of the string. Return NULL_TREE if no simplification
11250 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11251 bool ignore, bool unlocked, tree len)
11253 /* If we're using an unlocked function, assume the other unlocked
11254 functions exist explicitly. */
11255 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11256 : implicit_built_in_decls[BUILT_IN_FPUTC];
11257 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11258 : implicit_built_in_decls[BUILT_IN_FWRITE];
11260 /* If the return value is used, don't do the transformation. */
11264 /* Verify the arguments in the original call. */
11265 if (!validate_arg (arg0, POINTER_TYPE)
11266 || !validate_arg (arg1, POINTER_TYPE))
11270 len = c_strlen (arg0, 0);
11272 /* Get the length of the string passed to fputs. If the length
11273 can't be determined, punt. */
11275 || TREE_CODE (len) != INTEGER_CST)
11278 switch (compare_tree_int (len, 1))
11280 case -1: /* length is 0, delete the call entirely . */
11281 return omit_one_operand_loc (loc, integer_type_node,
11282 integer_zero_node, arg1);;
11284 case 0: /* length is 1, call fputc. */
11286 const char *p = c_getstr (arg0);
11291 return build_call_expr_loc (loc, fn_fputc, 2,
11293 (integer_type_node, p[0]), arg1);
11299 case 1: /* length is greater than 1, call fwrite. */
11301 /* If optimizing for size keep fputs. */
11302 if (optimize_function_for_size_p (cfun))
11304 /* New argument list transforming fputs(string, stream) to
11305 fwrite(string, 1, len, stream). */
11307 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11308 size_one_node, len, arg1);
11313 gcc_unreachable ();
11318 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11319 produced. False otherwise. This is done so that we don't output the error
11320 or warning twice or three times. */
11323 fold_builtin_next_arg (tree exp, bool va_start_p)
11325 tree fntype = TREE_TYPE (current_function_decl);
11326 int nargs = call_expr_nargs (exp);
11329 if (!stdarg_p (fntype))
11331 error ("%<va_start%> used in function with fixed args");
11337 if (va_start_p && (nargs != 2))
11339 error ("wrong number of arguments to function %<va_start%>");
11342 arg = CALL_EXPR_ARG (exp, 1);
11344 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11345 when we checked the arguments and if needed issued a warning. */
11350 /* Evidently an out of date version of <stdarg.h>; can't validate
11351 va_start's second argument, but can still work as intended. */
11352 warning (0, "%<__builtin_next_arg%> called without an argument");
11355 else if (nargs > 1)
11357 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11360 arg = CALL_EXPR_ARG (exp, 0);
11363 if (TREE_CODE (arg) == SSA_NAME)
11364 arg = SSA_NAME_VAR (arg);
11366 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11367 or __builtin_next_arg (0) the first time we see it, after checking
11368 the arguments and if needed issuing a warning. */
11369 if (!integer_zerop (arg))
11371 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11373 /* Strip off all nops for the sake of the comparison. This
11374 is not quite the same as STRIP_NOPS. It does more.
11375 We must also strip off INDIRECT_EXPR for C++ reference
11377 while (CONVERT_EXPR_P (arg)
11378 || TREE_CODE (arg) == INDIRECT_REF)
11379 arg = TREE_OPERAND (arg, 0);
11380 if (arg != last_parm)
11382 /* FIXME: Sometimes with the tree optimizers we can get the
11383 not the last argument even though the user used the last
11384 argument. We just warn and set the arg to be the last
11385 argument so that we will get wrong-code because of
11387 warning (0, "second parameter of %<va_start%> not last named argument");
11390 /* Undefined by C99 7.15.1.4p4 (va_start):
11391 "If the parameter parmN is declared with the register storage
11392 class, with a function or array type, or with a type that is
11393 not compatible with the type that results after application of
11394 the default argument promotions, the behavior is undefined."
11396 else if (DECL_REGISTER (arg))
11397 warning (0, "undefined behaviour when second parameter of "
11398 "%<va_start%> is declared with %<register%> storage");
11400 /* We want to verify the second parameter just once before the tree
11401 optimizers are run and then avoid keeping it in the tree,
11402 as otherwise we could warn even for correct code like:
11403 void foo (int i, ...)
11404 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11406 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11408 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11414 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11415 ORIG may be null if this is a 2-argument call. We don't attempt to
11416 simplify calls with more than 3 arguments.
11418 Return NULL_TREE if no simplification was possible, otherwise return the
11419 simplified form of the call as a tree. If IGNORED is true, it means that
11420 the caller does not use the returned value of the function. */
11423 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11424 tree orig, int ignored)
11427 const char *fmt_str = NULL;
11429 /* Verify the required arguments in the original call. We deal with two
11430 types of sprintf() calls: 'sprintf (str, fmt)' and
11431 'sprintf (dest, "%s", orig)'. */
11432 if (!validate_arg (dest, POINTER_TYPE)
11433 || !validate_arg (fmt, POINTER_TYPE))
11435 if (orig && !validate_arg (orig, POINTER_TYPE))
11438 /* Check whether the format is a literal string constant. */
11439 fmt_str = c_getstr (fmt);
11440 if (fmt_str == NULL)
11444 retval = NULL_TREE;
11446 if (!init_target_chars ())
11449 /* If the format doesn't contain % args or %%, use strcpy. */
11450 if (strchr (fmt_str, target_percent) == NULL)
11452 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11457 /* Don't optimize sprintf (buf, "abc", ptr++). */
11461 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11462 'format' is known to contain no % formats. */
11463 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11465 retval = build_int_cst (integer_type_node, strlen (fmt_str));
11468 /* If the format is "%s", use strcpy if the result isn't used. */
11469 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11472 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11477 /* Don't crash on sprintf (str1, "%s"). */
11481 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11484 retval = c_strlen (orig, 1);
11485 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11488 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11491 if (call && retval)
11493 retval = fold_convert_loc
11494 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11496 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11502 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
11503 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
11504 attempt to simplify calls with more than 4 arguments.
11506 Return NULL_TREE if no simplification was possible, otherwise return the
11507 simplified form of the call as a tree. If IGNORED is true, it means that
11508 the caller does not use the returned value of the function. */
11511 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
11512 tree orig, int ignored)
11515 const char *fmt_str = NULL;
11516 unsigned HOST_WIDE_INT destlen;
11518 /* Verify the required arguments in the original call. We deal with two
11519 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
11520 'snprintf (dest, cst, "%s", orig)'. */
11521 if (!validate_arg (dest, POINTER_TYPE)
11522 || !validate_arg (destsize, INTEGER_TYPE)
11523 || !validate_arg (fmt, POINTER_TYPE))
11525 if (orig && !validate_arg (orig, POINTER_TYPE))
11528 if (!host_integerp (destsize, 1))
11531 /* Check whether the format is a literal string constant. */
11532 fmt_str = c_getstr (fmt);
11533 if (fmt_str == NULL)
11537 retval = NULL_TREE;
11539 if (!init_target_chars ())
11542 destlen = tree_low_cst (destsize, 1);
11544 /* If the format doesn't contain % args or %%, use strcpy. */
11545 if (strchr (fmt_str, target_percent) == NULL)
11547 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11548 size_t len = strlen (fmt_str);
11550 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
11554 /* We could expand this as
11555 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
11557 memcpy (str, fmt_with_nul_at_cstm1, cst);
11558 but in the former case that might increase code size
11559 and in the latter case grow .rodata section too much.
11560 So punt for now. */
11561 if (len >= destlen)
11567 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
11568 'format' is known to contain no % formats and
11569 strlen (fmt) < cst. */
11570 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11573 retval = build_int_cst (integer_type_node, strlen (fmt_str));
11576 /* If the format is "%s", use strcpy if the result isn't used. */
11577 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11579 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11580 unsigned HOST_WIDE_INT origlen;
11582 /* Don't crash on snprintf (str1, cst, "%s"). */
11586 retval = c_strlen (orig, 1);
11587 if (!retval || !host_integerp (retval, 1))
11590 origlen = tree_low_cst (retval, 1);
11591 /* We could expand this as
11592 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
11594 memcpy (str1, str2_with_nul_at_cstm1, cst);
11595 but in the former case that might increase code size
11596 and in the latter case grow .rodata section too much.
11597 So punt for now. */
11598 if (origlen >= destlen)
11601 /* Convert snprintf (str1, cst, "%s", str2) into
11602 strcpy (str1, str2) if strlen (str2) < cst. */
11606 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11609 retval = NULL_TREE;
11612 if (call && retval)
11614 tree fn = built_in_decls[BUILT_IN_SNPRINTF];
11615 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
11616 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11622 /* Expand a call EXP to __builtin_object_size. */
11625 expand_builtin_object_size (tree exp)
11628 int object_size_type;
11629 tree fndecl = get_callee_fndecl (exp);
11631 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11633 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11635 expand_builtin_trap ();
11639 ost = CALL_EXPR_ARG (exp, 1);
11642 if (TREE_CODE (ost) != INTEGER_CST
11643 || tree_int_cst_sgn (ost) < 0
11644 || compare_tree_int (ost, 3) > 0)
11646 error ("%Klast argument of %D is not integer constant between 0 and 3",
11648 expand_builtin_trap ();
11652 object_size_type = tree_low_cst (ost, 0);
11654 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11657 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11658 FCODE is the BUILT_IN_* to use.
11659 Return NULL_RTX if we failed; the caller should emit a normal call,
11660 otherwise try to get the result in TARGET, if convenient (and in
11661 mode MODE if that's convenient). */
11664 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11665 enum built_in_function fcode)
11667 tree dest, src, len, size;
11669 if (!validate_arglist (exp,
11671 fcode == BUILT_IN_MEMSET_CHK
11672 ? INTEGER_TYPE : POINTER_TYPE,
11673 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11676 dest = CALL_EXPR_ARG (exp, 0);
11677 src = CALL_EXPR_ARG (exp, 1);
11678 len = CALL_EXPR_ARG (exp, 2);
11679 size = CALL_EXPR_ARG (exp, 3);
11681 if (! host_integerp (size, 1))
11684 if (host_integerp (len, 1) || integer_all_onesp (size))
11688 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11690 warning_at (tree_nonartificial_location (exp),
11691 0, "%Kcall to %D will always overflow destination buffer",
11692 exp, get_callee_fndecl (exp));
11697 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11698 mem{cpy,pcpy,move,set} is available. */
11701 case BUILT_IN_MEMCPY_CHK:
11702 fn = built_in_decls[BUILT_IN_MEMCPY];
11704 case BUILT_IN_MEMPCPY_CHK:
11705 fn = built_in_decls[BUILT_IN_MEMPCPY];
11707 case BUILT_IN_MEMMOVE_CHK:
11708 fn = built_in_decls[BUILT_IN_MEMMOVE];
11710 case BUILT_IN_MEMSET_CHK:
11711 fn = built_in_decls[BUILT_IN_MEMSET];
11720 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11721 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11722 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11723 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11725 else if (fcode == BUILT_IN_MEMSET_CHK)
11729 unsigned int dest_align = get_pointer_alignment (dest);
11731 /* If DEST is not a pointer type, call the normal function. */
11732 if (dest_align == 0)
11735 /* If SRC and DEST are the same (and not volatile), do nothing. */
11736 if (operand_equal_p (src, dest, 0))
11740 if (fcode != BUILT_IN_MEMPCPY_CHK)
11742 /* Evaluate and ignore LEN in case it has side-effects. */
11743 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11744 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11747 expr = fold_build_pointer_plus (dest, len);
11748 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11751 /* __memmove_chk special case. */
11752 if (fcode == BUILT_IN_MEMMOVE_CHK)
11754 unsigned int src_align = get_pointer_alignment (src);
11756 if (src_align == 0)
11759 /* If src is categorized for a readonly section we can use
11760 normal __memcpy_chk. */
11761 if (readonly_data_expr (src))
11763 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11766 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11767 dest, src, len, size);
11768 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11769 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11770 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11777 /* Emit warning if a buffer overflow is detected at compile time. */
11780 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11784 location_t loc = tree_nonartificial_location (exp);
11788 case BUILT_IN_STRCPY_CHK:
11789 case BUILT_IN_STPCPY_CHK:
11790 /* For __strcat_chk the warning will be emitted only if overflowing
11791 by at least strlen (dest) + 1 bytes. */
11792 case BUILT_IN_STRCAT_CHK:
11793 len = CALL_EXPR_ARG (exp, 1);
11794 size = CALL_EXPR_ARG (exp, 2);
11797 case BUILT_IN_STRNCAT_CHK:
11798 case BUILT_IN_STRNCPY_CHK:
11799 len = CALL_EXPR_ARG (exp, 2);
11800 size = CALL_EXPR_ARG (exp, 3);
11802 case BUILT_IN_SNPRINTF_CHK:
11803 case BUILT_IN_VSNPRINTF_CHK:
11804 len = CALL_EXPR_ARG (exp, 1);
11805 size = CALL_EXPR_ARG (exp, 3);
11808 gcc_unreachable ();
11814 if (! host_integerp (size, 1) || integer_all_onesp (size))
11819 len = c_strlen (len, 1);
11820 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11823 else if (fcode == BUILT_IN_STRNCAT_CHK)
11825 tree src = CALL_EXPR_ARG (exp, 1);
11826 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11828 src = c_strlen (src, 1);
11829 if (! src || ! host_integerp (src, 1))
11831 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11832 exp, get_callee_fndecl (exp));
11835 else if (tree_int_cst_lt (src, size))
11838 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11841 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11842 exp, get_callee_fndecl (exp));
11845 /* Emit warning if a buffer overflow is detected at compile time
11846 in __sprintf_chk/__vsprintf_chk calls. */
11849 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11851 tree size, len, fmt;
11852 const char *fmt_str;
11853 int nargs = call_expr_nargs (exp);
11855 /* Verify the required arguments in the original call. */
11859 size = CALL_EXPR_ARG (exp, 2);
11860 fmt = CALL_EXPR_ARG (exp, 3);
11862 if (! host_integerp (size, 1) || integer_all_onesp (size))
11865 /* Check whether the format is a literal string constant. */
11866 fmt_str = c_getstr (fmt);
11867 if (fmt_str == NULL)
11870 if (!init_target_chars ())
11873 /* If the format doesn't contain % args or %%, we know its size. */
11874 if (strchr (fmt_str, target_percent) == 0)
11875 len = build_int_cstu (size_type_node, strlen (fmt_str));
11876 /* If the format is "%s" and first ... argument is a string literal,
11878 else if (fcode == BUILT_IN_SPRINTF_CHK
11879 && strcmp (fmt_str, target_percent_s) == 0)
11885 arg = CALL_EXPR_ARG (exp, 4);
11886 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11889 len = c_strlen (arg, 1);
11890 if (!len || ! host_integerp (len, 1))
11896 if (! tree_int_cst_lt (len, size))
11897 warning_at (tree_nonartificial_location (exp),
11898 0, "%Kcall to %D will always overflow destination buffer",
11899 exp, get_callee_fndecl (exp));
11902 /* Emit warning if a free is called with address of a variable. */
11905 maybe_emit_free_warning (tree exp)
11907 tree arg = CALL_EXPR_ARG (exp, 0);
11910 if (TREE_CODE (arg) != ADDR_EXPR)
11913 arg = get_base_address (TREE_OPERAND (arg, 0));
11914 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11917 if (SSA_VAR_P (arg))
11918 warning_at (tree_nonartificial_location (exp),
11919 0, "%Kattempt to free a non-heap object %qD", exp, arg);
11921 warning_at (tree_nonartificial_location (exp),
11922 0, "%Kattempt to free a non-heap object", exp);
11925 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11929 fold_builtin_object_size (tree ptr, tree ost)
11931 unsigned HOST_WIDE_INT bytes;
11932 int object_size_type;
11934 if (!validate_arg (ptr, POINTER_TYPE)
11935 || !validate_arg (ost, INTEGER_TYPE))
11940 if (TREE_CODE (ost) != INTEGER_CST
11941 || tree_int_cst_sgn (ost) < 0
11942 || compare_tree_int (ost, 3) > 0)
11945 object_size_type = tree_low_cst (ost, 0);
11947 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11948 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11949 and (size_t) 0 for types 2 and 3. */
11950 if (TREE_SIDE_EFFECTS (ptr))
11951 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11953 if (TREE_CODE (ptr) == ADDR_EXPR)
11955 bytes = compute_builtin_object_size (ptr, object_size_type);
11956 if (double_int_fits_to_tree_p (size_type_node,
11957 uhwi_to_double_int (bytes)))
11958 return build_int_cstu (size_type_node, bytes);
11960 else if (TREE_CODE (ptr) == SSA_NAME)
11962 /* If object size is not known yet, delay folding until
11963 later. Maybe subsequent passes will help determining
11965 bytes = compute_builtin_object_size (ptr, object_size_type);
11966 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11967 && double_int_fits_to_tree_p (size_type_node,
11968 uhwi_to_double_int (bytes)))
11969 return build_int_cstu (size_type_node, bytes);
11975 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11976 DEST, SRC, LEN, and SIZE are the arguments to the call.
11977 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11978 code of the builtin. If MAXLEN is not NULL, it is maximum length
11979 passed as third argument. */
11982 fold_builtin_memory_chk (location_t loc, tree fndecl,
11983 tree dest, tree src, tree len, tree size,
11984 tree maxlen, bool ignore,
11985 enum built_in_function fcode)
11989 if (!validate_arg (dest, POINTER_TYPE)
11990 || !validate_arg (src,
11991 (fcode == BUILT_IN_MEMSET_CHK
11992 ? INTEGER_TYPE : POINTER_TYPE))
11993 || !validate_arg (len, INTEGER_TYPE)
11994 || !validate_arg (size, INTEGER_TYPE))
11997 /* If SRC and DEST are the same (and not volatile), return DEST
11998 (resp. DEST+LEN for __mempcpy_chk). */
11999 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12001 if (fcode != BUILT_IN_MEMPCPY_CHK)
12002 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12006 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12007 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12011 if (! host_integerp (size, 1))
12014 if (! integer_all_onesp (size))
12016 if (! host_integerp (len, 1))
12018 /* If LEN is not constant, try MAXLEN too.
12019 For MAXLEN only allow optimizing into non-_ocs function
12020 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12021 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12023 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12025 /* (void) __mempcpy_chk () can be optimized into
12026 (void) __memcpy_chk (). */
12027 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12031 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12039 if (tree_int_cst_lt (size, maxlen))
12044 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12045 mem{cpy,pcpy,move,set} is available. */
12048 case BUILT_IN_MEMCPY_CHK:
12049 fn = built_in_decls[BUILT_IN_MEMCPY];
12051 case BUILT_IN_MEMPCPY_CHK:
12052 fn = built_in_decls[BUILT_IN_MEMPCPY];
12054 case BUILT_IN_MEMMOVE_CHK:
12055 fn = built_in_decls[BUILT_IN_MEMMOVE];
12057 case BUILT_IN_MEMSET_CHK:
12058 fn = built_in_decls[BUILT_IN_MEMSET];
12067 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12070 /* Fold a call to the __st[rp]cpy_chk builtin.
12071 DEST, SRC, and SIZE are the arguments to the call.
12072 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12073 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12074 strings passed as second argument. */
12077 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12078 tree src, tree size,
12079 tree maxlen, bool ignore,
12080 enum built_in_function fcode)
12084 if (!validate_arg (dest, POINTER_TYPE)
12085 || !validate_arg (src, POINTER_TYPE)
12086 || !validate_arg (size, INTEGER_TYPE))
12089 /* If SRC and DEST are the same (and not volatile), return DEST. */
12090 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12091 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12093 if (! host_integerp (size, 1))
12096 if (! integer_all_onesp (size))
12098 len = c_strlen (src, 1);
12099 if (! len || ! host_integerp (len, 1))
12101 /* If LEN is not constant, try MAXLEN too.
12102 For MAXLEN only allow optimizing into non-_ocs function
12103 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12104 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12106 if (fcode == BUILT_IN_STPCPY_CHK)
12111 /* If return value of __stpcpy_chk is ignored,
12112 optimize into __strcpy_chk. */
12113 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12117 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12120 if (! len || TREE_SIDE_EFFECTS (len))
12123 /* If c_strlen returned something, but not a constant,
12124 transform __strcpy_chk into __memcpy_chk. */
12125 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12129 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12130 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12131 build_call_expr_loc (loc, fn, 4,
12132 dest, src, len, size));
12138 if (! tree_int_cst_lt (maxlen, size))
12142 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12143 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12144 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12148 return build_call_expr_loc (loc, fn, 2, dest, src);
12151 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12152 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12153 length passed as third argument. */
12156 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12157 tree len, tree size, tree maxlen)
12161 if (!validate_arg (dest, POINTER_TYPE)
12162 || !validate_arg (src, POINTER_TYPE)
12163 || !validate_arg (len, INTEGER_TYPE)
12164 || !validate_arg (size, INTEGER_TYPE))
12167 if (! host_integerp (size, 1))
12170 if (! integer_all_onesp (size))
12172 if (! host_integerp (len, 1))
12174 /* If LEN is not constant, try MAXLEN too.
12175 For MAXLEN only allow optimizing into non-_ocs function
12176 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12177 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12183 if (tree_int_cst_lt (size, maxlen))
12187 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12188 fn = built_in_decls[BUILT_IN_STRNCPY];
12192 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12195 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12196 are the arguments to the call. */
12199 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12200 tree src, tree size)
12205 if (!validate_arg (dest, POINTER_TYPE)
12206 || !validate_arg (src, POINTER_TYPE)
12207 || !validate_arg (size, INTEGER_TYPE))
12210 p = c_getstr (src);
12211 /* If the SRC parameter is "", return DEST. */
12212 if (p && *p == '\0')
12213 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12215 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12218 /* If __builtin_strcat_chk is used, assume strcat is available. */
12219 fn = built_in_decls[BUILT_IN_STRCAT];
12223 return build_call_expr_loc (loc, fn, 2, dest, src);
12226 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12230 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12231 tree dest, tree src, tree len, tree size)
12236 if (!validate_arg (dest, POINTER_TYPE)
12237 || !validate_arg (src, POINTER_TYPE)
12238 || !validate_arg (size, INTEGER_TYPE)
12239 || !validate_arg (size, INTEGER_TYPE))
12242 p = c_getstr (src);
12243 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12244 if (p && *p == '\0')
12245 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12246 else if (integer_zerop (len))
12247 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12249 if (! host_integerp (size, 1))
12252 if (! integer_all_onesp (size))
12254 tree src_len = c_strlen (src, 1);
12256 && host_integerp (src_len, 1)
12257 && host_integerp (len, 1)
12258 && ! tree_int_cst_lt (len, src_len))
12260 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12261 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12265 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12270 /* If __builtin_strncat_chk is used, assume strncat is available. */
12271 fn = built_in_decls[BUILT_IN_STRNCAT];
12275 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12278 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12279 Return NULL_TREE if a normal call should be emitted rather than
12280 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12281 or BUILT_IN_VSPRINTF_CHK. */
12284 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12285 enum built_in_function fcode)
12287 tree dest, size, len, fn, fmt, flag;
12288 const char *fmt_str;
12290 /* Verify the required arguments in the original call. */
12294 if (!validate_arg (dest, POINTER_TYPE))
12297 if (!validate_arg (flag, INTEGER_TYPE))
12300 if (!validate_arg (size, INTEGER_TYPE))
12303 if (!validate_arg (fmt, POINTER_TYPE))
12306 if (! host_integerp (size, 1))
12311 if (!init_target_chars ())
12314 /* Check whether the format is a literal string constant. */
12315 fmt_str = c_getstr (fmt);
12316 if (fmt_str != NULL)
12318 /* If the format doesn't contain % args or %%, we know the size. */
12319 if (strchr (fmt_str, target_percent) == 0)
12321 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12322 len = build_int_cstu (size_type_node, strlen (fmt_str));
12324 /* If the format is "%s" and first ... argument is a string literal,
12325 we know the size too. */
12326 else if (fcode == BUILT_IN_SPRINTF_CHK
12327 && strcmp (fmt_str, target_percent_s) == 0)
12334 if (validate_arg (arg, POINTER_TYPE))
12336 len = c_strlen (arg, 1);
12337 if (! len || ! host_integerp (len, 1))
12344 if (! integer_all_onesp (size))
12346 if (! len || ! tree_int_cst_lt (len, size))
12350 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12351 or if format doesn't contain % chars or is "%s". */
12352 if (! integer_zerop (flag))
12354 if (fmt_str == NULL)
12356 if (strchr (fmt_str, target_percent) != NULL
12357 && strcmp (fmt_str, target_percent_s))
12361 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12362 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12363 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12367 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
12370 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12371 a normal call should be emitted rather than expanding the function
12372 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12375 fold_builtin_sprintf_chk (location_t loc, tree exp,
12376 enum built_in_function fcode)
12378 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
12379 CALL_EXPR_ARGP (exp), fcode);
12382 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
12383 NULL_TREE if a normal call should be emitted rather than expanding
12384 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12385 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12386 passed as second argument. */
12389 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
12390 tree maxlen, enum built_in_function fcode)
12392 tree dest, size, len, fn, fmt, flag;
12393 const char *fmt_str;
12395 /* Verify the required arguments in the original call. */
12399 if (!validate_arg (dest, POINTER_TYPE))
12402 if (!validate_arg (len, INTEGER_TYPE))
12405 if (!validate_arg (flag, INTEGER_TYPE))
12408 if (!validate_arg (size, INTEGER_TYPE))
12411 if (!validate_arg (fmt, POINTER_TYPE))
12414 if (! host_integerp (size, 1))
12417 if (! integer_all_onesp (size))
12419 if (! host_integerp (len, 1))
12421 /* If LEN is not constant, try MAXLEN too.
12422 For MAXLEN only allow optimizing into non-_ocs function
12423 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12424 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12430 if (tree_int_cst_lt (size, maxlen))
12434 if (!init_target_chars ())
12437 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12438 or if format doesn't contain % chars or is "%s". */
12439 if (! integer_zerop (flag))
12441 fmt_str = c_getstr (fmt);
12442 if (fmt_str == NULL)
12444 if (strchr (fmt_str, target_percent) != NULL
12445 && strcmp (fmt_str, target_percent_s))
12449 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12451 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12452 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12456 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
12459 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12460 a normal call should be emitted rather than expanding the function
12461 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12462 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12463 passed as second argument. */
12466 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12467 enum built_in_function fcode)
12469 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
12470 CALL_EXPR_ARGP (exp), maxlen, fcode);
12473 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12474 FMT and ARG are the arguments to the call; we don't fold cases with
12475 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12477 Return NULL_TREE if no simplification was possible, otherwise return the
12478 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12479 code of the function to be simplified. */
12482 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12483 tree arg, bool ignore,
12484 enum built_in_function fcode)
12486 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12487 const char *fmt_str = NULL;
12489 /* If the return value is used, don't do the transformation. */
12493 /* Verify the required arguments in the original call. */
12494 if (!validate_arg (fmt, POINTER_TYPE))
12497 /* Check whether the format is a literal string constant. */
12498 fmt_str = c_getstr (fmt);
12499 if (fmt_str == NULL)
12502 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12504 /* If we're using an unlocked function, assume the other
12505 unlocked functions exist explicitly. */
12506 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12507 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12511 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12512 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12515 if (!init_target_chars ())
12518 if (strcmp (fmt_str, target_percent_s) == 0
12519 || strchr (fmt_str, target_percent) == NULL)
12523 if (strcmp (fmt_str, target_percent_s) == 0)
12525 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12528 if (!arg || !validate_arg (arg, POINTER_TYPE))
12531 str = c_getstr (arg);
12537 /* The format specifier doesn't contain any '%' characters. */
12538 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12544 /* If the string was "", printf does nothing. */
12545 if (str[0] == '\0')
12546 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12548 /* If the string has length of 1, call putchar. */
12549 if (str[1] == '\0')
12551 /* Given printf("c"), (where c is any one character,)
12552 convert "c"[0] to an int and pass that to the replacement
12554 newarg = build_int_cst (integer_type_node, str[0]);
12556 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12560 /* If the string was "string\n", call puts("string"). */
12561 size_t len = strlen (str);
12562 if ((unsigned char)str[len - 1] == target_newline
12563 && (size_t) (int) len == len
12567 tree offset_node, string_cst;
12569 /* Create a NUL-terminated string that's one char shorter
12570 than the original, stripping off the trailing '\n'. */
12571 newarg = build_string_literal (len, str);
12572 string_cst = string_constant (newarg, &offset_node);
12573 gcc_checking_assert (string_cst
12574 && (TREE_STRING_LENGTH (string_cst)
12576 && integer_zerop (offset_node)
12578 TREE_STRING_POINTER (string_cst)[len - 1]
12579 == target_newline);
12580 /* build_string_literal creates a new STRING_CST,
12581 modify it in place to avoid double copying. */
12582 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
12583 newstr[len - 1] = '\0';
12585 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12588 /* We'd like to arrange to call fputs(string,stdout) here,
12589 but we need stdout and don't have a way to get it yet. */
12594 /* The other optimizations can be done only on the non-va_list variants. */
12595 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12598 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12599 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12601 if (!arg || !validate_arg (arg, POINTER_TYPE))
12604 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12607 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12608 else if (strcmp (fmt_str, target_percent_c) == 0)
12610 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12613 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12619 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12622 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12623 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12624 more than 3 arguments, and ARG may be null in the 2-argument case.
12626 Return NULL_TREE if no simplification was possible, otherwise return the
12627 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12628 code of the function to be simplified. */
12631 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12632 tree fmt, tree arg, bool ignore,
12633 enum built_in_function fcode)
12635 tree fn_fputc, fn_fputs, call = NULL_TREE;
12636 const char *fmt_str = NULL;
12638 /* If the return value is used, don't do the transformation. */
12642 /* Verify the required arguments in the original call. */
12643 if (!validate_arg (fp, POINTER_TYPE))
12645 if (!validate_arg (fmt, POINTER_TYPE))
12648 /* Check whether the format is a literal string constant. */
12649 fmt_str = c_getstr (fmt);
12650 if (fmt_str == NULL)
12653 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12655 /* If we're using an unlocked function, assume the other
12656 unlocked functions exist explicitly. */
12657 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12658 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12662 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12663 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12666 if (!init_target_chars ())
12669 /* If the format doesn't contain % args or %%, use strcpy. */
12670 if (strchr (fmt_str, target_percent) == NULL)
12672 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12676 /* If the format specifier was "", fprintf does nothing. */
12677 if (fmt_str[0] == '\0')
12679 /* If FP has side-effects, just wait until gimplification is
12681 if (TREE_SIDE_EFFECTS (fp))
12684 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12687 /* When "string" doesn't contain %, replace all cases of
12688 fprintf (fp, string) with fputs (string, fp). The fputs
12689 builtin will take care of special cases like length == 1. */
12691 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12694 /* The other optimizations can be done only on the non-va_list variants. */
12695 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12698 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12699 else if (strcmp (fmt_str, target_percent_s) == 0)
12701 if (!arg || !validate_arg (arg, POINTER_TYPE))
12704 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12707 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12708 else if (strcmp (fmt_str, target_percent_c) == 0)
12710 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12713 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12718 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12721 /* Initialize format string characters in the target charset. */
12724 init_target_chars (void)
12729 target_newline = lang_hooks.to_target_charset ('\n');
12730 target_percent = lang_hooks.to_target_charset ('%');
12731 target_c = lang_hooks.to_target_charset ('c');
12732 target_s = lang_hooks.to_target_charset ('s');
12733 if (target_newline == 0 || target_percent == 0 || target_c == 0
12737 target_percent_c[0] = target_percent;
12738 target_percent_c[1] = target_c;
12739 target_percent_c[2] = '\0';
12741 target_percent_s[0] = target_percent;
12742 target_percent_s[1] = target_s;
12743 target_percent_s[2] = '\0';
12745 target_percent_s_newline[0] = target_percent;
12746 target_percent_s_newline[1] = target_s;
12747 target_percent_s_newline[2] = target_newline;
12748 target_percent_s_newline[3] = '\0';
12755 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12756 and no overflow/underflow occurred. INEXACT is true if M was not
12757 exactly calculated. TYPE is the tree type for the result. This
12758 function assumes that you cleared the MPFR flags and then
12759 calculated M to see if anything subsequently set a flag prior to
12760 entering this function. Return NULL_TREE if any checks fail. */
12763 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12765 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12766 overflow/underflow occurred. If -frounding-math, proceed iff the
12767 result of calling FUNC was exact. */
12768 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12769 && (!flag_rounding_math || !inexact))
12771 REAL_VALUE_TYPE rr;
12773 real_from_mpfr (&rr, m, type, GMP_RNDN);
12774 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12775 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12776 but the mpft_t is not, then we underflowed in the
12778 if (real_isfinite (&rr)
12779 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12781 REAL_VALUE_TYPE rmode;
12783 real_convert (&rmode, TYPE_MODE (type), &rr);
12784 /* Proceed iff the specified mode can hold the value. */
12785 if (real_identical (&rmode, &rr))
12786 return build_real (type, rmode);
12792 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12793 number and no overflow/underflow occurred. INEXACT is true if M
12794 was not exactly calculated. TYPE is the tree type for the result.
12795 This function assumes that you cleared the MPFR flags and then
12796 calculated M to see if anything subsequently set a flag prior to
12797 entering this function. Return NULL_TREE if any checks fail, if
12798 FORCE_CONVERT is true, then bypass the checks. */
12801 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12803 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12804 overflow/underflow occurred. If -frounding-math, proceed iff the
12805 result of calling FUNC was exact. */
12807 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12808 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12809 && (!flag_rounding_math || !inexact)))
12811 REAL_VALUE_TYPE re, im;
12813 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12814 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12815 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12816 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12817 but the mpft_t is not, then we underflowed in the
12820 || (real_isfinite (&re) && real_isfinite (&im)
12821 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12822 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12824 REAL_VALUE_TYPE re_mode, im_mode;
12826 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12827 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12828 /* Proceed iff the specified mode can hold the value. */
12830 || (real_identical (&re_mode, &re)
12831 && real_identical (&im_mode, &im)))
12832 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12833 build_real (TREE_TYPE (type), im_mode));
12839 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12840 FUNC on it and return the resulting value as a tree with type TYPE.
12841 If MIN and/or MAX are not NULL, then the supplied ARG must be
12842 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12843 acceptable values, otherwise they are not. The mpfr precision is
12844 set to the precision of TYPE. We assume that function FUNC returns
12845 zero if the result could be calculated exactly within the requested
12849 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12850 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12853 tree result = NULL_TREE;
12857 /* To proceed, MPFR must exactly represent the target floating point
12858 format, which only happens when the target base equals two. */
12859 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12860 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12862 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12864 if (real_isfinite (ra)
12865 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12866 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12868 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12869 const int prec = fmt->p;
12870 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12874 mpfr_init2 (m, prec);
12875 mpfr_from_real (m, ra, GMP_RNDN);
12876 mpfr_clear_flags ();
12877 inexact = func (m, m, rnd);
12878 result = do_mpfr_ckconv (m, type, inexact);
12886 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12887 FUNC on it and return the resulting value as a tree with type TYPE.
12888 The mpfr precision is set to the precision of TYPE. We assume that
12889 function FUNC returns zero if the result could be calculated
12890 exactly within the requested precision. */
12893 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12894 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12896 tree result = NULL_TREE;
12901 /* To proceed, MPFR must exactly represent the target floating point
12902 format, which only happens when the target base equals two. */
12903 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12904 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12905 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12907 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12908 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12910 if (real_isfinite (ra1) && real_isfinite (ra2))
12912 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12913 const int prec = fmt->p;
12914 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12918 mpfr_inits2 (prec, m1, m2, NULL);
12919 mpfr_from_real (m1, ra1, GMP_RNDN);
12920 mpfr_from_real (m2, ra2, GMP_RNDN);
12921 mpfr_clear_flags ();
12922 inexact = func (m1, m1, m2, rnd);
12923 result = do_mpfr_ckconv (m1, type, inexact);
12924 mpfr_clears (m1, m2, NULL);
12931 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12932 FUNC on it and return the resulting value as a tree with type TYPE.
12933 The mpfr precision is set to the precision of TYPE. We assume that
12934 function FUNC returns zero if the result could be calculated
12935 exactly within the requested precision. */
12938 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12939 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12941 tree result = NULL_TREE;
12947 /* To proceed, MPFR must exactly represent the target floating point
12948 format, which only happens when the target base equals two. */
12949 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12950 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12951 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12952 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12954 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12955 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12956 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12958 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12960 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12961 const int prec = fmt->p;
12962 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12966 mpfr_inits2 (prec, m1, m2, m3, NULL);
12967 mpfr_from_real (m1, ra1, GMP_RNDN);
12968 mpfr_from_real (m2, ra2, GMP_RNDN);
12969 mpfr_from_real (m3, ra3, GMP_RNDN);
12970 mpfr_clear_flags ();
12971 inexact = func (m1, m1, m2, m3, rnd);
12972 result = do_mpfr_ckconv (m1, type, inexact);
12973 mpfr_clears (m1, m2, m3, NULL);
12980 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12981 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12982 If ARG_SINP and ARG_COSP are NULL then the result is returned
12983 as a complex value.
12984 The type is taken from the type of ARG and is used for setting the
12985 precision of the calculation and results. */
12988 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12990 tree const type = TREE_TYPE (arg);
12991 tree result = NULL_TREE;
12995 /* To proceed, MPFR must exactly represent the target floating point
12996 format, which only happens when the target base equals two. */
12997 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12998 && TREE_CODE (arg) == REAL_CST
12999 && !TREE_OVERFLOW (arg))
13001 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13003 if (real_isfinite (ra))
13005 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13006 const int prec = fmt->p;
13007 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13008 tree result_s, result_c;
13012 mpfr_inits2 (prec, m, ms, mc, NULL);
13013 mpfr_from_real (m, ra, GMP_RNDN);
13014 mpfr_clear_flags ();
13015 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13016 result_s = do_mpfr_ckconv (ms, type, inexact);
13017 result_c = do_mpfr_ckconv (mc, type, inexact);
13018 mpfr_clears (m, ms, mc, NULL);
13019 if (result_s && result_c)
13021 /* If we are to return in a complex value do so. */
13022 if (!arg_sinp && !arg_cosp)
13023 return build_complex (build_complex_type (type),
13024 result_c, result_s);
13026 /* Dereference the sin/cos pointer arguments. */
13027 arg_sinp = build_fold_indirect_ref (arg_sinp);
13028 arg_cosp = build_fold_indirect_ref (arg_cosp);
13029 /* Proceed if valid pointer type were passed in. */
13030 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13031 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13033 /* Set the values. */
13034 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13036 TREE_SIDE_EFFECTS (result_s) = 1;
13037 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13039 TREE_SIDE_EFFECTS (result_c) = 1;
13040 /* Combine the assignments into a compound expr. */
13041 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13042 result_s, result_c));
13050 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13051 two-argument mpfr order N Bessel function FUNC on them and return
13052 the resulting value as a tree with type TYPE. The mpfr precision
13053 is set to the precision of TYPE. We assume that function FUNC
13054 returns zero if the result could be calculated exactly within the
13055 requested precision. */
13057 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13058 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13059 const REAL_VALUE_TYPE *min, bool inclusive)
13061 tree result = NULL_TREE;
13066 /* To proceed, MPFR must exactly represent the target floating point
13067 format, which only happens when the target base equals two. */
13068 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13069 && host_integerp (arg1, 0)
13070 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13072 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13073 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13076 && real_isfinite (ra)
13077 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13079 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13080 const int prec = fmt->p;
13081 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13085 mpfr_init2 (m, prec);
13086 mpfr_from_real (m, ra, GMP_RNDN);
13087 mpfr_clear_flags ();
13088 inexact = func (m, n, m, rnd);
13089 result = do_mpfr_ckconv (m, type, inexact);
13097 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13098 the pointer *(ARG_QUO) and return the result. The type is taken
13099 from the type of ARG0 and is used for setting the precision of the
13100 calculation and results. */
13103 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13105 tree const type = TREE_TYPE (arg0);
13106 tree result = NULL_TREE;
13111 /* To proceed, MPFR must exactly represent the target floating point
13112 format, which only happens when the target base equals two. */
13113 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13114 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13115 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13117 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13118 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13120 if (real_isfinite (ra0) && real_isfinite (ra1))
13122 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13123 const int prec = fmt->p;
13124 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13129 mpfr_inits2 (prec, m0, m1, NULL);
13130 mpfr_from_real (m0, ra0, GMP_RNDN);
13131 mpfr_from_real (m1, ra1, GMP_RNDN);
13132 mpfr_clear_flags ();
13133 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13134 /* Remquo is independent of the rounding mode, so pass
13135 inexact=0 to do_mpfr_ckconv(). */
13136 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13137 mpfr_clears (m0, m1, NULL);
13140 /* MPFR calculates quo in the host's long so it may
13141 return more bits in quo than the target int can hold
13142 if sizeof(host long) > sizeof(target int). This can
13143 happen even for native compilers in LP64 mode. In
13144 these cases, modulo the quo value with the largest
13145 number that the target int can hold while leaving one
13146 bit for the sign. */
13147 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13148 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13150 /* Dereference the quo pointer argument. */
13151 arg_quo = build_fold_indirect_ref (arg_quo);
13152 /* Proceed iff a valid pointer type was passed in. */
13153 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13155 /* Set the value. */
13157 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13158 build_int_cst (TREE_TYPE (arg_quo),
13160 TREE_SIDE_EFFECTS (result_quo) = 1;
13161 /* Combine the quo assignment with the rem. */
13162 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13163 result_quo, result_rem));
13171 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13172 resulting value as a tree with type TYPE. The mpfr precision is
13173 set to the precision of TYPE. We assume that this mpfr function
13174 returns zero if the result could be calculated exactly within the
13175 requested precision. In addition, the integer pointer represented
13176 by ARG_SG will be dereferenced and set to the appropriate signgam
13180 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13182 tree result = NULL_TREE;
13186 /* To proceed, MPFR must exactly represent the target floating point
13187 format, which only happens when the target base equals two. Also
13188 verify ARG is a constant and that ARG_SG is an int pointer. */
13189 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13190 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13191 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13192 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13194 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13196 /* In addition to NaN and Inf, the argument cannot be zero or a
13197 negative integer. */
13198 if (real_isfinite (ra)
13199 && ra->cl != rvc_zero
13200 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13202 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13203 const int prec = fmt->p;
13204 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13209 mpfr_init2 (m, prec);
13210 mpfr_from_real (m, ra, GMP_RNDN);
13211 mpfr_clear_flags ();
13212 inexact = mpfr_lgamma (m, &sg, m, rnd);
13213 result_lg = do_mpfr_ckconv (m, type, inexact);
13219 /* Dereference the arg_sg pointer argument. */
13220 arg_sg = build_fold_indirect_ref (arg_sg);
13221 /* Assign the signgam value into *arg_sg. */
13222 result_sg = fold_build2 (MODIFY_EXPR,
13223 TREE_TYPE (arg_sg), arg_sg,
13224 build_int_cst (TREE_TYPE (arg_sg), sg));
13225 TREE_SIDE_EFFECTS (result_sg) = 1;
13226 /* Combine the signgam assignment with the lgamma result. */
13227 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13228 result_sg, result_lg));
13236 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13237 function FUNC on it and return the resulting value as a tree with
13238 type TYPE. The mpfr precision is set to the precision of TYPE. We
13239 assume that function FUNC returns zero if the result could be
13240 calculated exactly within the requested precision. */
13243 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13245 tree result = NULL_TREE;
13249 /* To proceed, MPFR must exactly represent the target floating point
13250 format, which only happens when the target base equals two. */
13251 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13252 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13253 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13255 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13256 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13258 if (real_isfinite (re) && real_isfinite (im))
13260 const struct real_format *const fmt =
13261 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13262 const int prec = fmt->p;
13263 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13264 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13268 mpc_init2 (m, prec);
13269 mpfr_from_real (mpc_realref(m), re, rnd);
13270 mpfr_from_real (mpc_imagref(m), im, rnd);
13271 mpfr_clear_flags ();
13272 inexact = func (m, m, crnd);
13273 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13281 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13282 mpc function FUNC on it and return the resulting value as a tree
13283 with type TYPE. The mpfr precision is set to the precision of
13284 TYPE. We assume that function FUNC returns zero if the result
13285 could be calculated exactly within the requested precision. If
13286 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13287 in the arguments and/or results. */
13290 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13291 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13293 tree result = NULL_TREE;
13298 /* To proceed, MPFR must exactly represent the target floating point
13299 format, which only happens when the target base equals two. */
13300 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13301 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13302 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13303 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13304 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13306 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13307 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13308 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13309 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13312 || (real_isfinite (re0) && real_isfinite (im0)
13313 && real_isfinite (re1) && real_isfinite (im1)))
13315 const struct real_format *const fmt =
13316 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13317 const int prec = fmt->p;
13318 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13319 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13323 mpc_init2 (m0, prec);
13324 mpc_init2 (m1, prec);
13325 mpfr_from_real (mpc_realref(m0), re0, rnd);
13326 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13327 mpfr_from_real (mpc_realref(m1), re1, rnd);
13328 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13329 mpfr_clear_flags ();
13330 inexact = func (m0, m0, m1, crnd);
13331 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13340 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13341 a normal call should be emitted rather than expanding the function
13342 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13345 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13347 int nargs = gimple_call_num_args (stmt);
13349 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
13351 ? gimple_call_arg_ptr (stmt, 0)
13352 : &error_mark_node), fcode);
13355 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13356 a normal call should be emitted rather than expanding the function
13357 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13358 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13359 passed as second argument. */
13362 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13363 enum built_in_function fcode)
13365 int nargs = gimple_call_num_args (stmt);
13367 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
13369 ? gimple_call_arg_ptr (stmt, 0)
13370 : &error_mark_node), maxlen, fcode);
13373 /* Builtins with folding operations that operate on "..." arguments
13374 need special handling; we need to store the arguments in a convenient
13375 data structure before attempting any folding. Fortunately there are
13376 only a few builtins that fall into this category. FNDECL is the
13377 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13378 result of the function call is ignored. */
13381 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13382 bool ignore ATTRIBUTE_UNUSED)
13384 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13385 tree ret = NULL_TREE;
13389 case BUILT_IN_SPRINTF_CHK:
13390 case BUILT_IN_VSPRINTF_CHK:
13391 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13394 case BUILT_IN_SNPRINTF_CHK:
13395 case BUILT_IN_VSNPRINTF_CHK:
13396 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13403 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13404 TREE_NO_WARNING (ret) = 1;
13410 /* A wrapper function for builtin folding that prevents warnings for
13411 "statement without effect" and the like, caused by removing the
13412 call node earlier than the warning is generated. */
13415 fold_call_stmt (gimple stmt, bool ignore)
13417 tree ret = NULL_TREE;
13418 tree fndecl = gimple_call_fndecl (stmt);
13419 location_t loc = gimple_location (stmt);
13421 && TREE_CODE (fndecl) == FUNCTION_DECL
13422 && DECL_BUILT_IN (fndecl)
13423 && !gimple_call_va_arg_pack_p (stmt))
13425 int nargs = gimple_call_num_args (stmt);
13426 tree *args = (nargs > 0
13427 ? gimple_call_arg_ptr (stmt, 0)
13428 : &error_mark_node);
13430 if (avoid_folding_inline_builtin (fndecl))
13432 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13434 return targetm.fold_builtin (fndecl, nargs, args, ignore);
13438 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13439 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13441 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13444 /* Propagate location information from original call to
13445 expansion of builtin. Otherwise things like
13446 maybe_emit_chk_warning, that operate on the expansion
13447 of a builtin, will use the wrong location information. */
13448 if (gimple_has_location (stmt))
13450 tree realret = ret;
13451 if (TREE_CODE (ret) == NOP_EXPR)
13452 realret = TREE_OPERAND (ret, 0);
13453 if (CAN_HAVE_LOCATION_P (realret)
13454 && !EXPR_HAS_LOCATION (realret))
13455 SET_EXPR_LOCATION (realret, loc);
13465 /* Look up the function in built_in_decls that corresponds to DECL
13466 and set ASMSPEC as its user assembler name. DECL must be a
13467 function decl that declares a builtin. */
13470 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13473 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13474 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13477 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13478 set_user_assembler_name (builtin, asmspec);
13479 switch (DECL_FUNCTION_CODE (decl))
13481 case BUILT_IN_MEMCPY:
13482 init_block_move_fn (asmspec);
13483 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13485 case BUILT_IN_MEMSET:
13486 init_block_clear_fn (asmspec);
13487 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13489 case BUILT_IN_MEMMOVE:
13490 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13492 case BUILT_IN_MEMCMP:
13493 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13495 case BUILT_IN_ABORT:
13496 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13499 if (INT_TYPE_SIZE < BITS_PER_WORD)
13501 set_user_assembler_libfunc ("ffs", asmspec);
13502 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13503 MODE_INT, 0), "ffs");
13511 /* Return true if DECL is a builtin that expands to a constant or similarly
13514 is_simple_builtin (tree decl)
13516 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13517 switch (DECL_FUNCTION_CODE (decl))
13519 /* Builtins that expand to constants. */
13520 case BUILT_IN_CONSTANT_P:
13521 case BUILT_IN_EXPECT:
13522 case BUILT_IN_OBJECT_SIZE:
13523 case BUILT_IN_UNREACHABLE:
13524 /* Simple register moves or loads from stack. */
13525 case BUILT_IN_ASSUME_ALIGNED:
13526 case BUILT_IN_RETURN_ADDRESS:
13527 case BUILT_IN_EXTRACT_RETURN_ADDR:
13528 case BUILT_IN_FROB_RETURN_ADDR:
13529 case BUILT_IN_RETURN:
13530 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
13531 case BUILT_IN_FRAME_ADDRESS:
13532 case BUILT_IN_VA_END:
13533 case BUILT_IN_STACK_SAVE:
13534 case BUILT_IN_STACK_RESTORE:
13535 /* Exception state returns or moves registers around. */
13536 case BUILT_IN_EH_FILTER:
13537 case BUILT_IN_EH_POINTER:
13538 case BUILT_IN_EH_COPY_VALUES:
13548 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13549 most probably expanded inline into reasonably simple code. This is a
13550 superset of is_simple_builtin. */
13552 is_inexpensive_builtin (tree decl)
13556 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
13558 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13559 switch (DECL_FUNCTION_CODE (decl))
13562 case BUILT_IN_ALLOCA:
13563 case BUILT_IN_BSWAP32:
13564 case BUILT_IN_BSWAP64:
13566 case BUILT_IN_CLZIMAX:
13567 case BUILT_IN_CLZL:
13568 case BUILT_IN_CLZLL:
13570 case BUILT_IN_CTZIMAX:
13571 case BUILT_IN_CTZL:
13572 case BUILT_IN_CTZLL:
13574 case BUILT_IN_FFSIMAX:
13575 case BUILT_IN_FFSL:
13576 case BUILT_IN_FFSLL:
13577 case BUILT_IN_IMAXABS:
13578 case BUILT_IN_FINITE:
13579 case BUILT_IN_FINITEF:
13580 case BUILT_IN_FINITEL:
13581 case BUILT_IN_FINITED32:
13582 case BUILT_IN_FINITED64:
13583 case BUILT_IN_FINITED128:
13584 case BUILT_IN_FPCLASSIFY:
13585 case BUILT_IN_ISFINITE:
13586 case BUILT_IN_ISINF_SIGN:
13587 case BUILT_IN_ISINF:
13588 case BUILT_IN_ISINFF:
13589 case BUILT_IN_ISINFL:
13590 case BUILT_IN_ISINFD32:
13591 case BUILT_IN_ISINFD64:
13592 case BUILT_IN_ISINFD128:
13593 case BUILT_IN_ISNAN:
13594 case BUILT_IN_ISNANF:
13595 case BUILT_IN_ISNANL:
13596 case BUILT_IN_ISNAND32:
13597 case BUILT_IN_ISNAND64:
13598 case BUILT_IN_ISNAND128:
13599 case BUILT_IN_ISNORMAL:
13600 case BUILT_IN_ISGREATER:
13601 case BUILT_IN_ISGREATEREQUAL:
13602 case BUILT_IN_ISLESS:
13603 case BUILT_IN_ISLESSEQUAL:
13604 case BUILT_IN_ISLESSGREATER:
13605 case BUILT_IN_ISUNORDERED:
13606 case BUILT_IN_VA_ARG_PACK:
13607 case BUILT_IN_VA_ARG_PACK_LEN:
13608 case BUILT_IN_VA_COPY:
13609 case BUILT_IN_TRAP:
13610 case BUILT_IN_SAVEREGS:
13611 case BUILT_IN_POPCOUNTL:
13612 case BUILT_IN_POPCOUNTLL:
13613 case BUILT_IN_POPCOUNTIMAX:
13614 case BUILT_IN_POPCOUNT:
13615 case BUILT_IN_PARITYL:
13616 case BUILT_IN_PARITYLL:
13617 case BUILT_IN_PARITYIMAX:
13618 case BUILT_IN_PARITY:
13619 case BUILT_IN_LABS:
13620 case BUILT_IN_LLABS:
13621 case BUILT_IN_PREFETCH:
13625 return is_simple_builtin (decl);