1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
60 struct target_builtins default_target_builtins;
62 struct target_builtins *this_target_builtins = &default_target_builtins;
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
72 #include "builtins.def"
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 builtin_info_type builtin_info;
80 static const char *c_getstr (tree);
81 static rtx c_readstr (const char *, enum machine_mode);
82 static int target_char_cast (tree, char *);
83 static rtx get_memory_rtx (tree, tree);
84 static int apply_args_size (void);
85 static int apply_result_size (void);
86 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
87 static rtx result_vector (int, rtx);
89 static void expand_builtin_update_setjmp_buf (rtx);
90 static void expand_builtin_prefetch (tree);
91 static rtx expand_builtin_apply_args (void);
92 static rtx expand_builtin_apply_args_1 (void);
93 static rtx expand_builtin_apply (rtx, rtx, rtx);
94 static void expand_builtin_return (rtx);
95 static enum type_class type_to_class (tree);
96 static rtx expand_builtin_classify_type (tree);
97 static void expand_errno_check (tree, rtx);
98 static rtx expand_builtin_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
100 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
102 static rtx expand_builtin_interclass_mathfn (tree, rtx);
103 static rtx expand_builtin_sincos (tree);
104 static rtx expand_builtin_cexpi (tree, rtx);
105 static rtx expand_builtin_int_roundingfn (tree, rtx);
106 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
107 static rtx expand_builtin_next_arg (void);
108 static rtx expand_builtin_va_start (tree);
109 static rtx expand_builtin_va_end (tree);
110 static rtx expand_builtin_va_copy (tree);
111 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_strcmp (tree, rtx);
113 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
114 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
115 static rtx expand_builtin_memcpy (tree, rtx);
116 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
118 enum machine_mode, int);
119 static rtx expand_builtin_strcpy (tree, rtx);
120 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
121 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strncpy (tree, rtx);
123 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
124 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
126 static rtx expand_builtin_bzero (tree);
127 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_alloca (tree, bool);
129 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
130 static rtx expand_builtin_frame_address (tree, tree);
131 static tree stabilize_va_list_loc (location_t, tree, int);
132 static rtx expand_builtin_expect (tree, rtx);
133 static tree fold_builtin_constant_p (tree);
134 static tree fold_builtin_expect (location_t, tree, tree);
135 static tree fold_builtin_classify_type (tree);
136 static tree fold_builtin_strlen (location_t, tree, tree);
137 static tree fold_builtin_inf (location_t, tree, int);
138 static tree fold_builtin_nan (tree, tree, int);
139 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
140 static bool validate_arg (const_tree, enum tree_code code);
141 static bool integer_valued_real_p (tree);
142 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
143 static bool readonly_data_expr (tree);
144 static rtx expand_builtin_fabs (tree, rtx, rtx);
145 static rtx expand_builtin_signbit (tree, rtx);
146 static tree fold_builtin_sqrt (location_t, tree, tree);
147 static tree fold_builtin_cbrt (location_t, tree, tree);
148 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
149 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
150 static tree fold_builtin_cos (location_t, tree, tree, tree);
151 static tree fold_builtin_cosh (location_t, tree, tree, tree);
152 static tree fold_builtin_tan (tree, tree);
153 static tree fold_builtin_trunc (location_t, tree, tree);
154 static tree fold_builtin_floor (location_t, tree, tree);
155 static tree fold_builtin_ceil (location_t, tree, tree);
156 static tree fold_builtin_round (location_t, tree, tree);
157 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
158 static tree fold_builtin_bitop (tree, tree);
159 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
160 static tree fold_builtin_strchr (location_t, tree, tree, tree);
161 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_strcmp (location_t, tree, tree);
164 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
165 static tree fold_builtin_signbit (location_t, tree, tree);
166 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_isascii (location_t, tree);
168 static tree fold_builtin_toascii (location_t, tree);
169 static tree fold_builtin_isdigit (location_t, tree);
170 static tree fold_builtin_fabs (location_t, tree, tree);
171 static tree fold_builtin_abs (location_t, tree, tree);
172 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
174 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
175 static tree fold_builtin_0 (location_t, tree, bool);
176 static tree fold_builtin_1 (location_t, tree, tree, bool);
177 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
178 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
179 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
180 static tree fold_builtin_varargs (location_t, tree, tree, bool);
182 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
183 static tree fold_builtin_strstr (location_t, tree, tree, tree);
184 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
185 static tree fold_builtin_strcat (location_t, tree, tree);
186 static tree fold_builtin_strncat (location_t, tree, tree, tree);
187 static tree fold_builtin_strspn (location_t, tree, tree);
188 static tree fold_builtin_strcspn (location_t, tree, tree);
189 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
190 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
192 static rtx expand_builtin_object_size (tree);
193 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
194 enum built_in_function);
195 static void maybe_emit_chk_warning (tree, enum built_in_function);
196 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_free_warning (tree);
198 static tree fold_builtin_object_size (tree, tree);
199 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
200 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
201 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
202 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
203 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
204 enum built_in_function);
205 static bool init_target_chars (void);
207 static unsigned HOST_WIDE_INT target_newline;
208 static unsigned HOST_WIDE_INT target_percent;
209 static unsigned HOST_WIDE_INT target_c;
210 static unsigned HOST_WIDE_INT target_s;
211 static char target_percent_c[3];
212 static char target_percent_s[3];
213 static char target_percent_s_newline[4];
214 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
215 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
216 static tree do_mpfr_arg2 (tree, tree, tree,
217 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
218 static tree do_mpfr_arg3 (tree, tree, tree, tree,
219 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
220 static tree do_mpfr_sincos (tree, tree, tree);
221 static tree do_mpfr_bessel_n (tree, tree, tree,
222 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
223 const REAL_VALUE_TYPE *, bool);
224 static tree do_mpfr_remquo (tree, tree, tree);
225 static tree do_mpfr_lgamma_r (tree, tree, tree);
226 static void expand_builtin_sync_synchronize (void);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
231 is_builtin_name (const char *name)
233 if (strncmp (name, "__builtin_", 10) == 0)
235 if (strncmp (name, "__sync_", 7) == 0)
237 if (strncmp (name, "__atomic_", 9) == 0)
243 /* Return true if DECL is a function symbol representing a built-in. */
246 is_builtin_fn (tree decl)
248 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
252 /* Return true if NODE should be considered for inline expansion regardless
253 of the optimization level. This means whenever a function is invoked with
254 its "internal" name, which normally contains the prefix "__builtin". */
257 called_as_built_in (tree node)
259 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
260 we want the name used to call the function, not the name it
262 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
263 return is_builtin_name (name);
266 /* Compute values M and N such that M divides (address of EXP - N) and such
267 that N < M. If these numbers can be determined, store M in alignp and N in
268 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
269 *alignp and any bit-offset to *bitposp.
271 Note that the address (and thus the alignment) computed here is based
272 on the address to which a symbol resolves, whereas DECL_ALIGN is based
273 on the address at which an object is actually located. These two
274 addresses are not always the same. For example, on ARM targets,
275 the address &foo of a Thumb function foo() has the lowest bit set,
276 whereas foo() itself starts on an even address. */
279 get_object_alignment_1 (tree exp, unsigned int *alignp,
280 unsigned HOST_WIDE_INT *bitposp)
282 HOST_WIDE_INT bitsize, bitpos;
284 enum machine_mode mode;
285 int unsignedp, volatilep;
286 unsigned int inner, align = BITS_PER_UNIT;
287 bool known_alignment = false;
289 /* Get the innermost object and the constant (bitpos) and possibly
290 variable (offset) offset of the access. */
291 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
292 &mode, &unsignedp, &volatilep, true);
294 /* Extract alignment information from the innermost object and
295 possibly adjust bitpos and offset. */
296 if (TREE_CODE (exp) == CONST_DECL)
297 exp = DECL_INITIAL (exp);
299 && TREE_CODE (exp) != LABEL_DECL)
301 if (TREE_CODE (exp) == FUNCTION_DECL)
303 /* Function addresses can encode extra information besides their
304 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
305 allows the low bit to be used as a virtual bit, we know
306 that the address itself must be 2-byte aligned. */
307 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
309 known_alignment = true;
310 align = 2 * BITS_PER_UNIT;
315 known_alignment = true;
316 align = DECL_ALIGN (exp);
319 else if (CONSTANT_CLASS_P (exp))
321 known_alignment = true;
322 align = TYPE_ALIGN (TREE_TYPE (exp));
323 #ifdef CONSTANT_ALIGNMENT
324 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
327 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
329 known_alignment = true;
330 align = TYPE_ALIGN (TREE_TYPE (exp));
332 else if (TREE_CODE (exp) == INDIRECT_REF)
334 known_alignment = true;
335 align = TYPE_ALIGN (TREE_TYPE (exp));
337 else if (TREE_CODE (exp) == MEM_REF)
339 tree addr = TREE_OPERAND (exp, 0);
341 unsigned HOST_WIDE_INT ptr_bitpos;
343 if (TREE_CODE (addr) == BIT_AND_EXPR
344 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
346 known_alignment = true;
347 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
348 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
349 align *= BITS_PER_UNIT;
350 addr = TREE_OPERAND (addr, 0);
353 if (get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos))
355 known_alignment = true;
356 bitpos += ptr_bitpos & ~(align - 1);
357 align = MAX (ptr_align, align);
360 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
362 else if (TREE_CODE (exp) == TARGET_MEM_REF)
365 unsigned HOST_WIDE_INT ptr_bitpos;
366 tree addr = TMR_BASE (exp);
368 if (TREE_CODE (addr) == BIT_AND_EXPR
369 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
371 known_alignment = true;
372 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
373 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
374 align *= BITS_PER_UNIT;
375 addr = TREE_OPERAND (addr, 0);
378 if (get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos))
380 known_alignment = true;
381 bitpos += ptr_bitpos & ~(align - 1);
382 align = MAX (ptr_align, align);
385 if (TMR_OFFSET (exp))
386 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
387 if (TMR_INDEX (exp) && TMR_STEP (exp))
389 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
390 align = MIN (align, (step & -step) * BITS_PER_UNIT);
391 known_alignment = true;
393 else if (TMR_INDEX (exp))
394 known_alignment = false;
396 if (TMR_INDEX2 (exp))
397 known_alignment = false;
400 /* If there is a non-constant offset part extract the maximum
401 alignment that can prevail. */
407 if (TREE_CODE (offset) == PLUS_EXPR)
409 next_offset = TREE_OPERAND (offset, 0);
410 offset = TREE_OPERAND (offset, 1);
414 if (host_integerp (offset, 1))
416 /* Any overflow in calculating offset_bits won't change
419 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
422 inner = MIN (inner, (offset_bits & -offset_bits));
424 else if (TREE_CODE (offset) == MULT_EXPR
425 && host_integerp (TREE_OPERAND (offset, 1), 1))
427 /* Any overflow in calculating offset_factor won't change
429 unsigned offset_factor
430 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
434 inner = MIN (inner, (offset_factor & -offset_factor));
438 known_alignment = false;
441 offset = next_offset;
446 /* Alignment is innermost object alignment adjusted by the constant
447 and non-constant offset parts. */
448 align = MIN (align, inner);
449 bitpos = bitpos & (align - 1);
454 bitpos = bitpos & (BITS_PER_UNIT - 1);
455 *alignp = BITS_PER_UNIT;
458 return known_alignment;
461 /* Return the alignment in bits of EXP, an object. */
464 get_object_alignment (tree exp)
466 unsigned HOST_WIDE_INT bitpos = 0;
469 get_object_alignment_1 (exp, &align, &bitpos);
471 /* align and bitpos now specify known low bits of the pointer.
472 ptr & (align - 1) == bitpos. */
475 align = (bitpos & -bitpos);
479 /* Return the alignment of object EXP, also considering its type when we do
480 not know of explicit misalignment. Only handle MEM_REF and TARGET_MEM_REF.
482 ??? Note that, in the general case, the type of an expression is not kept
483 consistent with misalignment information by the front-end, for example when
484 taking the address of a member of a packed structure. However, in most of
485 the cases, expressions have the alignment of their type so we optimistically
486 fall back to this alignment when we cannot compute a misalignment. */
489 get_object_or_type_alignment (tree exp)
491 unsigned HOST_WIDE_INT misalign;
493 bool known_alignment;
495 gcc_assert (TREE_CODE (exp) == MEM_REF || TREE_CODE (exp) == TARGET_MEM_REF);
496 known_alignment = get_object_alignment_1 (exp, &align, &misalign);
498 align = (misalign & -misalign);
499 else if (!known_alignment)
500 align = TYPE_ALIGN (TREE_TYPE (exp));
505 /* For a pointer valued expression EXP compute values M and N such that M
506 divides (EXP - N) and such that N < M. If these numbers can be determined,
507 store M in alignp and N in *BITPOSP and return true. Otherwise return false
508 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp.
510 If EXP is not a pointer, false is returned too. */
513 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
514 unsigned HOST_WIDE_INT *bitposp)
518 if (TREE_CODE (exp) == ADDR_EXPR)
519 return get_object_alignment_1 (TREE_OPERAND (exp, 0), alignp, bitposp);
520 else if (TREE_CODE (exp) == SSA_NAME
521 && POINTER_TYPE_P (TREE_TYPE (exp)))
523 unsigned int ptr_align, ptr_misalign;
524 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
526 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
528 *bitposp = ptr_misalign * BITS_PER_UNIT;
529 *alignp = ptr_align * BITS_PER_UNIT;
535 *alignp = BITS_PER_UNIT;
541 *alignp = BITS_PER_UNIT;
545 /* Return the alignment in bits of EXP, a pointer valued expression.
546 The alignment returned is, by default, the alignment of the thing that
547 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
549 Otherwise, look at the expression to see if we can do better, i.e., if the
550 expression is actually pointing at an object whose alignment is tighter. */
553 get_pointer_alignment (tree exp)
555 unsigned HOST_WIDE_INT bitpos = 0;
558 get_pointer_alignment_1 (exp, &align, &bitpos);
560 /* align and bitpos now specify known low bits of the pointer.
561 ptr & (align - 1) == bitpos. */
564 align = (bitpos & -bitpos);
569 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
570 way, because it could contain a zero byte in the middle.
571 TREE_STRING_LENGTH is the size of the character array, not the string.
573 ONLY_VALUE should be nonzero if the result is not going to be emitted
574 into the instruction stream and zero if it is going to be expanded.
575 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
576 is returned, otherwise NULL, since
577 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
578 evaluate the side-effects.
580 The value returned is of type `ssizetype'.
582 Unfortunately, string_constant can't access the values of const char
583 arrays with initializers, so neither can we do so here. */
586 c_strlen (tree src, int only_value)
589 HOST_WIDE_INT offset;
595 if (TREE_CODE (src) == COND_EXPR
596 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
600 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
601 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
602 if (tree_int_cst_equal (len1, len2))
606 if (TREE_CODE (src) == COMPOUND_EXPR
607 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
608 return c_strlen (TREE_OPERAND (src, 1), only_value);
610 loc = EXPR_LOC_OR_HERE (src);
612 src = string_constant (src, &offset_node);
616 max = TREE_STRING_LENGTH (src) - 1;
617 ptr = TREE_STRING_POINTER (src);
619 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
621 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
622 compute the offset to the following null if we don't know where to
623 start searching for it. */
626 for (i = 0; i < max; i++)
630 /* We don't know the starting offset, but we do know that the string
631 has no internal zero bytes. We can assume that the offset falls
632 within the bounds of the string; otherwise, the programmer deserves
633 what he gets. Subtract the offset from the length of the string,
634 and return that. This would perhaps not be valid if we were dealing
635 with named arrays in addition to literal string constants. */
637 return size_diffop_loc (loc, size_int (max), offset_node);
640 /* We have a known offset into the string. Start searching there for
641 a null character if we can represent it as a single HOST_WIDE_INT. */
642 if (offset_node == 0)
644 else if (! host_integerp (offset_node, 0))
647 offset = tree_low_cst (offset_node, 0);
649 /* If the offset is known to be out of bounds, warn, and call strlen at
651 if (offset < 0 || offset > max)
653 /* Suppress multiple warnings for propagated constant strings. */
654 if (! TREE_NO_WARNING (src))
656 warning_at (loc, 0, "offset outside bounds of constant string");
657 TREE_NO_WARNING (src) = 1;
662 /* Use strlen to search for the first zero byte. Since any strings
663 constructed with build_string will have nulls appended, we win even
664 if we get handed something like (char[4])"abcd".
666 Since OFFSET is our starting index into the string, no further
667 calculation is needed. */
668 return ssize_int (strlen (ptr + offset));
671 /* Return a char pointer for a C string if it is a string constant
672 or sum of string constant and integer constant. */
679 src = string_constant (src, &offset_node);
683 if (offset_node == 0)
684 return TREE_STRING_POINTER (src);
685 else if (!host_integerp (offset_node, 1)
686 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
689 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
692 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
693 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
696 c_readstr (const char *str, enum machine_mode mode)
702 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
707 for (i = 0; i < GET_MODE_SIZE (mode); i++)
710 if (WORDS_BIG_ENDIAN)
711 j = GET_MODE_SIZE (mode) - i - 1;
712 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
713 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
714 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
716 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
719 ch = (unsigned char) str[i];
720 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
722 return immed_double_const (c[0], c[1], mode);
725 /* Cast a target constant CST to target CHAR and if that value fits into
726 host char type, return zero and put that value into variable pointed to by
730 target_char_cast (tree cst, char *p)
732 unsigned HOST_WIDE_INT val, hostval;
734 if (TREE_CODE (cst) != INTEGER_CST
735 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
738 val = TREE_INT_CST_LOW (cst);
739 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
740 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
743 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
744 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
753 /* Similar to save_expr, but assumes that arbitrary code is not executed
754 in between the multiple evaluations. In particular, we assume that a
755 non-addressable local variable will not be modified. */
758 builtin_save_expr (tree exp)
760 if (TREE_CODE (exp) == SSA_NAME
761 || (TREE_ADDRESSABLE (exp) == 0
762 && (TREE_CODE (exp) == PARM_DECL
763 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
766 return save_expr (exp);
769 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
770 times to get the address of either a higher stack frame, or a return
771 address located within it (depending on FNDECL_CODE). */
774 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
778 #ifdef INITIAL_FRAME_ADDRESS_RTX
779 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
783 /* For a zero count with __builtin_return_address, we don't care what
784 frame address we return, because target-specific definitions will
785 override us. Therefore frame pointer elimination is OK, and using
786 the soft frame pointer is OK.
788 For a nonzero count, or a zero count with __builtin_frame_address,
789 we require a stable offset from the current frame pointer to the
790 previous one, so we must use the hard frame pointer, and
791 we must disable frame pointer elimination. */
792 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
793 tem = frame_pointer_rtx;
796 tem = hard_frame_pointer_rtx;
798 /* Tell reload not to eliminate the frame pointer. */
799 crtl->accesses_prior_frames = 1;
803 /* Some machines need special handling before we can access
804 arbitrary frames. For example, on the SPARC, we must first flush
805 all register windows to the stack. */
806 #ifdef SETUP_FRAME_ADDRESSES
808 SETUP_FRAME_ADDRESSES ();
811 /* On the SPARC, the return address is not in the frame, it is in a
812 register. There is no way to access it off of the current frame
813 pointer, but it can be accessed off the previous frame pointer by
814 reading the value from the register window save area. */
815 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
816 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
820 /* Scan back COUNT frames to the specified frame. */
821 for (i = 0; i < count; i++)
823 /* Assume the dynamic chain pointer is in the word that the
824 frame address points to, unless otherwise specified. */
825 #ifdef DYNAMIC_CHAIN_ADDRESS
826 tem = DYNAMIC_CHAIN_ADDRESS (tem);
828 tem = memory_address (Pmode, tem);
829 tem = gen_frame_mem (Pmode, tem);
830 tem = copy_to_reg (tem);
833 /* For __builtin_frame_address, return what we've got. But, on
834 the SPARC for example, we may have to add a bias. */
835 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
836 #ifdef FRAME_ADDR_RTX
837 return FRAME_ADDR_RTX (tem);
842 /* For __builtin_return_address, get the return address from that frame. */
843 #ifdef RETURN_ADDR_RTX
844 tem = RETURN_ADDR_RTX (count, tem);
846 tem = memory_address (Pmode,
847 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
848 tem = gen_frame_mem (Pmode, tem);
853 /* Alias set used for setjmp buffer. */
854 static alias_set_type setjmp_alias_set = -1;
856 /* Construct the leading half of a __builtin_setjmp call. Control will
857 return to RECEIVER_LABEL. This is also called directly by the SJLJ
858 exception handling code. */
861 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
863 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
867 if (setjmp_alias_set == -1)
868 setjmp_alias_set = new_alias_set ();
870 buf_addr = convert_memory_address (Pmode, buf_addr);
872 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
874 /* We store the frame pointer and the address of receiver_label in
875 the buffer and use the rest of it for the stack save area, which
876 is machine-dependent. */
878 mem = gen_rtx_MEM (Pmode, buf_addr);
879 set_mem_alias_set (mem, setjmp_alias_set);
880 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
882 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
883 GET_MODE_SIZE (Pmode))),
884 set_mem_alias_set (mem, setjmp_alias_set);
886 emit_move_insn (validize_mem (mem),
887 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
889 stack_save = gen_rtx_MEM (sa_mode,
890 plus_constant (Pmode, buf_addr,
891 2 * GET_MODE_SIZE (Pmode)));
892 set_mem_alias_set (stack_save, setjmp_alias_set);
893 emit_stack_save (SAVE_NONLOCAL, &stack_save);
895 /* If there is further processing to do, do it. */
896 #ifdef HAVE_builtin_setjmp_setup
897 if (HAVE_builtin_setjmp_setup)
898 emit_insn (gen_builtin_setjmp_setup (buf_addr));
901 /* We have a nonlocal label. */
902 cfun->has_nonlocal_label = 1;
905 /* Construct the trailing part of a __builtin_setjmp call. This is
906 also called directly by the SJLJ exception handling code. */
909 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
913 /* Clobber the FP when we get here, so we have to make sure it's
914 marked as used by this function. */
915 emit_use (hard_frame_pointer_rtx);
917 /* Mark the static chain as clobbered here so life information
918 doesn't get messed up for it. */
919 chain = targetm.calls.static_chain (current_function_decl, true);
920 if (chain && REG_P (chain))
921 emit_clobber (chain);
923 /* Now put in the code to restore the frame pointer, and argument
924 pointer, if needed. */
925 #ifdef HAVE_nonlocal_goto
926 if (! HAVE_nonlocal_goto)
929 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
930 /* This might change the hard frame pointer in ways that aren't
931 apparent to early optimization passes, so force a clobber. */
932 emit_clobber (hard_frame_pointer_rtx);
935 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
936 if (fixed_regs[ARG_POINTER_REGNUM])
938 #ifdef ELIMINABLE_REGS
940 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
942 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
943 if (elim_regs[i].from == ARG_POINTER_REGNUM
944 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
947 if (i == ARRAY_SIZE (elim_regs))
950 /* Now restore our arg pointer from the address at which it
951 was saved in our stack frame. */
952 emit_move_insn (crtl->args.internal_arg_pointer,
953 copy_to_reg (get_arg_pointer_save_area ()));
958 #ifdef HAVE_builtin_setjmp_receiver
959 if (HAVE_builtin_setjmp_receiver)
960 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
963 #ifdef HAVE_nonlocal_goto_receiver
964 if (HAVE_nonlocal_goto_receiver)
965 emit_insn (gen_nonlocal_goto_receiver ());
970 /* We must not allow the code we just generated to be reordered by
971 scheduling. Specifically, the update of the frame pointer must
972 happen immediately, not later. */
973 emit_insn (gen_blockage ());
976 /* __builtin_longjmp is passed a pointer to an array of five words (not
977 all will be used on all machines). It operates similarly to the C
978 library function of the same name, but is more efficient. Much of
979 the code below is copied from the handling of non-local gotos. */
982 expand_builtin_longjmp (rtx buf_addr, rtx value)
984 rtx fp, lab, stack, insn, last;
985 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
987 /* DRAP is needed for stack realign if longjmp is expanded to current
989 if (SUPPORTS_STACK_ALIGNMENT)
990 crtl->need_drap = true;
992 if (setjmp_alias_set == -1)
993 setjmp_alias_set = new_alias_set ();
995 buf_addr = convert_memory_address (Pmode, buf_addr);
997 buf_addr = force_reg (Pmode, buf_addr);
999 /* We require that the user must pass a second argument of 1, because
1000 that is what builtin_setjmp will return. */
1001 gcc_assert (value == const1_rtx);
1003 last = get_last_insn ();
1004 #ifdef HAVE_builtin_longjmp
1005 if (HAVE_builtin_longjmp)
1006 emit_insn (gen_builtin_longjmp (buf_addr));
1010 fp = gen_rtx_MEM (Pmode, buf_addr);
1011 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1012 GET_MODE_SIZE (Pmode)));
1014 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1015 2 * GET_MODE_SIZE (Pmode)));
1016 set_mem_alias_set (fp, setjmp_alias_set);
1017 set_mem_alias_set (lab, setjmp_alias_set);
1018 set_mem_alias_set (stack, setjmp_alias_set);
1020 /* Pick up FP, label, and SP from the block and jump. This code is
1021 from expand_goto in stmt.c; see there for detailed comments. */
1022 #ifdef HAVE_nonlocal_goto
1023 if (HAVE_nonlocal_goto)
1024 /* We have to pass a value to the nonlocal_goto pattern that will
1025 get copied into the static_chain pointer, but it does not matter
1026 what that value is, because builtin_setjmp does not use it. */
1027 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1031 lab = copy_to_reg (lab);
1033 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1034 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1036 emit_move_insn (hard_frame_pointer_rtx, fp);
1037 emit_stack_restore (SAVE_NONLOCAL, stack);
1039 emit_use (hard_frame_pointer_rtx);
1040 emit_use (stack_pointer_rtx);
1041 emit_indirect_jump (lab);
1045 /* Search backwards and mark the jump insn as a non-local goto.
1046 Note that this precludes the use of __builtin_longjmp to a
1047 __builtin_setjmp target in the same function. However, we've
1048 already cautioned the user that these functions are for
1049 internal exception handling use only. */
1050 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1052 gcc_assert (insn != last);
1056 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1059 else if (CALL_P (insn))
1064 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1065 and the address of the save area. */
1068 expand_builtin_nonlocal_goto (tree exp)
1070 tree t_label, t_save_area;
1071 rtx r_label, r_save_area, r_fp, r_sp, insn;
1073 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1076 t_label = CALL_EXPR_ARG (exp, 0);
1077 t_save_area = CALL_EXPR_ARG (exp, 1);
1079 r_label = expand_normal (t_label);
1080 r_label = convert_memory_address (Pmode, r_label);
1081 r_save_area = expand_normal (t_save_area);
1082 r_save_area = convert_memory_address (Pmode, r_save_area);
1083 /* Copy the address of the save location to a register just in case it was
1084 based on the frame pointer. */
1085 r_save_area = copy_to_reg (r_save_area);
1086 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1087 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1088 plus_constant (Pmode, r_save_area,
1089 GET_MODE_SIZE (Pmode)));
1091 crtl->has_nonlocal_goto = 1;
1093 #ifdef HAVE_nonlocal_goto
1094 /* ??? We no longer need to pass the static chain value, afaik. */
1095 if (HAVE_nonlocal_goto)
1096 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1100 r_label = copy_to_reg (r_label);
1102 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1103 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1105 /* Restore frame pointer for containing function. */
1106 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1107 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1109 /* USE of hard_frame_pointer_rtx added for consistency;
1110 not clear if really needed. */
1111 emit_use (hard_frame_pointer_rtx);
1112 emit_use (stack_pointer_rtx);
1114 /* If the architecture is using a GP register, we must
1115 conservatively assume that the target function makes use of it.
1116 The prologue of functions with nonlocal gotos must therefore
1117 initialize the GP register to the appropriate value, and we
1118 must then make sure that this value is live at the point
1119 of the jump. (Note that this doesn't necessarily apply
1120 to targets with a nonlocal_goto pattern; they are free
1121 to implement it in their own way. Note also that this is
1122 a no-op if the GP register is a global invariant.) */
1123 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1124 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1125 emit_use (pic_offset_table_rtx);
1127 emit_indirect_jump (r_label);
1130 /* Search backwards to the jump insn and mark it as a
1132 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1136 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1139 else if (CALL_P (insn))
1146 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1147 (not all will be used on all machines) that was passed to __builtin_setjmp.
1148 It updates the stack pointer in that block to correspond to the current
1152 expand_builtin_update_setjmp_buf (rtx buf_addr)
1154 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1156 = gen_rtx_MEM (sa_mode,
1159 plus_constant (Pmode, buf_addr,
1160 2 * GET_MODE_SIZE (Pmode))));
1162 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1165 /* Expand a call to __builtin_prefetch. For a target that does not support
1166 data prefetch, evaluate the memory address argument in case it has side
1170 expand_builtin_prefetch (tree exp)
1172 tree arg0, arg1, arg2;
1176 if (!validate_arglist (exp, POINTER_TYPE, 0))
1179 arg0 = CALL_EXPR_ARG (exp, 0);
1181 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1182 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1184 nargs = call_expr_nargs (exp);
1186 arg1 = CALL_EXPR_ARG (exp, 1);
1188 arg1 = integer_zero_node;
1190 arg2 = CALL_EXPR_ARG (exp, 2);
1192 arg2 = integer_three_node;
1194 /* Argument 0 is an address. */
1195 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1197 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1198 if (TREE_CODE (arg1) != INTEGER_CST)
1200 error ("second argument to %<__builtin_prefetch%> must be a constant");
1201 arg1 = integer_zero_node;
1203 op1 = expand_normal (arg1);
1204 /* Argument 1 must be either zero or one. */
1205 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1207 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1212 /* Argument 2 (locality) must be a compile-time constant int. */
1213 if (TREE_CODE (arg2) != INTEGER_CST)
1215 error ("third argument to %<__builtin_prefetch%> must be a constant");
1216 arg2 = integer_zero_node;
1218 op2 = expand_normal (arg2);
1219 /* Argument 2 must be 0, 1, 2, or 3. */
1220 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1222 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1226 #ifdef HAVE_prefetch
1229 struct expand_operand ops[3];
1231 create_address_operand (&ops[0], op0);
1232 create_integer_operand (&ops[1], INTVAL (op1));
1233 create_integer_operand (&ops[2], INTVAL (op2));
1234 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1239 /* Don't do anything with direct references to volatile memory, but
1240 generate code to handle other side effects. */
1241 if (!MEM_P (op0) && side_effects_p (op0))
1245 /* Get a MEM rtx for expression EXP which is the address of an operand
1246 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1247 the maximum length of the block of memory that might be accessed or
1251 get_memory_rtx (tree exp, tree len)
1253 tree orig_exp = exp;
1257 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1258 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1259 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1260 exp = TREE_OPERAND (exp, 0);
1262 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1263 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1265 /* Get an expression we can use to find the attributes to assign to MEM.
1266 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1267 we can. First remove any nops. */
1268 while (CONVERT_EXPR_P (exp)
1269 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1270 exp = TREE_OPERAND (exp, 0);
1273 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1274 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1275 && host_integerp (TREE_OPERAND (exp, 1), 0)
1276 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1277 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1278 else if (TREE_CODE (exp) == ADDR_EXPR)
1279 exp = TREE_OPERAND (exp, 0);
1280 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1281 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1285 /* Honor attributes derived from exp, except for the alias set
1286 (as builtin stringops may alias with anything) and the size
1287 (as stringops may access multiple array elements). */
1290 set_mem_attributes (mem, exp, 0);
1293 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1295 /* Allow the string and memory builtins to overflow from one
1296 field into another, see http://gcc.gnu.org/PR23561.
1297 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1298 memory accessed by the string or memory builtin will fit
1299 within the field. */
1300 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1302 tree mem_expr = MEM_EXPR (mem);
1303 HOST_WIDE_INT offset = -1, length = -1;
1306 while (TREE_CODE (inner) == ARRAY_REF
1307 || CONVERT_EXPR_P (inner)
1308 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1309 || TREE_CODE (inner) == SAVE_EXPR)
1310 inner = TREE_OPERAND (inner, 0);
1312 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1314 if (MEM_OFFSET_KNOWN_P (mem))
1315 offset = MEM_OFFSET (mem);
1317 if (offset >= 0 && len && host_integerp (len, 0))
1318 length = tree_low_cst (len, 0);
1320 while (TREE_CODE (inner) == COMPONENT_REF)
1322 tree field = TREE_OPERAND (inner, 1);
1323 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1324 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1326 /* Bitfields are generally not byte-addressable. */
1327 gcc_assert (!DECL_BIT_FIELD (field)
1328 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1329 % BITS_PER_UNIT) == 0
1330 && host_integerp (DECL_SIZE (field), 0)
1331 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1332 % BITS_PER_UNIT) == 0));
1334 /* If we can prove that the memory starting at XEXP (mem, 0) and
1335 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1336 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1337 fields without DECL_SIZE_UNIT like flexible array members. */
1339 && DECL_SIZE_UNIT (field)
1340 && host_integerp (DECL_SIZE_UNIT (field), 0))
1343 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1346 && offset + length <= size)
1351 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1352 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1353 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1361 mem_expr = TREE_OPERAND (mem_expr, 0);
1362 inner = TREE_OPERAND (inner, 0);
1365 if (mem_expr == NULL)
1367 if (mem_expr != MEM_EXPR (mem))
1369 set_mem_expr (mem, mem_expr);
1371 set_mem_offset (mem, offset);
1373 clear_mem_offset (mem);
1376 set_mem_alias_set (mem, 0);
1377 clear_mem_size (mem);
1383 /* Built-in functions to perform an untyped call and return. */
1385 #define apply_args_mode \
1386 (this_target_builtins->x_apply_args_mode)
1387 #define apply_result_mode \
1388 (this_target_builtins->x_apply_result_mode)
1390 /* Return the size required for the block returned by __builtin_apply_args,
1391 and initialize apply_args_mode. */
1394 apply_args_size (void)
1396 static int size = -1;
1399 enum machine_mode mode;
1401 /* The values computed by this function never change. */
1404 /* The first value is the incoming arg-pointer. */
1405 size = GET_MODE_SIZE (Pmode);
1407 /* The second value is the structure value address unless this is
1408 passed as an "invisible" first argument. */
1409 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1410 size += GET_MODE_SIZE (Pmode);
1412 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1413 if (FUNCTION_ARG_REGNO_P (regno))
1415 mode = targetm.calls.get_raw_arg_mode (regno);
1417 gcc_assert (mode != VOIDmode);
1419 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1420 if (size % align != 0)
1421 size = CEIL (size, align) * align;
1422 size += GET_MODE_SIZE (mode);
1423 apply_args_mode[regno] = mode;
1427 apply_args_mode[regno] = VOIDmode;
1433 /* Return the size required for the block returned by __builtin_apply,
1434 and initialize apply_result_mode. */
1437 apply_result_size (void)
1439 static int size = -1;
1441 enum machine_mode mode;
1443 /* The values computed by this function never change. */
1448 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1449 if (targetm.calls.function_value_regno_p (regno))
1451 mode = targetm.calls.get_raw_result_mode (regno);
1453 gcc_assert (mode != VOIDmode);
1455 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1456 if (size % align != 0)
1457 size = CEIL (size, align) * align;
1458 size += GET_MODE_SIZE (mode);
1459 apply_result_mode[regno] = mode;
1462 apply_result_mode[regno] = VOIDmode;
1464 /* Allow targets that use untyped_call and untyped_return to override
1465 the size so that machine-specific information can be stored here. */
1466 #ifdef APPLY_RESULT_SIZE
1467 size = APPLY_RESULT_SIZE;
1473 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1474 /* Create a vector describing the result block RESULT. If SAVEP is true,
1475 the result block is used to save the values; otherwise it is used to
1476 restore the values. */
1479 result_vector (int savep, rtx result)
1481 int regno, size, align, nelts;
1482 enum machine_mode mode;
1484 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1487 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1488 if ((mode = apply_result_mode[regno]) != VOIDmode)
1490 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1491 if (size % align != 0)
1492 size = CEIL (size, align) * align;
1493 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1494 mem = adjust_address (result, mode, size);
1495 savevec[nelts++] = (savep
1496 ? gen_rtx_SET (VOIDmode, mem, reg)
1497 : gen_rtx_SET (VOIDmode, reg, mem));
1498 size += GET_MODE_SIZE (mode);
1500 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1502 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1504 /* Save the state required to perform an untyped call with the same
1505 arguments as were passed to the current function. */
1508 expand_builtin_apply_args_1 (void)
1511 int size, align, regno;
1512 enum machine_mode mode;
1513 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1515 /* Create a block where the arg-pointer, structure value address,
1516 and argument registers can be saved. */
1517 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1519 /* Walk past the arg-pointer and structure value address. */
1520 size = GET_MODE_SIZE (Pmode);
1521 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1522 size += GET_MODE_SIZE (Pmode);
1524 /* Save each register used in calling a function to the block. */
1525 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1526 if ((mode = apply_args_mode[regno]) != VOIDmode)
1528 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1529 if (size % align != 0)
1530 size = CEIL (size, align) * align;
1532 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1534 emit_move_insn (adjust_address (registers, mode, size), tem);
1535 size += GET_MODE_SIZE (mode);
1538 /* Save the arg pointer to the block. */
1539 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1540 #ifdef STACK_GROWS_DOWNWARD
1541 /* We need the pointer as the caller actually passed them to us, not
1542 as we might have pretended they were passed. Make sure it's a valid
1543 operand, as emit_move_insn isn't expected to handle a PLUS. */
1545 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1548 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1550 size = GET_MODE_SIZE (Pmode);
1552 /* Save the structure value address unless this is passed as an
1553 "invisible" first argument. */
1554 if (struct_incoming_value)
1556 emit_move_insn (adjust_address (registers, Pmode, size),
1557 copy_to_reg (struct_incoming_value));
1558 size += GET_MODE_SIZE (Pmode);
1561 /* Return the address of the block. */
1562 return copy_addr_to_reg (XEXP (registers, 0));
1565 /* __builtin_apply_args returns block of memory allocated on
1566 the stack into which is stored the arg pointer, structure
1567 value address, static chain, and all the registers that might
1568 possibly be used in performing a function call. The code is
1569 moved to the start of the function so the incoming values are
1573 expand_builtin_apply_args (void)
1575 /* Don't do __builtin_apply_args more than once in a function.
1576 Save the result of the first call and reuse it. */
1577 if (apply_args_value != 0)
1578 return apply_args_value;
1580 /* When this function is called, it means that registers must be
1581 saved on entry to this function. So we migrate the
1582 call to the first insn of this function. */
1587 temp = expand_builtin_apply_args_1 ();
1591 apply_args_value = temp;
1593 /* Put the insns after the NOTE that starts the function.
1594 If this is inside a start_sequence, make the outer-level insn
1595 chain current, so the code is placed at the start of the
1596 function. If internal_arg_pointer is a non-virtual pseudo,
1597 it needs to be placed after the function that initializes
1599 push_topmost_sequence ();
1600 if (REG_P (crtl->args.internal_arg_pointer)
1601 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1602 emit_insn_before (seq, parm_birth_insn);
1604 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1605 pop_topmost_sequence ();
1610 /* Perform an untyped call and save the state required to perform an
1611 untyped return of whatever value was returned by the given function. */
1614 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1616 int size, align, regno;
1617 enum machine_mode mode;
1618 rtx incoming_args, result, reg, dest, src, call_insn;
1619 rtx old_stack_level = 0;
1620 rtx call_fusage = 0;
1621 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1623 arguments = convert_memory_address (Pmode, arguments);
1625 /* Create a block where the return registers can be saved. */
1626 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1628 /* Fetch the arg pointer from the ARGUMENTS block. */
1629 incoming_args = gen_reg_rtx (Pmode);
1630 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1631 #ifndef STACK_GROWS_DOWNWARD
1632 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1633 incoming_args, 0, OPTAB_LIB_WIDEN);
1636 /* Push a new argument block and copy the arguments. Do not allow
1637 the (potential) memcpy call below to interfere with our stack
1639 do_pending_stack_adjust ();
1642 /* Save the stack with nonlocal if available. */
1643 #ifdef HAVE_save_stack_nonlocal
1644 if (HAVE_save_stack_nonlocal)
1645 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1648 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1650 /* Allocate a block of memory onto the stack and copy the memory
1651 arguments to the outgoing arguments address. We can pass TRUE
1652 as the 4th argument because we just saved the stack pointer
1653 and will restore it right after the call. */
1654 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1656 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1657 may have already set current_function_calls_alloca to true.
1658 current_function_calls_alloca won't be set if argsize is zero,
1659 so we have to guarantee need_drap is true here. */
1660 if (SUPPORTS_STACK_ALIGNMENT)
1661 crtl->need_drap = true;
1663 dest = virtual_outgoing_args_rtx;
1664 #ifndef STACK_GROWS_DOWNWARD
1665 if (CONST_INT_P (argsize))
1666 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1668 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1670 dest = gen_rtx_MEM (BLKmode, dest);
1671 set_mem_align (dest, PARM_BOUNDARY);
1672 src = gen_rtx_MEM (BLKmode, incoming_args);
1673 set_mem_align (src, PARM_BOUNDARY);
1674 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1676 /* Refer to the argument block. */
1678 arguments = gen_rtx_MEM (BLKmode, arguments);
1679 set_mem_align (arguments, PARM_BOUNDARY);
1681 /* Walk past the arg-pointer and structure value address. */
1682 size = GET_MODE_SIZE (Pmode);
1684 size += GET_MODE_SIZE (Pmode);
1686 /* Restore each of the registers previously saved. Make USE insns
1687 for each of these registers for use in making the call. */
1688 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1689 if ((mode = apply_args_mode[regno]) != VOIDmode)
1691 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1692 if (size % align != 0)
1693 size = CEIL (size, align) * align;
1694 reg = gen_rtx_REG (mode, regno);
1695 emit_move_insn (reg, adjust_address (arguments, mode, size));
1696 use_reg (&call_fusage, reg);
1697 size += GET_MODE_SIZE (mode);
1700 /* Restore the structure value address unless this is passed as an
1701 "invisible" first argument. */
1702 size = GET_MODE_SIZE (Pmode);
1705 rtx value = gen_reg_rtx (Pmode);
1706 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1707 emit_move_insn (struct_value, value);
1708 if (REG_P (struct_value))
1709 use_reg (&call_fusage, struct_value);
1710 size += GET_MODE_SIZE (Pmode);
1713 /* All arguments and registers used for the call are set up by now! */
1714 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1716 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1717 and we don't want to load it into a register as an optimization,
1718 because prepare_call_address already did it if it should be done. */
1719 if (GET_CODE (function) != SYMBOL_REF)
1720 function = memory_address (FUNCTION_MODE, function);
1722 /* Generate the actual call instruction and save the return value. */
1723 #ifdef HAVE_untyped_call
1724 if (HAVE_untyped_call)
1725 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1726 result, result_vector (1, result)));
1729 #ifdef HAVE_call_value
1730 if (HAVE_call_value)
1734 /* Locate the unique return register. It is not possible to
1735 express a call that sets more than one return register using
1736 call_value; use untyped_call for that. In fact, untyped_call
1737 only needs to save the return registers in the given block. */
1738 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1739 if ((mode = apply_result_mode[regno]) != VOIDmode)
1741 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1743 valreg = gen_rtx_REG (mode, regno);
1746 emit_call_insn (GEN_CALL_VALUE (valreg,
1747 gen_rtx_MEM (FUNCTION_MODE, function),
1748 const0_rtx, NULL_RTX, const0_rtx));
1750 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1756 /* Find the CALL insn we just emitted, and attach the register usage
1758 call_insn = last_call_insn ();
1759 add_function_usage_to (call_insn, call_fusage);
1761 /* Restore the stack. */
1762 #ifdef HAVE_save_stack_nonlocal
1763 if (HAVE_save_stack_nonlocal)
1764 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1767 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1768 fixup_args_size_notes (call_insn, get_last_insn(), 0);
1772 /* Return the address of the result block. */
1773 result = copy_addr_to_reg (XEXP (result, 0));
1774 return convert_memory_address (ptr_mode, result);
1777 /* Perform an untyped return. */
1780 expand_builtin_return (rtx result)
1782 int size, align, regno;
1783 enum machine_mode mode;
1785 rtx call_fusage = 0;
1787 result = convert_memory_address (Pmode, result);
1789 apply_result_size ();
1790 result = gen_rtx_MEM (BLKmode, result);
1792 #ifdef HAVE_untyped_return
1793 if (HAVE_untyped_return)
1795 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1801 /* Restore the return value and note that each value is used. */
1803 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1804 if ((mode = apply_result_mode[regno]) != VOIDmode)
1806 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1807 if (size % align != 0)
1808 size = CEIL (size, align) * align;
1809 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1810 emit_move_insn (reg, adjust_address (result, mode, size));
1812 push_to_sequence (call_fusage);
1814 call_fusage = get_insns ();
1816 size += GET_MODE_SIZE (mode);
1819 /* Put the USE insns before the return. */
1820 emit_insn (call_fusage);
1822 /* Return whatever values was restored by jumping directly to the end
1824 expand_naked_return ();
1827 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1829 static enum type_class
1830 type_to_class (tree type)
1832 switch (TREE_CODE (type))
1834 case VOID_TYPE: return void_type_class;
1835 case INTEGER_TYPE: return integer_type_class;
1836 case ENUMERAL_TYPE: return enumeral_type_class;
1837 case BOOLEAN_TYPE: return boolean_type_class;
1838 case POINTER_TYPE: return pointer_type_class;
1839 case REFERENCE_TYPE: return reference_type_class;
1840 case OFFSET_TYPE: return offset_type_class;
1841 case REAL_TYPE: return real_type_class;
1842 case COMPLEX_TYPE: return complex_type_class;
1843 case FUNCTION_TYPE: return function_type_class;
1844 case METHOD_TYPE: return method_type_class;
1845 case RECORD_TYPE: return record_type_class;
1847 case QUAL_UNION_TYPE: return union_type_class;
1848 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1849 ? string_type_class : array_type_class);
1850 case LANG_TYPE: return lang_type_class;
1851 default: return no_type_class;
1855 /* Expand a call EXP to __builtin_classify_type. */
1858 expand_builtin_classify_type (tree exp)
1860 if (call_expr_nargs (exp))
1861 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1862 return GEN_INT (no_type_class);
1865 /* This helper macro, meant to be used in mathfn_built_in below,
1866 determines which among a set of three builtin math functions is
1867 appropriate for a given type mode. The `F' and `L' cases are
1868 automatically generated from the `double' case. */
1869 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1870 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1871 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1872 fcodel = BUILT_IN_MATHFN##L ; break;
1873 /* Similar to above, but appends _R after any F/L suffix. */
1874 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1875 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1876 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1877 fcodel = BUILT_IN_MATHFN##L_R ; break;
1879 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1880 if available. If IMPLICIT is true use the implicit builtin declaration,
1881 otherwise use the explicit declaration. If we can't do the conversion,
1885 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1887 enum built_in_function fcode, fcodef, fcodel, fcode2;
1891 CASE_MATHFN (BUILT_IN_ACOS)
1892 CASE_MATHFN (BUILT_IN_ACOSH)
1893 CASE_MATHFN (BUILT_IN_ASIN)
1894 CASE_MATHFN (BUILT_IN_ASINH)
1895 CASE_MATHFN (BUILT_IN_ATAN)
1896 CASE_MATHFN (BUILT_IN_ATAN2)
1897 CASE_MATHFN (BUILT_IN_ATANH)
1898 CASE_MATHFN (BUILT_IN_CBRT)
1899 CASE_MATHFN (BUILT_IN_CEIL)
1900 CASE_MATHFN (BUILT_IN_CEXPI)
1901 CASE_MATHFN (BUILT_IN_COPYSIGN)
1902 CASE_MATHFN (BUILT_IN_COS)
1903 CASE_MATHFN (BUILT_IN_COSH)
1904 CASE_MATHFN (BUILT_IN_DREM)
1905 CASE_MATHFN (BUILT_IN_ERF)
1906 CASE_MATHFN (BUILT_IN_ERFC)
1907 CASE_MATHFN (BUILT_IN_EXP)
1908 CASE_MATHFN (BUILT_IN_EXP10)
1909 CASE_MATHFN (BUILT_IN_EXP2)
1910 CASE_MATHFN (BUILT_IN_EXPM1)
1911 CASE_MATHFN (BUILT_IN_FABS)
1912 CASE_MATHFN (BUILT_IN_FDIM)
1913 CASE_MATHFN (BUILT_IN_FLOOR)
1914 CASE_MATHFN (BUILT_IN_FMA)
1915 CASE_MATHFN (BUILT_IN_FMAX)
1916 CASE_MATHFN (BUILT_IN_FMIN)
1917 CASE_MATHFN (BUILT_IN_FMOD)
1918 CASE_MATHFN (BUILT_IN_FREXP)
1919 CASE_MATHFN (BUILT_IN_GAMMA)
1920 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1921 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1922 CASE_MATHFN (BUILT_IN_HYPOT)
1923 CASE_MATHFN (BUILT_IN_ILOGB)
1924 CASE_MATHFN (BUILT_IN_ICEIL)
1925 CASE_MATHFN (BUILT_IN_IFLOOR)
1926 CASE_MATHFN (BUILT_IN_INF)
1927 CASE_MATHFN (BUILT_IN_IRINT)
1928 CASE_MATHFN (BUILT_IN_IROUND)
1929 CASE_MATHFN (BUILT_IN_ISINF)
1930 CASE_MATHFN (BUILT_IN_J0)
1931 CASE_MATHFN (BUILT_IN_J1)
1932 CASE_MATHFN (BUILT_IN_JN)
1933 CASE_MATHFN (BUILT_IN_LCEIL)
1934 CASE_MATHFN (BUILT_IN_LDEXP)
1935 CASE_MATHFN (BUILT_IN_LFLOOR)
1936 CASE_MATHFN (BUILT_IN_LGAMMA)
1937 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1938 CASE_MATHFN (BUILT_IN_LLCEIL)
1939 CASE_MATHFN (BUILT_IN_LLFLOOR)
1940 CASE_MATHFN (BUILT_IN_LLRINT)
1941 CASE_MATHFN (BUILT_IN_LLROUND)
1942 CASE_MATHFN (BUILT_IN_LOG)
1943 CASE_MATHFN (BUILT_IN_LOG10)
1944 CASE_MATHFN (BUILT_IN_LOG1P)
1945 CASE_MATHFN (BUILT_IN_LOG2)
1946 CASE_MATHFN (BUILT_IN_LOGB)
1947 CASE_MATHFN (BUILT_IN_LRINT)
1948 CASE_MATHFN (BUILT_IN_LROUND)
1949 CASE_MATHFN (BUILT_IN_MODF)
1950 CASE_MATHFN (BUILT_IN_NAN)
1951 CASE_MATHFN (BUILT_IN_NANS)
1952 CASE_MATHFN (BUILT_IN_NEARBYINT)
1953 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1954 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1955 CASE_MATHFN (BUILT_IN_POW)
1956 CASE_MATHFN (BUILT_IN_POWI)
1957 CASE_MATHFN (BUILT_IN_POW10)
1958 CASE_MATHFN (BUILT_IN_REMAINDER)
1959 CASE_MATHFN (BUILT_IN_REMQUO)
1960 CASE_MATHFN (BUILT_IN_RINT)
1961 CASE_MATHFN (BUILT_IN_ROUND)
1962 CASE_MATHFN (BUILT_IN_SCALB)
1963 CASE_MATHFN (BUILT_IN_SCALBLN)
1964 CASE_MATHFN (BUILT_IN_SCALBN)
1965 CASE_MATHFN (BUILT_IN_SIGNBIT)
1966 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1967 CASE_MATHFN (BUILT_IN_SIN)
1968 CASE_MATHFN (BUILT_IN_SINCOS)
1969 CASE_MATHFN (BUILT_IN_SINH)
1970 CASE_MATHFN (BUILT_IN_SQRT)
1971 CASE_MATHFN (BUILT_IN_TAN)
1972 CASE_MATHFN (BUILT_IN_TANH)
1973 CASE_MATHFN (BUILT_IN_TGAMMA)
1974 CASE_MATHFN (BUILT_IN_TRUNC)
1975 CASE_MATHFN (BUILT_IN_Y0)
1976 CASE_MATHFN (BUILT_IN_Y1)
1977 CASE_MATHFN (BUILT_IN_YN)
1983 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1985 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1987 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1992 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1995 return builtin_decl_explicit (fcode2);
1998 /* Like mathfn_built_in_1(), but always use the implicit array. */
2001 mathfn_built_in (tree type, enum built_in_function fn)
2003 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2006 /* If errno must be maintained, expand the RTL to check if the result,
2007 TARGET, of a built-in function call, EXP, is NaN, and if so set
2011 expand_errno_check (tree exp, rtx target)
2013 rtx lab = gen_label_rtx ();
2015 /* Test the result; if it is NaN, set errno=EDOM because
2016 the argument was not in the domain. */
2017 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
2018 NULL_RTX, NULL_RTX, lab,
2019 /* The jump is very likely. */
2020 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
2023 /* If this built-in doesn't throw an exception, set errno directly. */
2024 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
2026 #ifdef GEN_ERRNO_RTX
2027 rtx errno_rtx = GEN_ERRNO_RTX;
2030 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2032 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
2038 /* Make sure the library call isn't expanded as a tail call. */
2039 CALL_EXPR_TAILCALL (exp) = 0;
2041 /* We can't set errno=EDOM directly; let the library call do it.
2042 Pop the arguments right away in case the call gets deleted. */
2044 expand_call (exp, target, 0);
2049 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2050 Return NULL_RTX if a normal call should be emitted rather than expanding
2051 the function in-line. EXP is the expression that is a call to the builtin
2052 function; if convenient, the result should be placed in TARGET.
2053 SUBTARGET may be used as the target for computing one of EXP's operands. */
2056 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2058 optab builtin_optab;
2060 tree fndecl = get_callee_fndecl (exp);
2061 enum machine_mode mode;
2062 bool errno_set = false;
2065 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2068 arg = CALL_EXPR_ARG (exp, 0);
2070 switch (DECL_FUNCTION_CODE (fndecl))
2072 CASE_FLT_FN (BUILT_IN_SQRT):
2073 errno_set = ! tree_expr_nonnegative_p (arg);
2074 builtin_optab = sqrt_optab;
2076 CASE_FLT_FN (BUILT_IN_EXP):
2077 errno_set = true; builtin_optab = exp_optab; break;
2078 CASE_FLT_FN (BUILT_IN_EXP10):
2079 CASE_FLT_FN (BUILT_IN_POW10):
2080 errno_set = true; builtin_optab = exp10_optab; break;
2081 CASE_FLT_FN (BUILT_IN_EXP2):
2082 errno_set = true; builtin_optab = exp2_optab; break;
2083 CASE_FLT_FN (BUILT_IN_EXPM1):
2084 errno_set = true; builtin_optab = expm1_optab; break;
2085 CASE_FLT_FN (BUILT_IN_LOGB):
2086 errno_set = true; builtin_optab = logb_optab; break;
2087 CASE_FLT_FN (BUILT_IN_LOG):
2088 errno_set = true; builtin_optab = log_optab; break;
2089 CASE_FLT_FN (BUILT_IN_LOG10):
2090 errno_set = true; builtin_optab = log10_optab; break;
2091 CASE_FLT_FN (BUILT_IN_LOG2):
2092 errno_set = true; builtin_optab = log2_optab; break;
2093 CASE_FLT_FN (BUILT_IN_LOG1P):
2094 errno_set = true; builtin_optab = log1p_optab; break;
2095 CASE_FLT_FN (BUILT_IN_ASIN):
2096 builtin_optab = asin_optab; break;
2097 CASE_FLT_FN (BUILT_IN_ACOS):
2098 builtin_optab = acos_optab; break;
2099 CASE_FLT_FN (BUILT_IN_TAN):
2100 builtin_optab = tan_optab; break;
2101 CASE_FLT_FN (BUILT_IN_ATAN):
2102 builtin_optab = atan_optab; break;
2103 CASE_FLT_FN (BUILT_IN_FLOOR):
2104 builtin_optab = floor_optab; break;
2105 CASE_FLT_FN (BUILT_IN_CEIL):
2106 builtin_optab = ceil_optab; break;
2107 CASE_FLT_FN (BUILT_IN_TRUNC):
2108 builtin_optab = btrunc_optab; break;
2109 CASE_FLT_FN (BUILT_IN_ROUND):
2110 builtin_optab = round_optab; break;
2111 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2112 builtin_optab = nearbyint_optab;
2113 if (flag_trapping_math)
2115 /* Else fallthrough and expand as rint. */
2116 CASE_FLT_FN (BUILT_IN_RINT):
2117 builtin_optab = rint_optab; break;
2118 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2119 builtin_optab = significand_optab; break;
2124 /* Make a suitable register to place result in. */
2125 mode = TYPE_MODE (TREE_TYPE (exp));
2127 if (! flag_errno_math || ! HONOR_NANS (mode))
2130 /* Before working hard, check whether the instruction is available. */
2131 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2132 && (!errno_set || !optimize_insn_for_size_p ()))
2134 target = gen_reg_rtx (mode);
2136 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2137 need to expand the argument again. This way, we will not perform
2138 side-effects more the once. */
2139 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2141 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2145 /* Compute into TARGET.
2146 Set TARGET to wherever the result comes back. */
2147 target = expand_unop (mode, builtin_optab, op0, target, 0);
2152 expand_errno_check (exp, target);
2154 /* Output the entire sequence. */
2155 insns = get_insns ();
2161 /* If we were unable to expand via the builtin, stop the sequence
2162 (without outputting the insns) and call to the library function
2163 with the stabilized argument list. */
2167 return expand_call (exp, target, target == const0_rtx);
2170 /* Expand a call to the builtin binary math functions (pow and atan2).
2171 Return NULL_RTX if a normal call should be emitted rather than expanding the
2172 function in-line. EXP is the expression that is a call to the builtin
2173 function; if convenient, the result should be placed in TARGET.
2174 SUBTARGET may be used as the target for computing one of EXP's
2178 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2180 optab builtin_optab;
2181 rtx op0, op1, insns;
2182 int op1_type = REAL_TYPE;
2183 tree fndecl = get_callee_fndecl (exp);
2185 enum machine_mode mode;
2186 bool errno_set = true;
2188 switch (DECL_FUNCTION_CODE (fndecl))
2190 CASE_FLT_FN (BUILT_IN_SCALBN):
2191 CASE_FLT_FN (BUILT_IN_SCALBLN):
2192 CASE_FLT_FN (BUILT_IN_LDEXP):
2193 op1_type = INTEGER_TYPE;
2198 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2201 arg0 = CALL_EXPR_ARG (exp, 0);
2202 arg1 = CALL_EXPR_ARG (exp, 1);
2204 switch (DECL_FUNCTION_CODE (fndecl))
2206 CASE_FLT_FN (BUILT_IN_POW):
2207 builtin_optab = pow_optab; break;
2208 CASE_FLT_FN (BUILT_IN_ATAN2):
2209 builtin_optab = atan2_optab; break;
2210 CASE_FLT_FN (BUILT_IN_SCALB):
2211 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2213 builtin_optab = scalb_optab; break;
2214 CASE_FLT_FN (BUILT_IN_SCALBN):
2215 CASE_FLT_FN (BUILT_IN_SCALBLN):
2216 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2218 /* Fall through... */
2219 CASE_FLT_FN (BUILT_IN_LDEXP):
2220 builtin_optab = ldexp_optab; break;
2221 CASE_FLT_FN (BUILT_IN_FMOD):
2222 builtin_optab = fmod_optab; break;
2223 CASE_FLT_FN (BUILT_IN_REMAINDER):
2224 CASE_FLT_FN (BUILT_IN_DREM):
2225 builtin_optab = remainder_optab; break;
2230 /* Make a suitable register to place result in. */
2231 mode = TYPE_MODE (TREE_TYPE (exp));
2233 /* Before working hard, check whether the instruction is available. */
2234 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2237 target = gen_reg_rtx (mode);
2239 if (! flag_errno_math || ! HONOR_NANS (mode))
2242 if (errno_set && optimize_insn_for_size_p ())
2245 /* Always stabilize the argument list. */
2246 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2247 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2249 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2250 op1 = expand_normal (arg1);
2254 /* Compute into TARGET.
2255 Set TARGET to wherever the result comes back. */
2256 target = expand_binop (mode, builtin_optab, op0, op1,
2257 target, 0, OPTAB_DIRECT);
2259 /* If we were unable to expand via the builtin, stop the sequence
2260 (without outputting the insns) and call to the library function
2261 with the stabilized argument list. */
2265 return expand_call (exp, target, target == const0_rtx);
2269 expand_errno_check (exp, target);
2271 /* Output the entire sequence. */
2272 insns = get_insns ();
2279 /* Expand a call to the builtin trinary math functions (fma).
2280 Return NULL_RTX if a normal call should be emitted rather than expanding the
2281 function in-line. EXP is the expression that is a call to the builtin
2282 function; if convenient, the result should be placed in TARGET.
2283 SUBTARGET may be used as the target for computing one of EXP's
2287 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2289 optab builtin_optab;
2290 rtx op0, op1, op2, insns;
2291 tree fndecl = get_callee_fndecl (exp);
2292 tree arg0, arg1, arg2;
2293 enum machine_mode mode;
2295 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2298 arg0 = CALL_EXPR_ARG (exp, 0);
2299 arg1 = CALL_EXPR_ARG (exp, 1);
2300 arg2 = CALL_EXPR_ARG (exp, 2);
2302 switch (DECL_FUNCTION_CODE (fndecl))
2304 CASE_FLT_FN (BUILT_IN_FMA):
2305 builtin_optab = fma_optab; break;
2310 /* Make a suitable register to place result in. */
2311 mode = TYPE_MODE (TREE_TYPE (exp));
2313 /* Before working hard, check whether the instruction is available. */
2314 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2317 target = gen_reg_rtx (mode);
2319 /* Always stabilize the argument list. */
2320 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2321 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2322 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2324 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2325 op1 = expand_normal (arg1);
2326 op2 = expand_normal (arg2);
2330 /* Compute into TARGET.
2331 Set TARGET to wherever the result comes back. */
2332 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2335 /* If we were unable to expand via the builtin, stop the sequence
2336 (without outputting the insns) and call to the library function
2337 with the stabilized argument list. */
2341 return expand_call (exp, target, target == const0_rtx);
2344 /* Output the entire sequence. */
2345 insns = get_insns ();
2352 /* Expand a call to the builtin sin and cos math functions.
2353 Return NULL_RTX if a normal call should be emitted rather than expanding the
2354 function in-line. EXP is the expression that is a call to the builtin
2355 function; if convenient, the result should be placed in TARGET.
2356 SUBTARGET may be used as the target for computing one of EXP's
2360 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2362 optab builtin_optab;
2364 tree fndecl = get_callee_fndecl (exp);
2365 enum machine_mode mode;
2368 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2371 arg = CALL_EXPR_ARG (exp, 0);
2373 switch (DECL_FUNCTION_CODE (fndecl))
2375 CASE_FLT_FN (BUILT_IN_SIN):
2376 CASE_FLT_FN (BUILT_IN_COS):
2377 builtin_optab = sincos_optab; break;
2382 /* Make a suitable register to place result in. */
2383 mode = TYPE_MODE (TREE_TYPE (exp));
2385 /* Check if sincos insn is available, otherwise fallback
2386 to sin or cos insn. */
2387 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2388 switch (DECL_FUNCTION_CODE (fndecl))
2390 CASE_FLT_FN (BUILT_IN_SIN):
2391 builtin_optab = sin_optab; break;
2392 CASE_FLT_FN (BUILT_IN_COS):
2393 builtin_optab = cos_optab; break;
2398 /* Before working hard, check whether the instruction is available. */
2399 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2401 target = gen_reg_rtx (mode);
2403 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2404 need to expand the argument again. This way, we will not perform
2405 side-effects more the once. */
2406 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2408 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2412 /* Compute into TARGET.
2413 Set TARGET to wherever the result comes back. */
2414 if (builtin_optab == sincos_optab)
2418 switch (DECL_FUNCTION_CODE (fndecl))
2420 CASE_FLT_FN (BUILT_IN_SIN):
2421 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2423 CASE_FLT_FN (BUILT_IN_COS):
2424 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2429 gcc_assert (result);
2433 target = expand_unop (mode, builtin_optab, op0, target, 0);
2438 /* Output the entire sequence. */
2439 insns = get_insns ();
2445 /* If we were unable to expand via the builtin, stop the sequence
2446 (without outputting the insns) and call to the library function
2447 with the stabilized argument list. */
2451 target = expand_call (exp, target, target == const0_rtx);
2456 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2457 return an RTL instruction code that implements the functionality.
2458 If that isn't possible or available return CODE_FOR_nothing. */
2460 static enum insn_code
2461 interclass_mathfn_icode (tree arg, tree fndecl)
2463 bool errno_set = false;
2464 optab builtin_optab = 0;
2465 enum machine_mode mode;
2467 switch (DECL_FUNCTION_CODE (fndecl))
2469 CASE_FLT_FN (BUILT_IN_ILOGB):
2470 errno_set = true; builtin_optab = ilogb_optab; break;
2471 CASE_FLT_FN (BUILT_IN_ISINF):
2472 builtin_optab = isinf_optab; break;
2473 case BUILT_IN_ISNORMAL:
2474 case BUILT_IN_ISFINITE:
2475 CASE_FLT_FN (BUILT_IN_FINITE):
2476 case BUILT_IN_FINITED32:
2477 case BUILT_IN_FINITED64:
2478 case BUILT_IN_FINITED128:
2479 case BUILT_IN_ISINFD32:
2480 case BUILT_IN_ISINFD64:
2481 case BUILT_IN_ISINFD128:
2482 /* These builtins have no optabs (yet). */
2488 /* There's no easy way to detect the case we need to set EDOM. */
2489 if (flag_errno_math && errno_set)
2490 return CODE_FOR_nothing;
2492 /* Optab mode depends on the mode of the input argument. */
2493 mode = TYPE_MODE (TREE_TYPE (arg));
2496 return optab_handler (builtin_optab, mode);
2497 return CODE_FOR_nothing;
2500 /* Expand a call to one of the builtin math functions that operate on
2501 floating point argument and output an integer result (ilogb, isinf,
2503 Return 0 if a normal call should be emitted rather than expanding the
2504 function in-line. EXP is the expression that is a call to the builtin
2505 function; if convenient, the result should be placed in TARGET. */
2508 expand_builtin_interclass_mathfn (tree exp, rtx target)
2510 enum insn_code icode = CODE_FOR_nothing;
2512 tree fndecl = get_callee_fndecl (exp);
2513 enum machine_mode mode;
2516 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2519 arg = CALL_EXPR_ARG (exp, 0);
2520 icode = interclass_mathfn_icode (arg, fndecl);
2521 mode = TYPE_MODE (TREE_TYPE (arg));
2523 if (icode != CODE_FOR_nothing)
2525 struct expand_operand ops[1];
2526 rtx last = get_last_insn ();
2527 tree orig_arg = arg;
2529 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2530 need to expand the argument again. This way, we will not perform
2531 side-effects more the once. */
2532 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2534 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2536 if (mode != GET_MODE (op0))
2537 op0 = convert_to_mode (mode, op0, 0);
2539 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2540 if (maybe_legitimize_operands (icode, 0, 1, ops)
2541 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2542 return ops[0].value;
2544 delete_insns_since (last);
2545 CALL_EXPR_ARG (exp, 0) = orig_arg;
2551 /* Expand a call to the builtin sincos math function.
2552 Return NULL_RTX if a normal call should be emitted rather than expanding the
2553 function in-line. EXP is the expression that is a call to the builtin
2557 expand_builtin_sincos (tree exp)
2559 rtx op0, op1, op2, target1, target2;
2560 enum machine_mode mode;
2561 tree arg, sinp, cosp;
2563 location_t loc = EXPR_LOCATION (exp);
2564 tree alias_type, alias_off;
2566 if (!validate_arglist (exp, REAL_TYPE,
2567 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2570 arg = CALL_EXPR_ARG (exp, 0);
2571 sinp = CALL_EXPR_ARG (exp, 1);
2572 cosp = CALL_EXPR_ARG (exp, 2);
2574 /* Make a suitable register to place result in. */
2575 mode = TYPE_MODE (TREE_TYPE (arg));
2577 /* Check if sincos insn is available, otherwise emit the call. */
2578 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2581 target1 = gen_reg_rtx (mode);
2582 target2 = gen_reg_rtx (mode);
2584 op0 = expand_normal (arg);
2585 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2586 alias_off = build_int_cst (alias_type, 0);
2587 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2589 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2592 /* Compute into target1 and target2.
2593 Set TARGET to wherever the result comes back. */
2594 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2595 gcc_assert (result);
2597 /* Move target1 and target2 to the memory locations indicated
2599 emit_move_insn (op1, target1);
2600 emit_move_insn (op2, target2);
2605 /* Expand a call to the internal cexpi builtin to the sincos math function.
2606 EXP is the expression that is a call to the builtin function; if convenient,
2607 the result should be placed in TARGET. */
2610 expand_builtin_cexpi (tree exp, rtx target)
2612 tree fndecl = get_callee_fndecl (exp);
2614 enum machine_mode mode;
2616 location_t loc = EXPR_LOCATION (exp);
2618 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2621 arg = CALL_EXPR_ARG (exp, 0);
2622 type = TREE_TYPE (arg);
2623 mode = TYPE_MODE (TREE_TYPE (arg));
2625 /* Try expanding via a sincos optab, fall back to emitting a libcall
2626 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2627 is only generated from sincos, cexp or if we have either of them. */
2628 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2630 op1 = gen_reg_rtx (mode);
2631 op2 = gen_reg_rtx (mode);
2633 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2635 /* Compute into op1 and op2. */
2636 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2638 else if (TARGET_HAS_SINCOS)
2640 tree call, fn = NULL_TREE;
2644 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2645 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2646 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2647 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2648 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2649 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2653 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2654 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2655 op1a = copy_addr_to_reg (XEXP (op1, 0));
2656 op2a = copy_addr_to_reg (XEXP (op2, 0));
2657 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2658 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2660 /* Make sure not to fold the sincos call again. */
2661 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2662 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2663 call, 3, arg, top1, top2));
2667 tree call, fn = NULL_TREE, narg;
2668 tree ctype = build_complex_type (type);
2670 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2671 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2672 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2673 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2674 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2675 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2679 /* If we don't have a decl for cexp create one. This is the
2680 friendliest fallback if the user calls __builtin_cexpi
2681 without full target C99 function support. */
2682 if (fn == NULL_TREE)
2685 const char *name = NULL;
2687 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2689 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2691 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2694 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2695 fn = build_fn_decl (name, fntype);
2698 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2699 build_real (type, dconst0), arg);
2701 /* Make sure not to fold the cexp call again. */
2702 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2703 return expand_expr (build_call_nary (ctype, call, 1, narg),
2704 target, VOIDmode, EXPAND_NORMAL);
2707 /* Now build the proper return type. */
2708 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2709 make_tree (TREE_TYPE (arg), op2),
2710 make_tree (TREE_TYPE (arg), op1)),
2711 target, VOIDmode, EXPAND_NORMAL);
2714 /* Conveniently construct a function call expression. FNDECL names the
2715 function to be called, N is the number of arguments, and the "..."
2716 parameters are the argument expressions. Unlike build_call_exr
2717 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2720 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2723 tree fntype = TREE_TYPE (fndecl);
2724 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2727 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2729 SET_EXPR_LOCATION (fn, loc);
2733 /* Expand a call to one of the builtin rounding functions gcc defines
2734 as an extension (lfloor and lceil). As these are gcc extensions we
2735 do not need to worry about setting errno to EDOM.
2736 If expanding via optab fails, lower expression to (int)(floor(x)).
2737 EXP is the expression that is a call to the builtin function;
2738 if convenient, the result should be placed in TARGET. */
2741 expand_builtin_int_roundingfn (tree exp, rtx target)
2743 convert_optab builtin_optab;
2744 rtx op0, insns, tmp;
2745 tree fndecl = get_callee_fndecl (exp);
2746 enum built_in_function fallback_fn;
2747 tree fallback_fndecl;
2748 enum machine_mode mode;
2751 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2754 arg = CALL_EXPR_ARG (exp, 0);
2756 switch (DECL_FUNCTION_CODE (fndecl))
2758 CASE_FLT_FN (BUILT_IN_ICEIL):
2759 CASE_FLT_FN (BUILT_IN_LCEIL):
2760 CASE_FLT_FN (BUILT_IN_LLCEIL):
2761 builtin_optab = lceil_optab;
2762 fallback_fn = BUILT_IN_CEIL;
2765 CASE_FLT_FN (BUILT_IN_IFLOOR):
2766 CASE_FLT_FN (BUILT_IN_LFLOOR):
2767 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2768 builtin_optab = lfloor_optab;
2769 fallback_fn = BUILT_IN_FLOOR;
2776 /* Make a suitable register to place result in. */
2777 mode = TYPE_MODE (TREE_TYPE (exp));
2779 target = gen_reg_rtx (mode);
2781 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2782 need to expand the argument again. This way, we will not perform
2783 side-effects more the once. */
2784 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2786 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2790 /* Compute into TARGET. */
2791 if (expand_sfix_optab (target, op0, builtin_optab))
2793 /* Output the entire sequence. */
2794 insns = get_insns ();
2800 /* If we were unable to expand via the builtin, stop the sequence
2801 (without outputting the insns). */
2804 /* Fall back to floating point rounding optab. */
2805 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2807 /* For non-C99 targets we may end up without a fallback fndecl here
2808 if the user called __builtin_lfloor directly. In this case emit
2809 a call to the floor/ceil variants nevertheless. This should result
2810 in the best user experience for not full C99 targets. */
2811 if (fallback_fndecl == NULL_TREE)
2814 const char *name = NULL;
2816 switch (DECL_FUNCTION_CODE (fndecl))
2818 case BUILT_IN_ICEIL:
2819 case BUILT_IN_LCEIL:
2820 case BUILT_IN_LLCEIL:
2823 case BUILT_IN_ICEILF:
2824 case BUILT_IN_LCEILF:
2825 case BUILT_IN_LLCEILF:
2828 case BUILT_IN_ICEILL:
2829 case BUILT_IN_LCEILL:
2830 case BUILT_IN_LLCEILL:
2833 case BUILT_IN_IFLOOR:
2834 case BUILT_IN_LFLOOR:
2835 case BUILT_IN_LLFLOOR:
2838 case BUILT_IN_IFLOORF:
2839 case BUILT_IN_LFLOORF:
2840 case BUILT_IN_LLFLOORF:
2843 case BUILT_IN_IFLOORL:
2844 case BUILT_IN_LFLOORL:
2845 case BUILT_IN_LLFLOORL:
2852 fntype = build_function_type_list (TREE_TYPE (arg),
2853 TREE_TYPE (arg), NULL_TREE);
2854 fallback_fndecl = build_fn_decl (name, fntype);
2857 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2859 tmp = expand_normal (exp);
2861 /* Truncate the result of floating point optab to integer
2862 via expand_fix (). */
2863 target = gen_reg_rtx (mode);
2864 expand_fix (target, tmp, 0);
2869 /* Expand a call to one of the builtin math functions doing integer
2871 Return 0 if a normal call should be emitted rather than expanding the
2872 function in-line. EXP is the expression that is a call to the builtin
2873 function; if convenient, the result should be placed in TARGET. */
2876 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2878 convert_optab builtin_optab;
2880 tree fndecl = get_callee_fndecl (exp);
2882 enum machine_mode mode;
2883 enum built_in_function fallback_fn = BUILT_IN_NONE;
2885 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2888 arg = CALL_EXPR_ARG (exp, 0);
2890 switch (DECL_FUNCTION_CODE (fndecl))
2892 CASE_FLT_FN (BUILT_IN_IRINT):
2893 fallback_fn = BUILT_IN_LRINT;
2895 CASE_FLT_FN (BUILT_IN_LRINT):
2896 CASE_FLT_FN (BUILT_IN_LLRINT):
2897 builtin_optab = lrint_optab;
2900 CASE_FLT_FN (BUILT_IN_IROUND):
2901 fallback_fn = BUILT_IN_LROUND;
2903 CASE_FLT_FN (BUILT_IN_LROUND):
2904 CASE_FLT_FN (BUILT_IN_LLROUND):
2905 builtin_optab = lround_optab;
2912 /* There's no easy way to detect the case we need to set EDOM. */
2913 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2916 /* Make a suitable register to place result in. */
2917 mode = TYPE_MODE (TREE_TYPE (exp));
2919 /* There's no easy way to detect the case we need to set EDOM. */
2920 if (!flag_errno_math)
2922 target = gen_reg_rtx (mode);
2924 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2925 need to expand the argument again. This way, we will not perform
2926 side-effects more the once. */
2927 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2929 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2933 if (expand_sfix_optab (target, op0, builtin_optab))
2935 /* Output the entire sequence. */
2936 insns = get_insns ();
2942 /* If we were unable to expand via the builtin, stop the sequence
2943 (without outputting the insns) and call to the library function
2944 with the stabilized argument list. */
2948 if (fallback_fn != BUILT_IN_NONE)
2950 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2951 targets, (int) round (x) should never be transformed into
2952 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2953 a call to lround in the hope that the target provides at least some
2954 C99 functions. This should result in the best user experience for
2955 not full C99 targets. */
2956 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2959 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2960 fallback_fndecl, 1, arg);
2962 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2963 return convert_to_mode (mode, target, 0);
2966 target = expand_call (exp, target, target == const0_rtx);
2971 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2972 a normal call should be emitted rather than expanding the function
2973 in-line. EXP is the expression that is a call to the builtin
2974 function; if convenient, the result should be placed in TARGET. */
2977 expand_builtin_powi (tree exp, rtx target)
2981 enum machine_mode mode;
2982 enum machine_mode mode2;
2984 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2987 arg0 = CALL_EXPR_ARG (exp, 0);
2988 arg1 = CALL_EXPR_ARG (exp, 1);
2989 mode = TYPE_MODE (TREE_TYPE (exp));
2991 /* Emit a libcall to libgcc. */
2993 /* Mode of the 2nd argument must match that of an int. */
2994 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2996 if (target == NULL_RTX)
2997 target = gen_reg_rtx (mode);
2999 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3000 if (GET_MODE (op0) != mode)
3001 op0 = convert_to_mode (mode, op0, 0);
3002 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3003 if (GET_MODE (op1) != mode2)
3004 op1 = convert_to_mode (mode2, op1, 0);
3006 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3007 target, LCT_CONST, mode, 2,
3008 op0, mode, op1, mode2);
3013 /* Expand expression EXP which is a call to the strlen builtin. Return
3014 NULL_RTX if we failed the caller should emit a normal call, otherwise
3015 try to get the result in TARGET, if convenient. */
3018 expand_builtin_strlen (tree exp, rtx target,
3019 enum machine_mode target_mode)
3021 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3025 struct expand_operand ops[4];
3028 tree src = CALL_EXPR_ARG (exp, 0);
3029 rtx src_reg, before_strlen;
3030 enum machine_mode insn_mode = target_mode;
3031 enum insn_code icode = CODE_FOR_nothing;
3034 /* If the length can be computed at compile-time, return it. */
3035 len = c_strlen (src, 0);
3037 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3039 /* If the length can be computed at compile-time and is constant
3040 integer, but there are side-effects in src, evaluate
3041 src for side-effects, then return len.
3042 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3043 can be optimized into: i++; x = 3; */
3044 len = c_strlen (src, 1);
3045 if (len && TREE_CODE (len) == INTEGER_CST)
3047 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3048 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3051 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3053 /* If SRC is not a pointer type, don't do this operation inline. */
3057 /* Bail out if we can't compute strlen in the right mode. */
3058 while (insn_mode != VOIDmode)
3060 icode = optab_handler (strlen_optab, insn_mode);
3061 if (icode != CODE_FOR_nothing)
3064 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3066 if (insn_mode == VOIDmode)
3069 /* Make a place to hold the source address. We will not expand
3070 the actual source until we are sure that the expansion will
3071 not fail -- there are trees that cannot be expanded twice. */
3072 src_reg = gen_reg_rtx (Pmode);
3074 /* Mark the beginning of the strlen sequence so we can emit the
3075 source operand later. */
3076 before_strlen = get_last_insn ();
3078 create_output_operand (&ops[0], target, insn_mode);
3079 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3080 create_integer_operand (&ops[2], 0);
3081 create_integer_operand (&ops[3], align);
3082 if (!maybe_expand_insn (icode, 4, ops))
3085 /* Now that we are assured of success, expand the source. */
3087 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3090 #ifdef POINTERS_EXTEND_UNSIGNED
3091 if (GET_MODE (pat) != Pmode)
3092 pat = convert_to_mode (Pmode, pat,
3093 POINTERS_EXTEND_UNSIGNED);
3095 emit_move_insn (src_reg, pat);
3101 emit_insn_after (pat, before_strlen);
3103 emit_insn_before (pat, get_insns ());
3105 /* Return the value in the proper mode for this function. */
3106 if (GET_MODE (ops[0].value) == target_mode)
3107 target = ops[0].value;
3108 else if (target != 0)
3109 convert_move (target, ops[0].value, 0);
3111 target = convert_to_mode (target_mode, ops[0].value, 0);
3117 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3118 bytes from constant string DATA + OFFSET and return it as target
3122 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3123 enum machine_mode mode)
3125 const char *str = (const char *) data;
3127 gcc_assert (offset >= 0
3128 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3129 <= strlen (str) + 1));
3131 return c_readstr (str + offset, mode);
3134 /* Expand a call EXP to the memcpy builtin.
3135 Return NULL_RTX if we failed, the caller should emit a normal call,
3136 otherwise try to get the result in TARGET, if convenient (and in
3137 mode MODE if that's convenient). */
3140 expand_builtin_memcpy (tree exp, rtx target)
3142 if (!validate_arglist (exp,
3143 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3147 tree dest = CALL_EXPR_ARG (exp, 0);
3148 tree src = CALL_EXPR_ARG (exp, 1);
3149 tree len = CALL_EXPR_ARG (exp, 2);
3150 const char *src_str;
3151 unsigned int src_align = get_pointer_alignment (src);
3152 unsigned int dest_align = get_pointer_alignment (dest);
3153 rtx dest_mem, src_mem, dest_addr, len_rtx;
3154 HOST_WIDE_INT expected_size = -1;
3155 unsigned int expected_align = 0;
3157 /* If DEST is not a pointer type, call the normal function. */
3158 if (dest_align == 0)
3161 /* If either SRC is not a pointer type, don't do this
3162 operation in-line. */
3166 if (currently_expanding_gimple_stmt)
3167 stringop_block_profile (currently_expanding_gimple_stmt,
3168 &expected_align, &expected_size);
3170 if (expected_align < dest_align)
3171 expected_align = dest_align;
3172 dest_mem = get_memory_rtx (dest, len);
3173 set_mem_align (dest_mem, dest_align);
3174 len_rtx = expand_normal (len);
3175 src_str = c_getstr (src);
3177 /* If SRC is a string constant and block move would be done
3178 by pieces, we can avoid loading the string from memory
3179 and only stored the computed constants. */
3181 && CONST_INT_P (len_rtx)
3182 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3183 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3184 CONST_CAST (char *, src_str),
3187 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3188 builtin_memcpy_read_str,
3189 CONST_CAST (char *, src_str),
3190 dest_align, false, 0);
3191 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3192 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3196 src_mem = get_memory_rtx (src, len);
3197 set_mem_align (src_mem, src_align);
3199 /* Copy word part most expediently. */
3200 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3201 CALL_EXPR_TAILCALL (exp)
3202 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3203 expected_align, expected_size);
3207 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3208 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3214 /* Expand a call EXP to the mempcpy builtin.
3215 Return NULL_RTX if we failed; the caller should emit a normal call,
3216 otherwise try to get the result in TARGET, if convenient (and in
3217 mode MODE if that's convenient). If ENDP is 0 return the
3218 destination pointer, if ENDP is 1 return the end pointer ala
3219 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3223 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3225 if (!validate_arglist (exp,
3226 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3230 tree dest = CALL_EXPR_ARG (exp, 0);
3231 tree src = CALL_EXPR_ARG (exp, 1);
3232 tree len = CALL_EXPR_ARG (exp, 2);
3233 return expand_builtin_mempcpy_args (dest, src, len,
3234 target, mode, /*endp=*/ 1);
3238 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3239 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3240 so that this can also be called without constructing an actual CALL_EXPR.
3241 The other arguments and return value are the same as for
3242 expand_builtin_mempcpy. */
3245 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3246 rtx target, enum machine_mode mode, int endp)
3248 /* If return value is ignored, transform mempcpy into memcpy. */
3249 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3251 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3252 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3254 return expand_expr (result, target, mode, EXPAND_NORMAL);
3258 const char *src_str;
3259 unsigned int src_align = get_pointer_alignment (src);
3260 unsigned int dest_align = get_pointer_alignment (dest);
3261 rtx dest_mem, src_mem, len_rtx;
3263 /* If either SRC or DEST is not a pointer type, don't do this
3264 operation in-line. */
3265 if (dest_align == 0 || src_align == 0)
3268 /* If LEN is not constant, call the normal function. */
3269 if (! host_integerp (len, 1))
3272 len_rtx = expand_normal (len);
3273 src_str = c_getstr (src);
3275 /* If SRC is a string constant and block move would be done
3276 by pieces, we can avoid loading the string from memory
3277 and only stored the computed constants. */
3279 && CONST_INT_P (len_rtx)
3280 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3281 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3282 CONST_CAST (char *, src_str),
3285 dest_mem = get_memory_rtx (dest, len);
3286 set_mem_align (dest_mem, dest_align);
3287 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3288 builtin_memcpy_read_str,
3289 CONST_CAST (char *, src_str),
3290 dest_align, false, endp);
3291 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3292 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3296 if (CONST_INT_P (len_rtx)
3297 && can_move_by_pieces (INTVAL (len_rtx),
3298 MIN (dest_align, src_align)))
3300 dest_mem = get_memory_rtx (dest, len);
3301 set_mem_align (dest_mem, dest_align);
3302 src_mem = get_memory_rtx (src, len);
3303 set_mem_align (src_mem, src_align);
3304 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3305 MIN (dest_align, src_align), endp);
3306 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3307 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3316 # define HAVE_movstr 0
3317 # define CODE_FOR_movstr CODE_FOR_nothing
3320 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3321 we failed, the caller should emit a normal call, otherwise try to
3322 get the result in TARGET, if convenient. If ENDP is 0 return the
3323 destination pointer, if ENDP is 1 return the end pointer ala
3324 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3328 expand_movstr (tree dest, tree src, rtx target, int endp)
3330 struct expand_operand ops[3];
3337 dest_mem = get_memory_rtx (dest, NULL);
3338 src_mem = get_memory_rtx (src, NULL);
3341 target = force_reg (Pmode, XEXP (dest_mem, 0));
3342 dest_mem = replace_equiv_address (dest_mem, target);
3345 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3346 create_fixed_operand (&ops[1], dest_mem);
3347 create_fixed_operand (&ops[2], src_mem);
3348 expand_insn (CODE_FOR_movstr, 3, ops);
3350 if (endp && target != const0_rtx)
3352 target = ops[0].value;
3353 /* movstr is supposed to set end to the address of the NUL
3354 terminator. If the caller requested a mempcpy-like return value,
3358 rtx tem = plus_constant (GET_MODE (target),
3359 gen_lowpart (GET_MODE (target), target), 1);
3360 emit_move_insn (target, force_operand (tem, NULL_RTX));
3366 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3367 NULL_RTX if we failed the caller should emit a normal call, otherwise
3368 try to get the result in TARGET, if convenient (and in mode MODE if that's
3372 expand_builtin_strcpy (tree exp, rtx target)
3374 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3376 tree dest = CALL_EXPR_ARG (exp, 0);
3377 tree src = CALL_EXPR_ARG (exp, 1);
3378 return expand_builtin_strcpy_args (dest, src, target);
3383 /* Helper function to do the actual work for expand_builtin_strcpy. The
3384 arguments to the builtin_strcpy call DEST and SRC are broken out
3385 so that this can also be called without constructing an actual CALL_EXPR.
3386 The other arguments and return value are the same as for
3387 expand_builtin_strcpy. */
3390 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3392 return expand_movstr (dest, src, target, /*endp=*/0);
3395 /* Expand a call EXP to the stpcpy builtin.
3396 Return NULL_RTX if we failed the caller should emit a normal call,
3397 otherwise try to get the result in TARGET, if convenient (and in
3398 mode MODE if that's convenient). */
3401 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3404 location_t loc = EXPR_LOCATION (exp);
3406 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3409 dst = CALL_EXPR_ARG (exp, 0);
3410 src = CALL_EXPR_ARG (exp, 1);
3412 /* If return value is ignored, transform stpcpy into strcpy. */
3413 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3415 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3416 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3417 return expand_expr (result, target, mode, EXPAND_NORMAL);
3424 /* Ensure we get an actual string whose length can be evaluated at
3425 compile-time, not an expression containing a string. This is
3426 because the latter will potentially produce pessimized code
3427 when used to produce the return value. */
3428 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3429 return expand_movstr (dst, src, target, /*endp=*/2);
3431 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3432 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3433 target, mode, /*endp=*/2);
3438 if (TREE_CODE (len) == INTEGER_CST)
3440 rtx len_rtx = expand_normal (len);
3442 if (CONST_INT_P (len_rtx))
3444 ret = expand_builtin_strcpy_args (dst, src, target);
3450 if (mode != VOIDmode)
3451 target = gen_reg_rtx (mode);
3453 target = gen_reg_rtx (GET_MODE (ret));
3455 if (GET_MODE (target) != GET_MODE (ret))
3456 ret = gen_lowpart (GET_MODE (target), ret);
3458 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3459 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3467 return expand_movstr (dst, src, target, /*endp=*/2);
3471 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3472 bytes from constant string DATA + OFFSET and return it as target
3476 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3477 enum machine_mode mode)
3479 const char *str = (const char *) data;
3481 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3484 return c_readstr (str + offset, mode);
3487 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3488 NULL_RTX if we failed the caller should emit a normal call. */
3491 expand_builtin_strncpy (tree exp, rtx target)
3493 location_t loc = EXPR_LOCATION (exp);
3495 if (validate_arglist (exp,
3496 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3498 tree dest = CALL_EXPR_ARG (exp, 0);
3499 tree src = CALL_EXPR_ARG (exp, 1);
3500 tree len = CALL_EXPR_ARG (exp, 2);
3501 tree slen = c_strlen (src, 1);
3503 /* We must be passed a constant len and src parameter. */
3504 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3507 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3509 /* We're required to pad with trailing zeros if the requested
3510 len is greater than strlen(s2)+1. In that case try to
3511 use store_by_pieces, if it fails, punt. */
3512 if (tree_int_cst_lt (slen, len))
3514 unsigned int dest_align = get_pointer_alignment (dest);
3515 const char *p = c_getstr (src);
3518 if (!p || dest_align == 0 || !host_integerp (len, 1)
3519 || !can_store_by_pieces (tree_low_cst (len, 1),
3520 builtin_strncpy_read_str,
3521 CONST_CAST (char *, p),
3525 dest_mem = get_memory_rtx (dest, len);
3526 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3527 builtin_strncpy_read_str,
3528 CONST_CAST (char *, p), dest_align, false, 0);
3529 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3530 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3537 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3538 bytes from constant string DATA + OFFSET and return it as target
3542 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3543 enum machine_mode mode)
3545 const char *c = (const char *) data;
3546 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3548 memset (p, *c, GET_MODE_SIZE (mode));
3550 return c_readstr (p, mode);
3553 /* Callback routine for store_by_pieces. Return the RTL of a register
3554 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3555 char value given in the RTL register data. For example, if mode is
3556 4 bytes wide, return the RTL for 0x01010101*data. */
3559 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3560 enum machine_mode mode)
3566 size = GET_MODE_SIZE (mode);
3570 p = XALLOCAVEC (char, size);
3571 memset (p, 1, size);
3572 coeff = c_readstr (p, mode);
3574 target = convert_to_mode (mode, (rtx) data, 1);
3575 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3576 return force_reg (mode, target);
3579 /* Expand expression EXP, which is a call to the memset builtin. Return
3580 NULL_RTX if we failed the caller should emit a normal call, otherwise
3581 try to get the result in TARGET, if convenient (and in mode MODE if that's
3585 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3587 if (!validate_arglist (exp,
3588 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3592 tree dest = CALL_EXPR_ARG (exp, 0);
3593 tree val = CALL_EXPR_ARG (exp, 1);
3594 tree len = CALL_EXPR_ARG (exp, 2);
3595 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3599 /* Helper function to do the actual work for expand_builtin_memset. The
3600 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3601 so that this can also be called without constructing an actual CALL_EXPR.
3602 The other arguments and return value are the same as for
3603 expand_builtin_memset. */
3606 expand_builtin_memset_args (tree dest, tree val, tree len,
3607 rtx target, enum machine_mode mode, tree orig_exp)
3610 enum built_in_function fcode;
3611 enum machine_mode val_mode;
3613 unsigned int dest_align;
3614 rtx dest_mem, dest_addr, len_rtx;
3615 HOST_WIDE_INT expected_size = -1;
3616 unsigned int expected_align = 0;
3618 dest_align = get_pointer_alignment (dest);
3620 /* If DEST is not a pointer type, don't do this operation in-line. */
3621 if (dest_align == 0)
3624 if (currently_expanding_gimple_stmt)
3625 stringop_block_profile (currently_expanding_gimple_stmt,
3626 &expected_align, &expected_size);
3628 if (expected_align < dest_align)
3629 expected_align = dest_align;
3631 /* If the LEN parameter is zero, return DEST. */
3632 if (integer_zerop (len))
3634 /* Evaluate and ignore VAL in case it has side-effects. */
3635 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3636 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3639 /* Stabilize the arguments in case we fail. */
3640 dest = builtin_save_expr (dest);
3641 val = builtin_save_expr (val);
3642 len = builtin_save_expr (len);
3644 len_rtx = expand_normal (len);
3645 dest_mem = get_memory_rtx (dest, len);
3646 val_mode = TYPE_MODE (unsigned_char_type_node);
3648 if (TREE_CODE (val) != INTEGER_CST)
3652 val_rtx = expand_normal (val);
3653 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3655 /* Assume that we can memset by pieces if we can store
3656 * the coefficients by pieces (in the required modes).
3657 * We can't pass builtin_memset_gen_str as that emits RTL. */
3659 if (host_integerp (len, 1)
3660 && can_store_by_pieces (tree_low_cst (len, 1),
3661 builtin_memset_read_str, &c, dest_align,
3664 val_rtx = force_reg (val_mode, val_rtx);
3665 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3666 builtin_memset_gen_str, val_rtx, dest_align,
3669 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3670 dest_align, expected_align,
3674 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3675 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3679 if (target_char_cast (val, &c))
3684 if (host_integerp (len, 1)
3685 && can_store_by_pieces (tree_low_cst (len, 1),
3686 builtin_memset_read_str, &c, dest_align,
3688 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3689 builtin_memset_read_str, &c, dest_align, true, 0);
3690 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3691 gen_int_mode (c, val_mode),
3692 dest_align, expected_align,
3696 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3697 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3701 set_mem_align (dest_mem, dest_align);
3702 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3703 CALL_EXPR_TAILCALL (orig_exp)
3704 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3705 expected_align, expected_size);
3709 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3710 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3716 fndecl = get_callee_fndecl (orig_exp);
3717 fcode = DECL_FUNCTION_CODE (fndecl);
3718 if (fcode == BUILT_IN_MEMSET)
3719 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3721 else if (fcode == BUILT_IN_BZERO)
3722 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3726 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3727 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3728 return expand_call (fn, target, target == const0_rtx);
3731 /* Expand expression EXP, which is a call to the bzero builtin. Return
3732 NULL_RTX if we failed the caller should emit a normal call. */
3735 expand_builtin_bzero (tree exp)
3738 location_t loc = EXPR_LOCATION (exp);
3740 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3743 dest = CALL_EXPR_ARG (exp, 0);
3744 size = CALL_EXPR_ARG (exp, 1);
3746 /* New argument list transforming bzero(ptr x, int y) to
3747 memset(ptr x, int 0, size_t y). This is done this way
3748 so that if it isn't expanded inline, we fallback to
3749 calling bzero instead of memset. */
3751 return expand_builtin_memset_args (dest, integer_zero_node,
3752 fold_convert_loc (loc,
3753 size_type_node, size),
3754 const0_rtx, VOIDmode, exp);
3757 /* Expand expression EXP, which is a call to the memcmp built-in function.
3758 Return NULL_RTX if we failed and the caller should emit a normal call,
3759 otherwise try to get the result in TARGET, if convenient (and in mode
3760 MODE, if that's convenient). */
3763 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3764 ATTRIBUTE_UNUSED enum machine_mode mode)
3766 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3768 if (!validate_arglist (exp,
3769 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3772 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3773 implementing memcmp because it will stop if it encounters two
3775 #if defined HAVE_cmpmemsi
3777 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3780 tree arg1 = CALL_EXPR_ARG (exp, 0);
3781 tree arg2 = CALL_EXPR_ARG (exp, 1);
3782 tree len = CALL_EXPR_ARG (exp, 2);
3784 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3785 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3786 enum machine_mode insn_mode;
3789 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3793 /* If we don't have POINTER_TYPE, call the function. */
3794 if (arg1_align == 0 || arg2_align == 0)
3797 /* Make a place to write the result of the instruction. */
3800 && REG_P (result) && GET_MODE (result) == insn_mode
3801 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3802 result = gen_reg_rtx (insn_mode);
3804 arg1_rtx = get_memory_rtx (arg1, len);
3805 arg2_rtx = get_memory_rtx (arg2, len);
3806 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3808 /* Set MEM_SIZE as appropriate. */
3809 if (CONST_INT_P (arg3_rtx))
3811 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3812 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3816 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3817 GEN_INT (MIN (arg1_align, arg2_align)));
3824 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3825 TYPE_MODE (integer_type_node), 3,
3826 XEXP (arg1_rtx, 0), Pmode,
3827 XEXP (arg2_rtx, 0), Pmode,
3828 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3829 TYPE_UNSIGNED (sizetype)),
3830 TYPE_MODE (sizetype));
3832 /* Return the value in the proper mode for this function. */
3833 mode = TYPE_MODE (TREE_TYPE (exp));
3834 if (GET_MODE (result) == mode)
3836 else if (target != 0)
3838 convert_move (target, result, 0);
3842 return convert_to_mode (mode, result, 0);
3844 #endif /* HAVE_cmpmemsi. */
3849 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3850 if we failed the caller should emit a normal call, otherwise try to get
3851 the result in TARGET, if convenient. */
3854 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3856 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3859 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3860 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3861 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3863 rtx arg1_rtx, arg2_rtx;
3864 rtx result, insn = NULL_RTX;
3866 tree arg1 = CALL_EXPR_ARG (exp, 0);
3867 tree arg2 = CALL_EXPR_ARG (exp, 1);
3869 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3870 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3872 /* If we don't have POINTER_TYPE, call the function. */
3873 if (arg1_align == 0 || arg2_align == 0)
3876 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3877 arg1 = builtin_save_expr (arg1);
3878 arg2 = builtin_save_expr (arg2);
3880 arg1_rtx = get_memory_rtx (arg1, NULL);
3881 arg2_rtx = get_memory_rtx (arg2, NULL);
3883 #ifdef HAVE_cmpstrsi
3884 /* Try to call cmpstrsi. */
3887 enum machine_mode insn_mode
3888 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3890 /* Make a place to write the result of the instruction. */
3893 && REG_P (result) && GET_MODE (result) == insn_mode
3894 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3895 result = gen_reg_rtx (insn_mode);
3897 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3898 GEN_INT (MIN (arg1_align, arg2_align)));
3901 #ifdef HAVE_cmpstrnsi
3902 /* Try to determine at least one length and call cmpstrnsi. */
3903 if (!insn && HAVE_cmpstrnsi)
3908 enum machine_mode insn_mode
3909 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3910 tree len1 = c_strlen (arg1, 1);
3911 tree len2 = c_strlen (arg2, 1);
3914 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3916 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3918 /* If we don't have a constant length for the first, use the length
3919 of the second, if we know it. We don't require a constant for
3920 this case; some cost analysis could be done if both are available
3921 but neither is constant. For now, assume they're equally cheap,
3922 unless one has side effects. If both strings have constant lengths,
3929 else if (TREE_SIDE_EFFECTS (len1))
3931 else if (TREE_SIDE_EFFECTS (len2))
3933 else if (TREE_CODE (len1) != INTEGER_CST)
3935 else if (TREE_CODE (len2) != INTEGER_CST)
3937 else if (tree_int_cst_lt (len1, len2))
3942 /* If both arguments have side effects, we cannot optimize. */
3943 if (!len || TREE_SIDE_EFFECTS (len))
3946 arg3_rtx = expand_normal (len);
3948 /* Make a place to write the result of the instruction. */
3951 && REG_P (result) && GET_MODE (result) == insn_mode
3952 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3953 result = gen_reg_rtx (insn_mode);
3955 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3956 GEN_INT (MIN (arg1_align, arg2_align)));
3962 enum machine_mode mode;
3965 /* Return the value in the proper mode for this function. */
3966 mode = TYPE_MODE (TREE_TYPE (exp));
3967 if (GET_MODE (result) == mode)
3970 return convert_to_mode (mode, result, 0);
3971 convert_move (target, result, 0);
3975 /* Expand the library call ourselves using a stabilized argument
3976 list to avoid re-evaluating the function's arguments twice. */
3977 #ifdef HAVE_cmpstrnsi
3980 fndecl = get_callee_fndecl (exp);
3981 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3982 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3983 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3984 return expand_call (fn, target, target == const0_rtx);
3990 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3991 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3992 the result in TARGET, if convenient. */
3995 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3996 ATTRIBUTE_UNUSED enum machine_mode mode)
3998 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4000 if (!validate_arglist (exp,
4001 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4004 /* If c_strlen can determine an expression for one of the string
4005 lengths, and it doesn't have side effects, then emit cmpstrnsi
4006 using length MIN(strlen(string)+1, arg3). */
4007 #ifdef HAVE_cmpstrnsi
4010 tree len, len1, len2;
4011 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4014 tree arg1 = CALL_EXPR_ARG (exp, 0);
4015 tree arg2 = CALL_EXPR_ARG (exp, 1);
4016 tree arg3 = CALL_EXPR_ARG (exp, 2);
4018 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4019 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4020 enum machine_mode insn_mode
4021 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4023 len1 = c_strlen (arg1, 1);
4024 len2 = c_strlen (arg2, 1);
4027 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4029 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4031 /* If we don't have a constant length for the first, use the length
4032 of the second, if we know it. We don't require a constant for
4033 this case; some cost analysis could be done if both are available
4034 but neither is constant. For now, assume they're equally cheap,
4035 unless one has side effects. If both strings have constant lengths,
4042 else if (TREE_SIDE_EFFECTS (len1))
4044 else if (TREE_SIDE_EFFECTS (len2))
4046 else if (TREE_CODE (len1) != INTEGER_CST)
4048 else if (TREE_CODE (len2) != INTEGER_CST)
4050 else if (tree_int_cst_lt (len1, len2))
4055 /* If both arguments have side effects, we cannot optimize. */
4056 if (!len || TREE_SIDE_EFFECTS (len))
4059 /* The actual new length parameter is MIN(len,arg3). */
4060 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4061 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4063 /* If we don't have POINTER_TYPE, call the function. */
4064 if (arg1_align == 0 || arg2_align == 0)
4067 /* Make a place to write the result of the instruction. */
4070 && REG_P (result) && GET_MODE (result) == insn_mode
4071 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4072 result = gen_reg_rtx (insn_mode);
4074 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4075 arg1 = builtin_save_expr (arg1);
4076 arg2 = builtin_save_expr (arg2);
4077 len = builtin_save_expr (len);
4079 arg1_rtx = get_memory_rtx (arg1, len);
4080 arg2_rtx = get_memory_rtx (arg2, len);
4081 arg3_rtx = expand_normal (len);
4082 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4083 GEN_INT (MIN (arg1_align, arg2_align)));
4088 /* Return the value in the proper mode for this function. */
4089 mode = TYPE_MODE (TREE_TYPE (exp));
4090 if (GET_MODE (result) == mode)
4093 return convert_to_mode (mode, result, 0);
4094 convert_move (target, result, 0);
4098 /* Expand the library call ourselves using a stabilized argument
4099 list to avoid re-evaluating the function's arguments twice. */
4100 fndecl = get_callee_fndecl (exp);
4101 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4103 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4104 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4105 return expand_call (fn, target, target == const0_rtx);
4111 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4112 if that's convenient. */
4115 expand_builtin_saveregs (void)
4119 /* Don't do __builtin_saveregs more than once in a function.
4120 Save the result of the first call and reuse it. */
4121 if (saveregs_value != 0)
4122 return saveregs_value;
4124 /* When this function is called, it means that registers must be
4125 saved on entry to this function. So we migrate the call to the
4126 first insn of this function. */
4130 /* Do whatever the machine needs done in this case. */
4131 val = targetm.calls.expand_builtin_saveregs ();
4136 saveregs_value = val;
4138 /* Put the insns after the NOTE that starts the function. If this
4139 is inside a start_sequence, make the outer-level insn chain current, so
4140 the code is placed at the start of the function. */
4141 push_topmost_sequence ();
4142 emit_insn_after (seq, entry_of_function ());
4143 pop_topmost_sequence ();
4148 /* Expand a call to __builtin_next_arg. */
4151 expand_builtin_next_arg (void)
4153 /* Checking arguments is already done in fold_builtin_next_arg
4154 that must be called before this function. */
4155 return expand_binop (ptr_mode, add_optab,
4156 crtl->args.internal_arg_pointer,
4157 crtl->args.arg_offset_rtx,
4158 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4161 /* Make it easier for the backends by protecting the valist argument
4162 from multiple evaluations. */
4165 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4167 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4169 /* The current way of determining the type of valist is completely
4170 bogus. We should have the information on the va builtin instead. */
4172 vatype = targetm.fn_abi_va_list (cfun->decl);
4174 if (TREE_CODE (vatype) == ARRAY_TYPE)
4176 if (TREE_SIDE_EFFECTS (valist))
4177 valist = save_expr (valist);
4179 /* For this case, the backends will be expecting a pointer to
4180 vatype, but it's possible we've actually been given an array
4181 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4183 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4185 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4186 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4191 tree pt = build_pointer_type (vatype);
4195 if (! TREE_SIDE_EFFECTS (valist))
4198 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4199 TREE_SIDE_EFFECTS (valist) = 1;
4202 if (TREE_SIDE_EFFECTS (valist))
4203 valist = save_expr (valist);
4204 valist = fold_build2_loc (loc, MEM_REF,
4205 vatype, valist, build_int_cst (pt, 0));
4211 /* The "standard" definition of va_list is void*. */
4214 std_build_builtin_va_list (void)
4216 return ptr_type_node;
4219 /* The "standard" abi va_list is va_list_type_node. */
4222 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4224 return va_list_type_node;
4227 /* The "standard" type of va_list is va_list_type_node. */
4230 std_canonical_va_list_type (tree type)
4234 if (INDIRECT_REF_P (type))
4235 type = TREE_TYPE (type);
4236 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4237 type = TREE_TYPE (type);
4238 wtype = va_list_type_node;
4240 /* Treat structure va_list types. */
4241 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4242 htype = TREE_TYPE (htype);
4243 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4245 /* If va_list is an array type, the argument may have decayed
4246 to a pointer type, e.g. by being passed to another function.
4247 In that case, unwrap both types so that we can compare the
4248 underlying records. */
4249 if (TREE_CODE (htype) == ARRAY_TYPE
4250 || POINTER_TYPE_P (htype))
4252 wtype = TREE_TYPE (wtype);
4253 htype = TREE_TYPE (htype);
4256 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4257 return va_list_type_node;
4262 /* The "standard" implementation of va_start: just assign `nextarg' to
4266 std_expand_builtin_va_start (tree valist, rtx nextarg)
4268 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4269 convert_move (va_r, nextarg, 0);
4272 /* Expand EXP, a call to __builtin_va_start. */
4275 expand_builtin_va_start (tree exp)
4279 location_t loc = EXPR_LOCATION (exp);
4281 if (call_expr_nargs (exp) < 2)
4283 error_at (loc, "too few arguments to function %<va_start%>");
4287 if (fold_builtin_next_arg (exp, true))
4290 nextarg = expand_builtin_next_arg ();
4291 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4293 if (targetm.expand_builtin_va_start)
4294 targetm.expand_builtin_va_start (valist, nextarg);
4296 std_expand_builtin_va_start (valist, nextarg);
4301 /* The "standard" implementation of va_arg: read the value from the
4302 current (padded) address and increment by the (padded) size. */
4305 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4308 tree addr, t, type_size, rounded_size, valist_tmp;
4309 unsigned HOST_WIDE_INT align, boundary;
4312 #ifdef ARGS_GROW_DOWNWARD
4313 /* All of the alignment and movement below is for args-grow-up machines.
4314 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4315 implement their own specialized gimplify_va_arg_expr routines. */
4319 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4321 type = build_pointer_type (type);
4323 align = PARM_BOUNDARY / BITS_PER_UNIT;
4324 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4326 /* When we align parameter on stack for caller, if the parameter
4327 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4328 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4329 here with caller. */
4330 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4331 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4333 boundary /= BITS_PER_UNIT;
4335 /* Hoist the valist value into a temporary for the moment. */
4336 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4338 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4339 requires greater alignment, we must perform dynamic alignment. */
4340 if (boundary > align
4341 && !integer_zerop (TYPE_SIZE (type)))
4343 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4344 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
4345 gimplify_and_add (t, pre_p);
4347 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4348 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
4350 build_int_cst (TREE_TYPE (valist), -boundary)));
4351 gimplify_and_add (t, pre_p);
4356 /* If the actual alignment is less than the alignment of the type,
4357 adjust the type accordingly so that we don't assume strict alignment
4358 when dereferencing the pointer. */
4359 boundary *= BITS_PER_UNIT;
4360 if (boundary < TYPE_ALIGN (type))
4362 type = build_variant_type_copy (type);
4363 TYPE_ALIGN (type) = boundary;
4366 /* Compute the rounded size of the type. */
4367 type_size = size_in_bytes (type);
4368 rounded_size = round_up (type_size, align);
4370 /* Reduce rounded_size so it's sharable with the postqueue. */
4371 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4375 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4377 /* Small args are padded downward. */
4378 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4379 rounded_size, size_int (align));
4380 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4381 size_binop (MINUS_EXPR, rounded_size, type_size));
4382 addr = fold_build_pointer_plus (addr, t);
4385 /* Compute new value for AP. */
4386 t = fold_build_pointer_plus (valist_tmp, rounded_size);
4387 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4388 gimplify_and_add (t, pre_p);
4390 addr = fold_convert (build_pointer_type (type), addr);
4393 addr = build_va_arg_indirect_ref (addr);
4395 return build_va_arg_indirect_ref (addr);
4398 /* Build an indirect-ref expression over the given TREE, which represents a
4399 piece of a va_arg() expansion. */
4401 build_va_arg_indirect_ref (tree addr)
4403 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
4405 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4411 /* Return a dummy expression of type TYPE in order to keep going after an
4415 dummy_object (tree type)
4417 tree t = build_int_cst (build_pointer_type (type), 0);
4418 return build2 (MEM_REF, type, t, t);
4421 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4422 builtin function, but a very special sort of operator. */
4424 enum gimplify_status
4425 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4427 tree promoted_type, have_va_type;
4428 tree valist = TREE_OPERAND (*expr_p, 0);
4429 tree type = TREE_TYPE (*expr_p);
4431 location_t loc = EXPR_LOCATION (*expr_p);
4433 /* Verify that valist is of the proper type. */
4434 have_va_type = TREE_TYPE (valist);
4435 if (have_va_type == error_mark_node)
4437 have_va_type = targetm.canonical_va_list_type (have_va_type);
4439 if (have_va_type == NULL_TREE)
4441 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4445 /* Generate a diagnostic for requesting data of a type that cannot
4446 be passed through `...' due to type promotion at the call site. */
4447 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4450 static bool gave_help;
4453 /* Unfortunately, this is merely undefined, rather than a constraint
4454 violation, so we cannot make this an error. If this call is never
4455 executed, the program is still strictly conforming. */
4456 warned = warning_at (loc, 0,
4457 "%qT is promoted to %qT when passed through %<...%>",
4458 type, promoted_type);
4459 if (!gave_help && warned)
4462 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4463 promoted_type, type);
4466 /* We can, however, treat "undefined" any way we please.
4467 Call abort to encourage the user to fix the program. */
4469 inform (loc, "if this code is reached, the program will abort");
4470 /* Before the abort, allow the evaluation of the va_list
4471 expression to exit or longjmp. */
4472 gimplify_and_add (valist, pre_p);
4473 t = build_call_expr_loc (loc,
4474 builtin_decl_implicit (BUILT_IN_TRAP), 0);
4475 gimplify_and_add (t, pre_p);
4477 /* This is dead code, but go ahead and finish so that the
4478 mode of the result comes out right. */
4479 *expr_p = dummy_object (type);
4484 /* Make it easier for the backends by protecting the valist argument
4485 from multiple evaluations. */
4486 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4488 /* For this case, the backends will be expecting a pointer to
4489 TREE_TYPE (abi), but it's possible we've
4490 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4492 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4494 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4495 valist = fold_convert_loc (loc, p1,
4496 build_fold_addr_expr_loc (loc, valist));
4499 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4502 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4504 if (!targetm.gimplify_va_arg_expr)
4505 /* FIXME: Once most targets are converted we should merely
4506 assert this is non-null. */
4509 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4514 /* Expand EXP, a call to __builtin_va_end. */
4517 expand_builtin_va_end (tree exp)
4519 tree valist = CALL_EXPR_ARG (exp, 0);
4521 /* Evaluate for side effects, if needed. I hate macros that don't
4523 if (TREE_SIDE_EFFECTS (valist))
4524 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4529 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4530 builtin rather than just as an assignment in stdarg.h because of the
4531 nastiness of array-type va_list types. */
4534 expand_builtin_va_copy (tree exp)
4537 location_t loc = EXPR_LOCATION (exp);
4539 dst = CALL_EXPR_ARG (exp, 0);
4540 src = CALL_EXPR_ARG (exp, 1);
4542 dst = stabilize_va_list_loc (loc, dst, 1);
4543 src = stabilize_va_list_loc (loc, src, 0);
4545 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4547 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4549 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4550 TREE_SIDE_EFFECTS (t) = 1;
4551 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4555 rtx dstb, srcb, size;
4557 /* Evaluate to pointers. */
4558 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4559 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4560 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4561 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4563 dstb = convert_memory_address (Pmode, dstb);
4564 srcb = convert_memory_address (Pmode, srcb);
4566 /* "Dereference" to BLKmode memories. */
4567 dstb = gen_rtx_MEM (BLKmode, dstb);
4568 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4569 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4570 srcb = gen_rtx_MEM (BLKmode, srcb);
4571 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4572 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4575 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4581 /* Expand a call to one of the builtin functions __builtin_frame_address or
4582 __builtin_return_address. */
4585 expand_builtin_frame_address (tree fndecl, tree exp)
4587 /* The argument must be a nonnegative integer constant.
4588 It counts the number of frames to scan up the stack.
4589 The value is the return address saved in that frame. */
4590 if (call_expr_nargs (exp) == 0)
4591 /* Warning about missing arg was already issued. */
4593 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4595 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4596 error ("invalid argument to %<__builtin_frame_address%>");
4598 error ("invalid argument to %<__builtin_return_address%>");
4604 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4605 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4607 /* Some ports cannot access arbitrary stack frames. */
4610 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4611 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4613 warning (0, "unsupported argument to %<__builtin_return_address%>");
4617 /* For __builtin_frame_address, return what we've got. */
4618 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4622 && ! CONSTANT_P (tem))
4623 tem = copy_addr_to_reg (tem);
4628 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4629 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4630 is the same as for allocate_dynamic_stack_space. */
4633 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4639 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4640 == BUILT_IN_ALLOCA_WITH_ALIGN);
4642 /* Emit normal call if we use mudflap. */
4647 = (alloca_with_align
4648 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4649 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4654 /* Compute the argument. */
4655 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4657 /* Compute the alignment. */
4658 align = (alloca_with_align
4659 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4660 : BIGGEST_ALIGNMENT);
4662 /* Allocate the desired space. */
4663 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4664 result = convert_memory_address (ptr_mode, result);
4669 /* Expand a call to bswap builtin in EXP.
4670 Return NULL_RTX if a normal call should be emitted rather than expanding the
4671 function in-line. If convenient, the result should be placed in TARGET.
4672 SUBTARGET may be used as the target for computing one of EXP's operands. */
4675 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4681 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4684 arg = CALL_EXPR_ARG (exp, 0);
4685 op0 = expand_expr (arg,
4686 subtarget && GET_MODE (subtarget) == target_mode
4687 ? subtarget : NULL_RTX,
4688 target_mode, EXPAND_NORMAL);
4689 if (GET_MODE (op0) != target_mode)
4690 op0 = convert_to_mode (target_mode, op0, 1);
4692 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4694 gcc_assert (target);
4696 return convert_to_mode (target_mode, target, 1);
4699 /* Expand a call to a unary builtin in EXP.
4700 Return NULL_RTX if a normal call should be emitted rather than expanding the
4701 function in-line. If convenient, the result should be placed in TARGET.
4702 SUBTARGET may be used as the target for computing one of EXP's operands. */
4705 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4706 rtx subtarget, optab op_optab)
4710 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4713 /* Compute the argument. */
4714 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4716 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4717 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4718 VOIDmode, EXPAND_NORMAL);
4719 /* Compute op, into TARGET if possible.
4720 Set TARGET to wherever the result comes back. */
4721 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4722 op_optab, op0, target, op_optab != clrsb_optab);
4723 gcc_assert (target);
4725 return convert_to_mode (target_mode, target, 0);
4728 /* Expand a call to __builtin_expect. We just return our argument
4729 as the builtin_expect semantic should've been already executed by
4730 tree branch prediction pass. */
4733 expand_builtin_expect (tree exp, rtx target)
4737 if (call_expr_nargs (exp) < 2)
4739 arg = CALL_EXPR_ARG (exp, 0);
4741 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4742 /* When guessing was done, the hints should be already stripped away. */
4743 gcc_assert (!flag_guess_branch_prob
4744 || optimize == 0 || seen_error ());
4748 /* Expand a call to __builtin_assume_aligned. We just return our first
4749 argument as the builtin_assume_aligned semantic should've been already
4753 expand_builtin_assume_aligned (tree exp, rtx target)
4755 if (call_expr_nargs (exp) < 2)
4757 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4759 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4760 && (call_expr_nargs (exp) < 3
4761 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4766 expand_builtin_trap (void)
4770 emit_insn (gen_trap ());
4773 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4777 /* Expand a call to __builtin_unreachable. We do nothing except emit
4778 a barrier saying that control flow will not pass here.
4780 It is the responsibility of the program being compiled to ensure
4781 that control flow does never reach __builtin_unreachable. */
4783 expand_builtin_unreachable (void)
4788 /* Expand EXP, a call to fabs, fabsf or fabsl.
4789 Return NULL_RTX if a normal call should be emitted rather than expanding
4790 the function inline. If convenient, the result should be placed
4791 in TARGET. SUBTARGET may be used as the target for computing
4795 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4797 enum machine_mode mode;
4801 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4804 arg = CALL_EXPR_ARG (exp, 0);
4805 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4806 mode = TYPE_MODE (TREE_TYPE (arg));
4807 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4808 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4811 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4812 Return NULL is a normal call should be emitted rather than expanding the
4813 function inline. If convenient, the result should be placed in TARGET.
4814 SUBTARGET may be used as the target for computing the operand. */
4817 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4822 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4825 arg = CALL_EXPR_ARG (exp, 0);
4826 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4828 arg = CALL_EXPR_ARG (exp, 1);
4829 op1 = expand_normal (arg);
4831 return expand_copysign (op0, op1, target);
4834 /* Create a new constant string literal and return a char* pointer to it.
4835 The STRING_CST value is the LEN characters at STR. */
4837 build_string_literal (int len, const char *str)
4839 tree t, elem, index, type;
4841 t = build_string (len, str);
4842 elem = build_type_variant (char_type_node, 1, 0);
4843 index = build_index_type (size_int (len - 1));
4844 type = build_array_type (elem, index);
4845 TREE_TYPE (t) = type;
4846 TREE_CONSTANT (t) = 1;
4847 TREE_READONLY (t) = 1;
4848 TREE_STATIC (t) = 1;
4850 type = build_pointer_type (elem);
4851 t = build1 (ADDR_EXPR, type,
4852 build4 (ARRAY_REF, elem,
4853 t, integer_zero_node, NULL_TREE, NULL_TREE));
4857 /* Expand a call to __builtin___clear_cache. */
4860 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4862 #ifndef HAVE_clear_cache
4863 #ifdef CLEAR_INSN_CACHE
4864 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4865 does something. Just do the default expansion to a call to
4869 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4870 does nothing. There is no need to call it. Do nothing. */
4872 #endif /* CLEAR_INSN_CACHE */
4874 /* We have a "clear_cache" insn, and it will handle everything. */
4876 rtx begin_rtx, end_rtx;
4878 /* We must not expand to a library call. If we did, any
4879 fallback library function in libgcc that might contain a call to
4880 __builtin___clear_cache() would recurse infinitely. */
4881 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4883 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4887 if (HAVE_clear_cache)
4889 struct expand_operand ops[2];
4891 begin = CALL_EXPR_ARG (exp, 0);
4892 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4894 end = CALL_EXPR_ARG (exp, 1);
4895 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4897 create_address_operand (&ops[0], begin_rtx);
4898 create_address_operand (&ops[1], end_rtx);
4899 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4903 #endif /* HAVE_clear_cache */
4906 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4909 round_trampoline_addr (rtx tramp)
4911 rtx temp, addend, mask;
4913 /* If we don't need too much alignment, we'll have been guaranteed
4914 proper alignment by get_trampoline_type. */
4915 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4918 /* Round address up to desired boundary. */
4919 temp = gen_reg_rtx (Pmode);
4920 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
4921 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
4923 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4924 temp, 0, OPTAB_LIB_WIDEN);
4925 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4926 temp, 0, OPTAB_LIB_WIDEN);
4932 expand_builtin_init_trampoline (tree exp, bool onstack)
4934 tree t_tramp, t_func, t_chain;
4935 rtx m_tramp, r_tramp, r_chain, tmp;
4937 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4938 POINTER_TYPE, VOID_TYPE))
4941 t_tramp = CALL_EXPR_ARG (exp, 0);
4942 t_func = CALL_EXPR_ARG (exp, 1);
4943 t_chain = CALL_EXPR_ARG (exp, 2);
4945 r_tramp = expand_normal (t_tramp);
4946 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4947 MEM_NOTRAP_P (m_tramp) = 1;
4949 /* If ONSTACK, the TRAMP argument should be the address of a field
4950 within the local function's FRAME decl. Either way, let's see if
4951 we can fill in the MEM_ATTRs for this memory. */
4952 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4953 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
4956 /* Creator of a heap trampoline is responsible for making sure the
4957 address is aligned to at least STACK_BOUNDARY. Normally malloc
4958 will ensure this anyhow. */
4959 tmp = round_trampoline_addr (r_tramp);
4962 m_tramp = change_address (m_tramp, BLKmode, tmp);
4963 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4964 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4967 /* The FUNC argument should be the address of the nested function.
4968 Extract the actual function decl to pass to the hook. */
4969 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4970 t_func = TREE_OPERAND (t_func, 0);
4971 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4973 r_chain = expand_normal (t_chain);
4975 /* Generate insns to initialize the trampoline. */
4976 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4980 trampolines_created = 1;
4982 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4983 "trampoline generated for nested function %qD", t_func);
4990 expand_builtin_adjust_trampoline (tree exp)
4994 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4997 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4998 tramp = round_trampoline_addr (tramp);
4999 if (targetm.calls.trampoline_adjust_address)
5000 tramp = targetm.calls.trampoline_adjust_address (tramp);
5005 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5006 function. The function first checks whether the back end provides
5007 an insn to implement signbit for the respective mode. If not, it
5008 checks whether the floating point format of the value is such that
5009 the sign bit can be extracted. If that is not the case, the
5010 function returns NULL_RTX to indicate that a normal call should be
5011 emitted rather than expanding the function in-line. EXP is the
5012 expression that is a call to the builtin function; if convenient,
5013 the result should be placed in TARGET. */
5015 expand_builtin_signbit (tree exp, rtx target)
5017 const struct real_format *fmt;
5018 enum machine_mode fmode, imode, rmode;
5021 enum insn_code icode;
5023 location_t loc = EXPR_LOCATION (exp);
5025 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5028 arg = CALL_EXPR_ARG (exp, 0);
5029 fmode = TYPE_MODE (TREE_TYPE (arg));
5030 rmode = TYPE_MODE (TREE_TYPE (exp));
5031 fmt = REAL_MODE_FORMAT (fmode);
5033 arg = builtin_save_expr (arg);
5035 /* Expand the argument yielding a RTX expression. */
5036 temp = expand_normal (arg);
5038 /* Check if the back end provides an insn that handles signbit for the
5040 icode = optab_handler (signbit_optab, fmode);
5041 if (icode != CODE_FOR_nothing)
5043 rtx last = get_last_insn ();
5044 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5045 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5047 delete_insns_since (last);
5050 /* For floating point formats without a sign bit, implement signbit
5052 bitpos = fmt->signbit_ro;
5055 /* But we can't do this if the format supports signed zero. */
5056 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5059 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5060 build_real (TREE_TYPE (arg), dconst0));
5061 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5064 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5066 imode = int_mode_for_mode (fmode);
5067 if (imode == BLKmode)
5069 temp = gen_lowpart (imode, temp);
5074 /* Handle targets with different FP word orders. */
5075 if (FLOAT_WORDS_BIG_ENDIAN)
5076 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5078 word = bitpos / BITS_PER_WORD;
5079 temp = operand_subword_force (temp, word, fmode);
5080 bitpos = bitpos % BITS_PER_WORD;
5083 /* Force the intermediate word_mode (or narrower) result into a
5084 register. This avoids attempting to create paradoxical SUBREGs
5085 of floating point modes below. */
5086 temp = force_reg (imode, temp);
5088 /* If the bitpos is within the "result mode" lowpart, the operation
5089 can be implement with a single bitwise AND. Otherwise, we need
5090 a right shift and an AND. */
5092 if (bitpos < GET_MODE_BITSIZE (rmode))
5094 double_int mask = double_int_setbit (double_int_zero, bitpos);
5096 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5097 temp = gen_lowpart (rmode, temp);
5098 temp = expand_binop (rmode, and_optab, temp,
5099 immed_double_int_const (mask, rmode),
5100 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5104 /* Perform a logical right shift to place the signbit in the least
5105 significant bit, then truncate the result to the desired mode
5106 and mask just this bit. */
5107 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5108 temp = gen_lowpart (rmode, temp);
5109 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5110 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5116 /* Expand fork or exec calls. TARGET is the desired target of the
5117 call. EXP is the call. FN is the
5118 identificator of the actual function. IGNORE is nonzero if the
5119 value is to be ignored. */
5122 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5127 /* If we are not profiling, just call the function. */
5128 if (!profile_arc_flag)
5131 /* Otherwise call the wrapper. This should be equivalent for the rest of
5132 compiler, so the code does not diverge, and the wrapper may run the
5133 code necessary for keeping the profiling sane. */
5135 switch (DECL_FUNCTION_CODE (fn))
5138 id = get_identifier ("__gcov_fork");
5141 case BUILT_IN_EXECL:
5142 id = get_identifier ("__gcov_execl");
5145 case BUILT_IN_EXECV:
5146 id = get_identifier ("__gcov_execv");
5149 case BUILT_IN_EXECLP:
5150 id = get_identifier ("__gcov_execlp");
5153 case BUILT_IN_EXECLE:
5154 id = get_identifier ("__gcov_execle");
5157 case BUILT_IN_EXECVP:
5158 id = get_identifier ("__gcov_execvp");
5161 case BUILT_IN_EXECVE:
5162 id = get_identifier ("__gcov_execve");
5169 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5170 FUNCTION_DECL, id, TREE_TYPE (fn));
5171 DECL_EXTERNAL (decl) = 1;
5172 TREE_PUBLIC (decl) = 1;
5173 DECL_ARTIFICIAL (decl) = 1;
5174 TREE_NOTHROW (decl) = 1;
5175 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5176 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5177 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5178 return expand_call (call, target, ignore);
5183 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5184 the pointer in these functions is void*, the tree optimizers may remove
5185 casts. The mode computed in expand_builtin isn't reliable either, due
5186 to __sync_bool_compare_and_swap.
5188 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5189 group of builtins. This gives us log2 of the mode size. */
5191 static inline enum machine_mode
5192 get_builtin_sync_mode (int fcode_diff)
5194 /* The size is not negotiable, so ask not to get BLKmode in return
5195 if the target indicates that a smaller size would be better. */
5196 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5199 /* Expand the memory expression LOC and return the appropriate memory operand
5200 for the builtin_sync operations. */
5203 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5207 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5208 addr = convert_memory_address (Pmode, addr);
5210 /* Note that we explicitly do not want any alias information for this
5211 memory, so that we kill all other live memories. Otherwise we don't
5212 satisfy the full barrier semantics of the intrinsic. */
5213 mem = validize_mem (gen_rtx_MEM (mode, addr));
5215 /* The alignment needs to be at least according to that of the mode. */
5216 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5217 get_pointer_alignment (loc)));
5218 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5219 MEM_VOLATILE_P (mem) = 1;
5224 /* Make sure an argument is in the right mode.
5225 EXP is the tree argument.
5226 MODE is the mode it should be in. */
5229 expand_expr_force_mode (tree exp, enum machine_mode mode)
5232 enum machine_mode old_mode;
5234 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5235 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5236 of CONST_INTs, where we know the old_mode only from the call argument. */
5238 old_mode = GET_MODE (val);
5239 if (old_mode == VOIDmode)
5240 old_mode = TYPE_MODE (TREE_TYPE (exp));
5241 val = convert_modes (mode, old_mode, val, 1);
5246 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5247 EXP is the CALL_EXPR. CODE is the rtx code
5248 that corresponds to the arithmetic or logical operation from the name;
5249 an exception here is that NOT actually means NAND. TARGET is an optional
5250 place for us to store the results; AFTER is true if this is the
5251 fetch_and_xxx form. */
5254 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5255 enum rtx_code code, bool after,
5259 location_t loc = EXPR_LOCATION (exp);
5261 if (code == NOT && warn_sync_nand)
5263 tree fndecl = get_callee_fndecl (exp);
5264 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5266 static bool warned_f_a_n, warned_n_a_f;
5270 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5271 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5272 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5273 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5274 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5278 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5279 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5280 warned_f_a_n = true;
5283 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5284 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5285 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5286 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5287 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5291 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5292 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5293 warned_n_a_f = true;
5301 /* Expand the operands. */
5302 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5303 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5305 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5309 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5310 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5311 true if this is the boolean form. TARGET is a place for us to store the
5312 results; this is NOT optional if IS_BOOL is true. */
5315 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5316 bool is_bool, rtx target)
5318 rtx old_val, new_val, mem;
5321 /* Expand the operands. */
5322 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5323 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5324 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5326 pbool = poval = NULL;
5327 if (target != const0_rtx)
5334 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5335 false, MEMMODEL_SEQ_CST,
5342 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5343 general form is actually an atomic exchange, and some targets only
5344 support a reduced form with the second argument being a constant 1.
5345 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5349 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5354 /* Expand the operands. */
5355 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5356 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5358 return expand_sync_lock_test_and_set (target, mem, val);
5361 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5364 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5368 /* Expand the operands. */
5369 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5371 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5374 /* Given an integer representing an ``enum memmodel'', verify its
5375 correctness and return the memory model enum. */
5377 static enum memmodel
5378 get_memmodel (tree exp)
5381 unsigned HOST_WIDE_INT val;
5383 /* If the parameter is not a constant, it's a run time value so we'll just
5384 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5385 if (TREE_CODE (exp) != INTEGER_CST)
5386 return MEMMODEL_SEQ_CST;
5388 op = expand_normal (exp);
5391 if (targetm.memmodel_check)
5392 val = targetm.memmodel_check (val);
5393 else if (val & ~MEMMODEL_MASK)
5395 warning (OPT_Winvalid_memory_model,
5396 "Unknown architecture specifier in memory model to builtin.");
5397 return MEMMODEL_SEQ_CST;
5400 if ((INTVAL(op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5402 warning (OPT_Winvalid_memory_model,
5403 "invalid memory model argument to builtin");
5404 return MEMMODEL_SEQ_CST;
5407 return (enum memmodel) val;
5410 /* Expand the __atomic_exchange intrinsic:
5411 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5412 EXP is the CALL_EXPR.
5413 TARGET is an optional place for us to store the results. */
5416 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5419 enum memmodel model;
5421 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5422 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5424 error ("invalid memory model for %<__atomic_exchange%>");
5428 if (!flag_inline_atomics)
5431 /* Expand the operands. */
5432 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5433 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5435 return expand_atomic_exchange (target, mem, val, model);
5438 /* Expand the __atomic_compare_exchange intrinsic:
5439 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5440 TYPE desired, BOOL weak,
5441 enum memmodel success,
5442 enum memmodel failure)
5443 EXP is the CALL_EXPR.
5444 TARGET is an optional place for us to store the results. */
5447 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5450 rtx expect, desired, mem, oldval;
5451 enum memmodel success, failure;
5455 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5456 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5458 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5459 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5461 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5465 if (failure > success)
5467 error ("failure memory model cannot be stronger than success "
5468 "memory model for %<__atomic_compare_exchange%>");
5472 if (!flag_inline_atomics)
5475 /* Expand the operands. */
5476 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5478 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5479 expect = convert_memory_address (Pmode, expect);
5480 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5482 weak = CALL_EXPR_ARG (exp, 3);
5484 if (host_integerp (weak, 0) && tree_low_cst (weak, 0) != 0)
5487 oldval = copy_to_reg (gen_rtx_MEM (mode, expect));
5489 if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
5490 &oldval, mem, oldval, desired,
5491 is_weak, success, failure))
5494 emit_move_insn (gen_rtx_MEM (mode, expect), oldval);
5498 /* Expand the __atomic_load intrinsic:
5499 TYPE __atomic_load (TYPE *object, enum memmodel)
5500 EXP is the CALL_EXPR.
5501 TARGET is an optional place for us to store the results. */
5504 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5507 enum memmodel model;
5509 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5510 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5511 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5513 error ("invalid memory model for %<__atomic_load%>");
5517 if (!flag_inline_atomics)
5520 /* Expand the operand. */
5521 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5523 return expand_atomic_load (target, mem, model);
5527 /* Expand the __atomic_store intrinsic:
5528 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5529 EXP is the CALL_EXPR.
5530 TARGET is an optional place for us to store the results. */
5533 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5536 enum memmodel model;
5538 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5539 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5540 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5541 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5543 error ("invalid memory model for %<__atomic_store%>");
5547 if (!flag_inline_atomics)
5550 /* Expand the operands. */
5551 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5552 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5554 return expand_atomic_store (mem, val, model, false);
5557 /* Expand the __atomic_fetch_XXX intrinsic:
5558 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5559 EXP is the CALL_EXPR.
5560 TARGET is an optional place for us to store the results.
5561 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5562 FETCH_AFTER is true if returning the result of the operation.
5563 FETCH_AFTER is false if returning the value before the operation.
5564 IGNORE is true if the result is not used.
5565 EXT_CALL is the correct builtin for an external call if this cannot be
5566 resolved to an instruction sequence. */
5569 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5570 enum rtx_code code, bool fetch_after,
5571 bool ignore, enum built_in_function ext_call)
5574 enum memmodel model;
5578 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5580 /* Expand the operands. */
5581 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5582 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5584 /* Only try generating instructions if inlining is turned on. */
5585 if (flag_inline_atomics)
5587 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5592 /* Return if a different routine isn't needed for the library call. */
5593 if (ext_call == BUILT_IN_NONE)
5596 /* Change the call to the specified function. */
5597 fndecl = get_callee_fndecl (exp);
5598 addr = CALL_EXPR_FN (exp);
5601 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5602 TREE_OPERAND (addr, 0) = builtin_decl_explicit(ext_call);
5604 /* Expand the call here so we can emit trailing code. */
5605 ret = expand_call (exp, target, ignore);
5607 /* Replace the original function just in case it matters. */
5608 TREE_OPERAND (addr, 0) = fndecl;
5610 /* Then issue the arithmetic correction to return the right result. */
5615 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5617 ret = expand_simple_unop (mode, NOT, ret, target, true);
5620 ret = expand_simple_binop (mode, code, ret, val, target, true,
5627 #ifndef HAVE_atomic_clear
5628 # define HAVE_atomic_clear 0
5629 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5632 /* Expand an atomic clear operation.
5633 void _atomic_clear (BOOL *obj, enum memmodel)
5634 EXP is the call expression. */
5637 expand_builtin_atomic_clear (tree exp)
5639 enum machine_mode mode;
5641 enum memmodel model;
5643 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5644 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5645 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5647 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5648 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5650 error ("invalid memory model for %<__atomic_store%>");
5654 if (HAVE_atomic_clear)
5656 emit_insn (gen_atomic_clear (mem, model));
5660 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5661 Failing that, a store is issued by __atomic_store. The only way this can
5662 fail is if the bool type is larger than a word size. Unlikely, but
5663 handle it anyway for completeness. Assume a single threaded model since
5664 there is no atomic support in this case, and no barriers are required. */
5665 ret = expand_atomic_store (mem, const0_rtx, model, true);
5667 emit_move_insn (mem, const0_rtx);
5671 /* Expand an atomic test_and_set operation.
5672 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5673 EXP is the call expression. */
5676 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5679 enum memmodel model;
5680 enum machine_mode mode;
5682 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5683 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5684 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5686 return expand_atomic_test_and_set (target, mem, model);
5690 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5691 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5694 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5697 enum machine_mode mode;
5698 unsigned int mode_align, type_align;
5700 if (TREE_CODE (arg0) != INTEGER_CST)
5703 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5704 mode = mode_for_size (size, MODE_INT, 0);
5705 mode_align = GET_MODE_ALIGNMENT (mode);
5707 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5708 type_align = mode_align;
5711 tree ttype = TREE_TYPE (arg1);
5713 /* This function is usually invoked and folded immediately by the front
5714 end before anything else has a chance to look at it. The pointer
5715 parameter at this point is usually cast to a void *, so check for that
5716 and look past the cast. */
5717 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5718 && VOID_TYPE_P (TREE_TYPE (ttype)))
5719 arg1 = TREE_OPERAND (arg1, 0);
5721 ttype = TREE_TYPE (arg1);
5722 gcc_assert (POINTER_TYPE_P (ttype));
5724 /* Get the underlying type of the object. */
5725 ttype = TREE_TYPE (ttype);
5726 type_align = TYPE_ALIGN (ttype);
5729 /* If the object has smaller alignment, the the lock free routines cannot
5731 if (type_align < mode_align)
5732 return boolean_false_node;
5734 /* Check if a compare_and_swap pattern exists for the mode which represents
5735 the required size. The pattern is not allowed to fail, so the existence
5736 of the pattern indicates support is present. */
5737 if (can_compare_and_swap_p (mode, true))
5738 return boolean_true_node;
5740 return boolean_false_node;
5743 /* Return true if the parameters to call EXP represent an object which will
5744 always generate lock free instructions. The first argument represents the
5745 size of the object, and the second parameter is a pointer to the object
5746 itself. If NULL is passed for the object, then the result is based on
5747 typical alignment for an object of the specified size. Otherwise return
5751 expand_builtin_atomic_always_lock_free (tree exp)
5754 tree arg0 = CALL_EXPR_ARG (exp, 0);
5755 tree arg1 = CALL_EXPR_ARG (exp, 1);
5757 if (TREE_CODE (arg0) != INTEGER_CST)
5759 error ("non-constant argument 1 to __atomic_always_lock_free");
5763 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5764 if (size == boolean_true_node)
5769 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5770 is lock free on this architecture. */
5773 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5775 if (!flag_inline_atomics)
5778 /* If it isn't always lock free, don't generate a result. */
5779 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5780 return boolean_true_node;
5785 /* Return true if the parameters to call EXP represent an object which will
5786 always generate lock free instructions. The first argument represents the
5787 size of the object, and the second parameter is a pointer to the object
5788 itself. If NULL is passed for the object, then the result is based on
5789 typical alignment for an object of the specified size. Otherwise return
5793 expand_builtin_atomic_is_lock_free (tree exp)
5796 tree arg0 = CALL_EXPR_ARG (exp, 0);
5797 tree arg1 = CALL_EXPR_ARG (exp, 1);
5799 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5801 error ("non-integer argument 1 to __atomic_is_lock_free");
5805 if (!flag_inline_atomics)
5808 /* If the value is known at compile time, return the RTX for it. */
5809 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5810 if (size == boolean_true_node)
5816 /* Expand the __atomic_thread_fence intrinsic:
5817 void __atomic_thread_fence (enum memmodel)
5818 EXP is the CALL_EXPR. */
5821 expand_builtin_atomic_thread_fence (tree exp)
5823 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5824 expand_mem_thread_fence (model);
5827 /* Expand the __atomic_signal_fence intrinsic:
5828 void __atomic_signal_fence (enum memmodel)
5829 EXP is the CALL_EXPR. */
5832 expand_builtin_atomic_signal_fence (tree exp)
5834 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5835 expand_mem_signal_fence (model);
5838 /* Expand the __sync_synchronize intrinsic. */
5841 expand_builtin_sync_synchronize (void)
5843 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5847 /* Expand an expression EXP that calls a built-in function,
5848 with result going to TARGET if that's convenient
5849 (and in mode MODE if that's convenient).
5850 SUBTARGET may be used as the target for computing one of EXP's operands.
5851 IGNORE is nonzero if the value is to be ignored. */
5854 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5857 tree fndecl = get_callee_fndecl (exp);
5858 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5859 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5862 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5863 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5865 /* When not optimizing, generate calls to library functions for a certain
5868 && !called_as_built_in (fndecl)
5869 && fcode != BUILT_IN_ALLOCA
5870 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5871 && fcode != BUILT_IN_FREE)
5872 return expand_call (exp, target, ignore);
5874 /* The built-in function expanders test for target == const0_rtx
5875 to determine whether the function's result will be ignored. */
5877 target = const0_rtx;
5879 /* If the result of a pure or const built-in function is ignored, and
5880 none of its arguments are volatile, we can avoid expanding the
5881 built-in call and just evaluate the arguments for side-effects. */
5882 if (target == const0_rtx
5883 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5884 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5886 bool volatilep = false;
5888 call_expr_arg_iterator iter;
5890 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5891 if (TREE_THIS_VOLATILE (arg))
5899 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5900 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5907 CASE_FLT_FN (BUILT_IN_FABS):
5908 target = expand_builtin_fabs (exp, target, subtarget);
5913 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5914 target = expand_builtin_copysign (exp, target, subtarget);
5919 /* Just do a normal library call if we were unable to fold
5921 CASE_FLT_FN (BUILT_IN_CABS):
5924 CASE_FLT_FN (BUILT_IN_EXP):
5925 CASE_FLT_FN (BUILT_IN_EXP10):
5926 CASE_FLT_FN (BUILT_IN_POW10):
5927 CASE_FLT_FN (BUILT_IN_EXP2):
5928 CASE_FLT_FN (BUILT_IN_EXPM1):
5929 CASE_FLT_FN (BUILT_IN_LOGB):
5930 CASE_FLT_FN (BUILT_IN_LOG):
5931 CASE_FLT_FN (BUILT_IN_LOG10):
5932 CASE_FLT_FN (BUILT_IN_LOG2):
5933 CASE_FLT_FN (BUILT_IN_LOG1P):
5934 CASE_FLT_FN (BUILT_IN_TAN):
5935 CASE_FLT_FN (BUILT_IN_ASIN):
5936 CASE_FLT_FN (BUILT_IN_ACOS):
5937 CASE_FLT_FN (BUILT_IN_ATAN):
5938 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5939 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5940 because of possible accuracy problems. */
5941 if (! flag_unsafe_math_optimizations)
5943 CASE_FLT_FN (BUILT_IN_SQRT):
5944 CASE_FLT_FN (BUILT_IN_FLOOR):
5945 CASE_FLT_FN (BUILT_IN_CEIL):
5946 CASE_FLT_FN (BUILT_IN_TRUNC):
5947 CASE_FLT_FN (BUILT_IN_ROUND):
5948 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5949 CASE_FLT_FN (BUILT_IN_RINT):
5950 target = expand_builtin_mathfn (exp, target, subtarget);
5955 CASE_FLT_FN (BUILT_IN_FMA):
5956 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5961 CASE_FLT_FN (BUILT_IN_ILOGB):
5962 if (! flag_unsafe_math_optimizations)
5964 CASE_FLT_FN (BUILT_IN_ISINF):
5965 CASE_FLT_FN (BUILT_IN_FINITE):
5966 case BUILT_IN_ISFINITE:
5967 case BUILT_IN_ISNORMAL:
5968 target = expand_builtin_interclass_mathfn (exp, target);
5973 CASE_FLT_FN (BUILT_IN_ICEIL):
5974 CASE_FLT_FN (BUILT_IN_LCEIL):
5975 CASE_FLT_FN (BUILT_IN_LLCEIL):
5976 CASE_FLT_FN (BUILT_IN_LFLOOR):
5977 CASE_FLT_FN (BUILT_IN_IFLOOR):
5978 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5979 target = expand_builtin_int_roundingfn (exp, target);
5984 CASE_FLT_FN (BUILT_IN_IRINT):
5985 CASE_FLT_FN (BUILT_IN_LRINT):
5986 CASE_FLT_FN (BUILT_IN_LLRINT):
5987 CASE_FLT_FN (BUILT_IN_IROUND):
5988 CASE_FLT_FN (BUILT_IN_LROUND):
5989 CASE_FLT_FN (BUILT_IN_LLROUND):
5990 target = expand_builtin_int_roundingfn_2 (exp, target);
5995 CASE_FLT_FN (BUILT_IN_POWI):
5996 target = expand_builtin_powi (exp, target);
6001 CASE_FLT_FN (BUILT_IN_ATAN2):
6002 CASE_FLT_FN (BUILT_IN_LDEXP):
6003 CASE_FLT_FN (BUILT_IN_SCALB):
6004 CASE_FLT_FN (BUILT_IN_SCALBN):
6005 CASE_FLT_FN (BUILT_IN_SCALBLN):
6006 if (! flag_unsafe_math_optimizations)
6009 CASE_FLT_FN (BUILT_IN_FMOD):
6010 CASE_FLT_FN (BUILT_IN_REMAINDER):
6011 CASE_FLT_FN (BUILT_IN_DREM):
6012 CASE_FLT_FN (BUILT_IN_POW):
6013 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6018 CASE_FLT_FN (BUILT_IN_CEXPI):
6019 target = expand_builtin_cexpi (exp, target);
6020 gcc_assert (target);
6023 CASE_FLT_FN (BUILT_IN_SIN):
6024 CASE_FLT_FN (BUILT_IN_COS):
6025 if (! flag_unsafe_math_optimizations)
6027 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6032 CASE_FLT_FN (BUILT_IN_SINCOS):
6033 if (! flag_unsafe_math_optimizations)
6035 target = expand_builtin_sincos (exp);
6040 case BUILT_IN_APPLY_ARGS:
6041 return expand_builtin_apply_args ();
6043 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6044 FUNCTION with a copy of the parameters described by
6045 ARGUMENTS, and ARGSIZE. It returns a block of memory
6046 allocated on the stack into which is stored all the registers
6047 that might possibly be used for returning the result of a
6048 function. ARGUMENTS is the value returned by
6049 __builtin_apply_args. ARGSIZE is the number of bytes of
6050 arguments that must be copied. ??? How should this value be
6051 computed? We'll also need a safe worst case value for varargs
6053 case BUILT_IN_APPLY:
6054 if (!validate_arglist (exp, POINTER_TYPE,
6055 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6056 && !validate_arglist (exp, REFERENCE_TYPE,
6057 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6063 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6064 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6065 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6067 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6070 /* __builtin_return (RESULT) causes the function to return the
6071 value described by RESULT. RESULT is address of the block of
6072 memory returned by __builtin_apply. */
6073 case BUILT_IN_RETURN:
6074 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6075 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6078 case BUILT_IN_SAVEREGS:
6079 return expand_builtin_saveregs ();
6081 case BUILT_IN_VA_ARG_PACK:
6082 /* All valid uses of __builtin_va_arg_pack () are removed during
6084 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6087 case BUILT_IN_VA_ARG_PACK_LEN:
6088 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6090 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6093 /* Return the address of the first anonymous stack arg. */
6094 case BUILT_IN_NEXT_ARG:
6095 if (fold_builtin_next_arg (exp, false))
6097 return expand_builtin_next_arg ();
6099 case BUILT_IN_CLEAR_CACHE:
6100 target = expand_builtin___clear_cache (exp);
6105 case BUILT_IN_CLASSIFY_TYPE:
6106 return expand_builtin_classify_type (exp);
6108 case BUILT_IN_CONSTANT_P:
6111 case BUILT_IN_FRAME_ADDRESS:
6112 case BUILT_IN_RETURN_ADDRESS:
6113 return expand_builtin_frame_address (fndecl, exp);
6115 /* Returns the address of the area where the structure is returned.
6117 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6118 if (call_expr_nargs (exp) != 0
6119 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6120 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6123 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6125 case BUILT_IN_ALLOCA:
6126 case BUILT_IN_ALLOCA_WITH_ALIGN:
6127 /* If the allocation stems from the declaration of a variable-sized
6128 object, it cannot accumulate. */
6129 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6134 case BUILT_IN_STACK_SAVE:
6135 return expand_stack_save ();
6137 case BUILT_IN_STACK_RESTORE:
6138 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6141 case BUILT_IN_BSWAP16:
6142 case BUILT_IN_BSWAP32:
6143 case BUILT_IN_BSWAP64:
6144 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6149 CASE_INT_FN (BUILT_IN_FFS):
6150 case BUILT_IN_FFSIMAX:
6151 target = expand_builtin_unop (target_mode, exp, target,
6152 subtarget, ffs_optab);
6157 CASE_INT_FN (BUILT_IN_CLZ):
6158 case BUILT_IN_CLZIMAX:
6159 target = expand_builtin_unop (target_mode, exp, target,
6160 subtarget, clz_optab);
6165 CASE_INT_FN (BUILT_IN_CTZ):
6166 case BUILT_IN_CTZIMAX:
6167 target = expand_builtin_unop (target_mode, exp, target,
6168 subtarget, ctz_optab);
6173 CASE_INT_FN (BUILT_IN_CLRSB):
6174 case BUILT_IN_CLRSBIMAX:
6175 target = expand_builtin_unop (target_mode, exp, target,
6176 subtarget, clrsb_optab);
6181 CASE_INT_FN (BUILT_IN_POPCOUNT):
6182 case BUILT_IN_POPCOUNTIMAX:
6183 target = expand_builtin_unop (target_mode, exp, target,
6184 subtarget, popcount_optab);
6189 CASE_INT_FN (BUILT_IN_PARITY):
6190 case BUILT_IN_PARITYIMAX:
6191 target = expand_builtin_unop (target_mode, exp, target,
6192 subtarget, parity_optab);
6197 case BUILT_IN_STRLEN:
6198 target = expand_builtin_strlen (exp, target, target_mode);
6203 case BUILT_IN_STRCPY:
6204 target = expand_builtin_strcpy (exp, target);
6209 case BUILT_IN_STRNCPY:
6210 target = expand_builtin_strncpy (exp, target);
6215 case BUILT_IN_STPCPY:
6216 target = expand_builtin_stpcpy (exp, target, mode);
6221 case BUILT_IN_MEMCPY:
6222 target = expand_builtin_memcpy (exp, target);
6227 case BUILT_IN_MEMPCPY:
6228 target = expand_builtin_mempcpy (exp, target, mode);
6233 case BUILT_IN_MEMSET:
6234 target = expand_builtin_memset (exp, target, mode);
6239 case BUILT_IN_BZERO:
6240 target = expand_builtin_bzero (exp);
6245 case BUILT_IN_STRCMP:
6246 target = expand_builtin_strcmp (exp, target);
6251 case BUILT_IN_STRNCMP:
6252 target = expand_builtin_strncmp (exp, target, mode);
6258 case BUILT_IN_MEMCMP:
6259 target = expand_builtin_memcmp (exp, target, mode);
6264 case BUILT_IN_SETJMP:
6265 /* This should have been lowered to the builtins below. */
6268 case BUILT_IN_SETJMP_SETUP:
6269 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6270 and the receiver label. */
6271 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6273 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6274 VOIDmode, EXPAND_NORMAL);
6275 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6276 rtx label_r = label_rtx (label);
6278 /* This is copied from the handling of non-local gotos. */
6279 expand_builtin_setjmp_setup (buf_addr, label_r);
6280 nonlocal_goto_handler_labels
6281 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6282 nonlocal_goto_handler_labels);
6283 /* ??? Do not let expand_label treat us as such since we would
6284 not want to be both on the list of non-local labels and on
6285 the list of forced labels. */
6286 FORCED_LABEL (label) = 0;
6291 case BUILT_IN_SETJMP_DISPATCHER:
6292 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6293 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6295 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6296 rtx label_r = label_rtx (label);
6298 /* Remove the dispatcher label from the list of non-local labels
6299 since the receiver labels have been added to it above. */
6300 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6305 case BUILT_IN_SETJMP_RECEIVER:
6306 /* __builtin_setjmp_receiver is passed the receiver label. */
6307 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6309 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6310 rtx label_r = label_rtx (label);
6312 expand_builtin_setjmp_receiver (label_r);
6317 /* __builtin_longjmp is passed a pointer to an array of five words.
6318 It's similar to the C library longjmp function but works with
6319 __builtin_setjmp above. */
6320 case BUILT_IN_LONGJMP:
6321 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6323 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6324 VOIDmode, EXPAND_NORMAL);
6325 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6327 if (value != const1_rtx)
6329 error ("%<__builtin_longjmp%> second argument must be 1");
6333 expand_builtin_longjmp (buf_addr, value);
6338 case BUILT_IN_NONLOCAL_GOTO:
6339 target = expand_builtin_nonlocal_goto (exp);
6344 /* This updates the setjmp buffer that is its argument with the value
6345 of the current stack pointer. */
6346 case BUILT_IN_UPDATE_SETJMP_BUF:
6347 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6350 = expand_normal (CALL_EXPR_ARG (exp, 0));
6352 expand_builtin_update_setjmp_buf (buf_addr);
6358 expand_builtin_trap ();
6361 case BUILT_IN_UNREACHABLE:
6362 expand_builtin_unreachable ();
6365 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6366 case BUILT_IN_SIGNBITD32:
6367 case BUILT_IN_SIGNBITD64:
6368 case BUILT_IN_SIGNBITD128:
6369 target = expand_builtin_signbit (exp, target);
6374 /* Various hooks for the DWARF 2 __throw routine. */
6375 case BUILT_IN_UNWIND_INIT:
6376 expand_builtin_unwind_init ();
6378 case BUILT_IN_DWARF_CFA:
6379 return virtual_cfa_rtx;
6380 #ifdef DWARF2_UNWIND_INFO
6381 case BUILT_IN_DWARF_SP_COLUMN:
6382 return expand_builtin_dwarf_sp_column ();
6383 case BUILT_IN_INIT_DWARF_REG_SIZES:
6384 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6387 case BUILT_IN_FROB_RETURN_ADDR:
6388 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6389 case BUILT_IN_EXTRACT_RETURN_ADDR:
6390 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6391 case BUILT_IN_EH_RETURN:
6392 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6393 CALL_EXPR_ARG (exp, 1));
6395 #ifdef EH_RETURN_DATA_REGNO
6396 case BUILT_IN_EH_RETURN_DATA_REGNO:
6397 return expand_builtin_eh_return_data_regno (exp);
6399 case BUILT_IN_EXTEND_POINTER:
6400 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6401 case BUILT_IN_EH_POINTER:
6402 return expand_builtin_eh_pointer (exp);
6403 case BUILT_IN_EH_FILTER:
6404 return expand_builtin_eh_filter (exp);
6405 case BUILT_IN_EH_COPY_VALUES:
6406 return expand_builtin_eh_copy_values (exp);
6408 case BUILT_IN_VA_START:
6409 return expand_builtin_va_start (exp);
6410 case BUILT_IN_VA_END:
6411 return expand_builtin_va_end (exp);
6412 case BUILT_IN_VA_COPY:
6413 return expand_builtin_va_copy (exp);
6414 case BUILT_IN_EXPECT:
6415 return expand_builtin_expect (exp, target);
6416 case BUILT_IN_ASSUME_ALIGNED:
6417 return expand_builtin_assume_aligned (exp, target);
6418 case BUILT_IN_PREFETCH:
6419 expand_builtin_prefetch (exp);
6422 case BUILT_IN_INIT_TRAMPOLINE:
6423 return expand_builtin_init_trampoline (exp, true);
6424 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6425 return expand_builtin_init_trampoline (exp, false);
6426 case BUILT_IN_ADJUST_TRAMPOLINE:
6427 return expand_builtin_adjust_trampoline (exp);
6430 case BUILT_IN_EXECL:
6431 case BUILT_IN_EXECV:
6432 case BUILT_IN_EXECLP:
6433 case BUILT_IN_EXECLE:
6434 case BUILT_IN_EXECVP:
6435 case BUILT_IN_EXECVE:
6436 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6441 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6442 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6443 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6444 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6445 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6446 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6447 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6452 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6453 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6454 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6455 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6456 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6457 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6458 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6463 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6464 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6465 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6466 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6467 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6468 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6469 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6474 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6475 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6476 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6477 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6478 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6479 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6480 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6485 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6486 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6487 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6488 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6489 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6490 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6491 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6496 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6497 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6498 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6499 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6500 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6501 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6502 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6507 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6508 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6509 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6510 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6511 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6512 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6513 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6518 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6519 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6520 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6521 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6522 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6523 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6524 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6529 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6530 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6531 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6532 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6533 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6534 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6535 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6540 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6541 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6542 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6543 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6544 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6545 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6546 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6551 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6552 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6553 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6554 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6555 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6556 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6557 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6562 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6563 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6564 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6565 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6566 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6567 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6568 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6573 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6574 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6575 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6576 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6577 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6578 if (mode == VOIDmode)
6579 mode = TYPE_MODE (boolean_type_node);
6580 if (!target || !register_operand (target, mode))
6581 target = gen_reg_rtx (mode);
6583 mode = get_builtin_sync_mode
6584 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6585 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6590 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6591 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6592 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6593 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6594 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6595 mode = get_builtin_sync_mode
6596 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6597 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6602 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6603 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6604 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6605 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6606 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6607 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6608 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6613 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6614 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6615 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6616 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6617 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6618 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6619 expand_builtin_sync_lock_release (mode, exp);
6622 case BUILT_IN_SYNC_SYNCHRONIZE:
6623 expand_builtin_sync_synchronize ();
6626 case BUILT_IN_ATOMIC_EXCHANGE_1:
6627 case BUILT_IN_ATOMIC_EXCHANGE_2:
6628 case BUILT_IN_ATOMIC_EXCHANGE_4:
6629 case BUILT_IN_ATOMIC_EXCHANGE_8:
6630 case BUILT_IN_ATOMIC_EXCHANGE_16:
6631 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6632 target = expand_builtin_atomic_exchange (mode, exp, target);
6637 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6638 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6639 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6640 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6641 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6643 unsigned int nargs, z;
6647 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6648 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6652 /* If this is turned into an external library call, the weak parameter
6653 must be dropped to match the expected parameter list. */
6654 nargs = call_expr_nargs (exp);
6655 vec = VEC_alloc (tree, gc, nargs - 1);
6656 for (z = 0; z < 3; z++)
6657 VEC_quick_push (tree, vec, CALL_EXPR_ARG (exp, z));
6658 /* Skip the boolean weak parameter. */
6659 for (z = 4; z < 6; z++)
6660 VEC_quick_push (tree, vec, CALL_EXPR_ARG (exp, z));
6661 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6665 case BUILT_IN_ATOMIC_LOAD_1:
6666 case BUILT_IN_ATOMIC_LOAD_2:
6667 case BUILT_IN_ATOMIC_LOAD_4:
6668 case BUILT_IN_ATOMIC_LOAD_8:
6669 case BUILT_IN_ATOMIC_LOAD_16:
6670 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6671 target = expand_builtin_atomic_load (mode, exp, target);
6676 case BUILT_IN_ATOMIC_STORE_1:
6677 case BUILT_IN_ATOMIC_STORE_2:
6678 case BUILT_IN_ATOMIC_STORE_4:
6679 case BUILT_IN_ATOMIC_STORE_8:
6680 case BUILT_IN_ATOMIC_STORE_16:
6681 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6682 target = expand_builtin_atomic_store (mode, exp);
6687 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6688 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6689 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6690 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6691 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6693 enum built_in_function lib;
6694 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6695 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6696 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6697 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6703 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6704 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6705 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6706 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6707 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6709 enum built_in_function lib;
6710 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6711 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6712 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6713 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6719 case BUILT_IN_ATOMIC_AND_FETCH_1:
6720 case BUILT_IN_ATOMIC_AND_FETCH_2:
6721 case BUILT_IN_ATOMIC_AND_FETCH_4:
6722 case BUILT_IN_ATOMIC_AND_FETCH_8:
6723 case BUILT_IN_ATOMIC_AND_FETCH_16:
6725 enum built_in_function lib;
6726 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6727 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6728 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6729 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6735 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6736 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6737 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6738 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6739 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6741 enum built_in_function lib;
6742 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6743 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6744 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6745 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6751 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6752 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6753 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6754 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6755 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6757 enum built_in_function lib;
6758 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6759 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6760 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6761 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6767 case BUILT_IN_ATOMIC_OR_FETCH_1:
6768 case BUILT_IN_ATOMIC_OR_FETCH_2:
6769 case BUILT_IN_ATOMIC_OR_FETCH_4:
6770 case BUILT_IN_ATOMIC_OR_FETCH_8:
6771 case BUILT_IN_ATOMIC_OR_FETCH_16:
6773 enum built_in_function lib;
6774 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6775 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6776 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6777 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6783 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6784 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6785 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6786 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6787 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6788 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6789 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6790 ignore, BUILT_IN_NONE);
6795 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6796 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6797 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6798 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6799 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6800 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6801 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6802 ignore, BUILT_IN_NONE);
6807 case BUILT_IN_ATOMIC_FETCH_AND_1:
6808 case BUILT_IN_ATOMIC_FETCH_AND_2:
6809 case BUILT_IN_ATOMIC_FETCH_AND_4:
6810 case BUILT_IN_ATOMIC_FETCH_AND_8:
6811 case BUILT_IN_ATOMIC_FETCH_AND_16:
6812 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6813 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6814 ignore, BUILT_IN_NONE);
6819 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6820 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6821 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6822 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6823 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6824 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6825 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6826 ignore, BUILT_IN_NONE);
6831 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6832 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6833 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6834 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6835 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6836 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6837 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6838 ignore, BUILT_IN_NONE);
6843 case BUILT_IN_ATOMIC_FETCH_OR_1:
6844 case BUILT_IN_ATOMIC_FETCH_OR_2:
6845 case BUILT_IN_ATOMIC_FETCH_OR_4:
6846 case BUILT_IN_ATOMIC_FETCH_OR_8:
6847 case BUILT_IN_ATOMIC_FETCH_OR_16:
6848 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6849 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6850 ignore, BUILT_IN_NONE);
6855 case BUILT_IN_ATOMIC_TEST_AND_SET:
6856 return expand_builtin_atomic_test_and_set (exp, target);
6858 case BUILT_IN_ATOMIC_CLEAR:
6859 return expand_builtin_atomic_clear (exp);
6861 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6862 return expand_builtin_atomic_always_lock_free (exp);
6864 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6865 target = expand_builtin_atomic_is_lock_free (exp);
6870 case BUILT_IN_ATOMIC_THREAD_FENCE:
6871 expand_builtin_atomic_thread_fence (exp);
6874 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6875 expand_builtin_atomic_signal_fence (exp);
6878 case BUILT_IN_OBJECT_SIZE:
6879 return expand_builtin_object_size (exp);
6881 case BUILT_IN_MEMCPY_CHK:
6882 case BUILT_IN_MEMPCPY_CHK:
6883 case BUILT_IN_MEMMOVE_CHK:
6884 case BUILT_IN_MEMSET_CHK:
6885 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6890 case BUILT_IN_STRCPY_CHK:
6891 case BUILT_IN_STPCPY_CHK:
6892 case BUILT_IN_STRNCPY_CHK:
6893 case BUILT_IN_STPNCPY_CHK:
6894 case BUILT_IN_STRCAT_CHK:
6895 case BUILT_IN_STRNCAT_CHK:
6896 case BUILT_IN_SNPRINTF_CHK:
6897 case BUILT_IN_VSNPRINTF_CHK:
6898 maybe_emit_chk_warning (exp, fcode);
6901 case BUILT_IN_SPRINTF_CHK:
6902 case BUILT_IN_VSPRINTF_CHK:
6903 maybe_emit_sprintf_chk_warning (exp, fcode);
6907 if (warn_free_nonheap_object)
6908 maybe_emit_free_warning (exp);
6911 default: /* just do library call, if unknown builtin */
6915 /* The switch statement above can drop through to cause the function
6916 to be called normally. */
6917 return expand_call (exp, target, ignore);
6920 /* Determine whether a tree node represents a call to a built-in
6921 function. If the tree T is a call to a built-in function with
6922 the right number of arguments of the appropriate types, return
6923 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6924 Otherwise the return value is END_BUILTINS. */
6926 enum built_in_function
6927 builtin_mathfn_code (const_tree t)
6929 const_tree fndecl, arg, parmlist;
6930 const_tree argtype, parmtype;
6931 const_call_expr_arg_iterator iter;
6933 if (TREE_CODE (t) != CALL_EXPR
6934 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6935 return END_BUILTINS;
6937 fndecl = get_callee_fndecl (t);
6938 if (fndecl == NULL_TREE
6939 || TREE_CODE (fndecl) != FUNCTION_DECL
6940 || ! DECL_BUILT_IN (fndecl)
6941 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6942 return END_BUILTINS;
6944 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6945 init_const_call_expr_arg_iterator (t, &iter);
6946 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6948 /* If a function doesn't take a variable number of arguments,
6949 the last element in the list will have type `void'. */
6950 parmtype = TREE_VALUE (parmlist);
6951 if (VOID_TYPE_P (parmtype))
6953 if (more_const_call_expr_args_p (&iter))
6954 return END_BUILTINS;
6955 return DECL_FUNCTION_CODE (fndecl);
6958 if (! more_const_call_expr_args_p (&iter))
6959 return END_BUILTINS;
6961 arg = next_const_call_expr_arg (&iter);
6962 argtype = TREE_TYPE (arg);
6964 if (SCALAR_FLOAT_TYPE_P (parmtype))
6966 if (! SCALAR_FLOAT_TYPE_P (argtype))
6967 return END_BUILTINS;
6969 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6971 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6972 return END_BUILTINS;
6974 else if (POINTER_TYPE_P (parmtype))
6976 if (! POINTER_TYPE_P (argtype))
6977 return END_BUILTINS;
6979 else if (INTEGRAL_TYPE_P (parmtype))
6981 if (! INTEGRAL_TYPE_P (argtype))
6982 return END_BUILTINS;
6985 return END_BUILTINS;
6988 /* Variable-length argument list. */
6989 return DECL_FUNCTION_CODE (fndecl);
6992 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6993 evaluate to a constant. */
6996 fold_builtin_constant_p (tree arg)
6998 /* We return 1 for a numeric type that's known to be a constant
6999 value at compile-time or for an aggregate type that's a
7000 literal constant. */
7003 /* If we know this is a constant, emit the constant of one. */
7004 if (CONSTANT_CLASS_P (arg)
7005 || (TREE_CODE (arg) == CONSTRUCTOR
7006 && TREE_CONSTANT (arg)))
7007 return integer_one_node;
7008 if (TREE_CODE (arg) == ADDR_EXPR)
7010 tree op = TREE_OPERAND (arg, 0);
7011 if (TREE_CODE (op) == STRING_CST
7012 || (TREE_CODE (op) == ARRAY_REF
7013 && integer_zerop (TREE_OPERAND (op, 1))
7014 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7015 return integer_one_node;
7018 /* If this expression has side effects, show we don't know it to be a
7019 constant. Likewise if it's a pointer or aggregate type since in
7020 those case we only want literals, since those are only optimized
7021 when generating RTL, not later.
7022 And finally, if we are compiling an initializer, not code, we
7023 need to return a definite result now; there's not going to be any
7024 more optimization done. */
7025 if (TREE_SIDE_EFFECTS (arg)
7026 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7027 || POINTER_TYPE_P (TREE_TYPE (arg))
7029 || folding_initializer)
7030 return integer_zero_node;
7035 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7036 return it as a truthvalue. */
7039 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
7041 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7043 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7044 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7045 ret_type = TREE_TYPE (TREE_TYPE (fn));
7046 pred_type = TREE_VALUE (arg_types);
7047 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7049 pred = fold_convert_loc (loc, pred_type, pred);
7050 expected = fold_convert_loc (loc, expected_type, expected);
7051 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
7053 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7054 build_int_cst (ret_type, 0));
7057 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7058 NULL_TREE if no simplification is possible. */
7061 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
7063 tree inner, fndecl, inner_arg0;
7064 enum tree_code code;
7066 /* Distribute the expected value over short-circuiting operators.
7067 See through the cast from truthvalue_type_node to long. */
7069 while (TREE_CODE (inner_arg0) == NOP_EXPR
7070 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7071 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7072 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7074 /* If this is a builtin_expect within a builtin_expect keep the
7075 inner one. See through a comparison against a constant. It
7076 might have been added to create a thruthvalue. */
7079 if (COMPARISON_CLASS_P (inner)
7080 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7081 inner = TREE_OPERAND (inner, 0);
7083 if (TREE_CODE (inner) == CALL_EXPR
7084 && (fndecl = get_callee_fndecl (inner))
7085 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7086 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7090 code = TREE_CODE (inner);
7091 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7093 tree op0 = TREE_OPERAND (inner, 0);
7094 tree op1 = TREE_OPERAND (inner, 1);
7096 op0 = build_builtin_expect_predicate (loc, op0, arg1);
7097 op1 = build_builtin_expect_predicate (loc, op1, arg1);
7098 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7100 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7103 /* If the argument isn't invariant then there's nothing else we can do. */
7104 if (!TREE_CONSTANT (inner_arg0))
7107 /* If we expect that a comparison against the argument will fold to
7108 a constant return the constant. In practice, this means a true
7109 constant or the address of a non-weak symbol. */
7112 if (TREE_CODE (inner) == ADDR_EXPR)
7116 inner = TREE_OPERAND (inner, 0);
7118 while (TREE_CODE (inner) == COMPONENT_REF
7119 || TREE_CODE (inner) == ARRAY_REF);
7120 if ((TREE_CODE (inner) == VAR_DECL
7121 || TREE_CODE (inner) == FUNCTION_DECL)
7122 && DECL_WEAK (inner))
7126 /* Otherwise, ARG0 already has the proper type for the return value. */
7130 /* Fold a call to __builtin_classify_type with argument ARG. */
7133 fold_builtin_classify_type (tree arg)
7136 return build_int_cst (integer_type_node, no_type_class);
7138 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7141 /* Fold a call to __builtin_strlen with argument ARG. */
7144 fold_builtin_strlen (location_t loc, tree type, tree arg)
7146 if (!validate_arg (arg, POINTER_TYPE))
7150 tree len = c_strlen (arg, 0);
7153 return fold_convert_loc (loc, type, len);
7159 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7162 fold_builtin_inf (location_t loc, tree type, int warn)
7164 REAL_VALUE_TYPE real;
7166 /* __builtin_inff is intended to be usable to define INFINITY on all
7167 targets. If an infinity is not available, INFINITY expands "to a
7168 positive constant of type float that overflows at translation
7169 time", footnote "In this case, using INFINITY will violate the
7170 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7171 Thus we pedwarn to ensure this constraint violation is
7173 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7174 pedwarn (loc, 0, "target format does not support infinity");
7177 return build_real (type, real);
7180 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7183 fold_builtin_nan (tree arg, tree type, int quiet)
7185 REAL_VALUE_TYPE real;
7188 if (!validate_arg (arg, POINTER_TYPE))
7190 str = c_getstr (arg);
7194 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7197 return build_real (type, real);
7200 /* Return true if the floating point expression T has an integer value.
7201 We also allow +Inf, -Inf and NaN to be considered integer values. */
7204 integer_valued_real_p (tree t)
7206 switch (TREE_CODE (t))
7213 return integer_valued_real_p (TREE_OPERAND (t, 0));
7218 return integer_valued_real_p (TREE_OPERAND (t, 1));
7225 return integer_valued_real_p (TREE_OPERAND (t, 0))
7226 && integer_valued_real_p (TREE_OPERAND (t, 1));
7229 return integer_valued_real_p (TREE_OPERAND (t, 1))
7230 && integer_valued_real_p (TREE_OPERAND (t, 2));
7233 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7237 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7238 if (TREE_CODE (type) == INTEGER_TYPE)
7240 if (TREE_CODE (type) == REAL_TYPE)
7241 return integer_valued_real_p (TREE_OPERAND (t, 0));
7246 switch (builtin_mathfn_code (t))
7248 CASE_FLT_FN (BUILT_IN_CEIL):
7249 CASE_FLT_FN (BUILT_IN_FLOOR):
7250 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7251 CASE_FLT_FN (BUILT_IN_RINT):
7252 CASE_FLT_FN (BUILT_IN_ROUND):
7253 CASE_FLT_FN (BUILT_IN_TRUNC):
7256 CASE_FLT_FN (BUILT_IN_FMIN):
7257 CASE_FLT_FN (BUILT_IN_FMAX):
7258 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7259 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7272 /* FNDECL is assumed to be a builtin where truncation can be propagated
7273 across (for instance floor((double)f) == (double)floorf (f).
7274 Do the transformation for a call with argument ARG. */
7277 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7279 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7281 if (!validate_arg (arg, REAL_TYPE))
7284 /* Integer rounding functions are idempotent. */
7285 if (fcode == builtin_mathfn_code (arg))
7288 /* If argument is already integer valued, and we don't need to worry
7289 about setting errno, there's no need to perform rounding. */
7290 if (! flag_errno_math && integer_valued_real_p (arg))
7295 tree arg0 = strip_float_extensions (arg);
7296 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7297 tree newtype = TREE_TYPE (arg0);
7300 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7301 && (decl = mathfn_built_in (newtype, fcode)))
7302 return fold_convert_loc (loc, ftype,
7303 build_call_expr_loc (loc, decl, 1,
7304 fold_convert_loc (loc,
7311 /* FNDECL is assumed to be builtin which can narrow the FP type of
7312 the argument, for instance lround((double)f) -> lroundf (f).
7313 Do the transformation for a call with argument ARG. */
7316 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7318 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7320 if (!validate_arg (arg, REAL_TYPE))
7323 /* If argument is already integer valued, and we don't need to worry
7324 about setting errno, there's no need to perform rounding. */
7325 if (! flag_errno_math && integer_valued_real_p (arg))
7326 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7327 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7331 tree ftype = TREE_TYPE (arg);
7332 tree arg0 = strip_float_extensions (arg);
7333 tree newtype = TREE_TYPE (arg0);
7336 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7337 && (decl = mathfn_built_in (newtype, fcode)))
7338 return build_call_expr_loc (loc, decl, 1,
7339 fold_convert_loc (loc, newtype, arg0));
7342 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7343 sizeof (int) == sizeof (long). */
7344 if (TYPE_PRECISION (integer_type_node)
7345 == TYPE_PRECISION (long_integer_type_node))
7347 tree newfn = NULL_TREE;
7350 CASE_FLT_FN (BUILT_IN_ICEIL):
7351 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7354 CASE_FLT_FN (BUILT_IN_IFLOOR):
7355 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7358 CASE_FLT_FN (BUILT_IN_IROUND):
7359 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7362 CASE_FLT_FN (BUILT_IN_IRINT):
7363 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7372 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7373 return fold_convert_loc (loc,
7374 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7378 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7379 sizeof (long long) == sizeof (long). */
7380 if (TYPE_PRECISION (long_long_integer_type_node)
7381 == TYPE_PRECISION (long_integer_type_node))
7383 tree newfn = NULL_TREE;
7386 CASE_FLT_FN (BUILT_IN_LLCEIL):
7387 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7390 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7391 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7394 CASE_FLT_FN (BUILT_IN_LLROUND):
7395 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7398 CASE_FLT_FN (BUILT_IN_LLRINT):
7399 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7408 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7409 return fold_convert_loc (loc,
7410 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7417 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7418 return type. Return NULL_TREE if no simplification can be made. */
7421 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7425 if (!validate_arg (arg, COMPLEX_TYPE)
7426 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7429 /* Calculate the result when the argument is a constant. */
7430 if (TREE_CODE (arg) == COMPLEX_CST
7431 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7435 if (TREE_CODE (arg) == COMPLEX_EXPR)
7437 tree real = TREE_OPERAND (arg, 0);
7438 tree imag = TREE_OPERAND (arg, 1);
7440 /* If either part is zero, cabs is fabs of the other. */
7441 if (real_zerop (real))
7442 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7443 if (real_zerop (imag))
7444 return fold_build1_loc (loc, ABS_EXPR, type, real);
7446 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7447 if (flag_unsafe_math_optimizations
7448 && operand_equal_p (real, imag, OEP_PURE_SAME))
7450 const REAL_VALUE_TYPE sqrt2_trunc
7451 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7453 return fold_build2_loc (loc, MULT_EXPR, type,
7454 fold_build1_loc (loc, ABS_EXPR, type, real),
7455 build_real (type, sqrt2_trunc));
7459 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7460 if (TREE_CODE (arg) == NEGATE_EXPR
7461 || TREE_CODE (arg) == CONJ_EXPR)
7462 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7464 /* Don't do this when optimizing for size. */
7465 if (flag_unsafe_math_optimizations
7466 && optimize && optimize_function_for_speed_p (cfun))
7468 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7470 if (sqrtfn != NULL_TREE)
7472 tree rpart, ipart, result;
7474 arg = builtin_save_expr (arg);
7476 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7477 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7479 rpart = builtin_save_expr (rpart);
7480 ipart = builtin_save_expr (ipart);
7482 result = fold_build2_loc (loc, PLUS_EXPR, type,
7483 fold_build2_loc (loc, MULT_EXPR, type,
7485 fold_build2_loc (loc, MULT_EXPR, type,
7488 return build_call_expr_loc (loc, sqrtfn, 1, result);
7495 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7496 complex tree type of the result. If NEG is true, the imaginary
7497 zero is negative. */
7500 build_complex_cproj (tree type, bool neg)
7502 REAL_VALUE_TYPE rinf, rzero = dconst0;
7506 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7507 build_real (TREE_TYPE (type), rzero));
7510 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7511 return type. Return NULL_TREE if no simplification can be made. */
7514 fold_builtin_cproj (location_t loc, tree arg, tree type)
7516 if (!validate_arg (arg, COMPLEX_TYPE)
7517 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7520 /* If there are no infinities, return arg. */
7521 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7522 return non_lvalue_loc (loc, arg);
7524 /* Calculate the result when the argument is a constant. */
7525 if (TREE_CODE (arg) == COMPLEX_CST)
7527 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7528 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7530 if (real_isinf (real) || real_isinf (imag))
7531 return build_complex_cproj (type, imag->sign);
7535 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7537 tree real = TREE_OPERAND (arg, 0);
7538 tree imag = TREE_OPERAND (arg, 1);
7543 /* If the real part is inf and the imag part is known to be
7544 nonnegative, return (inf + 0i). Remember side-effects are
7545 possible in the imag part. */
7546 if (TREE_CODE (real) == REAL_CST
7547 && real_isinf (TREE_REAL_CST_PTR (real))
7548 && tree_expr_nonnegative_p (imag))
7549 return omit_one_operand_loc (loc, type,
7550 build_complex_cproj (type, false),
7553 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7554 Remember side-effects are possible in the real part. */
7555 if (TREE_CODE (imag) == REAL_CST
7556 && real_isinf (TREE_REAL_CST_PTR (imag)))
7558 omit_one_operand_loc (loc, type,
7559 build_complex_cproj (type, TREE_REAL_CST_PTR
7560 (imag)->sign), arg);
7566 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7567 Return NULL_TREE if no simplification can be made. */
7570 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7573 enum built_in_function fcode;
7576 if (!validate_arg (arg, REAL_TYPE))
7579 /* Calculate the result when the argument is a constant. */
7580 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7583 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7584 fcode = builtin_mathfn_code (arg);
7585 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7587 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7588 arg = fold_build2_loc (loc, MULT_EXPR, type,
7589 CALL_EXPR_ARG (arg, 0),
7590 build_real (type, dconsthalf));
7591 return build_call_expr_loc (loc, expfn, 1, arg);
7594 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7595 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7597 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7601 tree arg0 = CALL_EXPR_ARG (arg, 0);
7603 /* The inner root was either sqrt or cbrt. */
7604 /* This was a conditional expression but it triggered a bug
7606 REAL_VALUE_TYPE dconstroot;
7607 if (BUILTIN_SQRT_P (fcode))
7608 dconstroot = dconsthalf;
7610 dconstroot = dconst_third ();
7612 /* Adjust for the outer root. */
7613 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7614 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7615 tree_root = build_real (type, dconstroot);
7616 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7620 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7621 if (flag_unsafe_math_optimizations
7622 && (fcode == BUILT_IN_POW
7623 || fcode == BUILT_IN_POWF
7624 || fcode == BUILT_IN_POWL))
7626 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7627 tree arg0 = CALL_EXPR_ARG (arg, 0);
7628 tree arg1 = CALL_EXPR_ARG (arg, 1);
7630 if (!tree_expr_nonnegative_p (arg0))
7631 arg0 = build1 (ABS_EXPR, type, arg0);
7632 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7633 build_real (type, dconsthalf));
7634 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7640 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7641 Return NULL_TREE if no simplification can be made. */
7644 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7646 const enum built_in_function fcode = builtin_mathfn_code (arg);
7649 if (!validate_arg (arg, REAL_TYPE))
7652 /* Calculate the result when the argument is a constant. */
7653 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7656 if (flag_unsafe_math_optimizations)
7658 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7659 if (BUILTIN_EXPONENT_P (fcode))
7661 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7662 const REAL_VALUE_TYPE third_trunc =
7663 real_value_truncate (TYPE_MODE (type), dconst_third ());
7664 arg = fold_build2_loc (loc, MULT_EXPR, type,
7665 CALL_EXPR_ARG (arg, 0),
7666 build_real (type, third_trunc));
7667 return build_call_expr_loc (loc, expfn, 1, arg);
7670 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7671 if (BUILTIN_SQRT_P (fcode))
7673 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7677 tree arg0 = CALL_EXPR_ARG (arg, 0);
7679 REAL_VALUE_TYPE dconstroot = dconst_third ();
7681 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7682 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7683 tree_root = build_real (type, dconstroot);
7684 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7688 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7689 if (BUILTIN_CBRT_P (fcode))
7691 tree arg0 = CALL_EXPR_ARG (arg, 0);
7692 if (tree_expr_nonnegative_p (arg0))
7694 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7699 REAL_VALUE_TYPE dconstroot;
7701 real_arithmetic (&dconstroot, MULT_EXPR,
7702 dconst_third_ptr (), dconst_third_ptr ());
7703 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7704 tree_root = build_real (type, dconstroot);
7705 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7710 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7711 if (fcode == BUILT_IN_POW
7712 || fcode == BUILT_IN_POWF
7713 || fcode == BUILT_IN_POWL)
7715 tree arg00 = CALL_EXPR_ARG (arg, 0);
7716 tree arg01 = CALL_EXPR_ARG (arg, 1);
7717 if (tree_expr_nonnegative_p (arg00))
7719 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7720 const REAL_VALUE_TYPE dconstroot
7721 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7722 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7723 build_real (type, dconstroot));
7724 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7731 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7732 TYPE is the type of the return value. Return NULL_TREE if no
7733 simplification can be made. */
7736 fold_builtin_cos (location_t loc,
7737 tree arg, tree type, tree fndecl)
7741 if (!validate_arg (arg, REAL_TYPE))
7744 /* Calculate the result when the argument is a constant. */
7745 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7748 /* Optimize cos(-x) into cos (x). */
7749 if ((narg = fold_strip_sign_ops (arg)))
7750 return build_call_expr_loc (loc, fndecl, 1, narg);
7755 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7756 Return NULL_TREE if no simplification can be made. */
7759 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7761 if (validate_arg (arg, REAL_TYPE))
7765 /* Calculate the result when the argument is a constant. */
7766 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7769 /* Optimize cosh(-x) into cosh (x). */
7770 if ((narg = fold_strip_sign_ops (arg)))
7771 return build_call_expr_loc (loc, fndecl, 1, narg);
7777 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7778 argument ARG. TYPE is the type of the return value. Return
7779 NULL_TREE if no simplification can be made. */
7782 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7785 if (validate_arg (arg, COMPLEX_TYPE)
7786 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7790 /* Calculate the result when the argument is a constant. */
7791 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7794 /* Optimize fn(-x) into fn(x). */
7795 if ((tmp = fold_strip_sign_ops (arg)))
7796 return build_call_expr_loc (loc, fndecl, 1, tmp);
7802 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7803 Return NULL_TREE if no simplification can be made. */
7806 fold_builtin_tan (tree arg, tree type)
7808 enum built_in_function fcode;
7811 if (!validate_arg (arg, REAL_TYPE))
7814 /* Calculate the result when the argument is a constant. */
7815 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7818 /* Optimize tan(atan(x)) = x. */
7819 fcode = builtin_mathfn_code (arg);
7820 if (flag_unsafe_math_optimizations
7821 && (fcode == BUILT_IN_ATAN
7822 || fcode == BUILT_IN_ATANF
7823 || fcode == BUILT_IN_ATANL))
7824 return CALL_EXPR_ARG (arg, 0);
7829 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7830 NULL_TREE if no simplification can be made. */
7833 fold_builtin_sincos (location_t loc,
7834 tree arg0, tree arg1, tree arg2)
7839 if (!validate_arg (arg0, REAL_TYPE)
7840 || !validate_arg (arg1, POINTER_TYPE)
7841 || !validate_arg (arg2, POINTER_TYPE))
7844 type = TREE_TYPE (arg0);
7846 /* Calculate the result when the argument is a constant. */
7847 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7850 /* Canonicalize sincos to cexpi. */
7851 if (!TARGET_C99_FUNCTIONS)
7853 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7857 call = build_call_expr_loc (loc, fn, 1, arg0);
7858 call = builtin_save_expr (call);
7860 return build2 (COMPOUND_EXPR, void_type_node,
7861 build2 (MODIFY_EXPR, void_type_node,
7862 build_fold_indirect_ref_loc (loc, arg1),
7863 build1 (IMAGPART_EXPR, type, call)),
7864 build2 (MODIFY_EXPR, void_type_node,
7865 build_fold_indirect_ref_loc (loc, arg2),
7866 build1 (REALPART_EXPR, type, call)));
7869 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7870 NULL_TREE if no simplification can be made. */
7873 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7876 tree realp, imagp, ifn;
7879 if (!validate_arg (arg0, COMPLEX_TYPE)
7880 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7883 /* Calculate the result when the argument is a constant. */
7884 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7887 rtype = TREE_TYPE (TREE_TYPE (arg0));
7889 /* In case we can figure out the real part of arg0 and it is constant zero
7891 if (!TARGET_C99_FUNCTIONS)
7893 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7897 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7898 && real_zerop (realp))
7900 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7901 return build_call_expr_loc (loc, ifn, 1, narg);
7904 /* In case we can easily decompose real and imaginary parts split cexp
7905 to exp (r) * cexpi (i). */
7906 if (flag_unsafe_math_optimizations
7909 tree rfn, rcall, icall;
7911 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7915 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7919 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7920 icall = builtin_save_expr (icall);
7921 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7922 rcall = builtin_save_expr (rcall);
7923 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7924 fold_build2_loc (loc, MULT_EXPR, rtype,
7926 fold_build1_loc (loc, REALPART_EXPR,
7928 fold_build2_loc (loc, MULT_EXPR, rtype,
7930 fold_build1_loc (loc, IMAGPART_EXPR,
7937 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7938 Return NULL_TREE if no simplification can be made. */
7941 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7943 if (!validate_arg (arg, REAL_TYPE))
7946 /* Optimize trunc of constant value. */
7947 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7949 REAL_VALUE_TYPE r, x;
7950 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7952 x = TREE_REAL_CST (arg);
7953 real_trunc (&r, TYPE_MODE (type), &x);
7954 return build_real (type, r);
7957 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7960 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7961 Return NULL_TREE if no simplification can be made. */
7964 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7966 if (!validate_arg (arg, REAL_TYPE))
7969 /* Optimize floor of constant value. */
7970 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7974 x = TREE_REAL_CST (arg);
7975 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7977 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7980 real_floor (&r, TYPE_MODE (type), &x);
7981 return build_real (type, r);
7985 /* Fold floor (x) where x is nonnegative to trunc (x). */
7986 if (tree_expr_nonnegative_p (arg))
7988 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7990 return build_call_expr_loc (loc, truncfn, 1, arg);
7993 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7996 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7997 Return NULL_TREE if no simplification can be made. */
8000 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
8002 if (!validate_arg (arg, REAL_TYPE))
8005 /* Optimize ceil of constant value. */
8006 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8010 x = TREE_REAL_CST (arg);
8011 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8013 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8016 real_ceil (&r, TYPE_MODE (type), &x);
8017 return build_real (type, r);
8021 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8024 /* Fold function call to builtin round, roundf or roundl with argument ARG.
8025 Return NULL_TREE if no simplification can be made. */
8028 fold_builtin_round (location_t loc, tree fndecl, tree arg)
8030 if (!validate_arg (arg, REAL_TYPE))
8033 /* Optimize round of constant value. */
8034 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8038 x = TREE_REAL_CST (arg);
8039 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8041 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8044 real_round (&r, TYPE_MODE (type), &x);
8045 return build_real (type, r);
8049 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
8052 /* Fold function call to builtin lround, lroundf or lroundl (or the
8053 corresponding long long versions) and other rounding functions. ARG
8054 is the argument to the call. Return NULL_TREE if no simplification
8058 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8060 if (!validate_arg (arg, REAL_TYPE))
8063 /* Optimize lround of constant value. */
8064 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8066 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8068 if (real_isfinite (&x))
8070 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8071 tree ftype = TREE_TYPE (arg);
8075 switch (DECL_FUNCTION_CODE (fndecl))
8077 CASE_FLT_FN (BUILT_IN_IFLOOR):
8078 CASE_FLT_FN (BUILT_IN_LFLOOR):
8079 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8080 real_floor (&r, TYPE_MODE (ftype), &x);
8083 CASE_FLT_FN (BUILT_IN_ICEIL):
8084 CASE_FLT_FN (BUILT_IN_LCEIL):
8085 CASE_FLT_FN (BUILT_IN_LLCEIL):
8086 real_ceil (&r, TYPE_MODE (ftype), &x);
8089 CASE_FLT_FN (BUILT_IN_IROUND):
8090 CASE_FLT_FN (BUILT_IN_LROUND):
8091 CASE_FLT_FN (BUILT_IN_LLROUND):
8092 real_round (&r, TYPE_MODE (ftype), &x);
8099 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
8100 if (double_int_fits_to_tree_p (itype, val))
8101 return double_int_to_tree (itype, val);
8105 switch (DECL_FUNCTION_CODE (fndecl))
8107 CASE_FLT_FN (BUILT_IN_LFLOOR):
8108 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8109 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8110 if (tree_expr_nonnegative_p (arg))
8111 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8112 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8117 return fold_fixed_mathfn (loc, fndecl, arg);
8120 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8121 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8122 the argument to the call. Return NULL_TREE if no simplification can
8126 fold_builtin_bitop (tree fndecl, tree arg)
8128 if (!validate_arg (arg, INTEGER_TYPE))
8131 /* Optimize for constant argument. */
8132 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8134 HOST_WIDE_INT hi, width, result;
8135 unsigned HOST_WIDE_INT lo;
8138 type = TREE_TYPE (arg);
8139 width = TYPE_PRECISION (type);
8140 lo = TREE_INT_CST_LOW (arg);
8142 /* Clear all the bits that are beyond the type's precision. */
8143 if (width > HOST_BITS_PER_WIDE_INT)
8145 hi = TREE_INT_CST_HIGH (arg);
8146 if (width < HOST_BITS_PER_DOUBLE_INT)
8147 hi &= ~((unsigned HOST_WIDE_INT) (-1)
8148 << (width - HOST_BITS_PER_WIDE_INT));
8153 if (width < HOST_BITS_PER_WIDE_INT)
8154 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8157 switch (DECL_FUNCTION_CODE (fndecl))
8159 CASE_INT_FN (BUILT_IN_FFS):
8161 result = ffs_hwi (lo);
8163 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8168 CASE_INT_FN (BUILT_IN_CLZ):
8170 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8172 result = width - floor_log2 (lo) - 1;
8173 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8177 CASE_INT_FN (BUILT_IN_CTZ):
8179 result = ctz_hwi (lo);
8181 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8182 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8186 CASE_INT_FN (BUILT_IN_CLRSB):
8187 if (width > HOST_BITS_PER_WIDE_INT
8188 && (hi & ((unsigned HOST_WIDE_INT) 1
8189 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8191 hi = ~hi & ~((unsigned HOST_WIDE_INT) (-1)
8192 << (width - HOST_BITS_PER_WIDE_INT - 1));
8195 else if (width <= HOST_BITS_PER_WIDE_INT
8196 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8197 lo = ~lo & ~((unsigned HOST_WIDE_INT) (-1) << (width - 1));
8199 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8201 result = width - floor_log2 (lo) - 2;
8206 CASE_INT_FN (BUILT_IN_POPCOUNT):
8209 result++, lo &= lo - 1;
8211 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8214 CASE_INT_FN (BUILT_IN_PARITY):
8217 result++, lo &= lo - 1;
8219 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8227 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8233 /* Fold function call to builtin_bswap and the short, long and long long
8234 variants. Return NULL_TREE if no simplification can be made. */
8236 fold_builtin_bswap (tree fndecl, tree arg)
8238 if (! validate_arg (arg, INTEGER_TYPE))
8241 /* Optimize constant value. */
8242 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8244 HOST_WIDE_INT hi, width, r_hi = 0;
8245 unsigned HOST_WIDE_INT lo, r_lo = 0;
8246 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8248 width = TYPE_PRECISION (type);
8249 lo = TREE_INT_CST_LOW (arg);
8250 hi = TREE_INT_CST_HIGH (arg);
8252 switch (DECL_FUNCTION_CODE (fndecl))
8254 case BUILT_IN_BSWAP16:
8255 case BUILT_IN_BSWAP32:
8256 case BUILT_IN_BSWAP64:
8260 for (s = 0; s < width; s += 8)
8262 int d = width - s - 8;
8263 unsigned HOST_WIDE_INT byte;
8265 if (s < HOST_BITS_PER_WIDE_INT)
8266 byte = (lo >> s) & 0xff;
8268 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8270 if (d < HOST_BITS_PER_WIDE_INT)
8273 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8283 if (width < HOST_BITS_PER_WIDE_INT)
8284 return build_int_cst (type, r_lo);
8286 return build_int_cst_wide (type, r_lo, r_hi);
8292 /* A subroutine of fold_builtin to fold the various logarithmic
8293 functions. Return NULL_TREE if no simplification can me made.
8294 FUNC is the corresponding MPFR logarithm function. */
8297 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8298 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8300 if (validate_arg (arg, REAL_TYPE))
8302 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8304 const enum built_in_function fcode = builtin_mathfn_code (arg);
8306 /* Calculate the result when the argument is a constant. */
8307 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8310 /* Special case, optimize logN(expN(x)) = x. */
8311 if (flag_unsafe_math_optimizations
8312 && ((func == mpfr_log
8313 && (fcode == BUILT_IN_EXP
8314 || fcode == BUILT_IN_EXPF
8315 || fcode == BUILT_IN_EXPL))
8316 || (func == mpfr_log2
8317 && (fcode == BUILT_IN_EXP2
8318 || fcode == BUILT_IN_EXP2F
8319 || fcode == BUILT_IN_EXP2L))
8320 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8321 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8323 /* Optimize logN(func()) for various exponential functions. We
8324 want to determine the value "x" and the power "exponent" in
8325 order to transform logN(x**exponent) into exponent*logN(x). */
8326 if (flag_unsafe_math_optimizations)
8328 tree exponent = 0, x = 0;
8332 CASE_FLT_FN (BUILT_IN_EXP):
8333 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8334 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8336 exponent = CALL_EXPR_ARG (arg, 0);
8338 CASE_FLT_FN (BUILT_IN_EXP2):
8339 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8340 x = build_real (type, dconst2);
8341 exponent = CALL_EXPR_ARG (arg, 0);
8343 CASE_FLT_FN (BUILT_IN_EXP10):
8344 CASE_FLT_FN (BUILT_IN_POW10):
8345 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8347 REAL_VALUE_TYPE dconst10;
8348 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8349 x = build_real (type, dconst10);
8351 exponent = CALL_EXPR_ARG (arg, 0);
8353 CASE_FLT_FN (BUILT_IN_SQRT):
8354 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8355 x = CALL_EXPR_ARG (arg, 0);
8356 exponent = build_real (type, dconsthalf);
8358 CASE_FLT_FN (BUILT_IN_CBRT):
8359 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8360 x = CALL_EXPR_ARG (arg, 0);
8361 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8364 CASE_FLT_FN (BUILT_IN_POW):
8365 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8366 x = CALL_EXPR_ARG (arg, 0);
8367 exponent = CALL_EXPR_ARG (arg, 1);
8373 /* Now perform the optimization. */
8376 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8377 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8385 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8386 NULL_TREE if no simplification can be made. */
8389 fold_builtin_hypot (location_t loc, tree fndecl,
8390 tree arg0, tree arg1, tree type)
8392 tree res, narg0, narg1;
8394 if (!validate_arg (arg0, REAL_TYPE)
8395 || !validate_arg (arg1, REAL_TYPE))
8398 /* Calculate the result when the argument is a constant. */
8399 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8402 /* If either argument to hypot has a negate or abs, strip that off.
8403 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8404 narg0 = fold_strip_sign_ops (arg0);
8405 narg1 = fold_strip_sign_ops (arg1);
8408 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8409 narg1 ? narg1 : arg1);
8412 /* If either argument is zero, hypot is fabs of the other. */
8413 if (real_zerop (arg0))
8414 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8415 else if (real_zerop (arg1))
8416 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8418 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8419 if (flag_unsafe_math_optimizations
8420 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8422 const REAL_VALUE_TYPE sqrt2_trunc
8423 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8424 return fold_build2_loc (loc, MULT_EXPR, type,
8425 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8426 build_real (type, sqrt2_trunc));
8433 /* Fold a builtin function call to pow, powf, or powl. Return
8434 NULL_TREE if no simplification can be made. */
8436 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8440 if (!validate_arg (arg0, REAL_TYPE)
8441 || !validate_arg (arg1, REAL_TYPE))
8444 /* Calculate the result when the argument is a constant. */
8445 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8448 /* Optimize pow(1.0,y) = 1.0. */
8449 if (real_onep (arg0))
8450 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8452 if (TREE_CODE (arg1) == REAL_CST
8453 && !TREE_OVERFLOW (arg1))
8455 REAL_VALUE_TYPE cint;
8459 c = TREE_REAL_CST (arg1);
8461 /* Optimize pow(x,0.0) = 1.0. */
8462 if (REAL_VALUES_EQUAL (c, dconst0))
8463 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8466 /* Optimize pow(x,1.0) = x. */
8467 if (REAL_VALUES_EQUAL (c, dconst1))
8470 /* Optimize pow(x,-1.0) = 1.0/x. */
8471 if (REAL_VALUES_EQUAL (c, dconstm1))
8472 return fold_build2_loc (loc, RDIV_EXPR, type,
8473 build_real (type, dconst1), arg0);
8475 /* Optimize pow(x,0.5) = sqrt(x). */
8476 if (flag_unsafe_math_optimizations
8477 && REAL_VALUES_EQUAL (c, dconsthalf))
8479 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8481 if (sqrtfn != NULL_TREE)
8482 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8485 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8486 if (flag_unsafe_math_optimizations)
8488 const REAL_VALUE_TYPE dconstroot
8489 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8491 if (REAL_VALUES_EQUAL (c, dconstroot))
8493 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8494 if (cbrtfn != NULL_TREE)
8495 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8499 /* Check for an integer exponent. */
8500 n = real_to_integer (&c);
8501 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8502 if (real_identical (&c, &cint))
8504 /* Attempt to evaluate pow at compile-time, unless this should
8505 raise an exception. */
8506 if (TREE_CODE (arg0) == REAL_CST
8507 && !TREE_OVERFLOW (arg0)
8509 || (!flag_trapping_math && !flag_errno_math)
8510 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8515 x = TREE_REAL_CST (arg0);
8516 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8517 if (flag_unsafe_math_optimizations || !inexact)
8518 return build_real (type, x);
8521 /* Strip sign ops from even integer powers. */
8522 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8524 tree narg0 = fold_strip_sign_ops (arg0);
8526 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8531 if (flag_unsafe_math_optimizations)
8533 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8535 /* Optimize pow(expN(x),y) = expN(x*y). */
8536 if (BUILTIN_EXPONENT_P (fcode))
8538 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8539 tree arg = CALL_EXPR_ARG (arg0, 0);
8540 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8541 return build_call_expr_loc (loc, expfn, 1, arg);
8544 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8545 if (BUILTIN_SQRT_P (fcode))
8547 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8548 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8549 build_real (type, dconsthalf));
8550 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8553 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8554 if (BUILTIN_CBRT_P (fcode))
8556 tree arg = CALL_EXPR_ARG (arg0, 0);
8557 if (tree_expr_nonnegative_p (arg))
8559 const REAL_VALUE_TYPE dconstroot
8560 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8561 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8562 build_real (type, dconstroot));
8563 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8567 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8568 if (fcode == BUILT_IN_POW
8569 || fcode == BUILT_IN_POWF
8570 || fcode == BUILT_IN_POWL)
8572 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8573 if (tree_expr_nonnegative_p (arg00))
8575 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8576 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8577 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8585 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8586 Return NULL_TREE if no simplification can be made. */
8588 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8589 tree arg0, tree arg1, tree type)
8591 if (!validate_arg (arg0, REAL_TYPE)
8592 || !validate_arg (arg1, INTEGER_TYPE))
8595 /* Optimize pow(1.0,y) = 1.0. */
8596 if (real_onep (arg0))
8597 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8599 if (host_integerp (arg1, 0))
8601 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8603 /* Evaluate powi at compile-time. */
8604 if (TREE_CODE (arg0) == REAL_CST
8605 && !TREE_OVERFLOW (arg0))
8608 x = TREE_REAL_CST (arg0);
8609 real_powi (&x, TYPE_MODE (type), &x, c);
8610 return build_real (type, x);
8613 /* Optimize pow(x,0) = 1.0. */
8615 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8618 /* Optimize pow(x,1) = x. */
8622 /* Optimize pow(x,-1) = 1.0/x. */
8624 return fold_build2_loc (loc, RDIV_EXPR, type,
8625 build_real (type, dconst1), arg0);
8631 /* A subroutine of fold_builtin to fold the various exponent
8632 functions. Return NULL_TREE if no simplification can be made.
8633 FUNC is the corresponding MPFR exponent function. */
8636 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8637 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8639 if (validate_arg (arg, REAL_TYPE))
8641 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8644 /* Calculate the result when the argument is a constant. */
8645 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8648 /* Optimize expN(logN(x)) = x. */
8649 if (flag_unsafe_math_optimizations)
8651 const enum built_in_function fcode = builtin_mathfn_code (arg);
8653 if ((func == mpfr_exp
8654 && (fcode == BUILT_IN_LOG
8655 || fcode == BUILT_IN_LOGF
8656 || fcode == BUILT_IN_LOGL))
8657 || (func == mpfr_exp2
8658 && (fcode == BUILT_IN_LOG2
8659 || fcode == BUILT_IN_LOG2F
8660 || fcode == BUILT_IN_LOG2L))
8661 || (func == mpfr_exp10
8662 && (fcode == BUILT_IN_LOG10
8663 || fcode == BUILT_IN_LOG10F
8664 || fcode == BUILT_IN_LOG10L)))
8665 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8672 /* Return true if VAR is a VAR_DECL or a component thereof. */
8675 var_decl_component_p (tree var)
8678 while (handled_component_p (inner))
8679 inner = TREE_OPERAND (inner, 0);
8680 return SSA_VAR_P (inner);
8683 /* Fold function call to builtin memset. Return
8684 NULL_TREE if no simplification can be made. */
8687 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8688 tree type, bool ignore)
8690 tree var, ret, etype;
8691 unsigned HOST_WIDE_INT length, cval;
8693 if (! validate_arg (dest, POINTER_TYPE)
8694 || ! validate_arg (c, INTEGER_TYPE)
8695 || ! validate_arg (len, INTEGER_TYPE))
8698 if (! host_integerp (len, 1))
8701 /* If the LEN parameter is zero, return DEST. */
8702 if (integer_zerop (len))
8703 return omit_one_operand_loc (loc, type, dest, c);
8705 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8710 if (TREE_CODE (var) != ADDR_EXPR)
8713 var = TREE_OPERAND (var, 0);
8714 if (TREE_THIS_VOLATILE (var))
8717 etype = TREE_TYPE (var);
8718 if (TREE_CODE (etype) == ARRAY_TYPE)
8719 etype = TREE_TYPE (etype);
8721 if (!INTEGRAL_TYPE_P (etype)
8722 && !POINTER_TYPE_P (etype))
8725 if (! var_decl_component_p (var))
8728 length = tree_low_cst (len, 1);
8729 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8730 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8733 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8736 if (integer_zerop (c))
8740 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8743 cval = TREE_INT_CST_LOW (c);
8747 cval |= (cval << 31) << 1;
8750 ret = build_int_cst_type (etype, cval);
8751 var = build_fold_indirect_ref_loc (loc,
8752 fold_convert_loc (loc,
8753 build_pointer_type (etype),
8755 ret = build2 (MODIFY_EXPR, etype, var, ret);
8759 return omit_one_operand_loc (loc, type, dest, ret);
8762 /* Fold function call to builtin memset. Return
8763 NULL_TREE if no simplification can be made. */
8766 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8768 if (! validate_arg (dest, POINTER_TYPE)
8769 || ! validate_arg (size, INTEGER_TYPE))
8775 /* New argument list transforming bzero(ptr x, int y) to
8776 memset(ptr x, int 0, size_t y). This is done this way
8777 so that if it isn't expanded inline, we fallback to
8778 calling bzero instead of memset. */
8780 return fold_builtin_memset (loc, dest, integer_zero_node,
8781 fold_convert_loc (loc, size_type_node, size),
8782 void_type_node, ignore);
8785 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8786 NULL_TREE if no simplification can be made.
8787 If ENDP is 0, return DEST (like memcpy).
8788 If ENDP is 1, return DEST+LEN (like mempcpy).
8789 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8790 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8794 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8795 tree len, tree type, bool ignore, int endp)
8797 tree destvar, srcvar, expr;
8799 if (! validate_arg (dest, POINTER_TYPE)
8800 || ! validate_arg (src, POINTER_TYPE)
8801 || ! validate_arg (len, INTEGER_TYPE))
8804 /* If the LEN parameter is zero, return DEST. */
8805 if (integer_zerop (len))
8806 return omit_one_operand_loc (loc, type, dest, src);
8808 /* If SRC and DEST are the same (and not volatile), return
8809 DEST{,+LEN,+LEN-1}. */
8810 if (operand_equal_p (src, dest, 0))
8814 tree srctype, desttype;
8815 unsigned int src_align, dest_align;
8820 src_align = get_pointer_alignment (src);
8821 dest_align = get_pointer_alignment (dest);
8823 /* Both DEST and SRC must be pointer types.
8824 ??? This is what old code did. Is the testing for pointer types
8827 If either SRC is readonly or length is 1, we can use memcpy. */
8828 if (!dest_align || !src_align)
8830 if (readonly_data_expr (src)
8831 || (host_integerp (len, 1)
8832 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8833 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8835 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8838 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8841 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8842 if (TREE_CODE (src) == ADDR_EXPR
8843 && TREE_CODE (dest) == ADDR_EXPR)
8845 tree src_base, dest_base, fn;
8846 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8847 HOST_WIDE_INT size = -1;
8848 HOST_WIDE_INT maxsize = -1;
8850 srcvar = TREE_OPERAND (src, 0);
8851 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8853 destvar = TREE_OPERAND (dest, 0);
8854 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8856 if (host_integerp (len, 1))
8857 maxsize = tree_low_cst (len, 1);
8860 src_offset /= BITS_PER_UNIT;
8861 dest_offset /= BITS_PER_UNIT;
8862 if (SSA_VAR_P (src_base)
8863 && SSA_VAR_P (dest_base))
8865 if (operand_equal_p (src_base, dest_base, 0)
8866 && ranges_overlap_p (src_offset, maxsize,
8867 dest_offset, maxsize))
8870 else if (TREE_CODE (src_base) == MEM_REF
8871 && TREE_CODE (dest_base) == MEM_REF)
8874 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8875 TREE_OPERAND (dest_base, 0), 0))
8877 off = double_int_add (mem_ref_offset (src_base),
8878 shwi_to_double_int (src_offset));
8879 if (!double_int_fits_in_shwi_p (off))
8881 src_offset = off.low;
8882 off = double_int_add (mem_ref_offset (dest_base),
8883 shwi_to_double_int (dest_offset));
8884 if (!double_int_fits_in_shwi_p (off))
8886 dest_offset = off.low;
8887 if (ranges_overlap_p (src_offset, maxsize,
8888 dest_offset, maxsize))
8894 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8897 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8900 /* If the destination and source do not alias optimize into
8902 if ((is_gimple_min_invariant (dest)
8903 || TREE_CODE (dest) == SSA_NAME)
8904 && (is_gimple_min_invariant (src)
8905 || TREE_CODE (src) == SSA_NAME))
8908 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8909 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8910 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8913 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8916 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8923 if (!host_integerp (len, 0))
8926 This logic lose for arguments like (type *)malloc (sizeof (type)),
8927 since we strip the casts of up to VOID return value from malloc.
8928 Perhaps we ought to inherit type from non-VOID argument here? */
8931 if (!POINTER_TYPE_P (TREE_TYPE (src))
8932 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8934 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8935 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8937 tree tem = TREE_OPERAND (src, 0);
8939 if (tem != TREE_OPERAND (src, 0))
8940 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8942 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8944 tree tem = TREE_OPERAND (dest, 0);
8946 if (tem != TREE_OPERAND (dest, 0))
8947 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8949 srctype = TREE_TYPE (TREE_TYPE (src));
8950 if (TREE_CODE (srctype) == ARRAY_TYPE
8951 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8953 srctype = TREE_TYPE (srctype);
8955 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8957 desttype = TREE_TYPE (TREE_TYPE (dest));
8958 if (TREE_CODE (desttype) == ARRAY_TYPE
8959 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8961 desttype = TREE_TYPE (desttype);
8963 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8965 if (TREE_ADDRESSABLE (srctype)
8966 || TREE_ADDRESSABLE (desttype))
8969 src_align = get_pointer_alignment (src);
8970 dest_align = get_pointer_alignment (dest);
8971 if (dest_align < TYPE_ALIGN (desttype)
8972 || src_align < TYPE_ALIGN (srctype))
8976 dest = builtin_save_expr (dest);
8978 /* Build accesses at offset zero with a ref-all character type. */
8979 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8980 ptr_mode, true), 0);
8983 STRIP_NOPS (destvar);
8984 if (TREE_CODE (destvar) == ADDR_EXPR
8985 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8986 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8987 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8989 destvar = NULL_TREE;
8992 STRIP_NOPS (srcvar);
8993 if (TREE_CODE (srcvar) == ADDR_EXPR
8994 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8995 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8998 || src_align >= TYPE_ALIGN (desttype))
8999 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
9001 else if (!STRICT_ALIGNMENT)
9003 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
9005 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
9013 if (srcvar == NULL_TREE && destvar == NULL_TREE)
9016 if (srcvar == NULL_TREE)
9019 if (src_align >= TYPE_ALIGN (desttype))
9020 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
9023 if (STRICT_ALIGNMENT)
9025 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
9027 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
9030 else if (destvar == NULL_TREE)
9033 if (dest_align >= TYPE_ALIGN (srctype))
9034 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
9037 if (STRICT_ALIGNMENT)
9039 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
9041 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
9045 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
9051 if (endp == 0 || endp == 3)
9052 return omit_one_operand_loc (loc, type, dest, expr);
9058 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9061 dest = fold_build_pointer_plus_loc (loc, dest, len);
9062 dest = fold_convert_loc (loc, type, dest);
9064 dest = omit_one_operand_loc (loc, type, dest, expr);
9068 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9069 If LEN is not NULL, it represents the length of the string to be
9070 copied. Return NULL_TREE if no simplification can be made. */
9073 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9077 if (!validate_arg (dest, POINTER_TYPE)
9078 || !validate_arg (src, POINTER_TYPE))
9081 /* If SRC and DEST are the same (and not volatile), return DEST. */
9082 if (operand_equal_p (src, dest, 0))
9083 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9085 if (optimize_function_for_size_p (cfun))
9088 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9094 len = c_strlen (src, 1);
9095 if (! len || TREE_SIDE_EFFECTS (len))
9099 len = fold_convert_loc (loc, size_type_node, len);
9100 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
9101 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9102 build_call_expr_loc (loc, fn, 3, dest, src, len));
9105 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9106 Return NULL_TREE if no simplification can be made. */
9109 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
9111 tree fn, len, lenp1, call, type;
9113 if (!validate_arg (dest, POINTER_TYPE)
9114 || !validate_arg (src, POINTER_TYPE))
9117 len = c_strlen (src, 1);
9119 || TREE_CODE (len) != INTEGER_CST)
9122 if (optimize_function_for_size_p (cfun)
9123 /* If length is zero it's small enough. */
9124 && !integer_zerop (len))
9127 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9131 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9132 fold_convert_loc (loc, size_type_node, len),
9133 build_int_cst (size_type_node, 1));
9134 /* We use dest twice in building our expression. Save it from
9135 multiple expansions. */
9136 dest = builtin_save_expr (dest);
9137 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9139 type = TREE_TYPE (TREE_TYPE (fndecl));
9140 dest = fold_build_pointer_plus_loc (loc, dest, len);
9141 dest = fold_convert_loc (loc, type, dest);
9142 dest = omit_one_operand_loc (loc, type, dest, call);
9146 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9147 If SLEN is not NULL, it represents the length of the source string.
9148 Return NULL_TREE if no simplification can be made. */
9151 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9152 tree src, tree len, tree slen)
9156 if (!validate_arg (dest, POINTER_TYPE)
9157 || !validate_arg (src, POINTER_TYPE)
9158 || !validate_arg (len, INTEGER_TYPE))
9161 /* If the LEN parameter is zero, return DEST. */
9162 if (integer_zerop (len))
9163 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9165 /* We can't compare slen with len as constants below if len is not a
9167 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9171 slen = c_strlen (src, 1);
9173 /* Now, we must be passed a constant src ptr parameter. */
9174 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9177 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9179 /* We do not support simplification of this case, though we do
9180 support it when expanding trees into RTL. */
9181 /* FIXME: generate a call to __builtin_memset. */
9182 if (tree_int_cst_lt (slen, len))
9185 /* OK transform into builtin memcpy. */
9186 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9190 len = fold_convert_loc (loc, size_type_node, len);
9191 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9192 build_call_expr_loc (loc, fn, 3, dest, src, len));
9195 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9196 arguments to the call, and TYPE is its return type.
9197 Return NULL_TREE if no simplification can be made. */
9200 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9202 if (!validate_arg (arg1, POINTER_TYPE)
9203 || !validate_arg (arg2, INTEGER_TYPE)
9204 || !validate_arg (len, INTEGER_TYPE))
9210 if (TREE_CODE (arg2) != INTEGER_CST
9211 || !host_integerp (len, 1))
9214 p1 = c_getstr (arg1);
9215 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9221 if (target_char_cast (arg2, &c))
9224 r = (const char *) memchr (p1, c, tree_low_cst (len, 1));
9227 return build_int_cst (TREE_TYPE (arg1), 0);
9229 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9230 return fold_convert_loc (loc, type, tem);
9236 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9237 Return NULL_TREE if no simplification can be made. */
9240 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9242 const char *p1, *p2;
9244 if (!validate_arg (arg1, POINTER_TYPE)
9245 || !validate_arg (arg2, POINTER_TYPE)
9246 || !validate_arg (len, INTEGER_TYPE))
9249 /* If the LEN parameter is zero, return zero. */
9250 if (integer_zerop (len))
9251 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9254 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9255 if (operand_equal_p (arg1, arg2, 0))
9256 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9258 p1 = c_getstr (arg1);
9259 p2 = c_getstr (arg2);
9261 /* If all arguments are constant, and the value of len is not greater
9262 than the lengths of arg1 and arg2, evaluate at compile-time. */
9263 if (host_integerp (len, 1) && p1 && p2
9264 && compare_tree_int (len, strlen (p1) + 1) <= 0
9265 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9267 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
9270 return integer_one_node;
9272 return integer_minus_one_node;
9274 return integer_zero_node;
9277 /* If len parameter is one, return an expression corresponding to
9278 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9279 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9281 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9282 tree cst_uchar_ptr_node
9283 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9286 = fold_convert_loc (loc, integer_type_node,
9287 build1 (INDIRECT_REF, cst_uchar_node,
9288 fold_convert_loc (loc,
9292 = fold_convert_loc (loc, integer_type_node,
9293 build1 (INDIRECT_REF, cst_uchar_node,
9294 fold_convert_loc (loc,
9297 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9303 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9304 Return NULL_TREE if no simplification can be made. */
9307 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9309 const char *p1, *p2;
9311 if (!validate_arg (arg1, POINTER_TYPE)
9312 || !validate_arg (arg2, POINTER_TYPE))
9315 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9316 if (operand_equal_p (arg1, arg2, 0))
9317 return integer_zero_node;
9319 p1 = c_getstr (arg1);
9320 p2 = c_getstr (arg2);
9324 const int i = strcmp (p1, p2);
9326 return integer_minus_one_node;
9328 return integer_one_node;
9330 return integer_zero_node;
9333 /* If the second arg is "", return *(const unsigned char*)arg1. */
9334 if (p2 && *p2 == '\0')
9336 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9337 tree cst_uchar_ptr_node
9338 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9340 return fold_convert_loc (loc, integer_type_node,
9341 build1 (INDIRECT_REF, cst_uchar_node,
9342 fold_convert_loc (loc,
9347 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9348 if (p1 && *p1 == '\0')
9350 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9351 tree cst_uchar_ptr_node
9352 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9355 = fold_convert_loc (loc, integer_type_node,
9356 build1 (INDIRECT_REF, cst_uchar_node,
9357 fold_convert_loc (loc,
9360 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9366 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9367 Return NULL_TREE if no simplification can be made. */
9370 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9372 const char *p1, *p2;
9374 if (!validate_arg (arg1, POINTER_TYPE)
9375 || !validate_arg (arg2, POINTER_TYPE)
9376 || !validate_arg (len, INTEGER_TYPE))
9379 /* If the LEN parameter is zero, return zero. */
9380 if (integer_zerop (len))
9381 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9384 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9385 if (operand_equal_p (arg1, arg2, 0))
9386 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9388 p1 = c_getstr (arg1);
9389 p2 = c_getstr (arg2);
9391 if (host_integerp (len, 1) && p1 && p2)
9393 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9395 return integer_one_node;
9397 return integer_minus_one_node;
9399 return integer_zero_node;
9402 /* If the second arg is "", and the length is greater than zero,
9403 return *(const unsigned char*)arg1. */
9404 if (p2 && *p2 == '\0'
9405 && TREE_CODE (len) == INTEGER_CST
9406 && tree_int_cst_sgn (len) == 1)
9408 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9409 tree cst_uchar_ptr_node
9410 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9412 return fold_convert_loc (loc, integer_type_node,
9413 build1 (INDIRECT_REF, cst_uchar_node,
9414 fold_convert_loc (loc,
9419 /* If the first arg is "", and the length is greater than zero,
9420 return -*(const unsigned char*)arg2. */
9421 if (p1 && *p1 == '\0'
9422 && TREE_CODE (len) == INTEGER_CST
9423 && tree_int_cst_sgn (len) == 1)
9425 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9426 tree cst_uchar_ptr_node
9427 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9429 tree temp = fold_convert_loc (loc, integer_type_node,
9430 build1 (INDIRECT_REF, cst_uchar_node,
9431 fold_convert_loc (loc,
9434 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9437 /* If len parameter is one, return an expression corresponding to
9438 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9439 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9441 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9442 tree cst_uchar_ptr_node
9443 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9445 tree ind1 = fold_convert_loc (loc, integer_type_node,
9446 build1 (INDIRECT_REF, cst_uchar_node,
9447 fold_convert_loc (loc,
9450 tree ind2 = fold_convert_loc (loc, integer_type_node,
9451 build1 (INDIRECT_REF, cst_uchar_node,
9452 fold_convert_loc (loc,
9455 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9461 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9462 ARG. Return NULL_TREE if no simplification can be made. */
9465 fold_builtin_signbit (location_t loc, tree arg, tree type)
9467 if (!validate_arg (arg, REAL_TYPE))
9470 /* If ARG is a compile-time constant, determine the result. */
9471 if (TREE_CODE (arg) == REAL_CST
9472 && !TREE_OVERFLOW (arg))
9476 c = TREE_REAL_CST (arg);
9477 return (REAL_VALUE_NEGATIVE (c)
9478 ? build_one_cst (type)
9479 : build_zero_cst (type));
9482 /* If ARG is non-negative, the result is always zero. */
9483 if (tree_expr_nonnegative_p (arg))
9484 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9486 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9487 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9488 return fold_convert (type,
9489 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9490 build_real (TREE_TYPE (arg), dconst0)));
9495 /* Fold function call to builtin copysign, copysignf or copysignl with
9496 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9500 fold_builtin_copysign (location_t loc, tree fndecl,
9501 tree arg1, tree arg2, tree type)
9505 if (!validate_arg (arg1, REAL_TYPE)
9506 || !validate_arg (arg2, REAL_TYPE))
9509 /* copysign(X,X) is X. */
9510 if (operand_equal_p (arg1, arg2, 0))
9511 return fold_convert_loc (loc, type, arg1);
9513 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9514 if (TREE_CODE (arg1) == REAL_CST
9515 && TREE_CODE (arg2) == REAL_CST
9516 && !TREE_OVERFLOW (arg1)
9517 && !TREE_OVERFLOW (arg2))
9519 REAL_VALUE_TYPE c1, c2;
9521 c1 = TREE_REAL_CST (arg1);
9522 c2 = TREE_REAL_CST (arg2);
9523 /* c1.sign := c2.sign. */
9524 real_copysign (&c1, &c2);
9525 return build_real (type, c1);
9528 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9529 Remember to evaluate Y for side-effects. */
9530 if (tree_expr_nonnegative_p (arg2))
9531 return omit_one_operand_loc (loc, type,
9532 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9535 /* Strip sign changing operations for the first argument. */
9536 tem = fold_strip_sign_ops (arg1);
9538 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9543 /* Fold a call to builtin isascii with argument ARG. */
9546 fold_builtin_isascii (location_t loc, tree arg)
9548 if (!validate_arg (arg, INTEGER_TYPE))
9552 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9553 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9554 build_int_cst (integer_type_node,
9555 ~ (unsigned HOST_WIDE_INT) 0x7f));
9556 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9557 arg, integer_zero_node);
9561 /* Fold a call to builtin toascii with argument ARG. */
9564 fold_builtin_toascii (location_t loc, tree arg)
9566 if (!validate_arg (arg, INTEGER_TYPE))
9569 /* Transform toascii(c) -> (c & 0x7f). */
9570 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9571 build_int_cst (integer_type_node, 0x7f));
9574 /* Fold a call to builtin isdigit with argument ARG. */
9577 fold_builtin_isdigit (location_t loc, tree arg)
9579 if (!validate_arg (arg, INTEGER_TYPE))
9583 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9584 /* According to the C standard, isdigit is unaffected by locale.
9585 However, it definitely is affected by the target character set. */
9586 unsigned HOST_WIDE_INT target_digit0
9587 = lang_hooks.to_target_charset ('0');
9589 if (target_digit0 == 0)
9592 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9593 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9594 build_int_cst (unsigned_type_node, target_digit0));
9595 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9596 build_int_cst (unsigned_type_node, 9));
9600 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9603 fold_builtin_fabs (location_t loc, tree arg, tree type)
9605 if (!validate_arg (arg, REAL_TYPE))
9608 arg = fold_convert_loc (loc, type, arg);
9609 if (TREE_CODE (arg) == REAL_CST)
9610 return fold_abs_const (arg, type);
9611 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9614 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9617 fold_builtin_abs (location_t loc, tree arg, tree type)
9619 if (!validate_arg (arg, INTEGER_TYPE))
9622 arg = fold_convert_loc (loc, type, arg);
9623 if (TREE_CODE (arg) == INTEGER_CST)
9624 return fold_abs_const (arg, type);
9625 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9628 /* Fold a fma operation with arguments ARG[012]. */
9631 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9632 tree type, tree arg0, tree arg1, tree arg2)
9634 if (TREE_CODE (arg0) == REAL_CST
9635 && TREE_CODE (arg1) == REAL_CST
9636 && TREE_CODE (arg2) == REAL_CST)
9637 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9642 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9645 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9647 if (validate_arg (arg0, REAL_TYPE)
9648 && validate_arg(arg1, REAL_TYPE)
9649 && validate_arg(arg2, REAL_TYPE))
9651 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9655 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9656 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9657 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9662 /* Fold a call to builtin fmin or fmax. */
9665 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9666 tree type, bool max)
9668 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9670 /* Calculate the result when the argument is a constant. */
9671 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9676 /* If either argument is NaN, return the other one. Avoid the
9677 transformation if we get (and honor) a signalling NaN. Using
9678 omit_one_operand() ensures we create a non-lvalue. */
9679 if (TREE_CODE (arg0) == REAL_CST
9680 && real_isnan (&TREE_REAL_CST (arg0))
9681 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9682 || ! TREE_REAL_CST (arg0).signalling))
9683 return omit_one_operand_loc (loc, type, arg1, arg0);
9684 if (TREE_CODE (arg1) == REAL_CST
9685 && real_isnan (&TREE_REAL_CST (arg1))
9686 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9687 || ! TREE_REAL_CST (arg1).signalling))
9688 return omit_one_operand_loc (loc, type, arg0, arg1);
9690 /* Transform fmin/fmax(x,x) -> x. */
9691 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9692 return omit_one_operand_loc (loc, type, arg0, arg1);
9694 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9695 functions to return the numeric arg if the other one is NaN.
9696 These tree codes don't honor that, so only transform if
9697 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9698 handled, so we don't have to worry about it either. */
9699 if (flag_finite_math_only)
9700 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9701 fold_convert_loc (loc, type, arg0),
9702 fold_convert_loc (loc, type, arg1));
9707 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9710 fold_builtin_carg (location_t loc, tree arg, tree type)
9712 if (validate_arg (arg, COMPLEX_TYPE)
9713 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9715 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9719 tree new_arg = builtin_save_expr (arg);
9720 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9721 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9722 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9729 /* Fold a call to builtin logb/ilogb. */
9732 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9734 if (! validate_arg (arg, REAL_TYPE))
9739 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9741 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9747 /* If arg is Inf or NaN and we're logb, return it. */
9748 if (TREE_CODE (rettype) == REAL_TYPE)
9749 return fold_convert_loc (loc, rettype, arg);
9750 /* Fall through... */
9752 /* Zero may set errno and/or raise an exception for logb, also
9753 for ilogb we don't know FP_ILOGB0. */
9756 /* For normal numbers, proceed iff radix == 2. In GCC,
9757 normalized significands are in the range [0.5, 1.0). We
9758 want the exponent as if they were [1.0, 2.0) so get the
9759 exponent and subtract 1. */
9760 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9761 return fold_convert_loc (loc, rettype,
9762 build_int_cst (integer_type_node,
9763 REAL_EXP (value)-1));
9771 /* Fold a call to builtin significand, if radix == 2. */
9774 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9776 if (! validate_arg (arg, REAL_TYPE))
9781 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9783 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9790 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9791 return fold_convert_loc (loc, rettype, arg);
9793 /* For normal numbers, proceed iff radix == 2. */
9794 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9796 REAL_VALUE_TYPE result = *value;
9797 /* In GCC, normalized significands are in the range [0.5,
9798 1.0). We want them to be [1.0, 2.0) so set the
9800 SET_REAL_EXP (&result, 1);
9801 return build_real (rettype, result);
9810 /* Fold a call to builtin frexp, we can assume the base is 2. */
9813 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9815 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9820 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9823 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9825 /* Proceed if a valid pointer type was passed in. */
9826 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9828 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9834 /* For +-0, return (*exp = 0, +-0). */
9835 exp = integer_zero_node;
9840 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9841 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9844 /* Since the frexp function always expects base 2, and in
9845 GCC normalized significands are already in the range
9846 [0.5, 1.0), we have exactly what frexp wants. */
9847 REAL_VALUE_TYPE frac_rvt = *value;
9848 SET_REAL_EXP (&frac_rvt, 0);
9849 frac = build_real (rettype, frac_rvt);
9850 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9857 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9858 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9859 TREE_SIDE_EFFECTS (arg1) = 1;
9860 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9866 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9867 then we can assume the base is two. If it's false, then we have to
9868 check the mode of the TYPE parameter in certain cases. */
9871 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9872 tree type, bool ldexp)
9874 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9879 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9880 if (real_zerop (arg0) || integer_zerop (arg1)
9881 || (TREE_CODE (arg0) == REAL_CST
9882 && !real_isfinite (&TREE_REAL_CST (arg0))))
9883 return omit_one_operand_loc (loc, type, arg0, arg1);
9885 /* If both arguments are constant, then try to evaluate it. */
9886 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9887 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9888 && host_integerp (arg1, 0))
9890 /* Bound the maximum adjustment to twice the range of the
9891 mode's valid exponents. Use abs to ensure the range is
9892 positive as a sanity check. */
9893 const long max_exp_adj = 2 *
9894 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9895 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9897 /* Get the user-requested adjustment. */
9898 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9900 /* The requested adjustment must be inside this range. This
9901 is a preliminary cap to avoid things like overflow, we
9902 may still fail to compute the result for other reasons. */
9903 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9905 REAL_VALUE_TYPE initial_result;
9907 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9909 /* Ensure we didn't overflow. */
9910 if (! real_isinf (&initial_result))
9912 const REAL_VALUE_TYPE trunc_result
9913 = real_value_truncate (TYPE_MODE (type), initial_result);
9915 /* Only proceed if the target mode can hold the
9917 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9918 return build_real (type, trunc_result);
9927 /* Fold a call to builtin modf. */
9930 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9932 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9937 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9940 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9942 /* Proceed if a valid pointer type was passed in. */
9943 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9945 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9946 REAL_VALUE_TYPE trunc, frac;
9952 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9953 trunc = frac = *value;
9956 /* For +-Inf, return (*arg1 = arg0, +-0). */
9958 frac.sign = value->sign;
9962 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9963 real_trunc (&trunc, VOIDmode, value);
9964 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9965 /* If the original number was negative and already
9966 integral, then the fractional part is -0.0. */
9967 if (value->sign && frac.cl == rvc_zero)
9968 frac.sign = value->sign;
9972 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9973 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9974 build_real (rettype, trunc));
9975 TREE_SIDE_EFFECTS (arg1) = 1;
9976 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9977 build_real (rettype, frac));
9983 /* Given a location LOC, an interclass builtin function decl FNDECL
9984 and its single argument ARG, return an folded expression computing
9985 the same, or NULL_TREE if we either couldn't or didn't want to fold
9986 (the latter happen if there's an RTL instruction available). */
9989 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9991 enum machine_mode mode;
9993 if (!validate_arg (arg, REAL_TYPE))
9996 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9999 mode = TYPE_MODE (TREE_TYPE (arg));
10001 /* If there is no optab, try generic code. */
10002 switch (DECL_FUNCTION_CODE (fndecl))
10006 CASE_FLT_FN (BUILT_IN_ISINF):
10008 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
10009 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
10010 tree const type = TREE_TYPE (arg);
10014 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10015 real_from_string (&r, buf);
10016 result = build_call_expr (isgr_fn, 2,
10017 fold_build1_loc (loc, ABS_EXPR, type, arg),
10018 build_real (type, r));
10021 CASE_FLT_FN (BUILT_IN_FINITE):
10022 case BUILT_IN_ISFINITE:
10024 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
10025 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10026 tree const type = TREE_TYPE (arg);
10030 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10031 real_from_string (&r, buf);
10032 result = build_call_expr (isle_fn, 2,
10033 fold_build1_loc (loc, ABS_EXPR, type, arg),
10034 build_real (type, r));
10035 /*result = fold_build2_loc (loc, UNGT_EXPR,
10036 TREE_TYPE (TREE_TYPE (fndecl)),
10037 fold_build1_loc (loc, ABS_EXPR, type, arg),
10038 build_real (type, r));
10039 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10040 TREE_TYPE (TREE_TYPE (fndecl)),
10044 case BUILT_IN_ISNORMAL:
10046 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10047 islessequal(fabs(x),DBL_MAX). */
10048 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10049 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10050 tree const type = TREE_TYPE (arg);
10051 REAL_VALUE_TYPE rmax, rmin;
10054 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10055 real_from_string (&rmax, buf);
10056 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10057 real_from_string (&rmin, buf);
10058 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10059 result = build_call_expr (isle_fn, 2, arg,
10060 build_real (type, rmax));
10061 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
10062 build_call_expr (isge_fn, 2, arg,
10063 build_real (type, rmin)));
10073 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10074 ARG is the argument for the call. */
10077 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10079 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10082 if (!validate_arg (arg, REAL_TYPE))
10085 switch (builtin_index)
10087 case BUILT_IN_ISINF:
10088 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10089 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10091 if (TREE_CODE (arg) == REAL_CST)
10093 r = TREE_REAL_CST (arg);
10094 if (real_isinf (&r))
10095 return real_compare (GT_EXPR, &r, &dconst0)
10096 ? integer_one_node : integer_minus_one_node;
10098 return integer_zero_node;
10103 case BUILT_IN_ISINF_SIGN:
10105 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10106 /* In a boolean context, GCC will fold the inner COND_EXPR to
10107 1. So e.g. "if (isinf_sign(x))" would be folded to just
10108 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10109 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10110 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10111 tree tmp = NULL_TREE;
10113 arg = builtin_save_expr (arg);
10115 if (signbit_fn && isinf_fn)
10117 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10118 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10120 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10121 signbit_call, integer_zero_node);
10122 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10123 isinf_call, integer_zero_node);
10125 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10126 integer_minus_one_node, integer_one_node);
10127 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10129 integer_zero_node);
10135 case BUILT_IN_ISFINITE:
10136 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10137 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10138 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10140 if (TREE_CODE (arg) == REAL_CST)
10142 r = TREE_REAL_CST (arg);
10143 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10148 case BUILT_IN_ISNAN:
10149 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10150 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10152 if (TREE_CODE (arg) == REAL_CST)
10154 r = TREE_REAL_CST (arg);
10155 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10158 arg = builtin_save_expr (arg);
10159 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10162 gcc_unreachable ();
10166 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10167 This builtin will generate code to return the appropriate floating
10168 point classification depending on the value of the floating point
10169 number passed in. The possible return values must be supplied as
10170 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10171 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10172 one floating point argument which is "type generic". */
10175 fold_builtin_fpclassify (location_t loc, tree exp)
10177 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10178 arg, type, res, tmp;
10179 enum machine_mode mode;
10183 /* Verify the required arguments in the original call. */
10184 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10185 INTEGER_TYPE, INTEGER_TYPE,
10186 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10189 fp_nan = CALL_EXPR_ARG (exp, 0);
10190 fp_infinite = CALL_EXPR_ARG (exp, 1);
10191 fp_normal = CALL_EXPR_ARG (exp, 2);
10192 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10193 fp_zero = CALL_EXPR_ARG (exp, 4);
10194 arg = CALL_EXPR_ARG (exp, 5);
10195 type = TREE_TYPE (arg);
10196 mode = TYPE_MODE (type);
10197 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10199 /* fpclassify(x) ->
10200 isnan(x) ? FP_NAN :
10201 (fabs(x) == Inf ? FP_INFINITE :
10202 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10203 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10205 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10206 build_real (type, dconst0));
10207 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10208 tmp, fp_zero, fp_subnormal);
10210 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10211 real_from_string (&r, buf);
10212 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10213 arg, build_real (type, r));
10214 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10216 if (HONOR_INFINITIES (mode))
10219 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10220 build_real (type, r));
10221 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10225 if (HONOR_NANS (mode))
10227 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10228 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10234 /* Fold a call to an unordered comparison function such as
10235 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10236 being called and ARG0 and ARG1 are the arguments for the call.
10237 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10238 the opposite of the desired result. UNORDERED_CODE is used
10239 for modes that can hold NaNs and ORDERED_CODE is used for
10243 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10244 enum tree_code unordered_code,
10245 enum tree_code ordered_code)
10247 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10248 enum tree_code code;
10250 enum tree_code code0, code1;
10251 tree cmp_type = NULL_TREE;
10253 type0 = TREE_TYPE (arg0);
10254 type1 = TREE_TYPE (arg1);
10256 code0 = TREE_CODE (type0);
10257 code1 = TREE_CODE (type1);
10259 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10260 /* Choose the wider of two real types. */
10261 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10263 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10265 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10268 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10269 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10271 if (unordered_code == UNORDERED_EXPR)
10273 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10274 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10275 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10278 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10280 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10281 fold_build2_loc (loc, code, type, arg0, arg1));
10284 /* Fold a call to built-in function FNDECL with 0 arguments.
10285 IGNORE is true if the result of the function call is ignored. This
10286 function returns NULL_TREE if no simplification was possible. */
10289 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10291 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10292 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10295 CASE_FLT_FN (BUILT_IN_INF):
10296 case BUILT_IN_INFD32:
10297 case BUILT_IN_INFD64:
10298 case BUILT_IN_INFD128:
10299 return fold_builtin_inf (loc, type, true);
10301 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10302 return fold_builtin_inf (loc, type, false);
10304 case BUILT_IN_CLASSIFY_TYPE:
10305 return fold_builtin_classify_type (NULL_TREE);
10313 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10314 IGNORE is true if the result of the function call is ignored. This
10315 function returns NULL_TREE if no simplification was possible. */
10318 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10320 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10321 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10324 case BUILT_IN_CONSTANT_P:
10326 tree val = fold_builtin_constant_p (arg0);
10328 /* Gimplification will pull the CALL_EXPR for the builtin out of
10329 an if condition. When not optimizing, we'll not CSE it back.
10330 To avoid link error types of regressions, return false now. */
10331 if (!val && !optimize)
10332 val = integer_zero_node;
10337 case BUILT_IN_CLASSIFY_TYPE:
10338 return fold_builtin_classify_type (arg0);
10340 case BUILT_IN_STRLEN:
10341 return fold_builtin_strlen (loc, type, arg0);
10343 CASE_FLT_FN (BUILT_IN_FABS):
10344 return fold_builtin_fabs (loc, arg0, type);
10347 case BUILT_IN_LABS:
10348 case BUILT_IN_LLABS:
10349 case BUILT_IN_IMAXABS:
10350 return fold_builtin_abs (loc, arg0, type);
10352 CASE_FLT_FN (BUILT_IN_CONJ):
10353 if (validate_arg (arg0, COMPLEX_TYPE)
10354 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10355 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10358 CASE_FLT_FN (BUILT_IN_CREAL):
10359 if (validate_arg (arg0, COMPLEX_TYPE)
10360 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10361 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10364 CASE_FLT_FN (BUILT_IN_CIMAG):
10365 if (validate_arg (arg0, COMPLEX_TYPE)
10366 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10367 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10370 CASE_FLT_FN (BUILT_IN_CCOS):
10371 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
10373 CASE_FLT_FN (BUILT_IN_CCOSH):
10374 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
10376 CASE_FLT_FN (BUILT_IN_CPROJ):
10377 return fold_builtin_cproj(loc, arg0, type);
10379 CASE_FLT_FN (BUILT_IN_CSIN):
10380 if (validate_arg (arg0, COMPLEX_TYPE)
10381 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10382 return do_mpc_arg1 (arg0, type, mpc_sin);
10385 CASE_FLT_FN (BUILT_IN_CSINH):
10386 if (validate_arg (arg0, COMPLEX_TYPE)
10387 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10388 return do_mpc_arg1 (arg0, type, mpc_sinh);
10391 CASE_FLT_FN (BUILT_IN_CTAN):
10392 if (validate_arg (arg0, COMPLEX_TYPE)
10393 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10394 return do_mpc_arg1 (arg0, type, mpc_tan);
10397 CASE_FLT_FN (BUILT_IN_CTANH):
10398 if (validate_arg (arg0, COMPLEX_TYPE)
10399 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10400 return do_mpc_arg1 (arg0, type, mpc_tanh);
10403 CASE_FLT_FN (BUILT_IN_CLOG):
10404 if (validate_arg (arg0, COMPLEX_TYPE)
10405 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10406 return do_mpc_arg1 (arg0, type, mpc_log);
10409 CASE_FLT_FN (BUILT_IN_CSQRT):
10410 if (validate_arg (arg0, COMPLEX_TYPE)
10411 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10412 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10415 CASE_FLT_FN (BUILT_IN_CASIN):
10416 if (validate_arg (arg0, COMPLEX_TYPE)
10417 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10418 return do_mpc_arg1 (arg0, type, mpc_asin);
10421 CASE_FLT_FN (BUILT_IN_CACOS):
10422 if (validate_arg (arg0, COMPLEX_TYPE)
10423 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10424 return do_mpc_arg1 (arg0, type, mpc_acos);
10427 CASE_FLT_FN (BUILT_IN_CATAN):
10428 if (validate_arg (arg0, COMPLEX_TYPE)
10429 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10430 return do_mpc_arg1 (arg0, type, mpc_atan);
10433 CASE_FLT_FN (BUILT_IN_CASINH):
10434 if (validate_arg (arg0, COMPLEX_TYPE)
10435 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10436 return do_mpc_arg1 (arg0, type, mpc_asinh);
10439 CASE_FLT_FN (BUILT_IN_CACOSH):
10440 if (validate_arg (arg0, COMPLEX_TYPE)
10441 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10442 return do_mpc_arg1 (arg0, type, mpc_acosh);
10445 CASE_FLT_FN (BUILT_IN_CATANH):
10446 if (validate_arg (arg0, COMPLEX_TYPE)
10447 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10448 return do_mpc_arg1 (arg0, type, mpc_atanh);
10451 CASE_FLT_FN (BUILT_IN_CABS):
10452 return fold_builtin_cabs (loc, arg0, type, fndecl);
10454 CASE_FLT_FN (BUILT_IN_CARG):
10455 return fold_builtin_carg (loc, arg0, type);
10457 CASE_FLT_FN (BUILT_IN_SQRT):
10458 return fold_builtin_sqrt (loc, arg0, type);
10460 CASE_FLT_FN (BUILT_IN_CBRT):
10461 return fold_builtin_cbrt (loc, arg0, type);
10463 CASE_FLT_FN (BUILT_IN_ASIN):
10464 if (validate_arg (arg0, REAL_TYPE))
10465 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10466 &dconstm1, &dconst1, true);
10469 CASE_FLT_FN (BUILT_IN_ACOS):
10470 if (validate_arg (arg0, REAL_TYPE))
10471 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10472 &dconstm1, &dconst1, true);
10475 CASE_FLT_FN (BUILT_IN_ATAN):
10476 if (validate_arg (arg0, REAL_TYPE))
10477 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10480 CASE_FLT_FN (BUILT_IN_ASINH):
10481 if (validate_arg (arg0, REAL_TYPE))
10482 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10485 CASE_FLT_FN (BUILT_IN_ACOSH):
10486 if (validate_arg (arg0, REAL_TYPE))
10487 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10488 &dconst1, NULL, true);
10491 CASE_FLT_FN (BUILT_IN_ATANH):
10492 if (validate_arg (arg0, REAL_TYPE))
10493 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10494 &dconstm1, &dconst1, false);
10497 CASE_FLT_FN (BUILT_IN_SIN):
10498 if (validate_arg (arg0, REAL_TYPE))
10499 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10502 CASE_FLT_FN (BUILT_IN_COS):
10503 return fold_builtin_cos (loc, arg0, type, fndecl);
10505 CASE_FLT_FN (BUILT_IN_TAN):
10506 return fold_builtin_tan (arg0, type);
10508 CASE_FLT_FN (BUILT_IN_CEXP):
10509 return fold_builtin_cexp (loc, arg0, type);
10511 CASE_FLT_FN (BUILT_IN_CEXPI):
10512 if (validate_arg (arg0, REAL_TYPE))
10513 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10516 CASE_FLT_FN (BUILT_IN_SINH):
10517 if (validate_arg (arg0, REAL_TYPE))
10518 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10521 CASE_FLT_FN (BUILT_IN_COSH):
10522 return fold_builtin_cosh (loc, arg0, type, fndecl);
10524 CASE_FLT_FN (BUILT_IN_TANH):
10525 if (validate_arg (arg0, REAL_TYPE))
10526 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10529 CASE_FLT_FN (BUILT_IN_ERF):
10530 if (validate_arg (arg0, REAL_TYPE))
10531 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10534 CASE_FLT_FN (BUILT_IN_ERFC):
10535 if (validate_arg (arg0, REAL_TYPE))
10536 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10539 CASE_FLT_FN (BUILT_IN_TGAMMA):
10540 if (validate_arg (arg0, REAL_TYPE))
10541 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10544 CASE_FLT_FN (BUILT_IN_EXP):
10545 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10547 CASE_FLT_FN (BUILT_IN_EXP2):
10548 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10550 CASE_FLT_FN (BUILT_IN_EXP10):
10551 CASE_FLT_FN (BUILT_IN_POW10):
10552 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10554 CASE_FLT_FN (BUILT_IN_EXPM1):
10555 if (validate_arg (arg0, REAL_TYPE))
10556 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10559 CASE_FLT_FN (BUILT_IN_LOG):
10560 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10562 CASE_FLT_FN (BUILT_IN_LOG2):
10563 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10565 CASE_FLT_FN (BUILT_IN_LOG10):
10566 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10568 CASE_FLT_FN (BUILT_IN_LOG1P):
10569 if (validate_arg (arg0, REAL_TYPE))
10570 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10571 &dconstm1, NULL, false);
10574 CASE_FLT_FN (BUILT_IN_J0):
10575 if (validate_arg (arg0, REAL_TYPE))
10576 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10580 CASE_FLT_FN (BUILT_IN_J1):
10581 if (validate_arg (arg0, REAL_TYPE))
10582 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10586 CASE_FLT_FN (BUILT_IN_Y0):
10587 if (validate_arg (arg0, REAL_TYPE))
10588 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10589 &dconst0, NULL, false);
10592 CASE_FLT_FN (BUILT_IN_Y1):
10593 if (validate_arg (arg0, REAL_TYPE))
10594 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10595 &dconst0, NULL, false);
10598 CASE_FLT_FN (BUILT_IN_NAN):
10599 case BUILT_IN_NAND32:
10600 case BUILT_IN_NAND64:
10601 case BUILT_IN_NAND128:
10602 return fold_builtin_nan (arg0, type, true);
10604 CASE_FLT_FN (BUILT_IN_NANS):
10605 return fold_builtin_nan (arg0, type, false);
10607 CASE_FLT_FN (BUILT_IN_FLOOR):
10608 return fold_builtin_floor (loc, fndecl, arg0);
10610 CASE_FLT_FN (BUILT_IN_CEIL):
10611 return fold_builtin_ceil (loc, fndecl, arg0);
10613 CASE_FLT_FN (BUILT_IN_TRUNC):
10614 return fold_builtin_trunc (loc, fndecl, arg0);
10616 CASE_FLT_FN (BUILT_IN_ROUND):
10617 return fold_builtin_round (loc, fndecl, arg0);
10619 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10620 CASE_FLT_FN (BUILT_IN_RINT):
10621 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10623 CASE_FLT_FN (BUILT_IN_ICEIL):
10624 CASE_FLT_FN (BUILT_IN_LCEIL):
10625 CASE_FLT_FN (BUILT_IN_LLCEIL):
10626 CASE_FLT_FN (BUILT_IN_LFLOOR):
10627 CASE_FLT_FN (BUILT_IN_IFLOOR):
10628 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10629 CASE_FLT_FN (BUILT_IN_IROUND):
10630 CASE_FLT_FN (BUILT_IN_LROUND):
10631 CASE_FLT_FN (BUILT_IN_LLROUND):
10632 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10634 CASE_FLT_FN (BUILT_IN_IRINT):
10635 CASE_FLT_FN (BUILT_IN_LRINT):
10636 CASE_FLT_FN (BUILT_IN_LLRINT):
10637 return fold_fixed_mathfn (loc, fndecl, arg0);
10639 case BUILT_IN_BSWAP16:
10640 case BUILT_IN_BSWAP32:
10641 case BUILT_IN_BSWAP64:
10642 return fold_builtin_bswap (fndecl, arg0);
10644 CASE_INT_FN (BUILT_IN_FFS):
10645 CASE_INT_FN (BUILT_IN_CLZ):
10646 CASE_INT_FN (BUILT_IN_CTZ):
10647 CASE_INT_FN (BUILT_IN_CLRSB):
10648 CASE_INT_FN (BUILT_IN_POPCOUNT):
10649 CASE_INT_FN (BUILT_IN_PARITY):
10650 return fold_builtin_bitop (fndecl, arg0);
10652 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10653 return fold_builtin_signbit (loc, arg0, type);
10655 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10656 return fold_builtin_significand (loc, arg0, type);
10658 CASE_FLT_FN (BUILT_IN_ILOGB):
10659 CASE_FLT_FN (BUILT_IN_LOGB):
10660 return fold_builtin_logb (loc, arg0, type);
10662 case BUILT_IN_ISASCII:
10663 return fold_builtin_isascii (loc, arg0);
10665 case BUILT_IN_TOASCII:
10666 return fold_builtin_toascii (loc, arg0);
10668 case BUILT_IN_ISDIGIT:
10669 return fold_builtin_isdigit (loc, arg0);
10671 CASE_FLT_FN (BUILT_IN_FINITE):
10672 case BUILT_IN_FINITED32:
10673 case BUILT_IN_FINITED64:
10674 case BUILT_IN_FINITED128:
10675 case BUILT_IN_ISFINITE:
10677 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10680 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10683 CASE_FLT_FN (BUILT_IN_ISINF):
10684 case BUILT_IN_ISINFD32:
10685 case BUILT_IN_ISINFD64:
10686 case BUILT_IN_ISINFD128:
10688 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10691 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10694 case BUILT_IN_ISNORMAL:
10695 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10697 case BUILT_IN_ISINF_SIGN:
10698 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10700 CASE_FLT_FN (BUILT_IN_ISNAN):
10701 case BUILT_IN_ISNAND32:
10702 case BUILT_IN_ISNAND64:
10703 case BUILT_IN_ISNAND128:
10704 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10706 case BUILT_IN_PRINTF:
10707 case BUILT_IN_PRINTF_UNLOCKED:
10708 case BUILT_IN_VPRINTF:
10709 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10711 case BUILT_IN_FREE:
10712 if (integer_zerop (arg0))
10713 return build_empty_stmt (loc);
10724 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10725 IGNORE is true if the result of the function call is ignored. This
10726 function returns NULL_TREE if no simplification was possible. */
10729 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10731 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10732 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10736 CASE_FLT_FN (BUILT_IN_JN):
10737 if (validate_arg (arg0, INTEGER_TYPE)
10738 && validate_arg (arg1, REAL_TYPE))
10739 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10742 CASE_FLT_FN (BUILT_IN_YN):
10743 if (validate_arg (arg0, INTEGER_TYPE)
10744 && validate_arg (arg1, REAL_TYPE))
10745 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10749 CASE_FLT_FN (BUILT_IN_DREM):
10750 CASE_FLT_FN (BUILT_IN_REMAINDER):
10751 if (validate_arg (arg0, REAL_TYPE)
10752 && validate_arg(arg1, REAL_TYPE))
10753 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10756 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10757 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10758 if (validate_arg (arg0, REAL_TYPE)
10759 && validate_arg(arg1, POINTER_TYPE))
10760 return do_mpfr_lgamma_r (arg0, arg1, type);
10763 CASE_FLT_FN (BUILT_IN_ATAN2):
10764 if (validate_arg (arg0, REAL_TYPE)
10765 && validate_arg(arg1, REAL_TYPE))
10766 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10769 CASE_FLT_FN (BUILT_IN_FDIM):
10770 if (validate_arg (arg0, REAL_TYPE)
10771 && validate_arg(arg1, REAL_TYPE))
10772 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10775 CASE_FLT_FN (BUILT_IN_HYPOT):
10776 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10778 CASE_FLT_FN (BUILT_IN_CPOW):
10779 if (validate_arg (arg0, COMPLEX_TYPE)
10780 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10781 && validate_arg (arg1, COMPLEX_TYPE)
10782 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10783 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10786 CASE_FLT_FN (BUILT_IN_LDEXP):
10787 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10788 CASE_FLT_FN (BUILT_IN_SCALBN):
10789 CASE_FLT_FN (BUILT_IN_SCALBLN):
10790 return fold_builtin_load_exponent (loc, arg0, arg1,
10791 type, /*ldexp=*/false);
10793 CASE_FLT_FN (BUILT_IN_FREXP):
10794 return fold_builtin_frexp (loc, arg0, arg1, type);
10796 CASE_FLT_FN (BUILT_IN_MODF):
10797 return fold_builtin_modf (loc, arg0, arg1, type);
10799 case BUILT_IN_BZERO:
10800 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10802 case BUILT_IN_FPUTS:
10803 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10805 case BUILT_IN_FPUTS_UNLOCKED:
10806 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10808 case BUILT_IN_STRSTR:
10809 return fold_builtin_strstr (loc, arg0, arg1, type);
10811 case BUILT_IN_STRCAT:
10812 return fold_builtin_strcat (loc, arg0, arg1);
10814 case BUILT_IN_STRSPN:
10815 return fold_builtin_strspn (loc, arg0, arg1);
10817 case BUILT_IN_STRCSPN:
10818 return fold_builtin_strcspn (loc, arg0, arg1);
10820 case BUILT_IN_STRCHR:
10821 case BUILT_IN_INDEX:
10822 return fold_builtin_strchr (loc, arg0, arg1, type);
10824 case BUILT_IN_STRRCHR:
10825 case BUILT_IN_RINDEX:
10826 return fold_builtin_strrchr (loc, arg0, arg1, type);
10828 case BUILT_IN_STRCPY:
10829 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10831 case BUILT_IN_STPCPY:
10834 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10838 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10841 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10844 case BUILT_IN_STRCMP:
10845 return fold_builtin_strcmp (loc, arg0, arg1);
10847 case BUILT_IN_STRPBRK:
10848 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10850 case BUILT_IN_EXPECT:
10851 return fold_builtin_expect (loc, arg0, arg1);
10853 CASE_FLT_FN (BUILT_IN_POW):
10854 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10856 CASE_FLT_FN (BUILT_IN_POWI):
10857 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10859 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10860 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10862 CASE_FLT_FN (BUILT_IN_FMIN):
10863 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10865 CASE_FLT_FN (BUILT_IN_FMAX):
10866 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10868 case BUILT_IN_ISGREATER:
10869 return fold_builtin_unordered_cmp (loc, fndecl,
10870 arg0, arg1, UNLE_EXPR, LE_EXPR);
10871 case BUILT_IN_ISGREATEREQUAL:
10872 return fold_builtin_unordered_cmp (loc, fndecl,
10873 arg0, arg1, UNLT_EXPR, LT_EXPR);
10874 case BUILT_IN_ISLESS:
10875 return fold_builtin_unordered_cmp (loc, fndecl,
10876 arg0, arg1, UNGE_EXPR, GE_EXPR);
10877 case BUILT_IN_ISLESSEQUAL:
10878 return fold_builtin_unordered_cmp (loc, fndecl,
10879 arg0, arg1, UNGT_EXPR, GT_EXPR);
10880 case BUILT_IN_ISLESSGREATER:
10881 return fold_builtin_unordered_cmp (loc, fndecl,
10882 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10883 case BUILT_IN_ISUNORDERED:
10884 return fold_builtin_unordered_cmp (loc, fndecl,
10885 arg0, arg1, UNORDERED_EXPR,
10888 /* We do the folding for va_start in the expander. */
10889 case BUILT_IN_VA_START:
10892 case BUILT_IN_SPRINTF:
10893 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10895 case BUILT_IN_OBJECT_SIZE:
10896 return fold_builtin_object_size (arg0, arg1);
10898 case BUILT_IN_PRINTF:
10899 case BUILT_IN_PRINTF_UNLOCKED:
10900 case BUILT_IN_VPRINTF:
10901 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10903 case BUILT_IN_PRINTF_CHK:
10904 case BUILT_IN_VPRINTF_CHK:
10905 if (!validate_arg (arg0, INTEGER_TYPE)
10906 || TREE_SIDE_EFFECTS (arg0))
10909 return fold_builtin_printf (loc, fndecl,
10910 arg1, NULL_TREE, ignore, fcode);
10913 case BUILT_IN_FPRINTF:
10914 case BUILT_IN_FPRINTF_UNLOCKED:
10915 case BUILT_IN_VFPRINTF:
10916 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10919 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10920 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10922 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10923 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10931 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10932 and ARG2. IGNORE is true if the result of the function call is ignored.
10933 This function returns NULL_TREE if no simplification was possible. */
10936 fold_builtin_3 (location_t loc, tree fndecl,
10937 tree arg0, tree arg1, tree arg2, bool ignore)
10939 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10940 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10944 CASE_FLT_FN (BUILT_IN_SINCOS):
10945 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10947 CASE_FLT_FN (BUILT_IN_FMA):
10948 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10951 CASE_FLT_FN (BUILT_IN_REMQUO):
10952 if (validate_arg (arg0, REAL_TYPE)
10953 && validate_arg(arg1, REAL_TYPE)
10954 && validate_arg(arg2, POINTER_TYPE))
10955 return do_mpfr_remquo (arg0, arg1, arg2);
10958 case BUILT_IN_MEMSET:
10959 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10961 case BUILT_IN_BCOPY:
10962 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10963 void_type_node, true, /*endp=*/3);
10965 case BUILT_IN_MEMCPY:
10966 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10967 type, ignore, /*endp=*/0);
10969 case BUILT_IN_MEMPCPY:
10970 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10971 type, ignore, /*endp=*/1);
10973 case BUILT_IN_MEMMOVE:
10974 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10975 type, ignore, /*endp=*/3);
10977 case BUILT_IN_STRNCAT:
10978 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10980 case BUILT_IN_STRNCPY:
10981 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10983 case BUILT_IN_STRNCMP:
10984 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10986 case BUILT_IN_MEMCHR:
10987 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10989 case BUILT_IN_BCMP:
10990 case BUILT_IN_MEMCMP:
10991 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10993 case BUILT_IN_SPRINTF:
10994 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10996 case BUILT_IN_SNPRINTF:
10997 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10999 case BUILT_IN_STRCPY_CHK:
11000 case BUILT_IN_STPCPY_CHK:
11001 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
11004 case BUILT_IN_STRCAT_CHK:
11005 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
11007 case BUILT_IN_PRINTF_CHK:
11008 case BUILT_IN_VPRINTF_CHK:
11009 if (!validate_arg (arg0, INTEGER_TYPE)
11010 || TREE_SIDE_EFFECTS (arg0))
11013 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
11016 case BUILT_IN_FPRINTF:
11017 case BUILT_IN_FPRINTF_UNLOCKED:
11018 case BUILT_IN_VFPRINTF:
11019 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
11022 case BUILT_IN_FPRINTF_CHK:
11023 case BUILT_IN_VFPRINTF_CHK:
11024 if (!validate_arg (arg1, INTEGER_TYPE)
11025 || TREE_SIDE_EFFECTS (arg1))
11028 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
11037 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
11038 ARG2, and ARG3. IGNORE is true if the result of the function call is
11039 ignored. This function returns NULL_TREE if no simplification was
11043 fold_builtin_4 (location_t loc, tree fndecl,
11044 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
11046 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11050 case BUILT_IN_MEMCPY_CHK:
11051 case BUILT_IN_MEMPCPY_CHK:
11052 case BUILT_IN_MEMMOVE_CHK:
11053 case BUILT_IN_MEMSET_CHK:
11054 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11056 DECL_FUNCTION_CODE (fndecl));
11058 case BUILT_IN_STRNCPY_CHK:
11059 case BUILT_IN_STPNCPY_CHK:
11060 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
11063 case BUILT_IN_STRNCAT_CHK:
11064 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11066 case BUILT_IN_SNPRINTF:
11067 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
11069 case BUILT_IN_FPRINTF_CHK:
11070 case BUILT_IN_VFPRINTF_CHK:
11071 if (!validate_arg (arg1, INTEGER_TYPE)
11072 || TREE_SIDE_EFFECTS (arg1))
11075 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11085 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11086 arguments, where NARGS <= 4. IGNORE is true if the result of the
11087 function call is ignored. This function returns NULL_TREE if no
11088 simplification was possible. Note that this only folds builtins with
11089 fixed argument patterns. Foldings that do varargs-to-varargs
11090 transformations, or that match calls with more than 4 arguments,
11091 need to be handled with fold_builtin_varargs instead. */
11093 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11096 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11098 tree ret = NULL_TREE;
11103 ret = fold_builtin_0 (loc, fndecl, ignore);
11106 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11109 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11112 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11115 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11123 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11124 SET_EXPR_LOCATION (ret, loc);
11125 TREE_NO_WARNING (ret) = 1;
11131 /* Builtins with folding operations that operate on "..." arguments
11132 need special handling; we need to store the arguments in a convenient
11133 data structure before attempting any folding. Fortunately there are
11134 only a few builtins that fall into this category. FNDECL is the
11135 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
11136 result of the function call is ignored. */
11139 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
11140 bool ignore ATTRIBUTE_UNUSED)
11142 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11143 tree ret = NULL_TREE;
11147 case BUILT_IN_SPRINTF_CHK:
11148 case BUILT_IN_VSPRINTF_CHK:
11149 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
11152 case BUILT_IN_SNPRINTF_CHK:
11153 case BUILT_IN_VSNPRINTF_CHK:
11154 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
11157 case BUILT_IN_FPCLASSIFY:
11158 ret = fold_builtin_fpclassify (loc, exp);
11166 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11167 SET_EXPR_LOCATION (ret, loc);
11168 TREE_NO_WARNING (ret) = 1;
11174 /* Return true if FNDECL shouldn't be folded right now.
11175 If a built-in function has an inline attribute always_inline
11176 wrapper, defer folding it after always_inline functions have
11177 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11178 might not be performed. */
11181 avoid_folding_inline_builtin (tree fndecl)
11183 return (DECL_DECLARED_INLINE_P (fndecl)
11184 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11186 && !cfun->always_inline_functions_inlined
11187 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11190 /* A wrapper function for builtin folding that prevents warnings for
11191 "statement without effect" and the like, caused by removing the
11192 call node earlier than the warning is generated. */
11195 fold_call_expr (location_t loc, tree exp, bool ignore)
11197 tree ret = NULL_TREE;
11198 tree fndecl = get_callee_fndecl (exp);
11200 && TREE_CODE (fndecl) == FUNCTION_DECL
11201 && DECL_BUILT_IN (fndecl)
11202 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11203 yet. Defer folding until we see all the arguments
11204 (after inlining). */
11205 && !CALL_EXPR_VA_ARG_PACK (exp))
11207 int nargs = call_expr_nargs (exp);
11209 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11210 instead last argument is __builtin_va_arg_pack (). Defer folding
11211 even in that case, until arguments are finalized. */
11212 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11214 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11216 && TREE_CODE (fndecl2) == FUNCTION_DECL
11217 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11218 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11222 if (avoid_folding_inline_builtin (fndecl))
11225 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11226 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11227 CALL_EXPR_ARGP (exp), ignore);
11230 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11232 tree *args = CALL_EXPR_ARGP (exp);
11233 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11236 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11244 /* Conveniently construct a function call expression. FNDECL names the
11245 function to be called and N arguments are passed in the array
11249 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11251 tree fntype = TREE_TYPE (fndecl);
11252 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11254 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11257 /* Conveniently construct a function call expression. FNDECL names the
11258 function to be called and the arguments are passed in the vector
11262 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
11264 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
11265 VEC_address (tree, vec));
11269 /* Conveniently construct a function call expression. FNDECL names the
11270 function to be called, N is the number of arguments, and the "..."
11271 parameters are the argument expressions. */
11274 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11277 tree *argarray = XALLOCAVEC (tree, n);
11281 for (i = 0; i < n; i++)
11282 argarray[i] = va_arg (ap, tree);
11284 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11287 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11288 varargs macros aren't supported by all bootstrap compilers. */
11291 build_call_expr (tree fndecl, int n, ...)
11294 tree *argarray = XALLOCAVEC (tree, n);
11298 for (i = 0; i < n; i++)
11299 argarray[i] = va_arg (ap, tree);
11301 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11304 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11305 N arguments are passed in the array ARGARRAY. */
11308 fold_builtin_call_array (location_t loc, tree type,
11313 tree ret = NULL_TREE;
11316 if (TREE_CODE (fn) == ADDR_EXPR)
11318 tree fndecl = TREE_OPERAND (fn, 0);
11319 if (TREE_CODE (fndecl) == FUNCTION_DECL
11320 && DECL_BUILT_IN (fndecl))
11322 /* If last argument is __builtin_va_arg_pack (), arguments to this
11323 function are not finalized yet. Defer folding until they are. */
11324 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11326 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11328 && TREE_CODE (fndecl2) == FUNCTION_DECL
11329 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11330 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11331 return build_call_array_loc (loc, type, fn, n, argarray);
11333 if (avoid_folding_inline_builtin (fndecl))
11334 return build_call_array_loc (loc, type, fn, n, argarray);
11335 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11337 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11341 return build_call_array_loc (loc, type, fn, n, argarray);
11343 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11345 /* First try the transformations that don't require consing up
11347 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11352 /* If we got this far, we need to build an exp. */
11353 exp = build_call_array_loc (loc, type, fn, n, argarray);
11354 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11355 return ret ? ret : exp;
11359 return build_call_array_loc (loc, type, fn, n, argarray);
11362 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11363 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11364 of arguments in ARGS to be omitted. OLDNARGS is the number of
11365 elements in ARGS. */
11368 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11369 int skip, tree fndecl, int n, va_list newargs)
11371 int nargs = oldnargs - skip + n;
11378 buffer = XALLOCAVEC (tree, nargs);
11379 for (i = 0; i < n; i++)
11380 buffer[i] = va_arg (newargs, tree);
11381 for (j = skip; j < oldnargs; j++, i++)
11382 buffer[i] = args[j];
11385 buffer = args + skip;
11387 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11390 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11391 list ARGS along with N new arguments specified as the "..."
11392 parameters. SKIP is the number of arguments in ARGS to be omitted.
11393 OLDNARGS is the number of elements in ARGS. */
11396 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11397 int skip, tree fndecl, int n, ...)
11403 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11409 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11410 along with N new arguments specified as the "..." parameters. SKIP
11411 is the number of arguments in EXP to be omitted. This function is used
11412 to do varargs-to-varargs transformations. */
11415 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11421 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11422 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11428 /* Validate a single argument ARG against a tree code CODE representing
11432 validate_arg (const_tree arg, enum tree_code code)
11436 else if (code == POINTER_TYPE)
11437 return POINTER_TYPE_P (TREE_TYPE (arg));
11438 else if (code == INTEGER_TYPE)
11439 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11440 return code == TREE_CODE (TREE_TYPE (arg));
11443 /* This function validates the types of a function call argument list
11444 against a specified list of tree_codes. If the last specifier is a 0,
11445 that represents an ellipses, otherwise the last specifier must be a
11448 This is the GIMPLE version of validate_arglist. Eventually we want to
11449 completely convert builtins.c to work from GIMPLEs and the tree based
11450 validate_arglist will then be removed. */
11453 validate_gimple_arglist (const_gimple call, ...)
11455 enum tree_code code;
11461 va_start (ap, call);
11466 code = (enum tree_code) va_arg (ap, int);
11470 /* This signifies an ellipses, any further arguments are all ok. */
11474 /* This signifies an endlink, if no arguments remain, return
11475 true, otherwise return false. */
11476 res = (i == gimple_call_num_args (call));
11479 /* If no parameters remain or the parameter's code does not
11480 match the specified code, return false. Otherwise continue
11481 checking any remaining arguments. */
11482 arg = gimple_call_arg (call, i++);
11483 if (!validate_arg (arg, code))
11490 /* We need gotos here since we can only have one VA_CLOSE in a
11498 /* This function validates the types of a function call argument list
11499 against a specified list of tree_codes. If the last specifier is a 0,
11500 that represents an ellipses, otherwise the last specifier must be a
11504 validate_arglist (const_tree callexpr, ...)
11506 enum tree_code code;
11509 const_call_expr_arg_iterator iter;
11512 va_start (ap, callexpr);
11513 init_const_call_expr_arg_iterator (callexpr, &iter);
11517 code = (enum tree_code) va_arg (ap, int);
11521 /* This signifies an ellipses, any further arguments are all ok. */
11525 /* This signifies an endlink, if no arguments remain, return
11526 true, otherwise return false. */
11527 res = !more_const_call_expr_args_p (&iter);
11530 /* If no parameters remain or the parameter's code does not
11531 match the specified code, return false. Otherwise continue
11532 checking any remaining arguments. */
11533 arg = next_const_call_expr_arg (&iter);
11534 if (!validate_arg (arg, code))
11541 /* We need gotos here since we can only have one VA_CLOSE in a
11549 /* Default target-specific builtin expander that does nothing. */
11552 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11553 rtx target ATTRIBUTE_UNUSED,
11554 rtx subtarget ATTRIBUTE_UNUSED,
11555 enum machine_mode mode ATTRIBUTE_UNUSED,
11556 int ignore ATTRIBUTE_UNUSED)
11561 /* Returns true is EXP represents data that would potentially reside
11562 in a readonly section. */
11565 readonly_data_expr (tree exp)
11569 if (TREE_CODE (exp) != ADDR_EXPR)
11572 exp = get_base_address (TREE_OPERAND (exp, 0));
11576 /* Make sure we call decl_readonly_section only for trees it
11577 can handle (since it returns true for everything it doesn't
11579 if (TREE_CODE (exp) == STRING_CST
11580 || TREE_CODE (exp) == CONSTRUCTOR
11581 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11582 return decl_readonly_section (exp, 0);
11587 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11588 to the call, and TYPE is its return type.
11590 Return NULL_TREE if no simplification was possible, otherwise return the
11591 simplified form of the call as a tree.
11593 The simplified form may be a constant or other expression which
11594 computes the same value, but in a more efficient manner (including
11595 calls to other builtin functions).
11597 The call may contain arguments which need to be evaluated, but
11598 which are not useful to determine the result of the call. In
11599 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11600 COMPOUND_EXPR will be an argument which must be evaluated.
11601 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11602 COMPOUND_EXPR in the chain will contain the tree for the simplified
11603 form of the builtin function call. */
11606 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11608 if (!validate_arg (s1, POINTER_TYPE)
11609 || !validate_arg (s2, POINTER_TYPE))
11614 const char *p1, *p2;
11616 p2 = c_getstr (s2);
11620 p1 = c_getstr (s1);
11623 const char *r = strstr (p1, p2);
11627 return build_int_cst (TREE_TYPE (s1), 0);
11629 /* Return an offset into the constant string argument. */
11630 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11631 return fold_convert_loc (loc, type, tem);
11634 /* The argument is const char *, and the result is char *, so we need
11635 a type conversion here to avoid a warning. */
11637 return fold_convert_loc (loc, type, s1);
11642 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11646 /* New argument list transforming strstr(s1, s2) to
11647 strchr(s1, s2[0]). */
11648 return build_call_expr_loc (loc, fn, 2, s1,
11649 build_int_cst (integer_type_node, p2[0]));
11653 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11654 the call, and TYPE is its return type.
11656 Return NULL_TREE if no simplification was possible, otherwise return the
11657 simplified form of the call as a tree.
11659 The simplified form may be a constant or other expression which
11660 computes the same value, but in a more efficient manner (including
11661 calls to other builtin functions).
11663 The call may contain arguments which need to be evaluated, but
11664 which are not useful to determine the result of the call. In
11665 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11666 COMPOUND_EXPR will be an argument which must be evaluated.
11667 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11668 COMPOUND_EXPR in the chain will contain the tree for the simplified
11669 form of the builtin function call. */
11672 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11674 if (!validate_arg (s1, POINTER_TYPE)
11675 || !validate_arg (s2, INTEGER_TYPE))
11681 if (TREE_CODE (s2) != INTEGER_CST)
11684 p1 = c_getstr (s1);
11691 if (target_char_cast (s2, &c))
11694 r = strchr (p1, c);
11697 return build_int_cst (TREE_TYPE (s1), 0);
11699 /* Return an offset into the constant string argument. */
11700 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11701 return fold_convert_loc (loc, type, tem);
11707 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11708 the call, and TYPE is its return type.
11710 Return NULL_TREE if no simplification was possible, otherwise return the
11711 simplified form of the call as a tree.
11713 The simplified form may be a constant or other expression which
11714 computes the same value, but in a more efficient manner (including
11715 calls to other builtin functions).
11717 The call may contain arguments which need to be evaluated, but
11718 which are not useful to determine the result of the call. In
11719 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11720 COMPOUND_EXPR will be an argument which must be evaluated.
11721 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11722 COMPOUND_EXPR in the chain will contain the tree for the simplified
11723 form of the builtin function call. */
11726 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11728 if (!validate_arg (s1, POINTER_TYPE)
11729 || !validate_arg (s2, INTEGER_TYPE))
11736 if (TREE_CODE (s2) != INTEGER_CST)
11739 p1 = c_getstr (s1);
11746 if (target_char_cast (s2, &c))
11749 r = strrchr (p1, c);
11752 return build_int_cst (TREE_TYPE (s1), 0);
11754 /* Return an offset into the constant string argument. */
11755 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11756 return fold_convert_loc (loc, type, tem);
11759 if (! integer_zerop (s2))
11762 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11766 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11767 return build_call_expr_loc (loc, fn, 2, s1, s2);
11771 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11772 to the call, and TYPE is its return type.
11774 Return NULL_TREE if no simplification was possible, otherwise return the
11775 simplified form of the call as a tree.
11777 The simplified form may be a constant or other expression which
11778 computes the same value, but in a more efficient manner (including
11779 calls to other builtin functions).
11781 The call may contain arguments which need to be evaluated, but
11782 which are not useful to determine the result of the call. In
11783 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11784 COMPOUND_EXPR will be an argument which must be evaluated.
11785 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11786 COMPOUND_EXPR in the chain will contain the tree for the simplified
11787 form of the builtin function call. */
11790 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11792 if (!validate_arg (s1, POINTER_TYPE)
11793 || !validate_arg (s2, POINTER_TYPE))
11798 const char *p1, *p2;
11800 p2 = c_getstr (s2);
11804 p1 = c_getstr (s1);
11807 const char *r = strpbrk (p1, p2);
11811 return build_int_cst (TREE_TYPE (s1), 0);
11813 /* Return an offset into the constant string argument. */
11814 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11815 return fold_convert_loc (loc, type, tem);
11819 /* strpbrk(x, "") == NULL.
11820 Evaluate and ignore s1 in case it had side-effects. */
11821 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11824 return NULL_TREE; /* Really call strpbrk. */
11826 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11830 /* New argument list transforming strpbrk(s1, s2) to
11831 strchr(s1, s2[0]). */
11832 return build_call_expr_loc (loc, fn, 2, s1,
11833 build_int_cst (integer_type_node, p2[0]));
11837 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11840 Return NULL_TREE if no simplification was possible, otherwise return the
11841 simplified form of the call as a tree.
11843 The simplified form may be a constant or other expression which
11844 computes the same value, but in a more efficient manner (including
11845 calls to other builtin functions).
11847 The call may contain arguments which need to be evaluated, but
11848 which are not useful to determine the result of the call. In
11849 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11850 COMPOUND_EXPR will be an argument which must be evaluated.
11851 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11852 COMPOUND_EXPR in the chain will contain the tree for the simplified
11853 form of the builtin function call. */
11856 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11858 if (!validate_arg (dst, POINTER_TYPE)
11859 || !validate_arg (src, POINTER_TYPE))
11863 const char *p = c_getstr (src);
11865 /* If the string length is zero, return the dst parameter. */
11866 if (p && *p == '\0')
11869 if (optimize_insn_for_speed_p ())
11871 /* See if we can store by pieces into (dst + strlen(dst)). */
11873 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11874 tree strcpy_fn = builtin_decl_implicit (BUILT_IN_STRCPY);
11876 if (!strlen_fn || !strcpy_fn)
11879 /* If we don't have a movstr we don't want to emit an strcpy
11880 call. We have to do that if the length of the source string
11881 isn't computable (in that case we can use memcpy probably
11882 later expanding to a sequence of mov instructions). If we
11883 have movstr instructions we can emit strcpy calls. */
11886 tree len = c_strlen (src, 1);
11887 if (! len || TREE_SIDE_EFFECTS (len))
11891 /* Stabilize the argument list. */
11892 dst = builtin_save_expr (dst);
11894 /* Create strlen (dst). */
11895 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11896 /* Create (dst p+ strlen (dst)). */
11898 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11899 newdst = builtin_save_expr (newdst);
11901 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11902 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11908 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11909 arguments to the call.
11911 Return NULL_TREE if no simplification was possible, otherwise return the
11912 simplified form of the call as a tree.
11914 The simplified form may be a constant or other expression which
11915 computes the same value, but in a more efficient manner (including
11916 calls to other builtin functions).
11918 The call may contain arguments which need to be evaluated, but
11919 which are not useful to determine the result of the call. In
11920 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11921 COMPOUND_EXPR will be an argument which must be evaluated.
11922 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11923 COMPOUND_EXPR in the chain will contain the tree for the simplified
11924 form of the builtin function call. */
11927 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11929 if (!validate_arg (dst, POINTER_TYPE)
11930 || !validate_arg (src, POINTER_TYPE)
11931 || !validate_arg (len, INTEGER_TYPE))
11935 const char *p = c_getstr (src);
11937 /* If the requested length is zero, or the src parameter string
11938 length is zero, return the dst parameter. */
11939 if (integer_zerop (len) || (p && *p == '\0'))
11940 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11942 /* If the requested len is greater than or equal to the string
11943 length, call strcat. */
11944 if (TREE_CODE (len) == INTEGER_CST && p
11945 && compare_tree_int (len, strlen (p)) >= 0)
11947 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11949 /* If the replacement _DECL isn't initialized, don't do the
11954 return build_call_expr_loc (loc, fn, 2, dst, src);
11960 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11963 Return NULL_TREE if no simplification was possible, otherwise return the
11964 simplified form of the call as a tree.
11966 The simplified form may be a constant or other expression which
11967 computes the same value, but in a more efficient manner (including
11968 calls to other builtin functions).
11970 The call may contain arguments which need to be evaluated, but
11971 which are not useful to determine the result of the call. In
11972 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11973 COMPOUND_EXPR will be an argument which must be evaluated.
11974 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11975 COMPOUND_EXPR in the chain will contain the tree for the simplified
11976 form of the builtin function call. */
11979 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11981 if (!validate_arg (s1, POINTER_TYPE)
11982 || !validate_arg (s2, POINTER_TYPE))
11986 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11988 /* If both arguments are constants, evaluate at compile-time. */
11991 const size_t r = strspn (p1, p2);
11992 return size_int (r);
11995 /* If either argument is "", return NULL_TREE. */
11996 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11997 /* Evaluate and ignore both arguments in case either one has
11999 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
12005 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
12008 Return NULL_TREE if no simplification was possible, otherwise return the
12009 simplified form of the call as a tree.
12011 The simplified form may be a constant or other expression which
12012 computes the same value, but in a more efficient manner (including
12013 calls to other builtin functions).
12015 The call may contain arguments which need to be evaluated, but
12016 which are not useful to determine the result of the call. In
12017 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12018 COMPOUND_EXPR will be an argument which must be evaluated.
12019 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12020 COMPOUND_EXPR in the chain will contain the tree for the simplified
12021 form of the builtin function call. */
12024 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
12026 if (!validate_arg (s1, POINTER_TYPE)
12027 || !validate_arg (s2, POINTER_TYPE))
12031 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
12033 /* If both arguments are constants, evaluate at compile-time. */
12036 const size_t r = strcspn (p1, p2);
12037 return size_int (r);
12040 /* If the first argument is "", return NULL_TREE. */
12041 if (p1 && *p1 == '\0')
12043 /* Evaluate and ignore argument s2 in case it has
12045 return omit_one_operand_loc (loc, size_type_node,
12046 size_zero_node, s2);
12049 /* If the second argument is "", return __builtin_strlen(s1). */
12050 if (p2 && *p2 == '\0')
12052 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
12054 /* If the replacement _DECL isn't initialized, don't do the
12059 return build_call_expr_loc (loc, fn, 1, s1);
12065 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
12066 to the call. IGNORE is true if the value returned
12067 by the builtin will be ignored. UNLOCKED is true is true if this
12068 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
12069 the known length of the string. Return NULL_TREE if no simplification
12073 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
12074 bool ignore, bool unlocked, tree len)
12076 /* If we're using an unlocked function, assume the other unlocked
12077 functions exist explicitly. */
12078 tree const fn_fputc = (unlocked
12079 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
12080 : builtin_decl_implicit (BUILT_IN_FPUTC));
12081 tree const fn_fwrite = (unlocked
12082 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
12083 : builtin_decl_implicit (BUILT_IN_FWRITE));
12085 /* If the return value is used, don't do the transformation. */
12089 /* Verify the arguments in the original call. */
12090 if (!validate_arg (arg0, POINTER_TYPE)
12091 || !validate_arg (arg1, POINTER_TYPE))
12095 len = c_strlen (arg0, 0);
12097 /* Get the length of the string passed to fputs. If the length
12098 can't be determined, punt. */
12100 || TREE_CODE (len) != INTEGER_CST)
12103 switch (compare_tree_int (len, 1))
12105 case -1: /* length is 0, delete the call entirely . */
12106 return omit_one_operand_loc (loc, integer_type_node,
12107 integer_zero_node, arg1);;
12109 case 0: /* length is 1, call fputc. */
12111 const char *p = c_getstr (arg0);
12116 return build_call_expr_loc (loc, fn_fputc, 2,
12118 (integer_type_node, p[0]), arg1);
12124 case 1: /* length is greater than 1, call fwrite. */
12126 /* If optimizing for size keep fputs. */
12127 if (optimize_function_for_size_p (cfun))
12129 /* New argument list transforming fputs(string, stream) to
12130 fwrite(string, 1, len, stream). */
12132 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12133 size_one_node, len, arg1);
12138 gcc_unreachable ();
12143 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12144 produced. False otherwise. This is done so that we don't output the error
12145 or warning twice or three times. */
12148 fold_builtin_next_arg (tree exp, bool va_start_p)
12150 tree fntype = TREE_TYPE (current_function_decl);
12151 int nargs = call_expr_nargs (exp);
12153 /* There is good chance the current input_location points inside the
12154 definition of the va_start macro (perhaps on the token for
12155 builtin) in a system header, so warnings will not be emitted.
12156 Use the location in real source code. */
12157 source_location current_location =
12158 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12161 if (!stdarg_p (fntype))
12163 error ("%<va_start%> used in function with fixed args");
12169 if (va_start_p && (nargs != 2))
12171 error ("wrong number of arguments to function %<va_start%>");
12174 arg = CALL_EXPR_ARG (exp, 1);
12176 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12177 when we checked the arguments and if needed issued a warning. */
12182 /* Evidently an out of date version of <stdarg.h>; can't validate
12183 va_start's second argument, but can still work as intended. */
12184 warning_at (current_location,
12186 "%<__builtin_next_arg%> called without an argument");
12189 else if (nargs > 1)
12191 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12194 arg = CALL_EXPR_ARG (exp, 0);
12197 if (TREE_CODE (arg) == SSA_NAME)
12198 arg = SSA_NAME_VAR (arg);
12200 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12201 or __builtin_next_arg (0) the first time we see it, after checking
12202 the arguments and if needed issuing a warning. */
12203 if (!integer_zerop (arg))
12205 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12207 /* Strip off all nops for the sake of the comparison. This
12208 is not quite the same as STRIP_NOPS. It does more.
12209 We must also strip off INDIRECT_EXPR for C++ reference
12211 while (CONVERT_EXPR_P (arg)
12212 || TREE_CODE (arg) == INDIRECT_REF)
12213 arg = TREE_OPERAND (arg, 0);
12214 if (arg != last_parm)
12216 /* FIXME: Sometimes with the tree optimizers we can get the
12217 not the last argument even though the user used the last
12218 argument. We just warn and set the arg to be the last
12219 argument so that we will get wrong-code because of
12221 warning_at (current_location,
12223 "second parameter of %<va_start%> not last named argument");
12226 /* Undefined by C99 7.15.1.4p4 (va_start):
12227 "If the parameter parmN is declared with the register storage
12228 class, with a function or array type, or with a type that is
12229 not compatible with the type that results after application of
12230 the default argument promotions, the behavior is undefined."
12232 else if (DECL_REGISTER (arg))
12234 warning_at (current_location,
12236 "undefined behaviour when second parameter of "
12237 "%<va_start%> is declared with %<register%> storage");
12240 /* We want to verify the second parameter just once before the tree
12241 optimizers are run and then avoid keeping it in the tree,
12242 as otherwise we could warn even for correct code like:
12243 void foo (int i, ...)
12244 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12246 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12248 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12254 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12255 ORIG may be null if this is a 2-argument call. We don't attempt to
12256 simplify calls with more than 3 arguments.
12258 Return NULL_TREE if no simplification was possible, otherwise return the
12259 simplified form of the call as a tree. If IGNORED is true, it means that
12260 the caller does not use the returned value of the function. */
12263 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12264 tree orig, int ignored)
12267 const char *fmt_str = NULL;
12269 /* Verify the required arguments in the original call. We deal with two
12270 types of sprintf() calls: 'sprintf (str, fmt)' and
12271 'sprintf (dest, "%s", orig)'. */
12272 if (!validate_arg (dest, POINTER_TYPE)
12273 || !validate_arg (fmt, POINTER_TYPE))
12275 if (orig && !validate_arg (orig, POINTER_TYPE))
12278 /* Check whether the format is a literal string constant. */
12279 fmt_str = c_getstr (fmt);
12280 if (fmt_str == NULL)
12284 retval = NULL_TREE;
12286 if (!init_target_chars ())
12289 /* If the format doesn't contain % args or %%, use strcpy. */
12290 if (strchr (fmt_str, target_percent) == NULL)
12292 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12297 /* Don't optimize sprintf (buf, "abc", ptr++). */
12301 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12302 'format' is known to contain no % formats. */
12303 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12305 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12308 /* If the format is "%s", use strcpy if the result isn't used. */
12309 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12312 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12317 /* Don't crash on sprintf (str1, "%s"). */
12321 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12324 retval = c_strlen (orig, 1);
12325 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12328 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12331 if (call && retval)
12333 retval = fold_convert_loc
12334 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12336 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12342 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12343 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12344 attempt to simplify calls with more than 4 arguments.
12346 Return NULL_TREE if no simplification was possible, otherwise return the
12347 simplified form of the call as a tree. If IGNORED is true, it means that
12348 the caller does not use the returned value of the function. */
12351 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12352 tree orig, int ignored)
12355 const char *fmt_str = NULL;
12356 unsigned HOST_WIDE_INT destlen;
12358 /* Verify the required arguments in the original call. We deal with two
12359 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12360 'snprintf (dest, cst, "%s", orig)'. */
12361 if (!validate_arg (dest, POINTER_TYPE)
12362 || !validate_arg (destsize, INTEGER_TYPE)
12363 || !validate_arg (fmt, POINTER_TYPE))
12365 if (orig && !validate_arg (orig, POINTER_TYPE))
12368 if (!host_integerp (destsize, 1))
12371 /* Check whether the format is a literal string constant. */
12372 fmt_str = c_getstr (fmt);
12373 if (fmt_str == NULL)
12377 retval = NULL_TREE;
12379 if (!init_target_chars ())
12382 destlen = tree_low_cst (destsize, 1);
12384 /* If the format doesn't contain % args or %%, use strcpy. */
12385 if (strchr (fmt_str, target_percent) == NULL)
12387 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12388 size_t len = strlen (fmt_str);
12390 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12394 /* We could expand this as
12395 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12397 memcpy (str, fmt_with_nul_at_cstm1, cst);
12398 but in the former case that might increase code size
12399 and in the latter case grow .rodata section too much.
12400 So punt for now. */
12401 if (len >= destlen)
12407 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12408 'format' is known to contain no % formats and
12409 strlen (fmt) < cst. */
12410 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12413 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12416 /* If the format is "%s", use strcpy if the result isn't used. */
12417 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12419 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12420 unsigned HOST_WIDE_INT origlen;
12422 /* Don't crash on snprintf (str1, cst, "%s"). */
12426 retval = c_strlen (orig, 1);
12427 if (!retval || !host_integerp (retval, 1))
12430 origlen = tree_low_cst (retval, 1);
12431 /* We could expand this as
12432 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12434 memcpy (str1, str2_with_nul_at_cstm1, cst);
12435 but in the former case that might increase code size
12436 and in the latter case grow .rodata section too much.
12437 So punt for now. */
12438 if (origlen >= destlen)
12441 /* Convert snprintf (str1, cst, "%s", str2) into
12442 strcpy (str1, str2) if strlen (str2) < cst. */
12446 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12449 retval = NULL_TREE;
12452 if (call && retval)
12454 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12455 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12456 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12462 /* Expand a call EXP to __builtin_object_size. */
12465 expand_builtin_object_size (tree exp)
12468 int object_size_type;
12469 tree fndecl = get_callee_fndecl (exp);
12471 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12473 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12475 expand_builtin_trap ();
12479 ost = CALL_EXPR_ARG (exp, 1);
12482 if (TREE_CODE (ost) != INTEGER_CST
12483 || tree_int_cst_sgn (ost) < 0
12484 || compare_tree_int (ost, 3) > 0)
12486 error ("%Klast argument of %D is not integer constant between 0 and 3",
12488 expand_builtin_trap ();
12492 object_size_type = tree_low_cst (ost, 0);
12494 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12497 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12498 FCODE is the BUILT_IN_* to use.
12499 Return NULL_RTX if we failed; the caller should emit a normal call,
12500 otherwise try to get the result in TARGET, if convenient (and in
12501 mode MODE if that's convenient). */
12504 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12505 enum built_in_function fcode)
12507 tree dest, src, len, size;
12509 if (!validate_arglist (exp,
12511 fcode == BUILT_IN_MEMSET_CHK
12512 ? INTEGER_TYPE : POINTER_TYPE,
12513 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12516 dest = CALL_EXPR_ARG (exp, 0);
12517 src = CALL_EXPR_ARG (exp, 1);
12518 len = CALL_EXPR_ARG (exp, 2);
12519 size = CALL_EXPR_ARG (exp, 3);
12521 if (! host_integerp (size, 1))
12524 if (host_integerp (len, 1) || integer_all_onesp (size))
12528 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12530 warning_at (tree_nonartificial_location (exp),
12531 0, "%Kcall to %D will always overflow destination buffer",
12532 exp, get_callee_fndecl (exp));
12537 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12538 mem{cpy,pcpy,move,set} is available. */
12541 case BUILT_IN_MEMCPY_CHK:
12542 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12544 case BUILT_IN_MEMPCPY_CHK:
12545 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12547 case BUILT_IN_MEMMOVE_CHK:
12548 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12550 case BUILT_IN_MEMSET_CHK:
12551 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12560 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12561 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12562 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12563 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12565 else if (fcode == BUILT_IN_MEMSET_CHK)
12569 unsigned int dest_align = get_pointer_alignment (dest);
12571 /* If DEST is not a pointer type, call the normal function. */
12572 if (dest_align == 0)
12575 /* If SRC and DEST are the same (and not volatile), do nothing. */
12576 if (operand_equal_p (src, dest, 0))
12580 if (fcode != BUILT_IN_MEMPCPY_CHK)
12582 /* Evaluate and ignore LEN in case it has side-effects. */
12583 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12584 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12587 expr = fold_build_pointer_plus (dest, len);
12588 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12591 /* __memmove_chk special case. */
12592 if (fcode == BUILT_IN_MEMMOVE_CHK)
12594 unsigned int src_align = get_pointer_alignment (src);
12596 if (src_align == 0)
12599 /* If src is categorized for a readonly section we can use
12600 normal __memcpy_chk. */
12601 if (readonly_data_expr (src))
12603 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12606 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12607 dest, src, len, size);
12608 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12609 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12610 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12617 /* Emit warning if a buffer overflow is detected at compile time. */
12620 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12624 location_t loc = tree_nonartificial_location (exp);
12628 case BUILT_IN_STRCPY_CHK:
12629 case BUILT_IN_STPCPY_CHK:
12630 /* For __strcat_chk the warning will be emitted only if overflowing
12631 by at least strlen (dest) + 1 bytes. */
12632 case BUILT_IN_STRCAT_CHK:
12633 len = CALL_EXPR_ARG (exp, 1);
12634 size = CALL_EXPR_ARG (exp, 2);
12637 case BUILT_IN_STRNCAT_CHK:
12638 case BUILT_IN_STRNCPY_CHK:
12639 case BUILT_IN_STPNCPY_CHK:
12640 len = CALL_EXPR_ARG (exp, 2);
12641 size = CALL_EXPR_ARG (exp, 3);
12643 case BUILT_IN_SNPRINTF_CHK:
12644 case BUILT_IN_VSNPRINTF_CHK:
12645 len = CALL_EXPR_ARG (exp, 1);
12646 size = CALL_EXPR_ARG (exp, 3);
12649 gcc_unreachable ();
12655 if (! host_integerp (size, 1) || integer_all_onesp (size))
12660 len = c_strlen (len, 1);
12661 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12664 else if (fcode == BUILT_IN_STRNCAT_CHK)
12666 tree src = CALL_EXPR_ARG (exp, 1);
12667 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12669 src = c_strlen (src, 1);
12670 if (! src || ! host_integerp (src, 1))
12672 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12673 exp, get_callee_fndecl (exp));
12676 else if (tree_int_cst_lt (src, size))
12679 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12682 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12683 exp, get_callee_fndecl (exp));
12686 /* Emit warning if a buffer overflow is detected at compile time
12687 in __sprintf_chk/__vsprintf_chk calls. */
12690 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12692 tree size, len, fmt;
12693 const char *fmt_str;
12694 int nargs = call_expr_nargs (exp);
12696 /* Verify the required arguments in the original call. */
12700 size = CALL_EXPR_ARG (exp, 2);
12701 fmt = CALL_EXPR_ARG (exp, 3);
12703 if (! host_integerp (size, 1) || integer_all_onesp (size))
12706 /* Check whether the format is a literal string constant. */
12707 fmt_str = c_getstr (fmt);
12708 if (fmt_str == NULL)
12711 if (!init_target_chars ())
12714 /* If the format doesn't contain % args or %%, we know its size. */
12715 if (strchr (fmt_str, target_percent) == 0)
12716 len = build_int_cstu (size_type_node, strlen (fmt_str));
12717 /* If the format is "%s" and first ... argument is a string literal,
12719 else if (fcode == BUILT_IN_SPRINTF_CHK
12720 && strcmp (fmt_str, target_percent_s) == 0)
12726 arg = CALL_EXPR_ARG (exp, 4);
12727 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12730 len = c_strlen (arg, 1);
12731 if (!len || ! host_integerp (len, 1))
12737 if (! tree_int_cst_lt (len, size))
12738 warning_at (tree_nonartificial_location (exp),
12739 0, "%Kcall to %D will always overflow destination buffer",
12740 exp, get_callee_fndecl (exp));
12743 /* Emit warning if a free is called with address of a variable. */
12746 maybe_emit_free_warning (tree exp)
12748 tree arg = CALL_EXPR_ARG (exp, 0);
12751 if (TREE_CODE (arg) != ADDR_EXPR)
12754 arg = get_base_address (TREE_OPERAND (arg, 0));
12755 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12758 if (SSA_VAR_P (arg))
12759 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12760 "%Kattempt to free a non-heap object %qD", exp, arg);
12762 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12763 "%Kattempt to free a non-heap object", exp);
12766 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12770 fold_builtin_object_size (tree ptr, tree ost)
12772 unsigned HOST_WIDE_INT bytes;
12773 int object_size_type;
12775 if (!validate_arg (ptr, POINTER_TYPE)
12776 || !validate_arg (ost, INTEGER_TYPE))
12781 if (TREE_CODE (ost) != INTEGER_CST
12782 || tree_int_cst_sgn (ost) < 0
12783 || compare_tree_int (ost, 3) > 0)
12786 object_size_type = tree_low_cst (ost, 0);
12788 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12789 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12790 and (size_t) 0 for types 2 and 3. */
12791 if (TREE_SIDE_EFFECTS (ptr))
12792 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12794 if (TREE_CODE (ptr) == ADDR_EXPR)
12796 bytes = compute_builtin_object_size (ptr, object_size_type);
12797 if (double_int_fits_to_tree_p (size_type_node,
12798 uhwi_to_double_int (bytes)))
12799 return build_int_cstu (size_type_node, bytes);
12801 else if (TREE_CODE (ptr) == SSA_NAME)
12803 /* If object size is not known yet, delay folding until
12804 later. Maybe subsequent passes will help determining
12806 bytes = compute_builtin_object_size (ptr, object_size_type);
12807 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12808 && double_int_fits_to_tree_p (size_type_node,
12809 uhwi_to_double_int (bytes)))
12810 return build_int_cstu (size_type_node, bytes);
12816 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12817 DEST, SRC, LEN, and SIZE are the arguments to the call.
12818 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12819 code of the builtin. If MAXLEN is not NULL, it is maximum length
12820 passed as third argument. */
12823 fold_builtin_memory_chk (location_t loc, tree fndecl,
12824 tree dest, tree src, tree len, tree size,
12825 tree maxlen, bool ignore,
12826 enum built_in_function fcode)
12830 if (!validate_arg (dest, POINTER_TYPE)
12831 || !validate_arg (src,
12832 (fcode == BUILT_IN_MEMSET_CHK
12833 ? INTEGER_TYPE : POINTER_TYPE))
12834 || !validate_arg (len, INTEGER_TYPE)
12835 || !validate_arg (size, INTEGER_TYPE))
12838 /* If SRC and DEST are the same (and not volatile), return DEST
12839 (resp. DEST+LEN for __mempcpy_chk). */
12840 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12842 if (fcode != BUILT_IN_MEMPCPY_CHK)
12843 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12847 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12848 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12852 if (! host_integerp (size, 1))
12855 if (! integer_all_onesp (size))
12857 if (! host_integerp (len, 1))
12859 /* If LEN is not constant, try MAXLEN too.
12860 For MAXLEN only allow optimizing into non-_ocs function
12861 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12862 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12864 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12866 /* (void) __mempcpy_chk () can be optimized into
12867 (void) __memcpy_chk (). */
12868 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12872 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12880 if (tree_int_cst_lt (size, maxlen))
12885 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12886 mem{cpy,pcpy,move,set} is available. */
12889 case BUILT_IN_MEMCPY_CHK:
12890 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12892 case BUILT_IN_MEMPCPY_CHK:
12893 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12895 case BUILT_IN_MEMMOVE_CHK:
12896 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12898 case BUILT_IN_MEMSET_CHK:
12899 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12908 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12911 /* Fold a call to the __st[rp]cpy_chk builtin.
12912 DEST, SRC, and SIZE are the arguments to the call.
12913 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12914 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12915 strings passed as second argument. */
12918 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12919 tree src, tree size,
12920 tree maxlen, bool ignore,
12921 enum built_in_function fcode)
12925 if (!validate_arg (dest, POINTER_TYPE)
12926 || !validate_arg (src, POINTER_TYPE)
12927 || !validate_arg (size, INTEGER_TYPE))
12930 /* If SRC and DEST are the same (and not volatile), return DEST. */
12931 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12932 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12934 if (! host_integerp (size, 1))
12937 if (! integer_all_onesp (size))
12939 len = c_strlen (src, 1);
12940 if (! len || ! host_integerp (len, 1))
12942 /* If LEN is not constant, try MAXLEN too.
12943 For MAXLEN only allow optimizing into non-_ocs function
12944 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12945 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12947 if (fcode == BUILT_IN_STPCPY_CHK)
12952 /* If return value of __stpcpy_chk is ignored,
12953 optimize into __strcpy_chk. */
12954 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12958 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12961 if (! len || TREE_SIDE_EFFECTS (len))
12964 /* If c_strlen returned something, but not a constant,
12965 transform __strcpy_chk into __memcpy_chk. */
12966 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12970 len = fold_convert_loc (loc, size_type_node, len);
12971 len = size_binop_loc (loc, PLUS_EXPR, len,
12972 build_int_cst (size_type_node, 1));
12973 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12974 build_call_expr_loc (loc, fn, 4,
12975 dest, src, len, size));
12981 if (! tree_int_cst_lt (maxlen, size))
12985 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12986 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12987 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12991 return build_call_expr_loc (loc, fn, 2, dest, src);
12994 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12995 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12996 length passed as third argument. IGNORE is true if return value can be
12997 ignored. FCODE is the BUILT_IN_* code of the builtin. */
13000 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
13001 tree len, tree size, tree maxlen, bool ignore,
13002 enum built_in_function fcode)
13006 if (!validate_arg (dest, POINTER_TYPE)
13007 || !validate_arg (src, POINTER_TYPE)
13008 || !validate_arg (len, INTEGER_TYPE)
13009 || !validate_arg (size, INTEGER_TYPE))
13012 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
13014 /* If return value of __stpncpy_chk is ignored,
13015 optimize into __strncpy_chk. */
13016 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
13018 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
13021 if (! host_integerp (size, 1))
13024 if (! integer_all_onesp (size))
13026 if (! host_integerp (len, 1))
13028 /* If LEN is not constant, try MAXLEN too.
13029 For MAXLEN only allow optimizing into non-_ocs function
13030 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13031 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13037 if (tree_int_cst_lt (size, maxlen))
13041 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
13042 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
13043 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
13047 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13050 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
13051 are the arguments to the call. */
13054 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
13055 tree src, tree size)
13060 if (!validate_arg (dest, POINTER_TYPE)
13061 || !validate_arg (src, POINTER_TYPE)
13062 || !validate_arg (size, INTEGER_TYPE))
13065 p = c_getstr (src);
13066 /* If the SRC parameter is "", return DEST. */
13067 if (p && *p == '\0')
13068 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13070 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
13073 /* If __builtin_strcat_chk is used, assume strcat is available. */
13074 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
13078 return build_call_expr_loc (loc, fn, 2, dest, src);
13081 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
13085 fold_builtin_strncat_chk (location_t loc, tree fndecl,
13086 tree dest, tree src, tree len, tree size)
13091 if (!validate_arg (dest, POINTER_TYPE)
13092 || !validate_arg (src, POINTER_TYPE)
13093 || !validate_arg (size, INTEGER_TYPE)
13094 || !validate_arg (size, INTEGER_TYPE))
13097 p = c_getstr (src);
13098 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13099 if (p && *p == '\0')
13100 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
13101 else if (integer_zerop (len))
13102 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13104 if (! host_integerp (size, 1))
13107 if (! integer_all_onesp (size))
13109 tree src_len = c_strlen (src, 1);
13111 && host_integerp (src_len, 1)
13112 && host_integerp (len, 1)
13113 && ! tree_int_cst_lt (len, src_len))
13115 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13116 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
13120 return build_call_expr_loc (loc, fn, 3, dest, src, size);
13125 /* If __builtin_strncat_chk is used, assume strncat is available. */
13126 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
13130 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13133 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13134 Return NULL_TREE if a normal call should be emitted rather than
13135 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13136 or BUILT_IN_VSPRINTF_CHK. */
13139 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13140 enum built_in_function fcode)
13142 tree dest, size, len, fn, fmt, flag;
13143 const char *fmt_str;
13145 /* Verify the required arguments in the original call. */
13149 if (!validate_arg (dest, POINTER_TYPE))
13152 if (!validate_arg (flag, INTEGER_TYPE))
13155 if (!validate_arg (size, INTEGER_TYPE))
13158 if (!validate_arg (fmt, POINTER_TYPE))
13161 if (! host_integerp (size, 1))
13166 if (!init_target_chars ())
13169 /* Check whether the format is a literal string constant. */
13170 fmt_str = c_getstr (fmt);
13171 if (fmt_str != NULL)
13173 /* If the format doesn't contain % args or %%, we know the size. */
13174 if (strchr (fmt_str, target_percent) == 0)
13176 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13177 len = build_int_cstu (size_type_node, strlen (fmt_str));
13179 /* If the format is "%s" and first ... argument is a string literal,
13180 we know the size too. */
13181 else if (fcode == BUILT_IN_SPRINTF_CHK
13182 && strcmp (fmt_str, target_percent_s) == 0)
13189 if (validate_arg (arg, POINTER_TYPE))
13191 len = c_strlen (arg, 1);
13192 if (! len || ! host_integerp (len, 1))
13199 if (! integer_all_onesp (size))
13201 if (! len || ! tree_int_cst_lt (len, size))
13205 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13206 or if format doesn't contain % chars or is "%s". */
13207 if (! integer_zerop (flag))
13209 if (fmt_str == NULL)
13211 if (strchr (fmt_str, target_percent) != NULL
13212 && strcmp (fmt_str, target_percent_s))
13216 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13217 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13218 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13222 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13225 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13226 a normal call should be emitted rather than expanding the function
13227 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13230 fold_builtin_sprintf_chk (location_t loc, tree exp,
13231 enum built_in_function fcode)
13233 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13234 CALL_EXPR_ARGP (exp), fcode);
13237 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13238 NULL_TREE if a normal call should be emitted rather than expanding
13239 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13240 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13241 passed as second argument. */
13244 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13245 tree maxlen, enum built_in_function fcode)
13247 tree dest, size, len, fn, fmt, flag;
13248 const char *fmt_str;
13250 /* Verify the required arguments in the original call. */
13254 if (!validate_arg (dest, POINTER_TYPE))
13257 if (!validate_arg (len, INTEGER_TYPE))
13260 if (!validate_arg (flag, INTEGER_TYPE))
13263 if (!validate_arg (size, INTEGER_TYPE))
13266 if (!validate_arg (fmt, POINTER_TYPE))
13269 if (! host_integerp (size, 1))
13272 if (! integer_all_onesp (size))
13274 if (! host_integerp (len, 1))
13276 /* If LEN is not constant, try MAXLEN too.
13277 For MAXLEN only allow optimizing into non-_ocs function
13278 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13279 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13285 if (tree_int_cst_lt (size, maxlen))
13289 if (!init_target_chars ())
13292 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13293 or if format doesn't contain % chars or is "%s". */
13294 if (! integer_zerop (flag))
13296 fmt_str = c_getstr (fmt);
13297 if (fmt_str == NULL)
13299 if (strchr (fmt_str, target_percent) != NULL
13300 && strcmp (fmt_str, target_percent_s))
13304 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13306 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13307 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13311 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13314 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13315 a normal call should be emitted rather than expanding the function
13316 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13317 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13318 passed as second argument. */
13321 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13322 enum built_in_function fcode)
13324 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13325 CALL_EXPR_ARGP (exp), maxlen, fcode);
13328 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13329 FMT and ARG are the arguments to the call; we don't fold cases with
13330 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13332 Return NULL_TREE if no simplification was possible, otherwise return the
13333 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13334 code of the function to be simplified. */
13337 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13338 tree arg, bool ignore,
13339 enum built_in_function fcode)
13341 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13342 const char *fmt_str = NULL;
13344 /* If the return value is used, don't do the transformation. */
13348 /* Verify the required arguments in the original call. */
13349 if (!validate_arg (fmt, POINTER_TYPE))
13352 /* Check whether the format is a literal string constant. */
13353 fmt_str = c_getstr (fmt);
13354 if (fmt_str == NULL)
13357 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13359 /* If we're using an unlocked function, assume the other
13360 unlocked functions exist explicitly. */
13361 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13362 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13366 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13367 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13370 if (!init_target_chars ())
13373 if (strcmp (fmt_str, target_percent_s) == 0
13374 || strchr (fmt_str, target_percent) == NULL)
13378 if (strcmp (fmt_str, target_percent_s) == 0)
13380 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13383 if (!arg || !validate_arg (arg, POINTER_TYPE))
13386 str = c_getstr (arg);
13392 /* The format specifier doesn't contain any '%' characters. */
13393 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13399 /* If the string was "", printf does nothing. */
13400 if (str[0] == '\0')
13401 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13403 /* If the string has length of 1, call putchar. */
13404 if (str[1] == '\0')
13406 /* Given printf("c"), (where c is any one character,)
13407 convert "c"[0] to an int and pass that to the replacement
13409 newarg = build_int_cst (integer_type_node, str[0]);
13411 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13415 /* If the string was "string\n", call puts("string"). */
13416 size_t len = strlen (str);
13417 if ((unsigned char)str[len - 1] == target_newline
13418 && (size_t) (int) len == len
13422 tree offset_node, string_cst;
13424 /* Create a NUL-terminated string that's one char shorter
13425 than the original, stripping off the trailing '\n'. */
13426 newarg = build_string_literal (len, str);
13427 string_cst = string_constant (newarg, &offset_node);
13428 gcc_checking_assert (string_cst
13429 && (TREE_STRING_LENGTH (string_cst)
13431 && integer_zerop (offset_node)
13433 TREE_STRING_POINTER (string_cst)[len - 1]
13434 == target_newline);
13435 /* build_string_literal creates a new STRING_CST,
13436 modify it in place to avoid double copying. */
13437 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13438 newstr[len - 1] = '\0';
13440 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13443 /* We'd like to arrange to call fputs(string,stdout) here,
13444 but we need stdout and don't have a way to get it yet. */
13449 /* The other optimizations can be done only on the non-va_list variants. */
13450 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13453 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13454 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13456 if (!arg || !validate_arg (arg, POINTER_TYPE))
13459 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13462 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13463 else if (strcmp (fmt_str, target_percent_c) == 0)
13465 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13468 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13474 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13477 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13478 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13479 more than 3 arguments, and ARG may be null in the 2-argument case.
13481 Return NULL_TREE if no simplification was possible, otherwise return the
13482 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13483 code of the function to be simplified. */
13486 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13487 tree fmt, tree arg, bool ignore,
13488 enum built_in_function fcode)
13490 tree fn_fputc, fn_fputs, call = NULL_TREE;
13491 const char *fmt_str = NULL;
13493 /* If the return value is used, don't do the transformation. */
13497 /* Verify the required arguments in the original call. */
13498 if (!validate_arg (fp, POINTER_TYPE))
13500 if (!validate_arg (fmt, POINTER_TYPE))
13503 /* Check whether the format is a literal string constant. */
13504 fmt_str = c_getstr (fmt);
13505 if (fmt_str == NULL)
13508 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13510 /* If we're using an unlocked function, assume the other
13511 unlocked functions exist explicitly. */
13512 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13513 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13517 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13518 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13521 if (!init_target_chars ())
13524 /* If the format doesn't contain % args or %%, use strcpy. */
13525 if (strchr (fmt_str, target_percent) == NULL)
13527 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13531 /* If the format specifier was "", fprintf does nothing. */
13532 if (fmt_str[0] == '\0')
13534 /* If FP has side-effects, just wait until gimplification is
13536 if (TREE_SIDE_EFFECTS (fp))
13539 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13542 /* When "string" doesn't contain %, replace all cases of
13543 fprintf (fp, string) with fputs (string, fp). The fputs
13544 builtin will take care of special cases like length == 1. */
13546 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13549 /* The other optimizations can be done only on the non-va_list variants. */
13550 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13553 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13554 else if (strcmp (fmt_str, target_percent_s) == 0)
13556 if (!arg || !validate_arg (arg, POINTER_TYPE))
13559 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13562 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13563 else if (strcmp (fmt_str, target_percent_c) == 0)
13565 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13568 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13573 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13576 /* Initialize format string characters in the target charset. */
13579 init_target_chars (void)
13584 target_newline = lang_hooks.to_target_charset ('\n');
13585 target_percent = lang_hooks.to_target_charset ('%');
13586 target_c = lang_hooks.to_target_charset ('c');
13587 target_s = lang_hooks.to_target_charset ('s');
13588 if (target_newline == 0 || target_percent == 0 || target_c == 0
13592 target_percent_c[0] = target_percent;
13593 target_percent_c[1] = target_c;
13594 target_percent_c[2] = '\0';
13596 target_percent_s[0] = target_percent;
13597 target_percent_s[1] = target_s;
13598 target_percent_s[2] = '\0';
13600 target_percent_s_newline[0] = target_percent;
13601 target_percent_s_newline[1] = target_s;
13602 target_percent_s_newline[2] = target_newline;
13603 target_percent_s_newline[3] = '\0';
13610 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13611 and no overflow/underflow occurred. INEXACT is true if M was not
13612 exactly calculated. TYPE is the tree type for the result. This
13613 function assumes that you cleared the MPFR flags and then
13614 calculated M to see if anything subsequently set a flag prior to
13615 entering this function. Return NULL_TREE if any checks fail. */
13618 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13620 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13621 overflow/underflow occurred. If -frounding-math, proceed iff the
13622 result of calling FUNC was exact. */
13623 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13624 && (!flag_rounding_math || !inexact))
13626 REAL_VALUE_TYPE rr;
13628 real_from_mpfr (&rr, m, type, GMP_RNDN);
13629 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13630 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13631 but the mpft_t is not, then we underflowed in the
13633 if (real_isfinite (&rr)
13634 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13636 REAL_VALUE_TYPE rmode;
13638 real_convert (&rmode, TYPE_MODE (type), &rr);
13639 /* Proceed iff the specified mode can hold the value. */
13640 if (real_identical (&rmode, &rr))
13641 return build_real (type, rmode);
13647 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13648 number and no overflow/underflow occurred. INEXACT is true if M
13649 was not exactly calculated. TYPE is the tree type for the result.
13650 This function assumes that you cleared the MPFR flags and then
13651 calculated M to see if anything subsequently set a flag prior to
13652 entering this function. Return NULL_TREE if any checks fail, if
13653 FORCE_CONVERT is true, then bypass the checks. */
13656 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13658 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13659 overflow/underflow occurred. If -frounding-math, proceed iff the
13660 result of calling FUNC was exact. */
13662 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13663 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13664 && (!flag_rounding_math || !inexact)))
13666 REAL_VALUE_TYPE re, im;
13668 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13669 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13670 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13671 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13672 but the mpft_t is not, then we underflowed in the
13675 || (real_isfinite (&re) && real_isfinite (&im)
13676 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13677 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13679 REAL_VALUE_TYPE re_mode, im_mode;
13681 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13682 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13683 /* Proceed iff the specified mode can hold the value. */
13685 || (real_identical (&re_mode, &re)
13686 && real_identical (&im_mode, &im)))
13687 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13688 build_real (TREE_TYPE (type), im_mode));
13694 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13695 FUNC on it and return the resulting value as a tree with type TYPE.
13696 If MIN and/or MAX are not NULL, then the supplied ARG must be
13697 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13698 acceptable values, otherwise they are not. The mpfr precision is
13699 set to the precision of TYPE. We assume that function FUNC returns
13700 zero if the result could be calculated exactly within the requested
13704 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13705 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13708 tree result = NULL_TREE;
13712 /* To proceed, MPFR must exactly represent the target floating point
13713 format, which only happens when the target base equals two. */
13714 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13715 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13717 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13719 if (real_isfinite (ra)
13720 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13721 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13723 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13724 const int prec = fmt->p;
13725 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13729 mpfr_init2 (m, prec);
13730 mpfr_from_real (m, ra, GMP_RNDN);
13731 mpfr_clear_flags ();
13732 inexact = func (m, m, rnd);
13733 result = do_mpfr_ckconv (m, type, inexact);
13741 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13742 FUNC on it and return the resulting value as a tree with type TYPE.
13743 The mpfr precision is set to the precision of TYPE. We assume that
13744 function FUNC returns zero if the result could be calculated
13745 exactly within the requested precision. */
13748 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13749 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13751 tree result = NULL_TREE;
13756 /* To proceed, MPFR must exactly represent the target floating point
13757 format, which only happens when the target base equals two. */
13758 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13759 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13760 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13762 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13763 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13765 if (real_isfinite (ra1) && real_isfinite (ra2))
13767 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13768 const int prec = fmt->p;
13769 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13773 mpfr_inits2 (prec, m1, m2, NULL);
13774 mpfr_from_real (m1, ra1, GMP_RNDN);
13775 mpfr_from_real (m2, ra2, GMP_RNDN);
13776 mpfr_clear_flags ();
13777 inexact = func (m1, m1, m2, rnd);
13778 result = do_mpfr_ckconv (m1, type, inexact);
13779 mpfr_clears (m1, m2, NULL);
13786 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13787 FUNC on it and return the resulting value as a tree with type TYPE.
13788 The mpfr precision is set to the precision of TYPE. We assume that
13789 function FUNC returns zero if the result could be calculated
13790 exactly within the requested precision. */
13793 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13794 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13796 tree result = NULL_TREE;
13802 /* To proceed, MPFR must exactly represent the target floating point
13803 format, which only happens when the target base equals two. */
13804 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13805 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13806 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13807 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13809 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13810 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13811 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13813 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13815 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13816 const int prec = fmt->p;
13817 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13821 mpfr_inits2 (prec, m1, m2, m3, NULL);
13822 mpfr_from_real (m1, ra1, GMP_RNDN);
13823 mpfr_from_real (m2, ra2, GMP_RNDN);
13824 mpfr_from_real (m3, ra3, GMP_RNDN);
13825 mpfr_clear_flags ();
13826 inexact = func (m1, m1, m2, m3, rnd);
13827 result = do_mpfr_ckconv (m1, type, inexact);
13828 mpfr_clears (m1, m2, m3, NULL);
13835 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13836 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13837 If ARG_SINP and ARG_COSP are NULL then the result is returned
13838 as a complex value.
13839 The type is taken from the type of ARG and is used for setting the
13840 precision of the calculation and results. */
13843 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13845 tree const type = TREE_TYPE (arg);
13846 tree result = NULL_TREE;
13850 /* To proceed, MPFR must exactly represent the target floating point
13851 format, which only happens when the target base equals two. */
13852 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13853 && TREE_CODE (arg) == REAL_CST
13854 && !TREE_OVERFLOW (arg))
13856 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13858 if (real_isfinite (ra))
13860 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13861 const int prec = fmt->p;
13862 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13863 tree result_s, result_c;
13867 mpfr_inits2 (prec, m, ms, mc, NULL);
13868 mpfr_from_real (m, ra, GMP_RNDN);
13869 mpfr_clear_flags ();
13870 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13871 result_s = do_mpfr_ckconv (ms, type, inexact);
13872 result_c = do_mpfr_ckconv (mc, type, inexact);
13873 mpfr_clears (m, ms, mc, NULL);
13874 if (result_s && result_c)
13876 /* If we are to return in a complex value do so. */
13877 if (!arg_sinp && !arg_cosp)
13878 return build_complex (build_complex_type (type),
13879 result_c, result_s);
13881 /* Dereference the sin/cos pointer arguments. */
13882 arg_sinp = build_fold_indirect_ref (arg_sinp);
13883 arg_cosp = build_fold_indirect_ref (arg_cosp);
13884 /* Proceed if valid pointer type were passed in. */
13885 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13886 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13888 /* Set the values. */
13889 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13891 TREE_SIDE_EFFECTS (result_s) = 1;
13892 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13894 TREE_SIDE_EFFECTS (result_c) = 1;
13895 /* Combine the assignments into a compound expr. */
13896 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13897 result_s, result_c));
13905 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13906 two-argument mpfr order N Bessel function FUNC on them and return
13907 the resulting value as a tree with type TYPE. The mpfr precision
13908 is set to the precision of TYPE. We assume that function FUNC
13909 returns zero if the result could be calculated exactly within the
13910 requested precision. */
13912 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13913 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13914 const REAL_VALUE_TYPE *min, bool inclusive)
13916 tree result = NULL_TREE;
13921 /* To proceed, MPFR must exactly represent the target floating point
13922 format, which only happens when the target base equals two. */
13923 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13924 && host_integerp (arg1, 0)
13925 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13927 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13928 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13931 && real_isfinite (ra)
13932 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13934 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13935 const int prec = fmt->p;
13936 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13940 mpfr_init2 (m, prec);
13941 mpfr_from_real (m, ra, GMP_RNDN);
13942 mpfr_clear_flags ();
13943 inexact = func (m, n, m, rnd);
13944 result = do_mpfr_ckconv (m, type, inexact);
13952 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13953 the pointer *(ARG_QUO) and return the result. The type is taken
13954 from the type of ARG0 and is used for setting the precision of the
13955 calculation and results. */
13958 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13960 tree const type = TREE_TYPE (arg0);
13961 tree result = NULL_TREE;
13966 /* To proceed, MPFR must exactly represent the target floating point
13967 format, which only happens when the target base equals two. */
13968 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13969 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13970 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13972 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13973 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13975 if (real_isfinite (ra0) && real_isfinite (ra1))
13977 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13978 const int prec = fmt->p;
13979 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13984 mpfr_inits2 (prec, m0, m1, NULL);
13985 mpfr_from_real (m0, ra0, GMP_RNDN);
13986 mpfr_from_real (m1, ra1, GMP_RNDN);
13987 mpfr_clear_flags ();
13988 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13989 /* Remquo is independent of the rounding mode, so pass
13990 inexact=0 to do_mpfr_ckconv(). */
13991 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13992 mpfr_clears (m0, m1, NULL);
13995 /* MPFR calculates quo in the host's long so it may
13996 return more bits in quo than the target int can hold
13997 if sizeof(host long) > sizeof(target int). This can
13998 happen even for native compilers in LP64 mode. In
13999 these cases, modulo the quo value with the largest
14000 number that the target int can hold while leaving one
14001 bit for the sign. */
14002 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
14003 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
14005 /* Dereference the quo pointer argument. */
14006 arg_quo = build_fold_indirect_ref (arg_quo);
14007 /* Proceed iff a valid pointer type was passed in. */
14008 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
14010 /* Set the value. */
14012 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
14013 build_int_cst (TREE_TYPE (arg_quo),
14015 TREE_SIDE_EFFECTS (result_quo) = 1;
14016 /* Combine the quo assignment with the rem. */
14017 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14018 result_quo, result_rem));
14026 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
14027 resulting value as a tree with type TYPE. The mpfr precision is
14028 set to the precision of TYPE. We assume that this mpfr function
14029 returns zero if the result could be calculated exactly within the
14030 requested precision. In addition, the integer pointer represented
14031 by ARG_SG will be dereferenced and set to the appropriate signgam
14035 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
14037 tree result = NULL_TREE;
14041 /* To proceed, MPFR must exactly represent the target floating point
14042 format, which only happens when the target base equals two. Also
14043 verify ARG is a constant and that ARG_SG is an int pointer. */
14044 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
14045 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
14046 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
14047 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
14049 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
14051 /* In addition to NaN and Inf, the argument cannot be zero or a
14052 negative integer. */
14053 if (real_isfinite (ra)
14054 && ra->cl != rvc_zero
14055 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
14057 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
14058 const int prec = fmt->p;
14059 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
14064 mpfr_init2 (m, prec);
14065 mpfr_from_real (m, ra, GMP_RNDN);
14066 mpfr_clear_flags ();
14067 inexact = mpfr_lgamma (m, &sg, m, rnd);
14068 result_lg = do_mpfr_ckconv (m, type, inexact);
14074 /* Dereference the arg_sg pointer argument. */
14075 arg_sg = build_fold_indirect_ref (arg_sg);
14076 /* Assign the signgam value into *arg_sg. */
14077 result_sg = fold_build2 (MODIFY_EXPR,
14078 TREE_TYPE (arg_sg), arg_sg,
14079 build_int_cst (TREE_TYPE (arg_sg), sg));
14080 TREE_SIDE_EFFECTS (result_sg) = 1;
14081 /* Combine the signgam assignment with the lgamma result. */
14082 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14083 result_sg, result_lg));
14091 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14092 function FUNC on it and return the resulting value as a tree with
14093 type TYPE. The mpfr precision is set to the precision of TYPE. We
14094 assume that function FUNC returns zero if the result could be
14095 calculated exactly within the requested precision. */
14098 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
14100 tree result = NULL_TREE;
14104 /* To proceed, MPFR must exactly represent the target floating point
14105 format, which only happens when the target base equals two. */
14106 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
14107 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
14108 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
14110 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
14111 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
14113 if (real_isfinite (re) && real_isfinite (im))
14115 const struct real_format *const fmt =
14116 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14117 const int prec = fmt->p;
14118 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14119 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14123 mpc_init2 (m, prec);
14124 mpfr_from_real (mpc_realref(m), re, rnd);
14125 mpfr_from_real (mpc_imagref(m), im, rnd);
14126 mpfr_clear_flags ();
14127 inexact = func (m, m, crnd);
14128 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
14136 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14137 mpc function FUNC on it and return the resulting value as a tree
14138 with type TYPE. The mpfr precision is set to the precision of
14139 TYPE. We assume that function FUNC returns zero if the result
14140 could be calculated exactly within the requested precision. If
14141 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14142 in the arguments and/or results. */
14145 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14146 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14148 tree result = NULL_TREE;
14153 /* To proceed, MPFR must exactly represent the target floating point
14154 format, which only happens when the target base equals two. */
14155 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14156 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14157 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14158 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14159 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14161 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14162 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14163 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14164 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14167 || (real_isfinite (re0) && real_isfinite (im0)
14168 && real_isfinite (re1) && real_isfinite (im1)))
14170 const struct real_format *const fmt =
14171 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14172 const int prec = fmt->p;
14173 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14174 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14178 mpc_init2 (m0, prec);
14179 mpc_init2 (m1, prec);
14180 mpfr_from_real (mpc_realref(m0), re0, rnd);
14181 mpfr_from_real (mpc_imagref(m0), im0, rnd);
14182 mpfr_from_real (mpc_realref(m1), re1, rnd);
14183 mpfr_from_real (mpc_imagref(m1), im1, rnd);
14184 mpfr_clear_flags ();
14185 inexact = func (m0, m0, m1, crnd);
14186 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14195 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14196 a normal call should be emitted rather than expanding the function
14197 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14200 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14202 int nargs = gimple_call_num_args (stmt);
14204 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14206 ? gimple_call_arg_ptr (stmt, 0)
14207 : &error_mark_node), fcode);
14210 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14211 a normal call should be emitted rather than expanding the function
14212 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14213 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14214 passed as second argument. */
14217 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14218 enum built_in_function fcode)
14220 int nargs = gimple_call_num_args (stmt);
14222 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14224 ? gimple_call_arg_ptr (stmt, 0)
14225 : &error_mark_node), maxlen, fcode);
14228 /* Builtins with folding operations that operate on "..." arguments
14229 need special handling; we need to store the arguments in a convenient
14230 data structure before attempting any folding. Fortunately there are
14231 only a few builtins that fall into this category. FNDECL is the
14232 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14233 result of the function call is ignored. */
14236 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14237 bool ignore ATTRIBUTE_UNUSED)
14239 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14240 tree ret = NULL_TREE;
14244 case BUILT_IN_SPRINTF_CHK:
14245 case BUILT_IN_VSPRINTF_CHK:
14246 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14249 case BUILT_IN_SNPRINTF_CHK:
14250 case BUILT_IN_VSNPRINTF_CHK:
14251 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14258 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14259 TREE_NO_WARNING (ret) = 1;
14265 /* A wrapper function for builtin folding that prevents warnings for
14266 "statement without effect" and the like, caused by removing the
14267 call node earlier than the warning is generated. */
14270 fold_call_stmt (gimple stmt, bool ignore)
14272 tree ret = NULL_TREE;
14273 tree fndecl = gimple_call_fndecl (stmt);
14274 location_t loc = gimple_location (stmt);
14276 && TREE_CODE (fndecl) == FUNCTION_DECL
14277 && DECL_BUILT_IN (fndecl)
14278 && !gimple_call_va_arg_pack_p (stmt))
14280 int nargs = gimple_call_num_args (stmt);
14281 tree *args = (nargs > 0
14282 ? gimple_call_arg_ptr (stmt, 0)
14283 : &error_mark_node);
14285 if (avoid_folding_inline_builtin (fndecl))
14287 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14289 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14293 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14294 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14296 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14299 /* Propagate location information from original call to
14300 expansion of builtin. Otherwise things like
14301 maybe_emit_chk_warning, that operate on the expansion
14302 of a builtin, will use the wrong location information. */
14303 if (gimple_has_location (stmt))
14305 tree realret = ret;
14306 if (TREE_CODE (ret) == NOP_EXPR)
14307 realret = TREE_OPERAND (ret, 0);
14308 if (CAN_HAVE_LOCATION_P (realret)
14309 && !EXPR_HAS_LOCATION (realret))
14310 SET_EXPR_LOCATION (realret, loc);
14320 /* Look up the function in builtin_decl that corresponds to DECL
14321 and set ASMSPEC as its user assembler name. DECL must be a
14322 function decl that declares a builtin. */
14325 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14328 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14329 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14332 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14333 set_user_assembler_name (builtin, asmspec);
14334 switch (DECL_FUNCTION_CODE (decl))
14336 case BUILT_IN_MEMCPY:
14337 init_block_move_fn (asmspec);
14338 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14340 case BUILT_IN_MEMSET:
14341 init_block_clear_fn (asmspec);
14342 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14344 case BUILT_IN_MEMMOVE:
14345 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14347 case BUILT_IN_MEMCMP:
14348 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14350 case BUILT_IN_ABORT:
14351 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14354 if (INT_TYPE_SIZE < BITS_PER_WORD)
14356 set_user_assembler_libfunc ("ffs", asmspec);
14357 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14358 MODE_INT, 0), "ffs");
14366 /* Return true if DECL is a builtin that expands to a constant or similarly
14369 is_simple_builtin (tree decl)
14371 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14372 switch (DECL_FUNCTION_CODE (decl))
14374 /* Builtins that expand to constants. */
14375 case BUILT_IN_CONSTANT_P:
14376 case BUILT_IN_EXPECT:
14377 case BUILT_IN_OBJECT_SIZE:
14378 case BUILT_IN_UNREACHABLE:
14379 /* Simple register moves or loads from stack. */
14380 case BUILT_IN_ASSUME_ALIGNED:
14381 case BUILT_IN_RETURN_ADDRESS:
14382 case BUILT_IN_EXTRACT_RETURN_ADDR:
14383 case BUILT_IN_FROB_RETURN_ADDR:
14384 case BUILT_IN_RETURN:
14385 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14386 case BUILT_IN_FRAME_ADDRESS:
14387 case BUILT_IN_VA_END:
14388 case BUILT_IN_STACK_SAVE:
14389 case BUILT_IN_STACK_RESTORE:
14390 /* Exception state returns or moves registers around. */
14391 case BUILT_IN_EH_FILTER:
14392 case BUILT_IN_EH_POINTER:
14393 case BUILT_IN_EH_COPY_VALUES:
14403 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14404 most probably expanded inline into reasonably simple code. This is a
14405 superset of is_simple_builtin. */
14407 is_inexpensive_builtin (tree decl)
14411 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14413 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14414 switch (DECL_FUNCTION_CODE (decl))
14417 case BUILT_IN_ALLOCA:
14418 case BUILT_IN_ALLOCA_WITH_ALIGN:
14419 case BUILT_IN_BSWAP16:
14420 case BUILT_IN_BSWAP32:
14421 case BUILT_IN_BSWAP64:
14423 case BUILT_IN_CLZIMAX:
14424 case BUILT_IN_CLZL:
14425 case BUILT_IN_CLZLL:
14427 case BUILT_IN_CTZIMAX:
14428 case BUILT_IN_CTZL:
14429 case BUILT_IN_CTZLL:
14431 case BUILT_IN_FFSIMAX:
14432 case BUILT_IN_FFSL:
14433 case BUILT_IN_FFSLL:
14434 case BUILT_IN_IMAXABS:
14435 case BUILT_IN_FINITE:
14436 case BUILT_IN_FINITEF:
14437 case BUILT_IN_FINITEL:
14438 case BUILT_IN_FINITED32:
14439 case BUILT_IN_FINITED64:
14440 case BUILT_IN_FINITED128:
14441 case BUILT_IN_FPCLASSIFY:
14442 case BUILT_IN_ISFINITE:
14443 case BUILT_IN_ISINF_SIGN:
14444 case BUILT_IN_ISINF:
14445 case BUILT_IN_ISINFF:
14446 case BUILT_IN_ISINFL:
14447 case BUILT_IN_ISINFD32:
14448 case BUILT_IN_ISINFD64:
14449 case BUILT_IN_ISINFD128:
14450 case BUILT_IN_ISNAN:
14451 case BUILT_IN_ISNANF:
14452 case BUILT_IN_ISNANL:
14453 case BUILT_IN_ISNAND32:
14454 case BUILT_IN_ISNAND64:
14455 case BUILT_IN_ISNAND128:
14456 case BUILT_IN_ISNORMAL:
14457 case BUILT_IN_ISGREATER:
14458 case BUILT_IN_ISGREATEREQUAL:
14459 case BUILT_IN_ISLESS:
14460 case BUILT_IN_ISLESSEQUAL:
14461 case BUILT_IN_ISLESSGREATER:
14462 case BUILT_IN_ISUNORDERED:
14463 case BUILT_IN_VA_ARG_PACK:
14464 case BUILT_IN_VA_ARG_PACK_LEN:
14465 case BUILT_IN_VA_COPY:
14466 case BUILT_IN_TRAP:
14467 case BUILT_IN_SAVEREGS:
14468 case BUILT_IN_POPCOUNTL:
14469 case BUILT_IN_POPCOUNTLL:
14470 case BUILT_IN_POPCOUNTIMAX:
14471 case BUILT_IN_POPCOUNT:
14472 case BUILT_IN_PARITYL:
14473 case BUILT_IN_PARITYLL:
14474 case BUILT_IN_PARITYIMAX:
14475 case BUILT_IN_PARITY:
14476 case BUILT_IN_LABS:
14477 case BUILT_IN_LLABS:
14478 case BUILT_IN_PREFETCH:
14482 return is_simple_builtin (decl);