1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
31 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
43 #include "typeclass.h"
45 #include "langhooks.h"
48 #include "tree-iterator.h"
49 #include "tree-pass.h"
50 #include "tree-flow.h"
54 #include "diagnostic.h"
55 #include "ssaexpand.h"
56 #include "target-globals.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
78 #define STACK_PUSH_CODE PRE_INC
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* This structure is used by move_by_pieces to describe the move to
93 struct move_by_pieces_d
102 int explicit_inc_from;
103 unsigned HOST_WIDE_INT len;
104 HOST_WIDE_INT offset;
108 /* This structure is used by store_by_pieces to describe the clear to
111 struct store_by_pieces_d
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
127 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces_d *);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces_d *);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, alias_set_type);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, alias_set_type, bool);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150 static int is_aligning_offset (const_tree, const_tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (sepops, rtx, enum machine_mode);
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* This macro is used to determine whether move_by_pieces should be called
163 to perform a structure copy. */
164 #ifndef MOVE_BY_PIECES_P
165 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
166 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
167 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
170 /* This macro is used to determine whether clear_by_pieces should be
171 called to clear storage. */
172 #ifndef CLEAR_BY_PIECES_P
173 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
174 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
175 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
178 /* This macro is used to determine whether store_by_pieces should be
179 called to "memset" storage with byte values other than zero. */
180 #ifndef SET_BY_PIECES_P
181 #define SET_BY_PIECES_P(SIZE, ALIGN) \
182 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
183 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
186 /* This macro is used to determine whether store_by_pieces should be
187 called to "memcpy" storage when the source is a constant string. */
188 #ifndef STORE_BY_PIECES_P
189 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
190 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
191 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
194 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
196 #ifndef SLOW_UNALIGNED_ACCESS
197 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
200 /* This is run to set up which modes can be used
201 directly in memory and to initialize the block move optab. It is run
202 at the beginning of compilation and when the target is reinitialized. */
205 init_expr_target (void)
208 enum machine_mode mode;
213 /* Try indexing by frame ptr and try by stack ptr.
214 It is known that on the Convex the stack ptr isn't a valid index.
215 With luck, one or the other is valid on any machine. */
216 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
217 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
219 /* A scratch register we can modify in-place below to avoid
220 useless RTL allocations. */
221 reg = gen_rtx_REG (VOIDmode, -1);
223 insn = rtx_alloc (INSN);
224 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
225 PATTERN (insn) = pat;
227 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
228 mode = (enum machine_mode) ((int) mode + 1))
232 direct_load[(int) mode] = direct_store[(int) mode] = 0;
233 PUT_MODE (mem, mode);
234 PUT_MODE (mem1, mode);
235 PUT_MODE (reg, mode);
237 /* See if there is some register that can be used in this mode and
238 directly loaded or stored from memory. */
240 if (mode != VOIDmode && mode != BLKmode)
241 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
242 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
245 if (! HARD_REGNO_MODE_OK (regno, mode))
248 SET_REGNO (reg, regno);
251 SET_DEST (pat) = reg;
252 if (recog (pat, insn, &num_clobbers) >= 0)
253 direct_load[(int) mode] = 1;
255 SET_SRC (pat) = mem1;
256 SET_DEST (pat) = reg;
257 if (recog (pat, insn, &num_clobbers) >= 0)
258 direct_load[(int) mode] = 1;
261 SET_DEST (pat) = mem;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_store[(int) mode] = 1;
266 SET_DEST (pat) = mem1;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_store[(int) mode] = 1;
272 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
274 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
275 mode = GET_MODE_WIDER_MODE (mode))
277 enum machine_mode srcmode;
278 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
279 srcmode = GET_MODE_WIDER_MODE (srcmode))
283 ic = can_extend_p (mode, srcmode, 0);
284 if (ic == CODE_FOR_nothing)
287 PUT_MODE (mem, srcmode);
289 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
290 float_extend_from_mem[mode][srcmode] = true;
295 /* This is run at the start of compiling a function. */
300 memset (&crtl->expr, 0, sizeof (crtl->expr));
303 /* Copy data from FROM to TO, where the machine modes are not the same.
304 Both modes may be integer, or both may be floating, or both may be
306 UNSIGNEDP should be nonzero if FROM is an unsigned type.
307 This causes zero-extension instead of sign-extension. */
310 convert_move (rtx to, rtx from, int unsignedp)
312 enum machine_mode to_mode = GET_MODE (to);
313 enum machine_mode from_mode = GET_MODE (from);
314 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
315 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
319 /* rtx code for making an equivalent value. */
320 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
321 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
324 gcc_assert (to_real == from_real);
325 gcc_assert (to_mode != BLKmode);
326 gcc_assert (from_mode != BLKmode);
328 /* If the source and destination are already the same, then there's
333 /* If FROM is a SUBREG that indicates that we have already done at least
334 the required extension, strip it. We don't handle such SUBREGs as
337 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
338 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
339 >= GET_MODE_SIZE (to_mode))
340 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
341 from = gen_lowpart (to_mode, from), from_mode = to_mode;
343 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
345 if (to_mode == from_mode
346 || (from_mode == VOIDmode && CONSTANT_P (from)))
348 emit_move_insn (to, from);
352 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
354 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
356 if (VECTOR_MODE_P (to_mode))
357 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
359 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
361 emit_move_insn (to, from);
365 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
367 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
368 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
377 gcc_assert ((GET_MODE_PRECISION (from_mode)
378 != GET_MODE_PRECISION (to_mode))
379 || (DECIMAL_FLOAT_MODE_P (from_mode)
380 != DECIMAL_FLOAT_MODE_P (to_mode)));
382 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
383 /* Conversion between decimal float and binary float, same size. */
384 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
385 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
390 /* Try converting directly if the insn is supported. */
392 code = convert_optab_handler (tab, to_mode, from_mode);
393 if (code != CODE_FOR_nothing)
395 emit_unop_insn (code, to, from,
396 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
400 /* Otherwise use a libcall. */
401 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
403 /* Is this conversion implemented yet? */
404 gcc_assert (libcall);
407 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
409 insns = get_insns ();
411 emit_libcall_block (insns, to, value,
412 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
414 : gen_rtx_FLOAT_EXTEND (to_mode, from));
418 /* Handle pointer conversion. */ /* SPEE 900220. */
419 /* Targets are expected to provide conversion insns between PxImode and
420 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
421 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
423 enum machine_mode full_mode
424 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
426 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
427 != CODE_FOR_nothing);
429 if (full_mode != from_mode)
430 from = convert_to_mode (full_mode, from, unsignedp);
431 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
435 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
438 enum machine_mode full_mode
439 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
441 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)
442 != CODE_FOR_nothing);
444 if (to_mode == full_mode)
446 emit_unop_insn (convert_optab_handler (sext_optab, full_mode,
452 new_from = gen_reg_rtx (full_mode);
453 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode),
454 new_from, from, UNKNOWN);
456 /* else proceed to integer conversions below. */
457 from_mode = full_mode;
461 /* Make sure both are fixed-point modes or both are not. */
462 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
463 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
464 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
466 /* If we widen from_mode to to_mode and they are in the same class,
467 we won't saturate the result.
468 Otherwise, always saturate the result to play safe. */
469 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
470 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
471 expand_fixed_convert (to, from, 0, 0);
473 expand_fixed_convert (to, from, 0, 1);
477 /* Now both modes are integers. */
479 /* Handle expanding beyond a word. */
480 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
481 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
488 enum machine_mode lowpart_mode;
489 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
491 /* Try converting directly if the insn is supported. */
492 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
495 /* If FROM is a SUBREG, put it into a register. Do this
496 so that we always generate the same set of insns for
497 better cse'ing; if an intermediate assignment occurred,
498 we won't be doing the operation directly on the SUBREG. */
499 if (optimize > 0 && GET_CODE (from) == SUBREG)
500 from = force_reg (from_mode, from);
501 emit_unop_insn (code, to, from, equiv_code);
504 /* Next, try converting via full word. */
505 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
506 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
507 != CODE_FOR_nothing))
509 rtx word_to = gen_reg_rtx (word_mode);
512 if (reg_overlap_mentioned_p (to, from))
513 from = force_reg (from_mode, from);
516 convert_move (word_to, from, unsignedp);
517 emit_unop_insn (code, to, word_to, equiv_code);
521 /* No special multiword conversion insn; do it by hand. */
524 /* Since we will turn this into a no conflict block, we must ensure
525 that the source does not overlap the target. */
527 if (reg_overlap_mentioned_p (to, from))
528 from = force_reg (from_mode, from);
530 /* Get a copy of FROM widened to a word, if necessary. */
531 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
532 lowpart_mode = word_mode;
534 lowpart_mode = from_mode;
536 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
538 lowpart = gen_lowpart (lowpart_mode, to);
539 emit_move_insn (lowpart, lowfrom);
541 /* Compute the value to put in each remaining word. */
543 fill_value = const0_rtx;
545 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
546 LT, lowfrom, const0_rtx,
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
555 gcc_assert (subword);
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
561 insns = get_insns ();
568 /* Truncating multi-word to a word or less. */
569 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
570 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
573 && ! MEM_VOLATILE_P (from)
574 && direct_load[(int) to_mode]
575 && ! mode_dependent_address_p (XEXP (from, 0)))
577 || GET_CODE (from) == SUBREG))
578 from = force_reg (from_mode, from);
579 convert_move (to, gen_lowpart (word_mode, from), 0);
583 /* Now follow all the conversions between integers
584 no more than a word long. */
586 /* For truncation, usually we can just refer to FROM in a narrower mode. */
587 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
588 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
589 GET_MODE_BITSIZE (from_mode)))
592 && ! MEM_VOLATILE_P (from)
593 && direct_load[(int) to_mode]
594 && ! mode_dependent_address_p (XEXP (from, 0)))
596 || GET_CODE (from) == SUBREG))
597 from = force_reg (from_mode, from);
598 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
599 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
600 from = copy_to_reg (from);
601 emit_move_insn (to, gen_lowpart (to_mode, from));
605 /* Handle extension. */
606 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
608 /* Convert directly if that works. */
609 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
612 emit_unop_insn (code, to, from, equiv_code);
617 enum machine_mode intermediate;
621 /* Search for a mode to convert via. */
622 for (intermediate = from_mode; intermediate != VOIDmode;
623 intermediate = GET_MODE_WIDER_MODE (intermediate))
624 if (((can_extend_p (to_mode, intermediate, unsignedp)
626 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
628 GET_MODE_BITSIZE (intermediate))))
629 && (can_extend_p (intermediate, from_mode, unsignedp)
630 != CODE_FOR_nothing))
632 convert_move (to, convert_to_mode (intermediate, from,
633 unsignedp), unsignedp);
637 /* No suitable intermediate mode.
638 Generate what we need with shifts. */
639 shift_amount = build_int_cst (NULL_TREE,
640 GET_MODE_BITSIZE (to_mode)
641 - GET_MODE_BITSIZE (from_mode));
642 from = gen_lowpart (to_mode, force_reg (from_mode, from));
643 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
645 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
648 emit_move_insn (to, tmp);
653 /* Support special truncate insns for certain modes. */
654 if (convert_optab_handler (trunc_optab, to_mode,
655 from_mode) != CODE_FOR_nothing)
657 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
662 /* Handle truncation of volatile memrefs, and so on;
663 the things that couldn't be truncated directly,
664 and for which there was no special instruction.
666 ??? Code above formerly short-circuited this, for most integer
667 mode pairs, with a force_reg in from_mode followed by a recursive
668 call to this routine. Appears always to have been wrong. */
669 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
671 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
672 emit_move_insn (to, temp);
676 /* Mode combination is not recognized. */
680 /* Return an rtx for a value that would result
681 from converting X to mode MODE.
682 Both X and MODE may be floating, or both integer.
683 UNSIGNEDP is nonzero if X is an unsigned value.
684 This can be done by referring to a part of X in place
685 or by copying to a new temporary with conversion. */
688 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
690 return convert_modes (mode, VOIDmode, x, unsignedp);
693 /* Return an rtx for a value that would result
694 from converting X from mode OLDMODE to mode MODE.
695 Both modes may be floating, or both integer.
696 UNSIGNEDP is nonzero if X is an unsigned value.
698 This can be done by referring to a part of X in place
699 or by copying to a new temporary with conversion.
701 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
704 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
708 /* If FROM is a SUBREG that indicates that we have already done at least
709 the required extension, strip it. */
711 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
712 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
713 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
714 x = gen_lowpart (mode, x);
716 if (GET_MODE (x) != VOIDmode)
717 oldmode = GET_MODE (x);
722 /* There is one case that we must handle specially: If we are converting
723 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
724 we are to interpret the constant as unsigned, gen_lowpart will do
725 the wrong if the constant appears negative. What we want to do is
726 make the high-order word of the constant zero, not all ones. */
728 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
729 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
730 && CONST_INT_P (x) && INTVAL (x) < 0)
732 double_int val = uhwi_to_double_int (INTVAL (x));
734 /* We need to zero extend VAL. */
735 if (oldmode != VOIDmode)
736 val = double_int_zext (val, GET_MODE_BITSIZE (oldmode));
738 return immed_double_int_const (val, mode);
741 /* We can do this with a gen_lowpart if both desired and current modes
742 are integer, and this is either a constant integer, a register, or a
743 non-volatile MEM. Except for the constant case where MODE is no
744 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
747 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
748 || (GET_MODE_CLASS (mode) == MODE_INT
749 && GET_MODE_CLASS (oldmode) == MODE_INT
750 && (GET_CODE (x) == CONST_DOUBLE
751 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
752 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
753 && direct_load[(int) mode])
755 && (! HARD_REGISTER_P (x)
756 || HARD_REGNO_MODE_OK (REGNO (x), mode))
757 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
758 GET_MODE_BITSIZE (GET_MODE (x)))))))))
760 /* ?? If we don't know OLDMODE, we have to assume here that
761 X does not need sign- or zero-extension. This may not be
762 the case, but it's the best we can do. */
763 if (CONST_INT_P (x) && oldmode != VOIDmode
764 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
766 HOST_WIDE_INT val = INTVAL (x);
767 int width = GET_MODE_BITSIZE (oldmode);
769 /* We must sign or zero-extend in this case. Start by
770 zero-extending, then sign extend if we need to. */
771 val &= ((HOST_WIDE_INT) 1 << width) - 1;
773 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
774 val |= (HOST_WIDE_INT) (-1) << width;
776 return gen_int_mode (val, mode);
779 return gen_lowpart (mode, x);
782 /* Converting from integer constant into mode is always equivalent to an
784 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
786 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
787 return simplify_gen_subreg (mode, x, oldmode, 0);
790 temp = gen_reg_rtx (mode);
791 convert_move (temp, x, unsignedp);
795 /* Return the largest alignment we can use for doing a move (or store)
796 of MAX_PIECES. ALIGN is the largest alignment we could use. */
799 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
801 enum machine_mode tmode;
803 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
804 if (align >= GET_MODE_ALIGNMENT (tmode))
805 align = GET_MODE_ALIGNMENT (tmode);
808 enum machine_mode tmode, xmode;
810 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
812 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
813 if (GET_MODE_SIZE (tmode) > max_pieces
814 || SLOW_UNALIGNED_ACCESS (tmode, align))
817 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
823 /* Return the widest integer mode no wider than SIZE. If no such mode
824 can be found, return VOIDmode. */
826 static enum machine_mode
827 widest_int_mode_for_size (unsigned int size)
829 enum machine_mode tmode, mode = VOIDmode;
831 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
832 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
833 if (GET_MODE_SIZE (tmode) < size)
839 /* STORE_MAX_PIECES is the number of bytes at a time that we can
840 store efficiently. Due to internal GCC limitations, this is
841 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
842 for an immediate constant. */
844 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
846 /* Determine whether the LEN bytes can be moved by using several move
847 instructions. Return nonzero if a call to move_by_pieces should
851 can_move_by_pieces (unsigned HOST_WIDE_INT len,
852 unsigned int align ATTRIBUTE_UNUSED)
854 return MOVE_BY_PIECES_P (len, align);
857 /* Generate several move instructions to copy LEN bytes from block FROM to
858 block TO. (These are MEM rtx's with BLKmode).
860 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
861 used to push FROM to the stack.
863 ALIGN is maximum stack alignment we can assume.
865 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
866 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
870 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
871 unsigned int align, int endp)
873 struct move_by_pieces_d data;
874 enum machine_mode to_addr_mode, from_addr_mode
875 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
876 rtx to_addr, from_addr = XEXP (from, 0);
877 unsigned int max_size = MOVE_MAX_PIECES + 1;
878 enum insn_code icode;
880 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
883 data.from_addr = from_addr;
886 to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
887 to_addr = XEXP (to, 0);
890 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
891 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
893 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
897 to_addr_mode = VOIDmode;
901 #ifdef STACK_GROWS_DOWNWARD
907 data.to_addr = to_addr;
910 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
911 || GET_CODE (from_addr) == POST_INC
912 || GET_CODE (from_addr) == POST_DEC);
914 data.explicit_inc_from = 0;
915 data.explicit_inc_to = 0;
916 if (data.reverse) data.offset = len;
919 /* If copying requires more than two move insns,
920 copy addresses to registers (to make displacements shorter)
921 and use post-increment if available. */
922 if (!(data.autinc_from && data.autinc_to)
923 && move_by_pieces_ninsns (len, align, max_size) > 2)
925 /* Find the mode of the largest move...
926 MODE might not be used depending on the definitions of the
927 USE_* macros below. */
928 enum machine_mode mode ATTRIBUTE_UNUSED
929 = widest_int_mode_for_size (max_size);
931 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
933 data.from_addr = copy_to_mode_reg (from_addr_mode,
934 plus_constant (from_addr, len));
935 data.autinc_from = 1;
936 data.explicit_inc_from = -1;
938 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
940 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
941 data.autinc_from = 1;
942 data.explicit_inc_from = 1;
944 if (!data.autinc_from && CONSTANT_P (from_addr))
945 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
946 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
948 data.to_addr = copy_to_mode_reg (to_addr_mode,
949 plus_constant (to_addr, len));
951 data.explicit_inc_to = -1;
953 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
955 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
957 data.explicit_inc_to = 1;
959 if (!data.autinc_to && CONSTANT_P (to_addr))
960 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
963 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
965 /* First move what we can in the largest integer mode, then go to
966 successively smaller modes. */
970 enum machine_mode mode = widest_int_mode_for_size (max_size);
972 if (mode == VOIDmode)
975 icode = optab_handler (mov_optab, mode);
976 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
977 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
979 max_size = GET_MODE_SIZE (mode);
982 /* The code above should have handled everything. */
983 gcc_assert (!data.len);
989 gcc_assert (!data.reverse);
994 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
995 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
997 data.to_addr = copy_to_mode_reg (to_addr_mode,
998 plus_constant (data.to_addr,
1001 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1008 to1 = adjust_address (data.to, QImode, data.offset);
1016 /* Return number of insns required to move L bytes by pieces.
1017 ALIGN (in bits) is maximum alignment we can assume. */
1019 static unsigned HOST_WIDE_INT
1020 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1021 unsigned int max_size)
1023 unsigned HOST_WIDE_INT n_insns = 0;
1025 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1027 while (max_size > 1)
1029 enum machine_mode mode;
1030 enum insn_code icode;
1032 mode = widest_int_mode_for_size (max_size);
1034 if (mode == VOIDmode)
1037 icode = optab_handler (mov_optab, mode);
1038 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1039 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1041 max_size = GET_MODE_SIZE (mode);
1048 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1049 with move instructions for mode MODE. GENFUN is the gen_... function
1050 to make a move insn for that mode. DATA has all the other info. */
1053 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1054 struct move_by_pieces_d *data)
1056 unsigned int size = GET_MODE_SIZE (mode);
1057 rtx to1 = NULL_RTX, from1;
1059 while (data->len >= size)
1062 data->offset -= size;
1066 if (data->autinc_to)
1067 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1070 to1 = adjust_address (data->to, mode, data->offset);
1073 if (data->autinc_from)
1074 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1077 from1 = adjust_address (data->from, mode, data->offset);
1079 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1080 emit_insn (gen_add2_insn (data->to_addr,
1081 GEN_INT (-(HOST_WIDE_INT)size)));
1082 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1083 emit_insn (gen_add2_insn (data->from_addr,
1084 GEN_INT (-(HOST_WIDE_INT)size)));
1087 emit_insn ((*genfun) (to1, from1));
1090 #ifdef PUSH_ROUNDING
1091 emit_single_push_insn (mode, from1, NULL);
1097 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1098 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1099 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1100 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1102 if (! data->reverse)
1103 data->offset += size;
1109 /* Emit code to move a block Y to a block X. This may be done with
1110 string-move instructions, with multiple scalar move instructions,
1111 or with a library call.
1113 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1114 SIZE is an rtx that says how long they are.
1115 ALIGN is the maximum alignment we can assume they have.
1116 METHOD describes what kind of copy this is, and what mechanisms may be used.
1118 Return the address of the new block, if memcpy is called and returns it,
1122 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1123 unsigned int expected_align, HOST_WIDE_INT expected_size)
1130 if (CONST_INT_P (size)
1131 && INTVAL (size) == 0)
1136 case BLOCK_OP_NORMAL:
1137 case BLOCK_OP_TAILCALL:
1138 may_use_call = true;
1141 case BLOCK_OP_CALL_PARM:
1142 may_use_call = block_move_libcall_safe_for_call_parm ();
1144 /* Make inhibit_defer_pop nonzero around the library call
1145 to force it to pop the arguments right away. */
1149 case BLOCK_OP_NO_LIBCALL:
1150 may_use_call = false;
1157 gcc_assert (MEM_P (x) && MEM_P (y));
1158 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1159 gcc_assert (align >= BITS_PER_UNIT);
1161 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1162 block copy is more efficient for other large modes, e.g. DCmode. */
1163 x = adjust_address (x, BLKmode, 0);
1164 y = adjust_address (y, BLKmode, 0);
1166 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1167 can be incorrect is coming from __builtin_memcpy. */
1168 if (CONST_INT_P (size))
1170 x = shallow_copy_rtx (x);
1171 y = shallow_copy_rtx (y);
1172 set_mem_size (x, size);
1173 set_mem_size (y, size);
1176 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1177 move_by_pieces (x, y, INTVAL (size), align, 0);
1178 else if (emit_block_move_via_movmem (x, y, size, align,
1179 expected_align, expected_size))
1181 else if (may_use_call
1182 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1183 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1184 retval = emit_block_move_via_libcall (x, y, size,
1185 method == BLOCK_OP_TAILCALL);
1187 emit_block_move_via_loop (x, y, size, align);
1189 if (method == BLOCK_OP_CALL_PARM)
1196 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1198 return emit_block_move_hints (x, y, size, method, 0, -1);
1201 /* A subroutine of emit_block_move. Returns true if calling the
1202 block move libcall will not clobber any parameters which may have
1203 already been placed on the stack. */
1206 block_move_libcall_safe_for_call_parm (void)
1208 #if defined (REG_PARM_STACK_SPACE)
1212 /* If arguments are pushed on the stack, then they're safe. */
1216 /* If registers go on the stack anyway, any argument is sure to clobber
1217 an outgoing argument. */
1218 #if defined (REG_PARM_STACK_SPACE)
1219 fn = emit_block_move_libcall_fn (false);
1220 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1221 depend on its argument. */
1223 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1224 && REG_PARM_STACK_SPACE (fn) != 0)
1228 /* If any argument goes in memory, then it might clobber an outgoing
1231 CUMULATIVE_ARGS args_so_far;
1234 fn = emit_block_move_libcall_fn (false);
1235 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1237 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1238 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1240 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1241 rtx tmp = targetm.calls.function_arg (&args_so_far, mode,
1243 if (!tmp || !REG_P (tmp))
1245 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1247 targetm.calls.function_arg_advance (&args_so_far, mode,
1254 /* A subroutine of emit_block_move. Expand a movmem pattern;
1255 return true if successful. */
1258 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1259 unsigned int expected_align, HOST_WIDE_INT expected_size)
1261 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1262 int save_volatile_ok = volatile_ok;
1263 enum machine_mode mode;
1265 if (expected_align < align)
1266 expected_align = align;
1268 /* Since this is a move insn, we don't care about volatility. */
1271 /* Try the most limited insn first, because there's no point
1272 including more than one in the machine description unless
1273 the more limited one has some advantage. */
1275 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1276 mode = GET_MODE_WIDER_MODE (mode))
1278 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1279 insn_operand_predicate_fn pred;
1281 if (code != CODE_FOR_nothing
1282 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1283 here because if SIZE is less than the mode mask, as it is
1284 returned by the macro, it will definitely be less than the
1285 actual mode mask. */
1286 && ((CONST_INT_P (size)
1287 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1288 <= (GET_MODE_MASK (mode) >> 1)))
1289 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1290 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1291 || (*pred) (x, BLKmode))
1292 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1293 || (*pred) (y, BLKmode))
1294 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1295 || (*pred) (opalign, VOIDmode)))
1298 rtx last = get_last_insn ();
1301 op2 = convert_to_mode (mode, size, 1);
1302 pred = insn_data[(int) code].operand[2].predicate;
1303 if (pred != 0 && ! (*pred) (op2, mode))
1304 op2 = copy_to_mode_reg (mode, op2);
1306 /* ??? When called via emit_block_move_for_call, it'd be
1307 nice if there were some way to inform the backend, so
1308 that it doesn't fail the expansion because it thinks
1309 emitting the libcall would be more efficient. */
1311 if (insn_data[(int) code].n_operands == 4)
1312 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1314 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1315 GEN_INT (expected_align
1317 GEN_INT (expected_size));
1321 volatile_ok = save_volatile_ok;
1325 delete_insns_since (last);
1329 volatile_ok = save_volatile_ok;
1333 /* A subroutine of emit_block_move. Expand a call to memcpy.
1334 Return the return value from memcpy, 0 otherwise. */
1337 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1339 rtx dst_addr, src_addr;
1340 tree call_expr, fn, src_tree, dst_tree, size_tree;
1341 enum machine_mode size_mode;
1344 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1345 pseudos. We can then place those new pseudos into a VAR_DECL and
1348 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1349 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1351 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1352 src_addr = convert_memory_address (ptr_mode, src_addr);
1354 dst_tree = make_tree (ptr_type_node, dst_addr);
1355 src_tree = make_tree (ptr_type_node, src_addr);
1357 size_mode = TYPE_MODE (sizetype);
1359 size = convert_to_mode (size_mode, size, 1);
1360 size = copy_to_mode_reg (size_mode, size);
1362 /* It is incorrect to use the libcall calling conventions to call
1363 memcpy in this context. This could be a user call to memcpy and
1364 the user may wish to examine the return value from memcpy. For
1365 targets where libcalls and normal calls have different conventions
1366 for returning pointers, we could end up generating incorrect code. */
1368 size_tree = make_tree (sizetype, size);
1370 fn = emit_block_move_libcall_fn (true);
1371 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1372 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1374 retval = expand_normal (call_expr);
1379 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1380 for the function we use for block copies. The first time FOR_CALL
1381 is true, we call assemble_external. */
1383 static GTY(()) tree block_move_fn;
1386 init_block_move_fn (const char *asmspec)
1392 fn = get_identifier ("memcpy");
1393 args = build_function_type_list (ptr_type_node, ptr_type_node,
1394 const_ptr_type_node, sizetype,
1397 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1398 DECL_EXTERNAL (fn) = 1;
1399 TREE_PUBLIC (fn) = 1;
1400 DECL_ARTIFICIAL (fn) = 1;
1401 TREE_NOTHROW (fn) = 1;
1402 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1403 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1409 set_user_assembler_name (block_move_fn, asmspec);
1413 emit_block_move_libcall_fn (int for_call)
1415 static bool emitted_extern;
1418 init_block_move_fn (NULL);
1420 if (for_call && !emitted_extern)
1422 emitted_extern = true;
1423 make_decl_rtl (block_move_fn);
1424 assemble_external (block_move_fn);
1427 return block_move_fn;
1430 /* A subroutine of emit_block_move. Copy the data via an explicit
1431 loop. This is used only when libcalls are forbidden. */
1432 /* ??? It'd be nice to copy in hunks larger than QImode. */
1435 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1436 unsigned int align ATTRIBUTE_UNUSED)
1438 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1439 enum machine_mode x_addr_mode
1440 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
1441 enum machine_mode y_addr_mode
1442 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
1443 enum machine_mode iter_mode;
1445 iter_mode = GET_MODE (size);
1446 if (iter_mode == VOIDmode)
1447 iter_mode = word_mode;
1449 top_label = gen_label_rtx ();
1450 cmp_label = gen_label_rtx ();
1451 iter = gen_reg_rtx (iter_mode);
1453 emit_move_insn (iter, const0_rtx);
1455 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1456 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1457 do_pending_stack_adjust ();
1459 emit_jump (cmp_label);
1460 emit_label (top_label);
1462 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1463 x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1465 if (x_addr_mode != y_addr_mode)
1466 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1467 y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1469 x = change_address (x, QImode, x_addr);
1470 y = change_address (y, QImode, y_addr);
1472 emit_move_insn (x, y);
1474 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1475 true, OPTAB_LIB_WIDEN);
1477 emit_move_insn (iter, tmp);
1479 emit_label (cmp_label);
1481 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1485 /* Copy all or part of a value X into registers starting at REGNO.
1486 The number of registers to be filled is NREGS. */
1489 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1492 #ifdef HAVE_load_multiple
1500 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1501 x = validize_mem (force_const_mem (mode, x));
1503 /* See if the machine can do this with a load multiple insn. */
1504 #ifdef HAVE_load_multiple
1505 if (HAVE_load_multiple)
1507 last = get_last_insn ();
1508 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1516 delete_insns_since (last);
1520 for (i = 0; i < nregs; i++)
1521 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1522 operand_subword_force (x, i, mode));
1525 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1526 The number of registers to be filled is NREGS. */
1529 move_block_from_reg (int regno, rtx x, int nregs)
1536 /* See if the machine can do this with a store multiple insn. */
1537 #ifdef HAVE_store_multiple
1538 if (HAVE_store_multiple)
1540 rtx last = get_last_insn ();
1541 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1549 delete_insns_since (last);
1553 for (i = 0; i < nregs; i++)
1555 rtx tem = operand_subword (x, i, 1, BLKmode);
1559 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1563 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1564 ORIG, where ORIG is a non-consecutive group of registers represented by
1565 a PARALLEL. The clone is identical to the original except in that the
1566 original set of registers is replaced by a new set of pseudo registers.
1567 The new set has the same modes as the original set. */
1570 gen_group_rtx (rtx orig)
1575 gcc_assert (GET_CODE (orig) == PARALLEL);
1577 length = XVECLEN (orig, 0);
1578 tmps = XALLOCAVEC (rtx, length);
1580 /* Skip a NULL entry in first slot. */
1581 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1586 for (; i < length; i++)
1588 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1589 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1591 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1594 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1597 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1598 except that values are placed in TMPS[i], and must later be moved
1599 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1602 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1606 enum machine_mode m = GET_MODE (orig_src);
1608 gcc_assert (GET_CODE (dst) == PARALLEL);
1611 && !SCALAR_INT_MODE_P (m)
1612 && !MEM_P (orig_src)
1613 && GET_CODE (orig_src) != CONCAT)
1615 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1616 if (imode == BLKmode)
1617 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1619 src = gen_reg_rtx (imode);
1620 if (imode != BLKmode)
1621 src = gen_lowpart (GET_MODE (orig_src), src);
1622 emit_move_insn (src, orig_src);
1623 /* ...and back again. */
1624 if (imode != BLKmode)
1625 src = gen_lowpart (imode, src);
1626 emit_group_load_1 (tmps, dst, src, type, ssize);
1630 /* Check for a NULL entry, used to indicate that the parameter goes
1631 both on the stack and in registers. */
1632 if (XEXP (XVECEXP (dst, 0, 0), 0))
1637 /* Process the pieces. */
1638 for (i = start; i < XVECLEN (dst, 0); i++)
1640 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1641 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1642 unsigned int bytelen = GET_MODE_SIZE (mode);
1645 /* Handle trailing fragments that run over the size of the struct. */
1646 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1648 /* Arrange to shift the fragment to where it belongs.
1649 extract_bit_field loads to the lsb of the reg. */
1651 #ifdef BLOCK_REG_PADDING
1652 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1653 == (BYTES_BIG_ENDIAN ? upward : downward)
1658 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1659 bytelen = ssize - bytepos;
1660 gcc_assert (bytelen > 0);
1663 /* If we won't be loading directly from memory, protect the real source
1664 from strange tricks we might play; but make sure that the source can
1665 be loaded directly into the destination. */
1667 if (!MEM_P (orig_src)
1668 && (!CONSTANT_P (orig_src)
1669 || (GET_MODE (orig_src) != mode
1670 && GET_MODE (orig_src) != VOIDmode)))
1672 if (GET_MODE (orig_src) == VOIDmode)
1673 src = gen_reg_rtx (mode);
1675 src = gen_reg_rtx (GET_MODE (orig_src));
1677 emit_move_insn (src, orig_src);
1680 /* Optimize the access just a bit. */
1682 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1683 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1684 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1685 && bytelen == GET_MODE_SIZE (mode))
1687 tmps[i] = gen_reg_rtx (mode);
1688 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1690 else if (COMPLEX_MODE_P (mode)
1691 && GET_MODE (src) == mode
1692 && bytelen == GET_MODE_SIZE (mode))
1693 /* Let emit_move_complex do the bulk of the work. */
1695 else if (GET_CODE (src) == CONCAT)
1697 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1698 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1700 if ((bytepos == 0 && bytelen == slen0)
1701 || (bytepos != 0 && bytepos + bytelen <= slen))
1703 /* The following assumes that the concatenated objects all
1704 have the same size. In this case, a simple calculation
1705 can be used to determine the object and the bit field
1707 tmps[i] = XEXP (src, bytepos / slen0);
1708 if (! CONSTANT_P (tmps[i])
1709 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1710 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1711 (bytepos % slen0) * BITS_PER_UNIT,
1712 1, false, NULL_RTX, mode, mode);
1718 gcc_assert (!bytepos);
1719 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1720 emit_move_insn (mem, src);
1721 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1722 0, 1, false, NULL_RTX, mode, mode);
1725 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1726 SIMD register, which is currently broken. While we get GCC
1727 to emit proper RTL for these cases, let's dump to memory. */
1728 else if (VECTOR_MODE_P (GET_MODE (dst))
1731 int slen = GET_MODE_SIZE (GET_MODE (src));
1734 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1735 emit_move_insn (mem, src);
1736 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1738 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1739 && XVECLEN (dst, 0) > 1)
1740 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1741 else if (CONSTANT_P (src))
1743 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1751 gcc_assert (2 * len == ssize);
1752 split_double (src, &first, &second);
1759 else if (REG_P (src) && GET_MODE (src) == mode)
1762 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1763 bytepos * BITS_PER_UNIT, 1, false, NULL_RTX,
1767 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1768 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1772 /* Emit code to move a block SRC of type TYPE to a block DST,
1773 where DST is non-consecutive registers represented by a PARALLEL.
1774 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1778 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1783 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1784 emit_group_load_1 (tmps, dst, src, type, ssize);
1786 /* Copy the extracted pieces into the proper (probable) hard regs. */
1787 for (i = 0; i < XVECLEN (dst, 0); i++)
1789 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1792 emit_move_insn (d, tmps[i]);
1796 /* Similar, but load SRC into new pseudos in a format that looks like
1797 PARALLEL. This can later be fed to emit_group_move to get things
1798 in the right place. */
1801 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1806 vec = rtvec_alloc (XVECLEN (parallel, 0));
1807 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1809 /* Convert the vector to look just like the original PARALLEL, except
1810 with the computed values. */
1811 for (i = 0; i < XVECLEN (parallel, 0); i++)
1813 rtx e = XVECEXP (parallel, 0, i);
1814 rtx d = XEXP (e, 0);
1818 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1819 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1821 RTVEC_ELT (vec, i) = e;
1824 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1827 /* Emit code to move a block SRC to block DST, where SRC and DST are
1828 non-consecutive groups of registers, each represented by a PARALLEL. */
1831 emit_group_move (rtx dst, rtx src)
1835 gcc_assert (GET_CODE (src) == PARALLEL
1836 && GET_CODE (dst) == PARALLEL
1837 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1839 /* Skip first entry if NULL. */
1840 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1841 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1842 XEXP (XVECEXP (src, 0, i), 0));
1845 /* Move a group of registers represented by a PARALLEL into pseudos. */
1848 emit_group_move_into_temps (rtx src)
1850 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1853 for (i = 0; i < XVECLEN (src, 0); i++)
1855 rtx e = XVECEXP (src, 0, i);
1856 rtx d = XEXP (e, 0);
1859 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1860 RTVEC_ELT (vec, i) = e;
1863 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1866 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1867 where SRC is non-consecutive registers represented by a PARALLEL.
1868 SSIZE represents the total size of block ORIG_DST, or -1 if not
1872 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1875 int start, finish, i;
1876 enum machine_mode m = GET_MODE (orig_dst);
1878 gcc_assert (GET_CODE (src) == PARALLEL);
1880 if (!SCALAR_INT_MODE_P (m)
1881 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1883 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1884 if (imode == BLKmode)
1885 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1887 dst = gen_reg_rtx (imode);
1888 emit_group_store (dst, src, type, ssize);
1889 if (imode != BLKmode)
1890 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1891 emit_move_insn (orig_dst, dst);
1895 /* Check for a NULL entry, used to indicate that the parameter goes
1896 both on the stack and in registers. */
1897 if (XEXP (XVECEXP (src, 0, 0), 0))
1901 finish = XVECLEN (src, 0);
1903 tmps = XALLOCAVEC (rtx, finish);
1905 /* Copy the (probable) hard regs into pseudos. */
1906 for (i = start; i < finish; i++)
1908 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1909 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1911 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1912 emit_move_insn (tmps[i], reg);
1918 /* If we won't be storing directly into memory, protect the real destination
1919 from strange tricks we might play. */
1921 if (GET_CODE (dst) == PARALLEL)
1925 /* We can get a PARALLEL dst if there is a conditional expression in
1926 a return statement. In that case, the dst and src are the same,
1927 so no action is necessary. */
1928 if (rtx_equal_p (dst, src))
1931 /* It is unclear if we can ever reach here, but we may as well handle
1932 it. Allocate a temporary, and split this into a store/load to/from
1935 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1936 emit_group_store (temp, src, type, ssize);
1937 emit_group_load (dst, temp, type, ssize);
1940 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1942 enum machine_mode outer = GET_MODE (dst);
1943 enum machine_mode inner;
1944 HOST_WIDE_INT bytepos;
1948 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1949 dst = gen_reg_rtx (outer);
1951 /* Make life a bit easier for combine. */
1952 /* If the first element of the vector is the low part
1953 of the destination mode, use a paradoxical subreg to
1954 initialize the destination. */
1957 inner = GET_MODE (tmps[start]);
1958 bytepos = subreg_lowpart_offset (inner, outer);
1959 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1961 temp = simplify_gen_subreg (outer, tmps[start],
1965 emit_move_insn (dst, temp);
1972 /* If the first element wasn't the low part, try the last. */
1974 && start < finish - 1)
1976 inner = GET_MODE (tmps[finish - 1]);
1977 bytepos = subreg_lowpart_offset (inner, outer);
1978 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1980 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1984 emit_move_insn (dst, temp);
1991 /* Otherwise, simply initialize the result to zero. */
1993 emit_move_insn (dst, CONST0_RTX (outer));
1996 /* Process the pieces. */
1997 for (i = start; i < finish; i++)
1999 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2000 enum machine_mode mode = GET_MODE (tmps[i]);
2001 unsigned int bytelen = GET_MODE_SIZE (mode);
2002 unsigned int adj_bytelen = bytelen;
2005 /* Handle trailing fragments that run over the size of the struct. */
2006 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2007 adj_bytelen = ssize - bytepos;
2009 if (GET_CODE (dst) == CONCAT)
2011 if (bytepos + adj_bytelen
2012 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2013 dest = XEXP (dst, 0);
2014 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2016 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2017 dest = XEXP (dst, 1);
2021 enum machine_mode dest_mode = GET_MODE (dest);
2022 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2024 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2026 if (GET_MODE_ALIGNMENT (dest_mode)
2027 >= GET_MODE_ALIGNMENT (tmp_mode))
2029 dest = assign_stack_temp (dest_mode,
2030 GET_MODE_SIZE (dest_mode),
2032 emit_move_insn (adjust_address (dest,
2040 dest = assign_stack_temp (tmp_mode,
2041 GET_MODE_SIZE (tmp_mode),
2043 emit_move_insn (dest, tmps[i]);
2044 dst = adjust_address (dest, dest_mode, bytepos);
2050 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2052 /* store_bit_field always takes its value from the lsb.
2053 Move the fragment to the lsb if it's not already there. */
2055 #ifdef BLOCK_REG_PADDING
2056 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2057 == (BYTES_BIG_ENDIAN ? upward : downward)
2063 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2064 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2065 build_int_cst (NULL_TREE, shift),
2068 bytelen = adj_bytelen;
2071 /* Optimize the access just a bit. */
2073 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2074 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2075 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2076 && bytelen == GET_MODE_SIZE (mode))
2077 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2079 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2083 /* Copy from the pseudo into the (probable) hard reg. */
2084 if (orig_dst != dst)
2085 emit_move_insn (orig_dst, dst);
2088 /* Generate code to copy a BLKmode object of TYPE out of a
2089 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2090 is null, a stack temporary is created. TGTBLK is returned.
2092 The purpose of this routine is to handle functions that return
2093 BLKmode structures in registers. Some machines (the PA for example)
2094 want to return all small structures in registers regardless of the
2095 structure's alignment. */
2098 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2100 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2101 rtx src = NULL, dst = NULL;
2102 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2103 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2104 enum machine_mode copy_mode;
2108 tgtblk = assign_temp (build_qualified_type (type,
2110 | TYPE_QUAL_CONST)),
2112 preserve_temp_slots (tgtblk);
2115 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2116 into a new pseudo which is a full word. */
2118 if (GET_MODE (srcreg) != BLKmode
2119 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2120 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2122 /* If the structure doesn't take up a whole number of words, see whether
2123 SRCREG is padded on the left or on the right. If it's on the left,
2124 set PADDING_CORRECTION to the number of bits to skip.
2126 In most ABIs, the structure will be returned at the least end of
2127 the register, which translates to right padding on little-endian
2128 targets and left padding on big-endian targets. The opposite
2129 holds if the structure is returned at the most significant
2130 end of the register. */
2131 if (bytes % UNITS_PER_WORD != 0
2132 && (targetm.calls.return_in_msb (type)
2134 : BYTES_BIG_ENDIAN))
2136 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2138 /* Copy the structure BITSIZE bits at a time. If the target lives in
2139 memory, take care of not reading/writing past its end by selecting
2140 a copy mode suited to BITSIZE. This should always be possible given
2143 We could probably emit more efficient code for machines which do not use
2144 strict alignment, but it doesn't seem worth the effort at the current
2147 copy_mode = word_mode;
2150 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2151 if (mem_mode != BLKmode)
2152 copy_mode = mem_mode;
2155 for (bitpos = 0, xbitpos = padding_correction;
2156 bitpos < bytes * BITS_PER_UNIT;
2157 bitpos += bitsize, xbitpos += bitsize)
2159 /* We need a new source operand each time xbitpos is on a
2160 word boundary and when xbitpos == padding_correction
2161 (the first time through). */
2162 if (xbitpos % BITS_PER_WORD == 0
2163 || xbitpos == padding_correction)
2164 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2167 /* We need a new destination operand each time bitpos is on
2169 if (bitpos % BITS_PER_WORD == 0)
2170 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2172 /* Use xbitpos for the source extraction (right justified) and
2173 bitpos for the destination store (left justified). */
2174 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2175 extract_bit_field (src, bitsize,
2176 xbitpos % BITS_PER_WORD, 1, false,
2177 NULL_RTX, copy_mode, copy_mode));
2183 /* Add a USE expression for REG to the (possibly empty) list pointed
2184 to by CALL_FUSAGE. REG must denote a hard register. */
2187 use_reg (rtx *call_fusage, rtx reg)
2189 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2192 = gen_rtx_EXPR_LIST (VOIDmode,
2193 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2196 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2197 starting at REGNO. All of these registers must be hard registers. */
2200 use_regs (rtx *call_fusage, int regno, int nregs)
2204 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2206 for (i = 0; i < nregs; i++)
2207 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2210 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2211 PARALLEL REGS. This is for calls that pass values in multiple
2212 non-contiguous locations. The Irix 6 ABI has examples of this. */
2215 use_group_regs (rtx *call_fusage, rtx regs)
2219 for (i = 0; i < XVECLEN (regs, 0); i++)
2221 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2223 /* A NULL entry means the parameter goes both on the stack and in
2224 registers. This can also be a MEM for targets that pass values
2225 partially on the stack and partially in registers. */
2226 if (reg != 0 && REG_P (reg))
2227 use_reg (call_fusage, reg);
2231 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2232 assigment and the code of the expresion on the RHS is CODE. Return
2236 get_def_for_expr (tree name, enum tree_code code)
2240 if (TREE_CODE (name) != SSA_NAME)
2243 def_stmt = get_gimple_for_ssa_name (name);
2245 || gimple_assign_rhs_code (def_stmt) != code)
2252 /* Determine whether the LEN bytes generated by CONSTFUN can be
2253 stored to memory using several move instructions. CONSTFUNDATA is
2254 a pointer which will be passed as argument in every CONSTFUN call.
2255 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2256 a memset operation and false if it's a copy of a constant string.
2257 Return nonzero if a call to store_by_pieces should succeed. */
2260 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2261 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2262 void *constfundata, unsigned int align, bool memsetp)
2264 unsigned HOST_WIDE_INT l;
2265 unsigned int max_size;
2266 HOST_WIDE_INT offset = 0;
2267 enum machine_mode mode;
2268 enum insn_code icode;
2270 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2271 rtx cst ATTRIBUTE_UNUSED;
2277 ? SET_BY_PIECES_P (len, align)
2278 : STORE_BY_PIECES_P (len, align)))
2281 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2283 /* We would first store what we can in the largest integer mode, then go to
2284 successively smaller modes. */
2287 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2291 max_size = STORE_MAX_PIECES + 1;
2292 while (max_size > 1)
2294 mode = widest_int_mode_for_size (max_size);
2296 if (mode == VOIDmode)
2299 icode = optab_handler (mov_optab, mode);
2300 if (icode != CODE_FOR_nothing
2301 && align >= GET_MODE_ALIGNMENT (mode))
2303 unsigned int size = GET_MODE_SIZE (mode);
2310 cst = (*constfun) (constfundata, offset, mode);
2311 if (!LEGITIMATE_CONSTANT_P (cst))
2321 max_size = GET_MODE_SIZE (mode);
2324 /* The code above should have handled everything. */
2331 /* Generate several move instructions to store LEN bytes generated by
2332 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2333 pointer which will be passed as argument in every CONSTFUN call.
2334 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2335 a memset operation and false if it's a copy of a constant string.
2336 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2337 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2341 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2342 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2343 void *constfundata, unsigned int align, bool memsetp, int endp)
2345 enum machine_mode to_addr_mode
2346 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
2347 struct store_by_pieces_d data;
2351 gcc_assert (endp != 2);
2356 ? SET_BY_PIECES_P (len, align)
2357 : STORE_BY_PIECES_P (len, align));
2358 data.constfun = constfun;
2359 data.constfundata = constfundata;
2362 store_by_pieces_1 (&data, align);
2367 gcc_assert (!data.reverse);
2372 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2373 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2375 data.to_addr = copy_to_mode_reg (to_addr_mode,
2376 plus_constant (data.to_addr,
2379 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2386 to1 = adjust_address (data.to, QImode, data.offset);
2394 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2395 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2398 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2400 struct store_by_pieces_d data;
2405 data.constfun = clear_by_pieces_1;
2406 data.constfundata = NULL;
2409 store_by_pieces_1 (&data, align);
2412 /* Callback routine for clear_by_pieces.
2413 Return const0_rtx unconditionally. */
2416 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2417 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2418 enum machine_mode mode ATTRIBUTE_UNUSED)
2423 /* Subroutine of clear_by_pieces and store_by_pieces.
2424 Generate several move instructions to store LEN bytes of block TO. (A MEM
2425 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2428 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2429 unsigned int align ATTRIBUTE_UNUSED)
2431 enum machine_mode to_addr_mode
2432 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
2433 rtx to_addr = XEXP (data->to, 0);
2434 unsigned int max_size = STORE_MAX_PIECES + 1;
2435 enum insn_code icode;
2438 data->to_addr = to_addr;
2440 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2441 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2443 data->explicit_inc_to = 0;
2445 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2447 data->offset = data->len;
2449 /* If storing requires more than two move insns,
2450 copy addresses to registers (to make displacements shorter)
2451 and use post-increment if available. */
2452 if (!data->autinc_to
2453 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2455 /* Determine the main mode we'll be using.
2456 MODE might not be used depending on the definitions of the
2457 USE_* macros below. */
2458 enum machine_mode mode ATTRIBUTE_UNUSED
2459 = widest_int_mode_for_size (max_size);
2461 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2463 data->to_addr = copy_to_mode_reg (to_addr_mode,
2464 plus_constant (to_addr, data->len));
2465 data->autinc_to = 1;
2466 data->explicit_inc_to = -1;
2469 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2470 && ! data->autinc_to)
2472 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2473 data->autinc_to = 1;
2474 data->explicit_inc_to = 1;
2477 if ( !data->autinc_to && CONSTANT_P (to_addr))
2478 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2481 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2483 /* First store what we can in the largest integer mode, then go to
2484 successively smaller modes. */
2486 while (max_size > 1)
2488 enum machine_mode mode = widest_int_mode_for_size (max_size);
2490 if (mode == VOIDmode)
2493 icode = optab_handler (mov_optab, mode);
2494 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2495 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2497 max_size = GET_MODE_SIZE (mode);
2500 /* The code above should have handled everything. */
2501 gcc_assert (!data->len);
2504 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2505 with move instructions for mode MODE. GENFUN is the gen_... function
2506 to make a move insn for that mode. DATA has all the other info. */
2509 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2510 struct store_by_pieces_d *data)
2512 unsigned int size = GET_MODE_SIZE (mode);
2515 while (data->len >= size)
2518 data->offset -= size;
2520 if (data->autinc_to)
2521 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2524 to1 = adjust_address (data->to, mode, data->offset);
2526 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2527 emit_insn (gen_add2_insn (data->to_addr,
2528 GEN_INT (-(HOST_WIDE_INT) size)));
2530 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2531 emit_insn ((*genfun) (to1, cst));
2533 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2534 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2536 if (! data->reverse)
2537 data->offset += size;
2543 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2544 its length in bytes. */
2547 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2548 unsigned int expected_align, HOST_WIDE_INT expected_size)
2550 enum machine_mode mode = GET_MODE (object);
2553 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2555 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2556 just move a zero. Otherwise, do this a piece at a time. */
2558 && CONST_INT_P (size)
2559 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2561 rtx zero = CONST0_RTX (mode);
2564 emit_move_insn (object, zero);
2568 if (COMPLEX_MODE_P (mode))
2570 zero = CONST0_RTX (GET_MODE_INNER (mode));
2573 write_complex_part (object, zero, 0);
2574 write_complex_part (object, zero, 1);
2580 if (size == const0_rtx)
2583 align = MEM_ALIGN (object);
2585 if (CONST_INT_P (size)
2586 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2587 clear_by_pieces (object, INTVAL (size), align);
2588 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2589 expected_align, expected_size))
2591 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2592 return set_storage_via_libcall (object, size, const0_rtx,
2593 method == BLOCK_OP_TAILCALL);
2601 clear_storage (rtx object, rtx size, enum block_op_methods method)
2603 return clear_storage_hints (object, size, method, 0, -1);
2607 /* A subroutine of clear_storage. Expand a call to memset.
2608 Return the return value of memset, 0 otherwise. */
2611 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2613 tree call_expr, fn, object_tree, size_tree, val_tree;
2614 enum machine_mode size_mode;
2617 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2618 place those into new pseudos into a VAR_DECL and use them later. */
2620 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2622 size_mode = TYPE_MODE (sizetype);
2623 size = convert_to_mode (size_mode, size, 1);
2624 size = copy_to_mode_reg (size_mode, size);
2626 /* It is incorrect to use the libcall calling conventions to call
2627 memset in this context. This could be a user call to memset and
2628 the user may wish to examine the return value from memset. For
2629 targets where libcalls and normal calls have different conventions
2630 for returning pointers, we could end up generating incorrect code. */
2632 object_tree = make_tree (ptr_type_node, object);
2633 if (!CONST_INT_P (val))
2634 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2635 size_tree = make_tree (sizetype, size);
2636 val_tree = make_tree (integer_type_node, val);
2638 fn = clear_storage_libcall_fn (true);
2639 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2640 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2642 retval = expand_normal (call_expr);
2647 /* A subroutine of set_storage_via_libcall. Create the tree node
2648 for the function we use for block clears. The first time FOR_CALL
2649 is true, we call assemble_external. */
2651 tree block_clear_fn;
2654 init_block_clear_fn (const char *asmspec)
2656 if (!block_clear_fn)
2660 fn = get_identifier ("memset");
2661 args = build_function_type_list (ptr_type_node, ptr_type_node,
2662 integer_type_node, sizetype,
2665 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2666 DECL_EXTERNAL (fn) = 1;
2667 TREE_PUBLIC (fn) = 1;
2668 DECL_ARTIFICIAL (fn) = 1;
2669 TREE_NOTHROW (fn) = 1;
2670 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2671 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2673 block_clear_fn = fn;
2677 set_user_assembler_name (block_clear_fn, asmspec);
2681 clear_storage_libcall_fn (int for_call)
2683 static bool emitted_extern;
2685 if (!block_clear_fn)
2686 init_block_clear_fn (NULL);
2688 if (for_call && !emitted_extern)
2690 emitted_extern = true;
2691 make_decl_rtl (block_clear_fn);
2692 assemble_external (block_clear_fn);
2695 return block_clear_fn;
2698 /* Expand a setmem pattern; return true if successful. */
2701 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2702 unsigned int expected_align, HOST_WIDE_INT expected_size)
2704 /* Try the most limited insn first, because there's no point
2705 including more than one in the machine description unless
2706 the more limited one has some advantage. */
2708 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2709 enum machine_mode mode;
2711 if (expected_align < align)
2712 expected_align = align;
2714 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2715 mode = GET_MODE_WIDER_MODE (mode))
2717 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2718 insn_operand_predicate_fn pred;
2720 if (code != CODE_FOR_nothing
2721 /* We don't need MODE to be narrower than
2722 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2723 the mode mask, as it is returned by the macro, it will
2724 definitely be less than the actual mode mask. */
2725 && ((CONST_INT_P (size)
2726 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2727 <= (GET_MODE_MASK (mode) >> 1)))
2728 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2729 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2730 || (*pred) (object, BLKmode))
2731 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2732 || (*pred) (opalign, VOIDmode)))
2735 enum machine_mode char_mode;
2736 rtx last = get_last_insn ();
2739 opsize = convert_to_mode (mode, size, 1);
2740 pred = insn_data[(int) code].operand[1].predicate;
2741 if (pred != 0 && ! (*pred) (opsize, mode))
2742 opsize = copy_to_mode_reg (mode, opsize);
2745 char_mode = insn_data[(int) code].operand[2].mode;
2746 if (char_mode != VOIDmode)
2748 opchar = convert_to_mode (char_mode, opchar, 1);
2749 pred = insn_data[(int) code].operand[2].predicate;
2750 if (pred != 0 && ! (*pred) (opchar, char_mode))
2751 opchar = copy_to_mode_reg (char_mode, opchar);
2754 if (insn_data[(int) code].n_operands == 4)
2755 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2757 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2758 GEN_INT (expected_align
2760 GEN_INT (expected_size));
2767 delete_insns_since (last);
2775 /* Write to one of the components of the complex value CPLX. Write VAL to
2776 the real part if IMAG_P is false, and the imaginary part if its true. */
2779 write_complex_part (rtx cplx, rtx val, bool imag_p)
2781 enum machine_mode cmode;
2782 enum machine_mode imode;
2785 if (GET_CODE (cplx) == CONCAT)
2787 emit_move_insn (XEXP (cplx, imag_p), val);
2791 cmode = GET_MODE (cplx);
2792 imode = GET_MODE_INNER (cmode);
2793 ibitsize = GET_MODE_BITSIZE (imode);
2795 /* For MEMs simplify_gen_subreg may generate an invalid new address
2796 because, e.g., the original address is considered mode-dependent
2797 by the target, which restricts simplify_subreg from invoking
2798 adjust_address_nv. Instead of preparing fallback support for an
2799 invalid address, we call adjust_address_nv directly. */
2802 emit_move_insn (adjust_address_nv (cplx, imode,
2803 imag_p ? GET_MODE_SIZE (imode) : 0),
2808 /* If the sub-object is at least word sized, then we know that subregging
2809 will work. This special case is important, since store_bit_field
2810 wants to operate on integer modes, and there's rarely an OImode to
2811 correspond to TCmode. */
2812 if (ibitsize >= BITS_PER_WORD
2813 /* For hard regs we have exact predicates. Assume we can split
2814 the original object if it spans an even number of hard regs.
2815 This special case is important for SCmode on 64-bit platforms
2816 where the natural size of floating-point regs is 32-bit. */
2818 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2819 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2821 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2822 imag_p ? GET_MODE_SIZE (imode) : 0);
2825 emit_move_insn (part, val);
2829 /* simplify_gen_subreg may fail for sub-word MEMs. */
2830 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2833 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2836 /* Extract one of the components of the complex value CPLX. Extract the
2837 real part if IMAG_P is false, and the imaginary part if it's true. */
2840 read_complex_part (rtx cplx, bool imag_p)
2842 enum machine_mode cmode, imode;
2845 if (GET_CODE (cplx) == CONCAT)
2846 return XEXP (cplx, imag_p);
2848 cmode = GET_MODE (cplx);
2849 imode = GET_MODE_INNER (cmode);
2850 ibitsize = GET_MODE_BITSIZE (imode);
2852 /* Special case reads from complex constants that got spilled to memory. */
2853 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2855 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2856 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2858 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2859 if (CONSTANT_CLASS_P (part))
2860 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2864 /* For MEMs simplify_gen_subreg may generate an invalid new address
2865 because, e.g., the original address is considered mode-dependent
2866 by the target, which restricts simplify_subreg from invoking
2867 adjust_address_nv. Instead of preparing fallback support for an
2868 invalid address, we call adjust_address_nv directly. */
2870 return adjust_address_nv (cplx, imode,
2871 imag_p ? GET_MODE_SIZE (imode) : 0);
2873 /* If the sub-object is at least word sized, then we know that subregging
2874 will work. This special case is important, since extract_bit_field
2875 wants to operate on integer modes, and there's rarely an OImode to
2876 correspond to TCmode. */
2877 if (ibitsize >= BITS_PER_WORD
2878 /* For hard regs we have exact predicates. Assume we can split
2879 the original object if it spans an even number of hard regs.
2880 This special case is important for SCmode on 64-bit platforms
2881 where the natural size of floating-point regs is 32-bit. */
2883 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2884 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2886 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2887 imag_p ? GET_MODE_SIZE (imode) : 0);
2891 /* simplify_gen_subreg may fail for sub-word MEMs. */
2892 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2895 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2896 true, false, NULL_RTX, imode, imode);
2899 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2900 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2901 represented in NEW_MODE. If FORCE is true, this will never happen, as
2902 we'll force-create a SUBREG if needed. */
2905 emit_move_change_mode (enum machine_mode new_mode,
2906 enum machine_mode old_mode, rtx x, bool force)
2910 if (push_operand (x, GET_MODE (x)))
2912 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2913 MEM_COPY_ATTRIBUTES (ret, x);
2917 /* We don't have to worry about changing the address since the
2918 size in bytes is supposed to be the same. */
2919 if (reload_in_progress)
2921 /* Copy the MEM to change the mode and move any
2922 substitutions from the old MEM to the new one. */
2923 ret = adjust_address_nv (x, new_mode, 0);
2924 copy_replacements (x, ret);
2927 ret = adjust_address (x, new_mode, 0);
2931 /* Note that we do want simplify_subreg's behavior of validating
2932 that the new mode is ok for a hard register. If we were to use
2933 simplify_gen_subreg, we would create the subreg, but would
2934 probably run into the target not being able to implement it. */
2935 /* Except, of course, when FORCE is true, when this is exactly what
2936 we want. Which is needed for CCmodes on some targets. */
2938 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2940 ret = simplify_subreg (new_mode, x, old_mode, 0);
2946 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2947 an integer mode of the same size as MODE. Returns the instruction
2948 emitted, or NULL if such a move could not be generated. */
2951 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2953 enum machine_mode imode;
2954 enum insn_code code;
2956 /* There must exist a mode of the exact size we require. */
2957 imode = int_mode_for_mode (mode);
2958 if (imode == BLKmode)
2961 /* The target must support moves in this mode. */
2962 code = optab_handler (mov_optab, imode);
2963 if (code == CODE_FOR_nothing)
2966 x = emit_move_change_mode (imode, mode, x, force);
2969 y = emit_move_change_mode (imode, mode, y, force);
2972 return emit_insn (GEN_FCN (code) (x, y));
2975 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2976 Return an equivalent MEM that does not use an auto-increment. */
2979 emit_move_resolve_push (enum machine_mode mode, rtx x)
2981 enum rtx_code code = GET_CODE (XEXP (x, 0));
2982 HOST_WIDE_INT adjust;
2985 adjust = GET_MODE_SIZE (mode);
2986 #ifdef PUSH_ROUNDING
2987 adjust = PUSH_ROUNDING (adjust);
2989 if (code == PRE_DEC || code == POST_DEC)
2991 else if (code == PRE_MODIFY || code == POST_MODIFY)
2993 rtx expr = XEXP (XEXP (x, 0), 1);
2996 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2997 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
2998 val = INTVAL (XEXP (expr, 1));
2999 if (GET_CODE (expr) == MINUS)
3001 gcc_assert (adjust == val || adjust == -val);
3005 /* Do not use anti_adjust_stack, since we don't want to update
3006 stack_pointer_delta. */
3007 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3008 GEN_INT (adjust), stack_pointer_rtx,
3009 0, OPTAB_LIB_WIDEN);
3010 if (temp != stack_pointer_rtx)
3011 emit_move_insn (stack_pointer_rtx, temp);
3018 temp = stack_pointer_rtx;
3023 temp = plus_constant (stack_pointer_rtx, -adjust);
3029 return replace_equiv_address (x, temp);
3032 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3033 X is known to satisfy push_operand, and MODE is known to be complex.
3034 Returns the last instruction emitted. */
3037 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3039 enum machine_mode submode = GET_MODE_INNER (mode);
3042 #ifdef PUSH_ROUNDING
3043 unsigned int submodesize = GET_MODE_SIZE (submode);
3045 /* In case we output to the stack, but the size is smaller than the
3046 machine can push exactly, we need to use move instructions. */
3047 if (PUSH_ROUNDING (submodesize) != submodesize)
3049 x = emit_move_resolve_push (mode, x);
3050 return emit_move_insn (x, y);
3054 /* Note that the real part always precedes the imag part in memory
3055 regardless of machine's endianness. */
3056 switch (GET_CODE (XEXP (x, 0)))
3070 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3071 read_complex_part (y, imag_first));
3072 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3073 read_complex_part (y, !imag_first));
3076 /* A subroutine of emit_move_complex. Perform the move from Y to X
3077 via two moves of the parts. Returns the last instruction emitted. */
3080 emit_move_complex_parts (rtx x, rtx y)
3082 /* Show the output dies here. This is necessary for SUBREGs
3083 of pseudos since we cannot track their lifetimes correctly;
3084 hard regs shouldn't appear here except as return values. */
3085 if (!reload_completed && !reload_in_progress
3086 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3089 write_complex_part (x, read_complex_part (y, false), false);
3090 write_complex_part (x, read_complex_part (y, true), true);
3092 return get_last_insn ();
3095 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3096 MODE is known to be complex. Returns the last instruction emitted. */
3099 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3103 /* Need to take special care for pushes, to maintain proper ordering
3104 of the data, and possibly extra padding. */
3105 if (push_operand (x, mode))
3106 return emit_move_complex_push (mode, x, y);
3108 /* See if we can coerce the target into moving both values at once. */
3110 /* Move floating point as parts. */
3111 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3112 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing)
3114 /* Not possible if the values are inherently not adjacent. */
3115 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3117 /* Is possible if both are registers (or subregs of registers). */
3118 else if (register_operand (x, mode) && register_operand (y, mode))
3120 /* If one of the operands is a memory, and alignment constraints
3121 are friendly enough, we may be able to do combined memory operations.
3122 We do not attempt this if Y is a constant because that combination is
3123 usually better with the by-parts thing below. */
3124 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3125 && (!STRICT_ALIGNMENT
3126 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3135 /* For memory to memory moves, optimal behavior can be had with the
3136 existing block move logic. */
3137 if (MEM_P (x) && MEM_P (y))
3139 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3140 BLOCK_OP_NO_LIBCALL);
3141 return get_last_insn ();
3144 ret = emit_move_via_integer (mode, x, y, true);
3149 return emit_move_complex_parts (x, y);
3152 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3153 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3156 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3160 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3163 enum insn_code code = optab_handler (mov_optab, CCmode);
3164 if (code != CODE_FOR_nothing)
3166 x = emit_move_change_mode (CCmode, mode, x, true);
3167 y = emit_move_change_mode (CCmode, mode, y, true);
3168 return emit_insn (GEN_FCN (code) (x, y));
3172 /* Otherwise, find the MODE_INT mode of the same width. */
3173 ret = emit_move_via_integer (mode, x, y, false);
3174 gcc_assert (ret != NULL);
3178 /* Return true if word I of OP lies entirely in the
3179 undefined bits of a paradoxical subreg. */
3182 undefined_operand_subword_p (const_rtx op, int i)
3184 enum machine_mode innermode, innermostmode;
3186 if (GET_CODE (op) != SUBREG)
3188 innermode = GET_MODE (op);
3189 innermostmode = GET_MODE (SUBREG_REG (op));
3190 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3191 /* The SUBREG_BYTE represents offset, as if the value were stored in
3192 memory, except for a paradoxical subreg where we define
3193 SUBREG_BYTE to be 0; undo this exception as in
3195 if (SUBREG_BYTE (op) == 0
3196 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3198 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3199 if (WORDS_BIG_ENDIAN)
3200 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3201 if (BYTES_BIG_ENDIAN)
3202 offset += difference % UNITS_PER_WORD;
3204 if (offset >= GET_MODE_SIZE (innermostmode)
3205 || offset <= -GET_MODE_SIZE (word_mode))
3210 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3211 MODE is any multi-word or full-word mode that lacks a move_insn
3212 pattern. Note that you will get better code if you define such
3213 patterns, even if they must turn into multiple assembler instructions. */
3216 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3223 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3225 /* If X is a push on the stack, do the push now and replace
3226 X with a reference to the stack pointer. */
3227 if (push_operand (x, mode))
3228 x = emit_move_resolve_push (mode, x);
3230 /* If we are in reload, see if either operand is a MEM whose address
3231 is scheduled for replacement. */
3232 if (reload_in_progress && MEM_P (x)
3233 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3234 x = replace_equiv_address_nv (x, inner);
3235 if (reload_in_progress && MEM_P (y)
3236 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3237 y = replace_equiv_address_nv (y, inner);
3241 need_clobber = false;
3243 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3246 rtx xpart = operand_subword (x, i, 1, mode);
3249 /* Do not generate code for a move if it would come entirely
3250 from the undefined bits of a paradoxical subreg. */
3251 if (undefined_operand_subword_p (y, i))
3254 ypart = operand_subword (y, i, 1, mode);
3256 /* If we can't get a part of Y, put Y into memory if it is a
3257 constant. Otherwise, force it into a register. Then we must
3258 be able to get a part of Y. */
3259 if (ypart == 0 && CONSTANT_P (y))
3261 y = use_anchored_address (force_const_mem (mode, y));
3262 ypart = operand_subword (y, i, 1, mode);
3264 else if (ypart == 0)
3265 ypart = operand_subword_force (y, i, mode);
3267 gcc_assert (xpart && ypart);
3269 need_clobber |= (GET_CODE (xpart) == SUBREG);
3271 last_insn = emit_move_insn (xpart, ypart);
3277 /* Show the output dies here. This is necessary for SUBREGs
3278 of pseudos since we cannot track their lifetimes correctly;
3279 hard regs shouldn't appear here except as return values.
3280 We never want to emit such a clobber after reload. */
3282 && ! (reload_in_progress || reload_completed)
3283 && need_clobber != 0)
3291 /* Low level part of emit_move_insn.
3292 Called just like emit_move_insn, but assumes X and Y
3293 are basically valid. */
3296 emit_move_insn_1 (rtx x, rtx y)
3298 enum machine_mode mode = GET_MODE (x);
3299 enum insn_code code;
3301 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3303 code = optab_handler (mov_optab, mode);
3304 if (code != CODE_FOR_nothing)
3305 return emit_insn (GEN_FCN (code) (x, y));
3307 /* Expand complex moves by moving real part and imag part. */
3308 if (COMPLEX_MODE_P (mode))
3309 return emit_move_complex (mode, x, y);
3311 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3312 || ALL_FIXED_POINT_MODE_P (mode))
3314 rtx result = emit_move_via_integer (mode, x, y, true);
3316 /* If we can't find an integer mode, use multi words. */
3320 return emit_move_multi_word (mode, x, y);
3323 if (GET_MODE_CLASS (mode) == MODE_CC)
3324 return emit_move_ccmode (mode, x, y);
3326 /* Try using a move pattern for the corresponding integer mode. This is
3327 only safe when simplify_subreg can convert MODE constants into integer
3328 constants. At present, it can only do this reliably if the value
3329 fits within a HOST_WIDE_INT. */
3330 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3332 rtx ret = emit_move_via_integer (mode, x, y, false);
3337 return emit_move_multi_word (mode, x, y);
3340 /* Generate code to copy Y into X.
3341 Both Y and X must have the same mode, except that
3342 Y can be a constant with VOIDmode.
3343 This mode cannot be BLKmode; use emit_block_move for that.
3345 Return the last instruction emitted. */
3348 emit_move_insn (rtx x, rtx y)
3350 enum machine_mode mode = GET_MODE (x);
3351 rtx y_cst = NULL_RTX;
3354 gcc_assert (mode != BLKmode
3355 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3360 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3361 && (last_insn = compress_float_constant (x, y)))
3366 if (!LEGITIMATE_CONSTANT_P (y))
3368 y = force_const_mem (mode, y);
3370 /* If the target's cannot_force_const_mem prevented the spill,
3371 assume that the target's move expanders will also take care
3372 of the non-legitimate constant. */
3376 y = use_anchored_address (y);
3380 /* If X or Y are memory references, verify that their addresses are valid
3383 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3385 && ! push_operand (x, GET_MODE (x))))
3386 x = validize_mem (x);
3389 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3390 MEM_ADDR_SPACE (y)))
3391 y = validize_mem (y);
3393 gcc_assert (mode != BLKmode);
3395 last_insn = emit_move_insn_1 (x, y);
3397 if (y_cst && REG_P (x)
3398 && (set = single_set (last_insn)) != NULL_RTX
3399 && SET_DEST (set) == x
3400 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3401 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3406 /* If Y is representable exactly in a narrower mode, and the target can
3407 perform the extension directly from constant or memory, then emit the
3408 move as an extension. */
3411 compress_float_constant (rtx x, rtx y)
3413 enum machine_mode dstmode = GET_MODE (x);
3414 enum machine_mode orig_srcmode = GET_MODE (y);
3415 enum machine_mode srcmode;
3417 int oldcost, newcost;
3418 bool speed = optimize_insn_for_speed_p ();
3420 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3422 if (LEGITIMATE_CONSTANT_P (y))
3423 oldcost = rtx_cost (y, SET, speed);
3425 oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
3427 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3428 srcmode != orig_srcmode;
3429 srcmode = GET_MODE_WIDER_MODE (srcmode))
3432 rtx trunc_y, last_insn;
3434 /* Skip if the target can't extend this way. */
3435 ic = can_extend_p (dstmode, srcmode, 0);
3436 if (ic == CODE_FOR_nothing)
3439 /* Skip if the narrowed value isn't exact. */
3440 if (! exact_real_truncate (srcmode, &r))
3443 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3445 if (LEGITIMATE_CONSTANT_P (trunc_y))
3447 /* Skip if the target needs extra instructions to perform
3449 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3451 /* This is valid, but may not be cheaper than the original. */
3452 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3453 if (oldcost < newcost)
3456 else if (float_extend_from_mem[dstmode][srcmode])
3458 trunc_y = force_const_mem (srcmode, trunc_y);
3459 /* This is valid, but may not be cheaper than the original. */
3460 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3461 if (oldcost < newcost)
3463 trunc_y = validize_mem (trunc_y);
3468 /* For CSE's benefit, force the compressed constant pool entry
3469 into a new pseudo. This constant may be used in different modes,
3470 and if not, combine will put things back together for us. */
3471 trunc_y = force_reg (srcmode, trunc_y);
3472 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3473 last_insn = get_last_insn ();
3476 set_unique_reg_note (last_insn, REG_EQUAL, y);
3484 /* Pushing data onto the stack. */
3486 /* Push a block of length SIZE (perhaps variable)
3487 and return an rtx to address the beginning of the block.
3488 The value may be virtual_outgoing_args_rtx.
3490 EXTRA is the number of bytes of padding to push in addition to SIZE.
3491 BELOW nonzero means this padding comes at low addresses;
3492 otherwise, the padding comes at high addresses. */
3495 push_block (rtx size, int extra, int below)
3499 size = convert_modes (Pmode, ptr_mode, size, 1);
3500 if (CONSTANT_P (size))
3501 anti_adjust_stack (plus_constant (size, extra));
3502 else if (REG_P (size) && extra == 0)
3503 anti_adjust_stack (size);
3506 temp = copy_to_mode_reg (Pmode, size);
3508 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3509 temp, 0, OPTAB_LIB_WIDEN);
3510 anti_adjust_stack (temp);
3513 #ifndef STACK_GROWS_DOWNWARD
3519 temp = virtual_outgoing_args_rtx;
3520 if (extra != 0 && below)
3521 temp = plus_constant (temp, extra);
3525 if (CONST_INT_P (size))
3526 temp = plus_constant (virtual_outgoing_args_rtx,
3527 -INTVAL (size) - (below ? 0 : extra));
3528 else if (extra != 0 && !below)
3529 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3530 negate_rtx (Pmode, plus_constant (size, extra)));
3532 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3533 negate_rtx (Pmode, size));
3536 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3539 #ifdef PUSH_ROUNDING
3541 /* Emit single push insn. */
3544 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3547 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3549 enum insn_code icode;
3550 insn_operand_predicate_fn pred;
3552 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3553 /* If there is push pattern, use it. Otherwise try old way of throwing
3554 MEM representing push operation to move expander. */
3555 icode = optab_handler (push_optab, mode);
3556 if (icode != CODE_FOR_nothing)
3558 if (((pred = insn_data[(int) icode].operand[0].predicate)
3559 && !((*pred) (x, mode))))
3560 x = force_reg (mode, x);
3561 emit_insn (GEN_FCN (icode) (x));
3564 if (GET_MODE_SIZE (mode) == rounded_size)
3565 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3566 /* If we are to pad downward, adjust the stack pointer first and
3567 then store X into the stack location using an offset. This is
3568 because emit_move_insn does not know how to pad; it does not have
3570 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3572 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3573 HOST_WIDE_INT offset;
3575 emit_move_insn (stack_pointer_rtx,
3576 expand_binop (Pmode,
3577 #ifdef STACK_GROWS_DOWNWARD
3583 GEN_INT (rounded_size),
3584 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3586 offset = (HOST_WIDE_INT) padding_size;
3587 #ifdef STACK_GROWS_DOWNWARD
3588 if (STACK_PUSH_CODE == POST_DEC)
3589 /* We have already decremented the stack pointer, so get the
3591 offset += (HOST_WIDE_INT) rounded_size;
3593 if (STACK_PUSH_CODE == POST_INC)
3594 /* We have already incremented the stack pointer, so get the
3596 offset -= (HOST_WIDE_INT) rounded_size;
3598 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3602 #ifdef STACK_GROWS_DOWNWARD
3603 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3604 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3605 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3607 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3608 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3609 GEN_INT (rounded_size));
3611 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3614 dest = gen_rtx_MEM (mode, dest_addr);
3618 set_mem_attributes (dest, type, 1);
3620 if (flag_optimize_sibling_calls)
3621 /* Function incoming arguments may overlap with sibling call
3622 outgoing arguments and we cannot allow reordering of reads
3623 from function arguments with stores to outgoing arguments
3624 of sibling calls. */
3625 set_mem_alias_set (dest, 0);
3627 emit_move_insn (dest, x);
3631 /* Generate code to push X onto the stack, assuming it has mode MODE and
3633 MODE is redundant except when X is a CONST_INT (since they don't
3635 SIZE is an rtx for the size of data to be copied (in bytes),
3636 needed only if X is BLKmode.
3638 ALIGN (in bits) is maximum alignment we can assume.
3640 If PARTIAL and REG are both nonzero, then copy that many of the first
3641 bytes of X into registers starting with REG, and push the rest of X.
3642 The amount of space pushed is decreased by PARTIAL bytes.
3643 REG must be a hard register in this case.
3644 If REG is zero but PARTIAL is not, take any all others actions for an
3645 argument partially in registers, but do not actually load any
3648 EXTRA is the amount in bytes of extra space to leave next to this arg.
3649 This is ignored if an argument block has already been allocated.
3651 On a machine that lacks real push insns, ARGS_ADDR is the address of
3652 the bottom of the argument block for this call. We use indexing off there
3653 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3654 argument block has not been preallocated.
3656 ARGS_SO_FAR is the size of args previously pushed for this call.
3658 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3659 for arguments passed in registers. If nonzero, it will be the number
3660 of bytes required. */
3663 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3664 unsigned int align, int partial, rtx reg, int extra,
3665 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3669 enum direction stack_direction
3670 #ifdef STACK_GROWS_DOWNWARD
3676 /* Decide where to pad the argument: `downward' for below,
3677 `upward' for above, or `none' for don't pad it.
3678 Default is below for small data on big-endian machines; else above. */
3679 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3681 /* Invert direction if stack is post-decrement.
3683 if (STACK_PUSH_CODE == POST_DEC)
3684 if (where_pad != none)
3685 where_pad = (where_pad == downward ? upward : downward);
3690 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3692 /* Copy a block into the stack, entirely or partially. */
3699 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3700 used = partial - offset;
3702 if (mode != BLKmode)
3704 /* A value is to be stored in an insufficiently aligned
3705 stack slot; copy via a suitably aligned slot if
3707 size = GEN_INT (GET_MODE_SIZE (mode));
3708 if (!MEM_P (xinner))
3710 temp = assign_temp (type, 0, 1, 1);
3711 emit_move_insn (temp, xinner);
3718 /* USED is now the # of bytes we need not copy to the stack
3719 because registers will take care of them. */
3722 xinner = adjust_address (xinner, BLKmode, used);
3724 /* If the partial register-part of the arg counts in its stack size,
3725 skip the part of stack space corresponding to the registers.
3726 Otherwise, start copying to the beginning of the stack space,
3727 by setting SKIP to 0. */
3728 skip = (reg_parm_stack_space == 0) ? 0 : used;
3730 #ifdef PUSH_ROUNDING
3731 /* Do it with several push insns if that doesn't take lots of insns
3732 and if there is no difficulty with push insns that skip bytes
3733 on the stack for alignment purposes. */
3736 && CONST_INT_P (size)
3738 && MEM_ALIGN (xinner) >= align
3739 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3740 /* Here we avoid the case of a structure whose weak alignment
3741 forces many pushes of a small amount of data,
3742 and such small pushes do rounding that causes trouble. */
3743 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3744 || align >= BIGGEST_ALIGNMENT
3745 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3746 == (align / BITS_PER_UNIT)))
3747 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3749 /* Push padding now if padding above and stack grows down,
3750 or if padding below and stack grows up.
3751 But if space already allocated, this has already been done. */
3752 if (extra && args_addr == 0
3753 && where_pad != none && where_pad != stack_direction)
3754 anti_adjust_stack (GEN_INT (extra));
3756 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3759 #endif /* PUSH_ROUNDING */
3763 /* Otherwise make space on the stack and copy the data
3764 to the address of that space. */
3766 /* Deduct words put into registers from the size we must copy. */
3769 if (CONST_INT_P (size))
3770 size = GEN_INT (INTVAL (size) - used);
3772 size = expand_binop (GET_MODE (size), sub_optab, size,
3773 GEN_INT (used), NULL_RTX, 0,
3777 /* Get the address of the stack space.
3778 In this case, we do not deal with EXTRA separately.
3779 A single stack adjust will do. */
3782 temp = push_block (size, extra, where_pad == downward);
3785 else if (CONST_INT_P (args_so_far))
3786 temp = memory_address (BLKmode,
3787 plus_constant (args_addr,
3788 skip + INTVAL (args_so_far)));
3790 temp = memory_address (BLKmode,
3791 plus_constant (gen_rtx_PLUS (Pmode,
3796 if (!ACCUMULATE_OUTGOING_ARGS)
3798 /* If the source is referenced relative to the stack pointer,
3799 copy it to another register to stabilize it. We do not need
3800 to do this if we know that we won't be changing sp. */
3802 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3803 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3804 temp = copy_to_reg (temp);
3807 target = gen_rtx_MEM (BLKmode, temp);
3809 /* We do *not* set_mem_attributes here, because incoming arguments
3810 may overlap with sibling call outgoing arguments and we cannot
3811 allow reordering of reads from function arguments with stores
3812 to outgoing arguments of sibling calls. We do, however, want
3813 to record the alignment of the stack slot. */
3814 /* ALIGN may well be better aligned than TYPE, e.g. due to
3815 PARM_BOUNDARY. Assume the caller isn't lying. */
3816 set_mem_align (target, align);
3818 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3821 else if (partial > 0)
3823 /* Scalar partly in registers. */
3825 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3828 /* # bytes of start of argument
3829 that we must make space for but need not store. */
3830 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3831 int args_offset = INTVAL (args_so_far);
3834 /* Push padding now if padding above and stack grows down,
3835 or if padding below and stack grows up.
3836 But if space already allocated, this has already been done. */
3837 if (extra && args_addr == 0
3838 && where_pad != none && where_pad != stack_direction)
3839 anti_adjust_stack (GEN_INT (extra));
3841 /* If we make space by pushing it, we might as well push
3842 the real data. Otherwise, we can leave OFFSET nonzero
3843 and leave the space uninitialized. */
3847 /* Now NOT_STACK gets the number of words that we don't need to
3848 allocate on the stack. Convert OFFSET to words too. */
3849 not_stack = (partial - offset) / UNITS_PER_WORD;
3850 offset /= UNITS_PER_WORD;
3852 /* If the partial register-part of the arg counts in its stack size,
3853 skip the part of stack space corresponding to the registers.
3854 Otherwise, start copying to the beginning of the stack space,
3855 by setting SKIP to 0. */
3856 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3858 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3859 x = validize_mem (force_const_mem (mode, x));
3861 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3862 SUBREGs of such registers are not allowed. */
3863 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3864 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3865 x = copy_to_reg (x);
3867 /* Loop over all the words allocated on the stack for this arg. */
3868 /* We can do it by words, because any scalar bigger than a word
3869 has a size a multiple of a word. */
3870 #ifndef PUSH_ARGS_REVERSED
3871 for (i = not_stack; i < size; i++)
3873 for (i = size - 1; i >= not_stack; i--)
3875 if (i >= not_stack + offset)
3876 emit_push_insn (operand_subword_force (x, i, mode),
3877 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3879 GEN_INT (args_offset + ((i - not_stack + skip)
3881 reg_parm_stack_space, alignment_pad);
3888 /* Push padding now if padding above and stack grows down,
3889 or if padding below and stack grows up.
3890 But if space already allocated, this has already been done. */
3891 if (extra && args_addr == 0
3892 && where_pad != none && where_pad != stack_direction)
3893 anti_adjust_stack (GEN_INT (extra));
3895 #ifdef PUSH_ROUNDING
3896 if (args_addr == 0 && PUSH_ARGS)
3897 emit_single_push_insn (mode, x, type);
3901 if (CONST_INT_P (args_so_far))
3903 = memory_address (mode,
3904 plus_constant (args_addr,
3905 INTVAL (args_so_far)));
3907 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3909 dest = gen_rtx_MEM (mode, addr);
3911 /* We do *not* set_mem_attributes here, because incoming arguments
3912 may overlap with sibling call outgoing arguments and we cannot
3913 allow reordering of reads from function arguments with stores
3914 to outgoing arguments of sibling calls. We do, however, want
3915 to record the alignment of the stack slot. */
3916 /* ALIGN may well be better aligned than TYPE, e.g. due to
3917 PARM_BOUNDARY. Assume the caller isn't lying. */
3918 set_mem_align (dest, align);
3920 emit_move_insn (dest, x);
3924 /* If part should go in registers, copy that part
3925 into the appropriate registers. Do this now, at the end,
3926 since mem-to-mem copies above may do function calls. */
3927 if (partial > 0 && reg != 0)
3929 /* Handle calls that pass values in multiple non-contiguous locations.
3930 The Irix 6 ABI has examples of this. */
3931 if (GET_CODE (reg) == PARALLEL)
3932 emit_group_load (reg, x, type, -1);
3935 gcc_assert (partial % UNITS_PER_WORD == 0);
3936 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3940 if (extra && args_addr == 0 && where_pad == stack_direction)
3941 anti_adjust_stack (GEN_INT (extra));
3943 if (alignment_pad && args_addr == 0)
3944 anti_adjust_stack (alignment_pad);
3947 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3951 get_subtarget (rtx x)
3955 /* Only registers can be subtargets. */
3957 /* Don't use hard regs to avoid extending their life. */
3958 || REGNO (x) < FIRST_PSEUDO_REGISTER
3962 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3963 FIELD is a bitfield. Returns true if the optimization was successful,
3964 and there's nothing else to do. */
3967 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3968 unsigned HOST_WIDE_INT bitpos,
3969 enum machine_mode mode1, rtx str_rtx,
3972 enum machine_mode str_mode = GET_MODE (str_rtx);
3973 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3978 if (mode1 != VOIDmode
3979 || bitsize >= BITS_PER_WORD
3980 || str_bitsize > BITS_PER_WORD
3981 || TREE_SIDE_EFFECTS (to)
3982 || TREE_THIS_VOLATILE (to))
3986 if (!BINARY_CLASS_P (src)
3987 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3990 op0 = TREE_OPERAND (src, 0);
3991 op1 = TREE_OPERAND (src, 1);
3994 if (!operand_equal_p (to, op0, 0))
3997 if (MEM_P (str_rtx))
3999 unsigned HOST_WIDE_INT offset1;
4001 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4002 str_mode = word_mode;
4003 str_mode = get_best_mode (bitsize, bitpos,
4004 MEM_ALIGN (str_rtx), str_mode, 0);
4005 if (str_mode == VOIDmode)
4007 str_bitsize = GET_MODE_BITSIZE (str_mode);
4010 bitpos %= str_bitsize;
4011 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4012 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4014 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4017 /* If the bit field covers the whole REG/MEM, store_field
4018 will likely generate better code. */
4019 if (bitsize >= str_bitsize)
4022 /* We can't handle fields split across multiple entities. */
4023 if (bitpos + bitsize > str_bitsize)
4026 if (BYTES_BIG_ENDIAN)
4027 bitpos = str_bitsize - bitpos - bitsize;
4029 switch (TREE_CODE (src))
4033 /* For now, just optimize the case of the topmost bitfield
4034 where we don't need to do any masking and also
4035 1 bit bitfields where xor can be used.
4036 We might win by one instruction for the other bitfields
4037 too if insv/extv instructions aren't used, so that
4038 can be added later. */
4039 if (bitpos + bitsize != str_bitsize
4040 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4043 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4044 value = convert_modes (str_mode,
4045 TYPE_MODE (TREE_TYPE (op1)), value,
4046 TYPE_UNSIGNED (TREE_TYPE (op1)));
4048 /* We may be accessing data outside the field, which means
4049 we can alias adjacent data. */
4050 if (MEM_P (str_rtx))
4052 str_rtx = shallow_copy_rtx (str_rtx);
4053 set_mem_alias_set (str_rtx, 0);
4054 set_mem_expr (str_rtx, 0);
4057 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4058 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4060 value = expand_and (str_mode, value, const1_rtx, NULL);
4063 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4064 build_int_cst (NULL_TREE, bitpos),
4066 result = expand_binop (str_mode, binop, str_rtx,
4067 value, str_rtx, 1, OPTAB_WIDEN);
4068 if (result != str_rtx)
4069 emit_move_insn (str_rtx, result);
4074 if (TREE_CODE (op1) != INTEGER_CST)
4076 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4077 value = convert_modes (GET_MODE (str_rtx),
4078 TYPE_MODE (TREE_TYPE (op1)), value,
4079 TYPE_UNSIGNED (TREE_TYPE (op1)));
4081 /* We may be accessing data outside the field, which means
4082 we can alias adjacent data. */
4083 if (MEM_P (str_rtx))
4085 str_rtx = shallow_copy_rtx (str_rtx);
4086 set_mem_alias_set (str_rtx, 0);
4087 set_mem_expr (str_rtx, 0);
4090 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4091 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4093 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4095 value = expand_and (GET_MODE (str_rtx), value, mask,
4098 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4099 build_int_cst (NULL_TREE, bitpos),
4101 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4102 value, str_rtx, 1, OPTAB_WIDEN);
4103 if (result != str_rtx)
4104 emit_move_insn (str_rtx, result);
4115 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4116 is true, try generating a nontemporal store. */
4119 expand_assignment (tree to, tree from, bool nontemporal)
4123 enum machine_mode mode;
4126 /* Don't crash if the lhs of the assignment was erroneous. */
4127 if (TREE_CODE (to) == ERROR_MARK)
4129 result = expand_normal (from);
4133 /* Optimize away no-op moves without side-effects. */
4134 if (operand_equal_p (to, from, 0))
4137 mode = TYPE_MODE (TREE_TYPE (to));
4138 if ((TREE_CODE (to) == MEM_REF
4139 || TREE_CODE (to) == TARGET_MEM_REF)
4141 && ((align = MAX (TYPE_ALIGN (TREE_TYPE (to)),
4142 get_object_alignment (to, BIGGEST_ALIGNMENT)))
4143 < (signed) GET_MODE_ALIGNMENT (mode))
4144 && ((icode = optab_handler (movmisalign_optab, mode))
4145 != CODE_FOR_nothing))
4147 enum machine_mode address_mode, op_mode1;
4148 rtx insn, reg, op0, mem;
4150 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4151 reg = force_not_mem (reg);
4153 if (TREE_CODE (to) == MEM_REF)
4156 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 1))));
4157 tree base = TREE_OPERAND (to, 0);
4158 address_mode = targetm.addr_space.address_mode (as);
4159 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4160 op0 = convert_memory_address_addr_space (address_mode, op0, as);
4161 if (!integer_zerop (TREE_OPERAND (to, 1)))
4164 = immed_double_int_const (mem_ref_offset (to), address_mode);
4165 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
4167 op0 = memory_address_addr_space (mode, op0, as);
4168 mem = gen_rtx_MEM (mode, op0);
4169 set_mem_attributes (mem, to, 0);
4170 set_mem_addr_space (mem, as);
4172 else if (TREE_CODE (to) == TARGET_MEM_REF)
4174 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (to));
4175 struct mem_address addr;
4177 get_address_description (to, &addr);
4178 op0 = addr_for_mem_ref (&addr, as, true);
4179 op0 = memory_address_addr_space (mode, op0, as);
4180 mem = gen_rtx_MEM (mode, op0);
4181 set_mem_attributes (mem, to, 0);
4182 set_mem_addr_space (mem, as);
4186 if (TREE_THIS_VOLATILE (to))
4187 MEM_VOLATILE_P (mem) = 1;
4189 op_mode1 = insn_data[icode].operand[1].mode;
4190 if (! (*insn_data[icode].operand[1].predicate) (reg, op_mode1)
4191 && op_mode1 != VOIDmode)
4192 reg = copy_to_mode_reg (op_mode1, reg);
4194 insn = GEN_FCN (icode) (mem, reg);
4195 /* The movmisalign<mode> pattern cannot fail, else the assignment would
4196 silently be omitted. */
4197 gcc_assert (insn != NULL_RTX);
4202 /* Assignment of a structure component needs special treatment
4203 if the structure component's rtx is not simply a MEM.
4204 Assignment of an array element at a constant index, and assignment of
4205 an array element in an unaligned packed structure field, has the same
4207 if (handled_component_p (to)
4208 /* ??? We only need to handle MEM_REF here if the access is not
4209 a full access of the base object. */
4210 || (TREE_CODE (to) == MEM_REF
4211 && TREE_CODE (TREE_OPERAND (to, 0)) == ADDR_EXPR)
4212 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4214 enum machine_mode mode1;
4215 HOST_WIDE_INT bitsize, bitpos;
4222 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4223 &unsignedp, &volatilep, true);
4225 /* If we are going to use store_bit_field and extract_bit_field,
4226 make sure to_rtx will be safe for multiple use. */
4228 to_rtx = expand_normal (tem);
4230 /* If the bitfield is volatile, we want to access it in the
4231 field's mode, not the computed mode.
4232 If a MEM has VOIDmode (external with incomplete type),
4233 use BLKmode for it instead. */
4236 if (volatilep && flag_strict_volatile_bitfields > 0)
4237 to_rtx = adjust_address (to_rtx, mode1, 0);
4238 else if (GET_MODE (to_rtx) == VOIDmode)
4239 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4244 enum machine_mode address_mode;
4247 if (!MEM_P (to_rtx))
4249 /* We can get constant negative offsets into arrays with broken
4250 user code. Translate this to a trap instead of ICEing. */
4251 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4252 expand_builtin_trap ();
4253 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4256 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4258 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
4259 if (GET_MODE (offset_rtx) != address_mode)
4260 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4262 /* A constant address in TO_RTX can have VOIDmode, we must not try
4263 to call force_reg for that case. Avoid that case. */
4265 && GET_MODE (to_rtx) == BLKmode
4266 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4268 && (bitpos % bitsize) == 0
4269 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4270 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4272 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4276 to_rtx = offset_address (to_rtx, offset_rtx,
4277 highest_pow2_factor_for_target (to,
4281 /* No action is needed if the target is not a memory and the field
4282 lies completely outside that target. This can occur if the source
4283 code contains an out-of-bounds access to a small array. */
4285 && GET_MODE (to_rtx) != BLKmode
4286 && (unsigned HOST_WIDE_INT) bitpos
4287 >= GET_MODE_BITSIZE (GET_MODE (to_rtx)))
4289 expand_normal (from);
4292 /* Handle expand_expr of a complex value returning a CONCAT. */
4293 else if (GET_CODE (to_rtx) == CONCAT)
4295 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from))))
4297 gcc_assert (bitpos == 0);
4298 result = store_expr (from, to_rtx, false, nontemporal);
4302 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4303 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4311 /* If the field is at offset zero, we could have been given the
4312 DECL_RTX of the parent struct. Don't munge it. */
4313 to_rtx = shallow_copy_rtx (to_rtx);
4315 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4317 /* Deal with volatile and readonly fields. The former is only
4318 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4320 MEM_VOLATILE_P (to_rtx) = 1;
4321 if (component_uses_parent_alias_set (to))
4322 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4325 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4329 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4330 TREE_TYPE (tem), get_alias_set (to),
4335 preserve_temp_slots (result);
4341 /* If the rhs is a function call and its value is not an aggregate,
4342 call the function before we start to compute the lhs.
4343 This is needed for correct code for cases such as
4344 val = setjmp (buf) on machines where reference to val
4345 requires loading up part of an address in a separate insn.
4347 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4348 since it might be a promoted variable where the zero- or sign- extension
4349 needs to be done. Handling this in the normal way is safe because no
4350 computation is done before the call. The same is true for SSA names. */
4351 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4352 && COMPLETE_TYPE_P (TREE_TYPE (from))
4353 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4354 && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4355 && REG_P (DECL_RTL (to)))
4356 || TREE_CODE (to) == SSA_NAME))
4361 value = expand_normal (from);
4363 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4365 /* Handle calls that return values in multiple non-contiguous locations.
4366 The Irix 6 ABI has examples of this. */
4367 if (GET_CODE (to_rtx) == PARALLEL)
4368 emit_group_load (to_rtx, value, TREE_TYPE (from),
4369 int_size_in_bytes (TREE_TYPE (from)));
4370 else if (GET_MODE (to_rtx) == BLKmode)
4371 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4374 if (POINTER_TYPE_P (TREE_TYPE (to)))
4375 value = convert_memory_address_addr_space
4376 (GET_MODE (to_rtx), value,
4377 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4379 emit_move_insn (to_rtx, value);
4381 preserve_temp_slots (to_rtx);
4387 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4388 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4391 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4393 /* Don't move directly into a return register. */
4394 if (TREE_CODE (to) == RESULT_DECL
4395 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4400 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4402 if (GET_CODE (to_rtx) == PARALLEL)
4403 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4404 int_size_in_bytes (TREE_TYPE (from)));
4406 emit_move_insn (to_rtx, temp);
4408 preserve_temp_slots (to_rtx);
4414 /* In case we are returning the contents of an object which overlaps
4415 the place the value is being stored, use a safe function when copying
4416 a value through a pointer into a structure value return block. */
4417 if (TREE_CODE (to) == RESULT_DECL
4418 && TREE_CODE (from) == INDIRECT_REF
4419 && ADDR_SPACE_GENERIC_P
4420 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4421 && refs_may_alias_p (to, from)
4422 && cfun->returns_struct
4423 && !cfun->returns_pcc_struct)
4428 size = expr_size (from);
4429 from_rtx = expand_normal (from);
4431 emit_library_call (memmove_libfunc, LCT_NORMAL,
4432 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4433 XEXP (from_rtx, 0), Pmode,
4434 convert_to_mode (TYPE_MODE (sizetype),
4435 size, TYPE_UNSIGNED (sizetype)),
4436 TYPE_MODE (sizetype));
4438 preserve_temp_slots (to_rtx);
4444 /* Compute FROM and store the value in the rtx we got. */
4447 result = store_expr (from, to_rtx, 0, nontemporal);
4448 preserve_temp_slots (result);
4454 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4455 succeeded, false otherwise. */
4458 emit_storent_insn (rtx to, rtx from)
4460 enum machine_mode mode = GET_MODE (to), imode;
4461 enum insn_code code = optab_handler (storent_optab, mode);
4464 if (code == CODE_FOR_nothing)
4467 imode = insn_data[code].operand[0].mode;
4468 if (!insn_data[code].operand[0].predicate (to, imode))
4471 imode = insn_data[code].operand[1].mode;
4472 if (!insn_data[code].operand[1].predicate (from, imode))
4474 from = copy_to_mode_reg (imode, from);
4475 if (!insn_data[code].operand[1].predicate (from, imode))
4479 pattern = GEN_FCN (code) (to, from);
4480 if (pattern == NULL_RTX)
4483 emit_insn (pattern);
4487 /* Generate code for computing expression EXP,
4488 and storing the value into TARGET.
4490 If the mode is BLKmode then we may return TARGET itself.
4491 It turns out that in BLKmode it doesn't cause a problem.
4492 because C has no operators that could combine two different
4493 assignments into the same BLKmode object with different values
4494 with no sequence point. Will other languages need this to
4497 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4498 stack, and block moves may need to be treated specially.
4500 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4503 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4506 rtx alt_rtl = NULL_RTX;
4507 location_t loc = EXPR_LOCATION (exp);
4509 if (VOID_TYPE_P (TREE_TYPE (exp)))
4511 /* C++ can generate ?: expressions with a throw expression in one
4512 branch and an rvalue in the other. Here, we resolve attempts to
4513 store the throw expression's nonexistent result. */
4514 gcc_assert (!call_param_p);
4515 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4518 if (TREE_CODE (exp) == COMPOUND_EXPR)
4520 /* Perform first part of compound expression, then assign from second
4522 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4523 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4524 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4527 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4529 /* For conditional expression, get safe form of the target. Then
4530 test the condition, doing the appropriate assignment on either
4531 side. This avoids the creation of unnecessary temporaries.
4532 For non-BLKmode, it is more efficient not to do this. */
4534 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4536 do_pending_stack_adjust ();
4538 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
4539 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4541 emit_jump_insn (gen_jump (lab2));
4544 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4551 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4552 /* If this is a scalar in a register that is stored in a wider mode
4553 than the declared mode, compute the result into its declared mode
4554 and then convert to the wider mode. Our value is the computed
4557 rtx inner_target = 0;
4559 /* We can do the conversion inside EXP, which will often result
4560 in some optimizations. Do the conversion in two steps: first
4561 change the signedness, if needed, then the extend. But don't
4562 do this if the type of EXP is a subtype of something else
4563 since then the conversion might involve more than just
4564 converting modes. */
4565 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4566 && TREE_TYPE (TREE_TYPE (exp)) == 0
4567 && GET_MODE_PRECISION (GET_MODE (target))
4568 == TYPE_PRECISION (TREE_TYPE (exp)))
4570 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4571 != SUBREG_PROMOTED_UNSIGNED_P (target))
4573 /* Some types, e.g. Fortran's logical*4, won't have a signed
4574 version, so use the mode instead. */
4576 = (signed_or_unsigned_type_for
4577 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4579 ntype = lang_hooks.types.type_for_mode
4580 (TYPE_MODE (TREE_TYPE (exp)),
4581 SUBREG_PROMOTED_UNSIGNED_P (target));
4583 exp = fold_convert_loc (loc, ntype, exp);
4586 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
4587 (GET_MODE (SUBREG_REG (target)),
4588 SUBREG_PROMOTED_UNSIGNED_P (target)),
4591 inner_target = SUBREG_REG (target);
4594 temp = expand_expr (exp, inner_target, VOIDmode,
4595 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4597 /* If TEMP is a VOIDmode constant, use convert_modes to make
4598 sure that we properly convert it. */
4599 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4601 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4602 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4603 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4604 GET_MODE (target), temp,
4605 SUBREG_PROMOTED_UNSIGNED_P (target));
4608 convert_move (SUBREG_REG (target), temp,
4609 SUBREG_PROMOTED_UNSIGNED_P (target));
4613 else if ((TREE_CODE (exp) == STRING_CST
4614 || (TREE_CODE (exp) == MEM_REF
4615 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4616 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4618 && integer_zerop (TREE_OPERAND (exp, 1))))
4619 && !nontemporal && !call_param_p
4622 /* Optimize initialization of an array with a STRING_CST. */
4623 HOST_WIDE_INT exp_len, str_copy_len;
4625 tree str = TREE_CODE (exp) == STRING_CST
4626 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4628 exp_len = int_expr_size (exp);
4632 if (TREE_STRING_LENGTH (str) <= 0)
4635 str_copy_len = strlen (TREE_STRING_POINTER (str));
4636 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
4639 str_copy_len = TREE_STRING_LENGTH (str);
4640 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
4641 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
4643 str_copy_len += STORE_MAX_PIECES - 1;
4644 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4646 str_copy_len = MIN (str_copy_len, exp_len);
4647 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4648 CONST_CAST (char *, TREE_STRING_POINTER (str)),
4649 MEM_ALIGN (target), false))
4654 dest_mem = store_by_pieces (dest_mem,
4655 str_copy_len, builtin_strncpy_read_str,
4657 TREE_STRING_POINTER (str)),
4658 MEM_ALIGN (target), false,
4659 exp_len > str_copy_len ? 1 : 0);
4660 if (exp_len > str_copy_len)
4661 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4662 GEN_INT (exp_len - str_copy_len),
4671 /* If we want to use a nontemporal store, force the value to
4673 tmp_target = nontemporal ? NULL_RTX : target;
4674 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4676 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4680 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4681 the same as that of TARGET, adjust the constant. This is needed, for
4682 example, in case it is a CONST_DOUBLE and we want only a word-sized
4684 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4685 && TREE_CODE (exp) != ERROR_MARK
4686 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4687 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4688 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4690 /* If value was not generated in the target, store it there.
4691 Convert the value to TARGET's type first if necessary and emit the
4692 pending incrementations that have been queued when expanding EXP.
4693 Note that we cannot emit the whole queue blindly because this will
4694 effectively disable the POST_INC optimization later.
4696 If TEMP and TARGET compare equal according to rtx_equal_p, but
4697 one or both of them are volatile memory refs, we have to distinguish
4699 - expand_expr has used TARGET. In this case, we must not generate
4700 another copy. This can be detected by TARGET being equal according
4702 - expand_expr has not used TARGET - that means that the source just
4703 happens to have the same RTX form. Since temp will have been created
4704 by expand_expr, it will compare unequal according to == .
4705 We must generate a copy in this case, to reach the correct number
4706 of volatile memory references. */
4708 if ((! rtx_equal_p (temp, target)
4709 || (temp != target && (side_effects_p (temp)
4710 || side_effects_p (target))))
4711 && TREE_CODE (exp) != ERROR_MARK
4712 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4713 but TARGET is not valid memory reference, TEMP will differ
4714 from TARGET although it is really the same location. */
4716 && rtx_equal_p (alt_rtl, target)
4717 && !side_effects_p (alt_rtl)
4718 && !side_effects_p (target))
4719 /* If there's nothing to copy, don't bother. Don't call
4720 expr_size unless necessary, because some front-ends (C++)
4721 expr_size-hook must not be given objects that are not
4722 supposed to be bit-copied or bit-initialized. */
4723 && expr_size (exp) != const0_rtx)
4725 if (GET_MODE (temp) != GET_MODE (target)
4726 && GET_MODE (temp) != VOIDmode)
4728 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4729 if (GET_MODE (target) == BLKmode
4730 && GET_MODE (temp) == BLKmode)
4731 emit_block_move (target, temp, expr_size (exp),
4733 ? BLOCK_OP_CALL_PARM
4734 : BLOCK_OP_NORMAL));
4735 else if (GET_MODE (target) == BLKmode)
4736 store_bit_field (target, INTVAL (expr_size (exp)) * BITS_PER_UNIT,
4737 0, GET_MODE (temp), temp);
4739 convert_move (target, temp, unsignedp);
4742 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4744 /* Handle copying a string constant into an array. The string
4745 constant may be shorter than the array. So copy just the string's
4746 actual length, and clear the rest. First get the size of the data
4747 type of the string, which is actually the size of the target. */
4748 rtx size = expr_size (exp);
4750 if (CONST_INT_P (size)
4751 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4752 emit_block_move (target, temp, size,
4754 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4757 enum machine_mode pointer_mode
4758 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
4759 enum machine_mode address_mode
4760 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
4762 /* Compute the size of the data to copy from the string. */
4764 = size_binop_loc (loc, MIN_EXPR,
4765 make_tree (sizetype, size),
4766 size_int (TREE_STRING_LENGTH (exp)));
4768 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4770 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4773 /* Copy that much. */
4774 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
4775 TYPE_UNSIGNED (sizetype));
4776 emit_block_move (target, temp, copy_size_rtx,
4778 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4780 /* Figure out how much is left in TARGET that we have to clear.
4781 Do all calculations in pointer_mode. */
4782 if (CONST_INT_P (copy_size_rtx))
4784 size = plus_constant (size, -INTVAL (copy_size_rtx));
4785 target = adjust_address (target, BLKmode,
4786 INTVAL (copy_size_rtx));
4790 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4791 copy_size_rtx, NULL_RTX, 0,
4794 if (GET_MODE (copy_size_rtx) != address_mode)
4795 copy_size_rtx = convert_to_mode (address_mode,
4797 TYPE_UNSIGNED (sizetype));
4799 target = offset_address (target, copy_size_rtx,
4800 highest_pow2_factor (copy_size));
4801 label = gen_label_rtx ();
4802 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4803 GET_MODE (size), 0, label);
4806 if (size != const0_rtx)
4807 clear_storage (target, size, BLOCK_OP_NORMAL);
4813 /* Handle calls that return values in multiple non-contiguous locations.
4814 The Irix 6 ABI has examples of this. */
4815 else if (GET_CODE (target) == PARALLEL)
4816 emit_group_load (target, temp, TREE_TYPE (exp),
4817 int_size_in_bytes (TREE_TYPE (exp)));
4818 else if (GET_MODE (temp) == BLKmode)
4819 emit_block_move (target, temp, expr_size (exp),
4821 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4822 else if (nontemporal
4823 && emit_storent_insn (target, temp))
4824 /* If we managed to emit a nontemporal store, there is nothing else to
4829 temp = force_operand (temp, target);
4831 emit_move_insn (target, temp);
4838 /* Helper for categorize_ctor_elements. Identical interface. */
4841 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4842 HOST_WIDE_INT *p_elt_count,
4845 unsigned HOST_WIDE_INT idx;
4846 HOST_WIDE_INT nz_elts, elt_count;
4847 tree value, purpose;
4849 /* Whether CTOR is a valid constant initializer, in accordance with what
4850 initializer_constant_valid_p does. If inferred from the constructor
4851 elements, true until proven otherwise. */
4852 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4853 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4858 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4860 HOST_WIDE_INT mult = 1;
4862 if (TREE_CODE (purpose) == RANGE_EXPR)
4864 tree lo_index = TREE_OPERAND (purpose, 0);
4865 tree hi_index = TREE_OPERAND (purpose, 1);
4867 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4868 mult = (tree_low_cst (hi_index, 1)
4869 - tree_low_cst (lo_index, 1) + 1);
4872 switch (TREE_CODE (value))
4876 HOST_WIDE_INT nz = 0, ic = 0;
4879 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4881 nz_elts += mult * nz;
4882 elt_count += mult * ic;
4884 if (const_from_elts_p && const_p)
4885 const_p = const_elt_p;
4892 if (!initializer_zerop (value))
4898 nz_elts += mult * TREE_STRING_LENGTH (value);
4899 elt_count += mult * TREE_STRING_LENGTH (value);
4903 if (!initializer_zerop (TREE_REALPART (value)))
4905 if (!initializer_zerop (TREE_IMAGPART (value)))
4913 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4915 if (!initializer_zerop (TREE_VALUE (v)))
4924 HOST_WIDE_INT tc = count_type_elements (TREE_TYPE (value), true);
4927 nz_elts += mult * tc;
4928 elt_count += mult * tc;
4930 if (const_from_elts_p && const_p)
4931 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4939 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4940 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4943 bool clear_this = true;
4945 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4947 /* We don't expect more than one element of the union to be
4948 initialized. Not sure what we should do otherwise... */
4949 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4952 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4953 CONSTRUCTOR_ELTS (ctor),
4956 /* ??? We could look at each element of the union, and find the
4957 largest element. Which would avoid comparing the size of the
4958 initialized element against any tail padding in the union.
4959 Doesn't seem worth the effort... */
4960 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4961 TYPE_SIZE (init_sub_type)) == 1)
4963 /* And now we have to find out if the element itself is fully
4964 constructed. E.g. for union { struct { int a, b; } s; } u
4965 = { .s = { .a = 1 } }. */
4966 if (elt_count == count_type_elements (init_sub_type, false))
4971 *p_must_clear = clear_this;
4974 *p_nz_elts += nz_elts;
4975 *p_elt_count += elt_count;
4980 /* Examine CTOR to discover:
4981 * how many scalar fields are set to nonzero values,
4982 and place it in *P_NZ_ELTS;
4983 * how many scalar fields in total are in CTOR,
4984 and place it in *P_ELT_COUNT.
4985 * if a type is a union, and the initializer from the constructor
4986 is not the largest element in the union, then set *p_must_clear.
4988 Return whether or not CTOR is a valid static constant initializer, the same
4989 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4992 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4993 HOST_WIDE_INT *p_elt_count,
4998 *p_must_clear = false;
5001 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
5004 /* Count the number of scalars in TYPE. Return -1 on overflow or
5005 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
5006 array member at the end of the structure. */
5009 count_type_elements (const_tree type, bool allow_flexarr)
5011 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
5012 switch (TREE_CODE (type))
5016 tree telts = array_type_nelts (type);
5017 if (telts && host_integerp (telts, 1))
5019 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
5020 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
5023 else if (max / n > m)
5031 HOST_WIDE_INT n = 0, t;
5034 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5035 if (TREE_CODE (f) == FIELD_DECL)
5037 t = count_type_elements (TREE_TYPE (f), false);
5040 /* Check for structures with flexible array member. */
5041 tree tf = TREE_TYPE (f);
5043 && DECL_CHAIN (f) == NULL
5044 && TREE_CODE (tf) == ARRAY_TYPE
5046 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5047 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5048 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5049 && int_size_in_bytes (type) >= 0)
5061 case QUAL_UNION_TYPE:
5068 return TYPE_VECTOR_SUBPARTS (type);
5072 case FIXED_POINT_TYPE:
5077 case REFERENCE_TYPE:
5092 /* Return 1 if EXP contains mostly (3/4) zeros. */
5095 mostly_zeros_p (const_tree exp)
5097 if (TREE_CODE (exp) == CONSTRUCTOR)
5100 HOST_WIDE_INT nz_elts, count, elts;
5103 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5107 elts = count_type_elements (TREE_TYPE (exp), false);
5109 return nz_elts < elts / 4;
5112 return initializer_zerop (exp);
5115 /* Return 1 if EXP contains all zeros. */
5118 all_zeros_p (const_tree exp)
5120 if (TREE_CODE (exp) == CONSTRUCTOR)
5123 HOST_WIDE_INT nz_elts, count;
5126 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5127 return nz_elts == 0;
5130 return initializer_zerop (exp);
5133 /* Helper function for store_constructor.
5134 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5135 TYPE is the type of the CONSTRUCTOR, not the element type.
5136 CLEARED is as for store_constructor.
5137 ALIAS_SET is the alias set to use for any stores.
5139 This provides a recursive shortcut back to store_constructor when it isn't
5140 necessary to go through store_field. This is so that we can pass through
5141 the cleared field to let store_constructor know that we may not have to
5142 clear a substructure if the outer structure has already been cleared. */
5145 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5146 HOST_WIDE_INT bitpos, enum machine_mode mode,
5147 tree exp, tree type, int cleared,
5148 alias_set_type alias_set)
5150 if (TREE_CODE (exp) == CONSTRUCTOR
5151 /* We can only call store_constructor recursively if the size and
5152 bit position are on a byte boundary. */
5153 && bitpos % BITS_PER_UNIT == 0
5154 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5155 /* If we have a nonzero bitpos for a register target, then we just
5156 let store_field do the bitfield handling. This is unlikely to
5157 generate unnecessary clear instructions anyways. */
5158 && (bitpos == 0 || MEM_P (target)))
5162 = adjust_address (target,
5163 GET_MODE (target) == BLKmode
5165 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5166 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5169 /* Update the alias set, if required. */
5170 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5171 && MEM_ALIAS_SET (target) != 0)
5173 target = copy_rtx (target);
5174 set_mem_alias_set (target, alias_set);
5177 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5180 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5183 /* Store the value of constructor EXP into the rtx TARGET.
5184 TARGET is either a REG or a MEM; we know it cannot conflict, since
5185 safe_from_p has been called.
5186 CLEARED is true if TARGET is known to have been zero'd.
5187 SIZE is the number of bytes of TARGET we are allowed to modify: this
5188 may not be the same as the size of EXP if we are assigning to a field
5189 which has been packed to exclude padding bits. */
5192 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5194 tree type = TREE_TYPE (exp);
5195 #ifdef WORD_REGISTER_OPERATIONS
5196 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5199 switch (TREE_CODE (type))
5203 case QUAL_UNION_TYPE:
5205 unsigned HOST_WIDE_INT idx;
5208 /* If size is zero or the target is already cleared, do nothing. */
5209 if (size == 0 || cleared)
5211 /* We either clear the aggregate or indicate the value is dead. */
5212 else if ((TREE_CODE (type) == UNION_TYPE
5213 || TREE_CODE (type) == QUAL_UNION_TYPE)
5214 && ! CONSTRUCTOR_ELTS (exp))
5215 /* If the constructor is empty, clear the union. */
5217 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5221 /* If we are building a static constructor into a register,
5222 set the initial value as zero so we can fold the value into
5223 a constant. But if more than one register is involved,
5224 this probably loses. */
5225 else if (REG_P (target) && TREE_STATIC (exp)
5226 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5228 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5232 /* If the constructor has fewer fields than the structure or
5233 if we are initializing the structure to mostly zeros, clear
5234 the whole structure first. Don't do this if TARGET is a
5235 register whose mode size isn't equal to SIZE since
5236 clear_storage can't handle this case. */
5238 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5239 != fields_length (type))
5240 || mostly_zeros_p (exp))
5242 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5245 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5249 if (REG_P (target) && !cleared)
5250 emit_clobber (target);
5252 /* Store each element of the constructor into the
5253 corresponding field of TARGET. */
5254 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5256 enum machine_mode mode;
5257 HOST_WIDE_INT bitsize;
5258 HOST_WIDE_INT bitpos = 0;
5260 rtx to_rtx = target;
5262 /* Just ignore missing fields. We cleared the whole
5263 structure, above, if any fields are missing. */
5267 if (cleared && initializer_zerop (value))
5270 if (host_integerp (DECL_SIZE (field), 1))
5271 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5275 mode = DECL_MODE (field);
5276 if (DECL_BIT_FIELD (field))
5279 offset = DECL_FIELD_OFFSET (field);
5280 if (host_integerp (offset, 0)
5281 && host_integerp (bit_position (field), 0))
5283 bitpos = int_bit_position (field);
5287 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5291 enum machine_mode address_mode;
5295 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5296 make_tree (TREE_TYPE (exp),
5299 offset_rtx = expand_normal (offset);
5300 gcc_assert (MEM_P (to_rtx));
5303 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
5304 if (GET_MODE (offset_rtx) != address_mode)
5305 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5307 to_rtx = offset_address (to_rtx, offset_rtx,
5308 highest_pow2_factor (offset));
5311 #ifdef WORD_REGISTER_OPERATIONS
5312 /* If this initializes a field that is smaller than a
5313 word, at the start of a word, try to widen it to a full
5314 word. This special case allows us to output C++ member
5315 function initializations in a form that the optimizers
5318 && bitsize < BITS_PER_WORD
5319 && bitpos % BITS_PER_WORD == 0
5320 && GET_MODE_CLASS (mode) == MODE_INT
5321 && TREE_CODE (value) == INTEGER_CST
5323 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5325 tree type = TREE_TYPE (value);
5327 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5329 type = lang_hooks.types.type_for_size
5330 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5331 value = fold_convert (type, value);
5334 if (BYTES_BIG_ENDIAN)
5336 = fold_build2 (LSHIFT_EXPR, type, value,
5337 build_int_cst (type,
5338 BITS_PER_WORD - bitsize));
5339 bitsize = BITS_PER_WORD;
5344 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5345 && DECL_NONADDRESSABLE_P (field))
5347 to_rtx = copy_rtx (to_rtx);
5348 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5351 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5352 value, type, cleared,
5353 get_alias_set (TREE_TYPE (field)));
5360 unsigned HOST_WIDE_INT i;
5363 tree elttype = TREE_TYPE (type);
5365 HOST_WIDE_INT minelt = 0;
5366 HOST_WIDE_INT maxelt = 0;
5368 domain = TYPE_DOMAIN (type);
5369 const_bounds_p = (TYPE_MIN_VALUE (domain)
5370 && TYPE_MAX_VALUE (domain)
5371 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5372 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5374 /* If we have constant bounds for the range of the type, get them. */
5377 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5378 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5381 /* If the constructor has fewer elements than the array, clear
5382 the whole array first. Similarly if this is static
5383 constructor of a non-BLKmode object. */
5386 else if (REG_P (target) && TREE_STATIC (exp))
5390 unsigned HOST_WIDE_INT idx;
5392 HOST_WIDE_INT count = 0, zero_count = 0;
5393 need_to_clear = ! const_bounds_p;
5395 /* This loop is a more accurate version of the loop in
5396 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5397 is also needed to check for missing elements. */
5398 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5400 HOST_WIDE_INT this_node_count;
5405 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5407 tree lo_index = TREE_OPERAND (index, 0);
5408 tree hi_index = TREE_OPERAND (index, 1);
5410 if (! host_integerp (lo_index, 1)
5411 || ! host_integerp (hi_index, 1))
5417 this_node_count = (tree_low_cst (hi_index, 1)
5418 - tree_low_cst (lo_index, 1) + 1);
5421 this_node_count = 1;
5423 count += this_node_count;
5424 if (mostly_zeros_p (value))
5425 zero_count += this_node_count;
5428 /* Clear the entire array first if there are any missing
5429 elements, or if the incidence of zero elements is >=
5432 && (count < maxelt - minelt + 1
5433 || 4 * zero_count >= 3 * count))
5437 if (need_to_clear && size > 0)
5440 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5442 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5446 if (!cleared && REG_P (target))
5447 /* Inform later passes that the old value is dead. */
5448 emit_clobber (target);
5450 /* Store each element of the constructor into the
5451 corresponding element of TARGET, determined by counting the
5453 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5455 enum machine_mode mode;
5456 HOST_WIDE_INT bitsize;
5457 HOST_WIDE_INT bitpos;
5458 rtx xtarget = target;
5460 if (cleared && initializer_zerop (value))
5463 mode = TYPE_MODE (elttype);
5464 if (mode == BLKmode)
5465 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5466 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5469 bitsize = GET_MODE_BITSIZE (mode);
5471 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5473 tree lo_index = TREE_OPERAND (index, 0);
5474 tree hi_index = TREE_OPERAND (index, 1);
5475 rtx index_r, pos_rtx;
5476 HOST_WIDE_INT lo, hi, count;
5479 /* If the range is constant and "small", unroll the loop. */
5481 && host_integerp (lo_index, 0)
5482 && host_integerp (hi_index, 0)
5483 && (lo = tree_low_cst (lo_index, 0),
5484 hi = tree_low_cst (hi_index, 0),
5485 count = hi - lo + 1,
5488 || (host_integerp (TYPE_SIZE (elttype), 1)
5489 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5492 lo -= minelt; hi -= minelt;
5493 for (; lo <= hi; lo++)
5495 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5498 && !MEM_KEEP_ALIAS_SET_P (target)
5499 && TREE_CODE (type) == ARRAY_TYPE
5500 && TYPE_NONALIASED_COMPONENT (type))
5502 target = copy_rtx (target);
5503 MEM_KEEP_ALIAS_SET_P (target) = 1;
5506 store_constructor_field
5507 (target, bitsize, bitpos, mode, value, type, cleared,
5508 get_alias_set (elttype));
5513 rtx loop_start = gen_label_rtx ();
5514 rtx loop_end = gen_label_rtx ();
5517 expand_normal (hi_index);
5519 index = build_decl (EXPR_LOCATION (exp),
5520 VAR_DECL, NULL_TREE, domain);
5521 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
5522 SET_DECL_RTL (index, index_r);
5523 store_expr (lo_index, index_r, 0, false);
5525 /* Build the head of the loop. */
5526 do_pending_stack_adjust ();
5527 emit_label (loop_start);
5529 /* Assign value to element index. */
5531 fold_convert (ssizetype,
5532 fold_build2 (MINUS_EXPR,
5535 TYPE_MIN_VALUE (domain)));
5538 size_binop (MULT_EXPR, position,
5539 fold_convert (ssizetype,
5540 TYPE_SIZE_UNIT (elttype)));
5542 pos_rtx = expand_normal (position);
5543 xtarget = offset_address (target, pos_rtx,
5544 highest_pow2_factor (position));
5545 xtarget = adjust_address (xtarget, mode, 0);
5546 if (TREE_CODE (value) == CONSTRUCTOR)
5547 store_constructor (value, xtarget, cleared,
5548 bitsize / BITS_PER_UNIT);
5550 store_expr (value, xtarget, 0, false);
5552 /* Generate a conditional jump to exit the loop. */
5553 exit_cond = build2 (LT_EXPR, integer_type_node,
5555 jumpif (exit_cond, loop_end, -1);
5557 /* Update the loop counter, and jump to the head of
5559 expand_assignment (index,
5560 build2 (PLUS_EXPR, TREE_TYPE (index),
5561 index, integer_one_node),
5564 emit_jump (loop_start);
5566 /* Build the end of the loop. */
5567 emit_label (loop_end);
5570 else if ((index != 0 && ! host_integerp (index, 0))
5571 || ! host_integerp (TYPE_SIZE (elttype), 1))
5576 index = ssize_int (1);
5579 index = fold_convert (ssizetype,
5580 fold_build2 (MINUS_EXPR,
5583 TYPE_MIN_VALUE (domain)));
5586 size_binop (MULT_EXPR, index,
5587 fold_convert (ssizetype,
5588 TYPE_SIZE_UNIT (elttype)));
5589 xtarget = offset_address (target,
5590 expand_normal (position),
5591 highest_pow2_factor (position));
5592 xtarget = adjust_address (xtarget, mode, 0);
5593 store_expr (value, xtarget, 0, false);
5598 bitpos = ((tree_low_cst (index, 0) - minelt)
5599 * tree_low_cst (TYPE_SIZE (elttype), 1));
5601 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5603 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5604 && TREE_CODE (type) == ARRAY_TYPE
5605 && TYPE_NONALIASED_COMPONENT (type))
5607 target = copy_rtx (target);
5608 MEM_KEEP_ALIAS_SET_P (target) = 1;
5610 store_constructor_field (target, bitsize, bitpos, mode, value,
5611 type, cleared, get_alias_set (elttype));
5619 unsigned HOST_WIDE_INT idx;
5620 constructor_elt *ce;
5624 tree elttype = TREE_TYPE (type);
5625 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5626 enum machine_mode eltmode = TYPE_MODE (elttype);
5627 HOST_WIDE_INT bitsize;
5628 HOST_WIDE_INT bitpos;
5629 rtvec vector = NULL;
5631 alias_set_type alias;
5633 gcc_assert (eltmode != BLKmode);
5635 n_elts = TYPE_VECTOR_SUBPARTS (type);
5636 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5638 enum machine_mode mode = GET_MODE (target);
5640 icode = (int) optab_handler (vec_init_optab, mode);
5641 if (icode != CODE_FOR_nothing)
5645 vector = rtvec_alloc (n_elts);
5646 for (i = 0; i < n_elts; i++)
5647 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5651 /* If the constructor has fewer elements than the vector,
5652 clear the whole array first. Similarly if this is static
5653 constructor of a non-BLKmode object. */
5656 else if (REG_P (target) && TREE_STATIC (exp))
5660 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5663 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5665 int n_elts_here = tree_low_cst
5666 (int_const_binop (TRUNC_DIV_EXPR,
5667 TYPE_SIZE (TREE_TYPE (value)),
5668 TYPE_SIZE (elttype), 0), 1);
5670 count += n_elts_here;
5671 if (mostly_zeros_p (value))
5672 zero_count += n_elts_here;
5675 /* Clear the entire vector first if there are any missing elements,
5676 or if the incidence of zero elements is >= 75%. */
5677 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5680 if (need_to_clear && size > 0 && !vector)
5683 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5685 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5689 /* Inform later passes that the old value is dead. */
5690 if (!cleared && !vector && REG_P (target))
5691 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5694 alias = MEM_ALIAS_SET (target);
5696 alias = get_alias_set (elttype);
5698 /* Store each element of the constructor into the corresponding
5699 element of TARGET, determined by counting the elements. */
5700 for (idx = 0, i = 0;
5701 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5702 idx++, i += bitsize / elt_size)
5704 HOST_WIDE_INT eltpos;
5705 tree value = ce->value;
5707 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5708 if (cleared && initializer_zerop (value))
5712 eltpos = tree_low_cst (ce->index, 1);
5718 /* Vector CONSTRUCTORs should only be built from smaller
5719 vectors in the case of BLKmode vectors. */
5720 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5721 RTVEC_ELT (vector, eltpos)
5722 = expand_normal (value);
5726 enum machine_mode value_mode =
5727 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5728 ? TYPE_MODE (TREE_TYPE (value))
5730 bitpos = eltpos * elt_size;
5731 store_constructor_field (target, bitsize, bitpos,
5732 value_mode, value, type,
5738 emit_insn (GEN_FCN (icode)
5740 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5749 /* Store the value of EXP (an expression tree)
5750 into a subfield of TARGET which has mode MODE and occupies
5751 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5752 If MODE is VOIDmode, it means that we are storing into a bit-field.
5754 Always return const0_rtx unless we have something particular to
5757 TYPE is the type of the underlying object,
5759 ALIAS_SET is the alias set for the destination. This value will
5760 (in general) be different from that for TARGET, since TARGET is a
5761 reference to the containing structure.
5763 If NONTEMPORAL is true, try generating a nontemporal store. */
5766 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5767 enum machine_mode mode, tree exp, tree type,
5768 alias_set_type alias_set, bool nontemporal)
5770 if (TREE_CODE (exp) == ERROR_MARK)
5773 /* If we have nothing to store, do nothing unless the expression has
5776 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5778 /* If we are storing into an unaligned field of an aligned union that is
5779 in a register, we may have the mode of TARGET being an integer mode but
5780 MODE == BLKmode. In that case, get an aligned object whose size and
5781 alignment are the same as TARGET and store TARGET into it (we can avoid
5782 the store if the field being stored is the entire width of TARGET). Then
5783 call ourselves recursively to store the field into a BLKmode version of
5784 that object. Finally, load from the object into TARGET. This is not
5785 very efficient in general, but should only be slightly more expensive
5786 than the otherwise-required unaligned accesses. Perhaps this can be
5787 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5788 twice, once with emit_move_insn and once via store_field. */
5791 && (REG_P (target) || GET_CODE (target) == SUBREG))
5793 rtx object = assign_temp (type, 0, 1, 1);
5794 rtx blk_object = adjust_address (object, BLKmode, 0);
5796 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5797 emit_move_insn (object, target);
5799 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5802 emit_move_insn (target, object);
5804 /* We want to return the BLKmode version of the data. */
5808 if (GET_CODE (target) == CONCAT)
5810 /* We're storing into a struct containing a single __complex. */
5812 gcc_assert (!bitpos);
5813 return store_expr (exp, target, 0, nontemporal);
5816 /* If the structure is in a register or if the component
5817 is a bit field, we cannot use addressing to access it.
5818 Use bit-field techniques or SUBREG to store in it. */
5820 if (mode == VOIDmode
5821 || (mode != BLKmode && ! direct_store[(int) mode]
5822 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5823 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5825 || GET_CODE (target) == SUBREG
5826 /* If the field isn't aligned enough to store as an ordinary memref,
5827 store it as a bit field. */
5829 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5830 || bitpos % GET_MODE_ALIGNMENT (mode))
5831 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5832 || (bitpos % BITS_PER_UNIT != 0)))
5833 /* If the RHS and field are a constant size and the size of the
5834 RHS isn't the same size as the bitfield, we must use bitfield
5837 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5838 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
5839 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
5840 decl we must use bitfield operations. */
5842 && TREE_CODE (exp) == MEM_REF
5843 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5844 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5845 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
5846 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
5851 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5852 implies a mask operation. If the precision is the same size as
5853 the field we're storing into, that mask is redundant. This is
5854 particularly common with bit field assignments generated by the
5856 nop_def = get_def_for_expr (exp, NOP_EXPR);
5859 tree type = TREE_TYPE (exp);
5860 if (INTEGRAL_TYPE_P (type)
5861 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5862 && bitsize == TYPE_PRECISION (type))
5864 tree op = gimple_assign_rhs1 (nop_def);
5865 type = TREE_TYPE (op);
5866 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5871 temp = expand_normal (exp);
5873 /* If BITSIZE is narrower than the size of the type of EXP
5874 we will be narrowing TEMP. Normally, what's wanted are the
5875 low-order bits. However, if EXP's type is a record and this is
5876 big-endian machine, we want the upper BITSIZE bits. */
5877 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5878 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5879 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5880 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5881 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5885 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5887 if (mode != VOIDmode && mode != BLKmode
5888 && mode != TYPE_MODE (TREE_TYPE (exp)))
5889 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5891 /* If the modes of TEMP and TARGET are both BLKmode, both
5892 must be in memory and BITPOS must be aligned on a byte
5893 boundary. If so, we simply do a block copy. Likewise
5894 for a BLKmode-like TARGET. */
5895 if (GET_MODE (temp) == BLKmode
5896 && (GET_MODE (target) == BLKmode
5898 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5899 && (bitpos % BITS_PER_UNIT) == 0
5900 && (bitsize % BITS_PER_UNIT) == 0)))
5902 gcc_assert (MEM_P (target) && MEM_P (temp)
5903 && (bitpos % BITS_PER_UNIT) == 0);
5905 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5906 emit_block_move (target, temp,
5907 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5914 /* Store the value in the bitfield. */
5915 store_bit_field (target, bitsize, bitpos, mode, temp);
5921 /* Now build a reference to just the desired component. */
5922 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5924 if (to_rtx == target)
5925 to_rtx = copy_rtx (to_rtx);
5927 if (!MEM_SCALAR_P (to_rtx))
5928 MEM_IN_STRUCT_P (to_rtx) = 1;
5929 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5930 set_mem_alias_set (to_rtx, alias_set);
5932 return store_expr (exp, to_rtx, 0, nontemporal);
5936 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5937 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5938 codes and find the ultimate containing object, which we return.
5940 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5941 bit position, and *PUNSIGNEDP to the signedness of the field.
5942 If the position of the field is variable, we store a tree
5943 giving the variable offset (in units) in *POFFSET.
5944 This offset is in addition to the bit position.
5945 If the position is not variable, we store 0 in *POFFSET.
5947 If any of the extraction expressions is volatile,
5948 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5950 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5951 Otherwise, it is a mode that can be used to access the field.
5953 If the field describes a variable-sized object, *PMODE is set to
5954 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5955 this case, but the address of the object can be found.
5957 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5958 look through nodes that serve as markers of a greater alignment than
5959 the one that can be deduced from the expression. These nodes make it
5960 possible for front-ends to prevent temporaries from being created by
5961 the middle-end on alignment considerations. For that purpose, the
5962 normal operating mode at high-level is to always pass FALSE so that
5963 the ultimate containing object is really returned; moreover, the
5964 associated predicate handled_component_p will always return TRUE
5965 on these nodes, thus indicating that they are essentially handled
5966 by get_inner_reference. TRUE should only be passed when the caller
5967 is scanning the expression in order to build another representation
5968 and specifically knows how to handle these nodes; as such, this is
5969 the normal operating mode in the RTL expanders. */
5972 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5973 HOST_WIDE_INT *pbitpos, tree *poffset,
5974 enum machine_mode *pmode, int *punsignedp,
5975 int *pvolatilep, bool keep_aligning)
5978 enum machine_mode mode = VOIDmode;
5979 bool blkmode_bitfield = false;
5980 tree offset = size_zero_node;
5981 double_int bit_offset = double_int_zero;
5983 /* First get the mode, signedness, and size. We do this from just the
5984 outermost expression. */
5986 if (TREE_CODE (exp) == COMPONENT_REF)
5988 tree field = TREE_OPERAND (exp, 1);
5989 size_tree = DECL_SIZE (field);
5990 if (!DECL_BIT_FIELD (field))
5991 mode = DECL_MODE (field);
5992 else if (DECL_MODE (field) == BLKmode)
5993 blkmode_bitfield = true;
5994 else if (TREE_THIS_VOLATILE (exp)
5995 && flag_strict_volatile_bitfields > 0)
5996 /* Volatile bitfields should be accessed in the mode of the
5997 field's type, not the mode computed based on the bit
5999 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6001 *punsignedp = DECL_UNSIGNED (field);
6003 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6005 size_tree = TREE_OPERAND (exp, 1);
6006 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6007 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6009 /* For vector types, with the correct size of access, use the mode of
6011 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6012 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6013 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6014 mode = TYPE_MODE (TREE_TYPE (exp));
6018 mode = TYPE_MODE (TREE_TYPE (exp));
6019 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6021 if (mode == BLKmode)
6022 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6024 *pbitsize = GET_MODE_BITSIZE (mode);
6029 if (! host_integerp (size_tree, 1))
6030 mode = BLKmode, *pbitsize = -1;
6032 *pbitsize = tree_low_cst (size_tree, 1);
6035 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6036 and find the ultimate containing object. */
6039 switch (TREE_CODE (exp))
6043 = double_int_add (bit_offset,
6044 tree_to_double_int (TREE_OPERAND (exp, 2)));
6049 tree field = TREE_OPERAND (exp, 1);
6050 tree this_offset = component_ref_field_offset (exp);
6052 /* If this field hasn't been filled in yet, don't go past it.
6053 This should only happen when folding expressions made during
6054 type construction. */
6055 if (this_offset == 0)
6058 offset = size_binop (PLUS_EXPR, offset, this_offset);
6059 bit_offset = double_int_add (bit_offset,
6061 (DECL_FIELD_BIT_OFFSET (field)));
6063 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6068 case ARRAY_RANGE_REF:
6070 tree index = TREE_OPERAND (exp, 1);
6071 tree low_bound = array_ref_low_bound (exp);
6072 tree unit_size = array_ref_element_size (exp);
6074 /* We assume all arrays have sizes that are a multiple of a byte.
6075 First subtract the lower bound, if any, in the type of the
6076 index, then convert to sizetype and multiply by the size of
6077 the array element. */
6078 if (! integer_zerop (low_bound))
6079 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6082 offset = size_binop (PLUS_EXPR, offset,
6083 size_binop (MULT_EXPR,
6084 fold_convert (sizetype, index),
6093 bit_offset = double_int_add (bit_offset,
6094 uhwi_to_double_int (*pbitsize));
6097 case VIEW_CONVERT_EXPR:
6098 if (keep_aligning && STRICT_ALIGNMENT
6099 && (TYPE_ALIGN (TREE_TYPE (exp))
6100 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6101 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6102 < BIGGEST_ALIGNMENT)
6103 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6104 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6109 /* Hand back the decl for MEM[&decl, off]. */
6110 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6112 tree off = TREE_OPERAND (exp, 1);
6113 if (!integer_zerop (off))
6115 double_int boff, coff = mem_ref_offset (exp);
6116 boff = double_int_lshift (coff,
6118 ? 3 : exact_log2 (BITS_PER_UNIT),
6119 HOST_BITS_PER_DOUBLE_INT, true);
6120 bit_offset = double_int_add (bit_offset, boff);
6122 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6130 /* If any reference in the chain is volatile, the effect is volatile. */
6131 if (TREE_THIS_VOLATILE (exp))
6134 exp = TREE_OPERAND (exp, 0);
6138 /* If OFFSET is constant, see if we can return the whole thing as a
6139 constant bit position. Make sure to handle overflow during
6141 if (host_integerp (offset, 0))
6143 double_int tem = double_int_lshift (tree_to_double_int (offset),
6145 ? 3 : exact_log2 (BITS_PER_UNIT),
6146 HOST_BITS_PER_DOUBLE_INT, true);
6147 tem = double_int_add (tem, bit_offset);
6148 if (double_int_fits_in_shwi_p (tem))
6150 *pbitpos = double_int_to_shwi (tem);
6151 *poffset = offset = NULL_TREE;
6155 /* Otherwise, split it up. */
6158 *pbitpos = double_int_to_shwi (bit_offset);
6162 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6163 if (mode == VOIDmode
6165 && (*pbitpos % BITS_PER_UNIT) == 0
6166 && (*pbitsize % BITS_PER_UNIT) == 0)
6174 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6175 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6176 EXP is marked as PACKED. */
6179 contains_packed_reference (const_tree exp)
6181 bool packed_p = false;
6185 switch (TREE_CODE (exp))
6189 tree field = TREE_OPERAND (exp, 1);
6190 packed_p = DECL_PACKED (field)
6191 || TYPE_PACKED (TREE_TYPE (field))
6192 || TYPE_PACKED (TREE_TYPE (exp));
6200 case ARRAY_RANGE_REF:
6203 case VIEW_CONVERT_EXPR:
6209 exp = TREE_OPERAND (exp, 0);
6215 /* Return a tree of sizetype representing the size, in bytes, of the element
6216 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6219 array_ref_element_size (tree exp)
6221 tree aligned_size = TREE_OPERAND (exp, 3);
6222 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6223 location_t loc = EXPR_LOCATION (exp);
6225 /* If a size was specified in the ARRAY_REF, it's the size measured
6226 in alignment units of the element type. So multiply by that value. */
6229 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6230 sizetype from another type of the same width and signedness. */
6231 if (TREE_TYPE (aligned_size) != sizetype)
6232 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6233 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6234 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6237 /* Otherwise, take the size from that of the element type. Substitute
6238 any PLACEHOLDER_EXPR that we have. */
6240 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6243 /* Return a tree representing the lower bound of the array mentioned in
6244 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6247 array_ref_low_bound (tree exp)
6249 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6251 /* If a lower bound is specified in EXP, use it. */
6252 if (TREE_OPERAND (exp, 2))
6253 return TREE_OPERAND (exp, 2);
6255 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6256 substituting for a PLACEHOLDER_EXPR as needed. */
6257 if (domain_type && TYPE_MIN_VALUE (domain_type))
6258 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6260 /* Otherwise, return a zero of the appropriate type. */
6261 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6264 /* Return a tree representing the upper bound of the array mentioned in
6265 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6268 array_ref_up_bound (tree exp)
6270 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6272 /* If there is a domain type and it has an upper bound, use it, substituting
6273 for a PLACEHOLDER_EXPR as needed. */
6274 if (domain_type && TYPE_MAX_VALUE (domain_type))
6275 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6277 /* Otherwise fail. */
6281 /* Return a tree representing the offset, in bytes, of the field referenced
6282 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6285 component_ref_field_offset (tree exp)
6287 tree aligned_offset = TREE_OPERAND (exp, 2);
6288 tree field = TREE_OPERAND (exp, 1);
6289 location_t loc = EXPR_LOCATION (exp);
6291 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6292 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6296 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6297 sizetype from another type of the same width and signedness. */
6298 if (TREE_TYPE (aligned_offset) != sizetype)
6299 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6300 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6301 size_int (DECL_OFFSET_ALIGN (field)
6305 /* Otherwise, take the offset from that of the field. Substitute
6306 any PLACEHOLDER_EXPR that we have. */
6308 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6311 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6313 static unsigned HOST_WIDE_INT
6314 target_align (const_tree target)
6316 /* We might have a chain of nested references with intermediate misaligning
6317 bitfields components, so need to recurse to find out. */
6319 unsigned HOST_WIDE_INT this_align, outer_align;
6321 switch (TREE_CODE (target))
6327 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6328 outer_align = target_align (TREE_OPERAND (target, 0));
6329 return MIN (this_align, outer_align);
6332 case ARRAY_RANGE_REF:
6333 this_align = TYPE_ALIGN (TREE_TYPE (target));
6334 outer_align = target_align (TREE_OPERAND (target, 0));
6335 return MIN (this_align, outer_align);
6338 case NON_LVALUE_EXPR:
6339 case VIEW_CONVERT_EXPR:
6340 this_align = TYPE_ALIGN (TREE_TYPE (target));
6341 outer_align = target_align (TREE_OPERAND (target, 0));
6342 return MAX (this_align, outer_align);
6345 return TYPE_ALIGN (TREE_TYPE (target));
6350 /* Given an rtx VALUE that may contain additions and multiplications, return
6351 an equivalent value that just refers to a register, memory, or constant.
6352 This is done by generating instructions to perform the arithmetic and
6353 returning a pseudo-register containing the value.
6355 The returned value may be a REG, SUBREG, MEM or constant. */
6358 force_operand (rtx value, rtx target)
6361 /* Use subtarget as the target for operand 0 of a binary operation. */
6362 rtx subtarget = get_subtarget (target);
6363 enum rtx_code code = GET_CODE (value);
6365 /* Check for subreg applied to an expression produced by loop optimizer. */
6367 && !REG_P (SUBREG_REG (value))
6368 && !MEM_P (SUBREG_REG (value)))
6371 = simplify_gen_subreg (GET_MODE (value),
6372 force_reg (GET_MODE (SUBREG_REG (value)),
6373 force_operand (SUBREG_REG (value),
6375 GET_MODE (SUBREG_REG (value)),
6376 SUBREG_BYTE (value));
6377 code = GET_CODE (value);
6380 /* Check for a PIC address load. */
6381 if ((code == PLUS || code == MINUS)
6382 && XEXP (value, 0) == pic_offset_table_rtx
6383 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6384 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6385 || GET_CODE (XEXP (value, 1)) == CONST))
6388 subtarget = gen_reg_rtx (GET_MODE (value));
6389 emit_move_insn (subtarget, value);
6393 if (ARITHMETIC_P (value))
6395 op2 = XEXP (value, 1);
6396 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6398 if (code == MINUS && CONST_INT_P (op2))
6401 op2 = negate_rtx (GET_MODE (value), op2);
6404 /* Check for an addition with OP2 a constant integer and our first
6405 operand a PLUS of a virtual register and something else. In that
6406 case, we want to emit the sum of the virtual register and the
6407 constant first and then add the other value. This allows virtual
6408 register instantiation to simply modify the constant rather than
6409 creating another one around this addition. */
6410 if (code == PLUS && CONST_INT_P (op2)
6411 && GET_CODE (XEXP (value, 0)) == PLUS
6412 && REG_P (XEXP (XEXP (value, 0), 0))
6413 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6414 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6416 rtx temp = expand_simple_binop (GET_MODE (value), code,
6417 XEXP (XEXP (value, 0), 0), op2,
6418 subtarget, 0, OPTAB_LIB_WIDEN);
6419 return expand_simple_binop (GET_MODE (value), code, temp,
6420 force_operand (XEXP (XEXP (value,
6422 target, 0, OPTAB_LIB_WIDEN);
6425 op1 = force_operand (XEXP (value, 0), subtarget);
6426 op2 = force_operand (op2, NULL_RTX);
6430 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6432 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6433 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6434 target, 1, OPTAB_LIB_WIDEN);
6436 return expand_divmod (0,
6437 FLOAT_MODE_P (GET_MODE (value))
6438 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6439 GET_MODE (value), op1, op2, target, 0);
6441 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6444 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6447 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6450 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6451 target, 0, OPTAB_LIB_WIDEN);
6453 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6454 target, 1, OPTAB_LIB_WIDEN);
6457 if (UNARY_P (value))
6460 target = gen_reg_rtx (GET_MODE (value));
6461 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6468 case FLOAT_TRUNCATE:
6469 convert_move (target, op1, code == ZERO_EXTEND);
6474 expand_fix (target, op1, code == UNSIGNED_FIX);
6478 case UNSIGNED_FLOAT:
6479 expand_float (target, op1, code == UNSIGNED_FLOAT);
6483 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6487 #ifdef INSN_SCHEDULING
6488 /* On machines that have insn scheduling, we want all memory reference to be
6489 explicit, so we need to deal with such paradoxical SUBREGs. */
6490 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6491 && (GET_MODE_SIZE (GET_MODE (value))
6492 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6494 = simplify_gen_subreg (GET_MODE (value),
6495 force_reg (GET_MODE (SUBREG_REG (value)),
6496 force_operand (SUBREG_REG (value),
6498 GET_MODE (SUBREG_REG (value)),
6499 SUBREG_BYTE (value));
6505 /* Subroutine of expand_expr: return nonzero iff there is no way that
6506 EXP can reference X, which is being modified. TOP_P is nonzero if this
6507 call is going to be used to determine whether we need a temporary
6508 for EXP, as opposed to a recursive call to this function.
6510 It is always safe for this routine to return zero since it merely
6511 searches for optimization opportunities. */
6514 safe_from_p (const_rtx x, tree exp, int top_p)
6520 /* If EXP has varying size, we MUST use a target since we currently
6521 have no way of allocating temporaries of variable size
6522 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6523 So we assume here that something at a higher level has prevented a
6524 clash. This is somewhat bogus, but the best we can do. Only
6525 do this when X is BLKmode and when we are at the top level. */
6526 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6527 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6528 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6529 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6530 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6532 && GET_MODE (x) == BLKmode)
6533 /* If X is in the outgoing argument area, it is always safe. */
6535 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6536 || (GET_CODE (XEXP (x, 0)) == PLUS
6537 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6540 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6541 find the underlying pseudo. */
6542 if (GET_CODE (x) == SUBREG)
6545 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6549 /* Now look at our tree code and possibly recurse. */
6550 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6552 case tcc_declaration:
6553 exp_rtl = DECL_RTL_IF_SET (exp);
6559 case tcc_exceptional:
6560 if (TREE_CODE (exp) == TREE_LIST)
6564 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6566 exp = TREE_CHAIN (exp);
6569 if (TREE_CODE (exp) != TREE_LIST)
6570 return safe_from_p (x, exp, 0);
6573 else if (TREE_CODE (exp) == CONSTRUCTOR)
6575 constructor_elt *ce;
6576 unsigned HOST_WIDE_INT idx;
6578 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce)
6579 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6580 || !safe_from_p (x, ce->value, 0))
6584 else if (TREE_CODE (exp) == ERROR_MARK)
6585 return 1; /* An already-visited SAVE_EXPR? */
6590 /* The only case we look at here is the DECL_INITIAL inside a
6592 return (TREE_CODE (exp) != DECL_EXPR
6593 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6594 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6595 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6598 case tcc_comparison:
6599 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6604 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6606 case tcc_expression:
6609 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6610 the expression. If it is set, we conflict iff we are that rtx or
6611 both are in memory. Otherwise, we check all operands of the
6612 expression recursively. */
6614 switch (TREE_CODE (exp))
6617 /* If the operand is static or we are static, we can't conflict.
6618 Likewise if we don't conflict with the operand at all. */
6619 if (staticp (TREE_OPERAND (exp, 0))
6620 || TREE_STATIC (exp)
6621 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6624 /* Otherwise, the only way this can conflict is if we are taking
6625 the address of a DECL a that address if part of X, which is
6627 exp = TREE_OPERAND (exp, 0);
6630 if (!DECL_RTL_SET_P (exp)
6631 || !MEM_P (DECL_RTL (exp)))
6634 exp_rtl = XEXP (DECL_RTL (exp), 0);
6640 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6641 get_alias_set (exp)))
6646 /* Assume that the call will clobber all hard registers and
6648 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6653 case WITH_CLEANUP_EXPR:
6654 case CLEANUP_POINT_EXPR:
6655 /* Lowered by gimplify.c. */
6659 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6665 /* If we have an rtx, we do not need to scan our operands. */
6669 nops = TREE_OPERAND_LENGTH (exp);
6670 for (i = 0; i < nops; i++)
6671 if (TREE_OPERAND (exp, i) != 0
6672 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6678 /* Should never get a type here. */
6682 /* If we have an rtl, find any enclosed object. Then see if we conflict
6686 if (GET_CODE (exp_rtl) == SUBREG)
6688 exp_rtl = SUBREG_REG (exp_rtl);
6690 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6694 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6695 are memory and they conflict. */
6696 return ! (rtx_equal_p (x, exp_rtl)
6697 || (MEM_P (x) && MEM_P (exp_rtl)
6698 && true_dependence (exp_rtl, VOIDmode, x,
6699 rtx_addr_varies_p)));
6702 /* If we reach here, it is safe. */
6707 /* Return the highest power of two that EXP is known to be a multiple of.
6708 This is used in updating alignment of MEMs in array references. */
6710 unsigned HOST_WIDE_INT
6711 highest_pow2_factor (const_tree exp)
6713 unsigned HOST_WIDE_INT c0, c1;
6715 switch (TREE_CODE (exp))
6718 /* We can find the lowest bit that's a one. If the low
6719 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6720 We need to handle this case since we can find it in a COND_EXPR,
6721 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6722 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6724 if (TREE_OVERFLOW (exp))
6725 return BIGGEST_ALIGNMENT;
6728 /* Note: tree_low_cst is intentionally not used here,
6729 we don't care about the upper bits. */
6730 c0 = TREE_INT_CST_LOW (exp);
6732 return c0 ? c0 : BIGGEST_ALIGNMENT;
6736 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6737 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6738 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6739 return MIN (c0, c1);
6742 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6743 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6746 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6748 if (integer_pow2p (TREE_OPERAND (exp, 1))
6749 && host_integerp (TREE_OPERAND (exp, 1), 1))
6751 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6752 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6753 return MAX (1, c0 / c1);
6758 /* The highest power of two of a bit-and expression is the maximum of
6759 that of its operands. We typically get here for a complex LHS and
6760 a constant negative power of two on the RHS to force an explicit
6761 alignment, so don't bother looking at the LHS. */
6762 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6766 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6769 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6772 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6773 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6774 return MIN (c0, c1);
6783 /* Similar, except that the alignment requirements of TARGET are
6784 taken into account. Assume it is at least as aligned as its
6785 type, unless it is a COMPONENT_REF in which case the layout of
6786 the structure gives the alignment. */
6788 static unsigned HOST_WIDE_INT
6789 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6791 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
6792 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
6794 return MAX (factor, talign);
6797 /* Subroutine of expand_expr. Expand the two operands of a binary
6798 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6799 The value may be stored in TARGET if TARGET is nonzero. The
6800 MODIFIER argument is as documented by expand_expr. */
6803 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6804 enum expand_modifier modifier)
6806 if (! safe_from_p (target, exp1, 1))
6808 if (operand_equal_p (exp0, exp1, 0))
6810 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6811 *op1 = copy_rtx (*op0);
6815 /* If we need to preserve evaluation order, copy exp0 into its own
6816 temporary variable so that it can't be clobbered by exp1. */
6817 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6818 exp0 = save_expr (exp0);
6819 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6820 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6825 /* Return a MEM that contains constant EXP. DEFER is as for
6826 output_constant_def and MODIFIER is as for expand_expr. */
6829 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6833 mem = output_constant_def (exp, defer);
6834 if (modifier != EXPAND_INITIALIZER)
6835 mem = use_anchored_address (mem);
6839 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6840 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6843 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6844 enum expand_modifier modifier, addr_space_t as)
6846 rtx result, subtarget;
6848 HOST_WIDE_INT bitsize, bitpos;
6849 int volatilep, unsignedp;
6850 enum machine_mode mode1;
6852 /* If we are taking the address of a constant and are at the top level,
6853 we have to use output_constant_def since we can't call force_const_mem
6855 /* ??? This should be considered a front-end bug. We should not be
6856 generating ADDR_EXPR of something that isn't an LVALUE. The only
6857 exception here is STRING_CST. */
6858 if (CONSTANT_CLASS_P (exp))
6859 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6861 /* Everything must be something allowed by is_gimple_addressable. */
6862 switch (TREE_CODE (exp))
6865 /* This case will happen via recursion for &a->b. */
6866 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6870 tree tem = TREE_OPERAND (exp, 0);
6871 if (!integer_zerop (TREE_OPERAND (exp, 1)))
6872 tem = build2 (POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6874 double_int_to_tree (sizetype, mem_ref_offset (exp)));
6875 return expand_expr (tem, target, tmode, modifier);
6879 /* Expand the initializer like constants above. */
6880 return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0);
6883 /* The real part of the complex number is always first, therefore
6884 the address is the same as the address of the parent object. */
6887 inner = TREE_OPERAND (exp, 0);
6891 /* The imaginary part of the complex number is always second.
6892 The expression is therefore always offset by the size of the
6895 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6896 inner = TREE_OPERAND (exp, 0);
6900 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6901 expand_expr, as that can have various side effects; LABEL_DECLs for
6902 example, may not have their DECL_RTL set yet. Expand the rtl of
6903 CONSTRUCTORs too, which should yield a memory reference for the
6904 constructor's contents. Assume language specific tree nodes can
6905 be expanded in some interesting way. */
6906 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
6908 || TREE_CODE (exp) == CONSTRUCTOR
6909 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
6911 result = expand_expr (exp, target, tmode,
6912 modifier == EXPAND_INITIALIZER
6913 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6915 /* If the DECL isn't in memory, then the DECL wasn't properly
6916 marked TREE_ADDRESSABLE, which will be either a front-end
6917 or a tree optimizer bug. */
6918 gcc_assert (MEM_P (result));
6919 result = XEXP (result, 0);
6921 /* ??? Is this needed anymore? */
6922 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6924 assemble_external (exp);
6925 TREE_USED (exp) = 1;
6928 if (modifier != EXPAND_INITIALIZER
6929 && modifier != EXPAND_CONST_ADDRESS)
6930 result = force_operand (result, target);
6934 /* Pass FALSE as the last argument to get_inner_reference although
6935 we are expanding to RTL. The rationale is that we know how to
6936 handle "aligning nodes" here: we can just bypass them because
6937 they won't change the final object whose address will be returned
6938 (they actually exist only for that purpose). */
6939 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6940 &mode1, &unsignedp, &volatilep, false);
6944 /* We must have made progress. */
6945 gcc_assert (inner != exp);
6947 subtarget = offset || bitpos ? NULL_RTX : target;
6948 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6949 inner alignment, force the inner to be sufficiently aligned. */
6950 if (CONSTANT_CLASS_P (inner)
6951 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
6953 inner = copy_node (inner);
6954 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
6955 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
6956 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
6958 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
6964 if (modifier != EXPAND_NORMAL)
6965 result = force_operand (result, NULL);
6966 tmp = expand_expr (offset, NULL_RTX, tmode,
6967 modifier == EXPAND_INITIALIZER
6968 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6970 result = convert_memory_address_addr_space (tmode, result, as);
6971 tmp = convert_memory_address_addr_space (tmode, tmp, as);
6973 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6974 result = gen_rtx_PLUS (tmode, result, tmp);
6977 subtarget = bitpos ? NULL_RTX : target;
6978 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6979 1, OPTAB_LIB_WIDEN);
6985 /* Someone beforehand should have rejected taking the address
6986 of such an object. */
6987 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6989 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6990 if (modifier < EXPAND_SUM)
6991 result = force_operand (result, target);
6997 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6998 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7001 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7002 enum expand_modifier modifier)
7004 addr_space_t as = ADDR_SPACE_GENERIC;
7005 enum machine_mode address_mode = Pmode;
7006 enum machine_mode pointer_mode = ptr_mode;
7007 enum machine_mode rmode;
7010 /* Target mode of VOIDmode says "whatever's natural". */
7011 if (tmode == VOIDmode)
7012 tmode = TYPE_MODE (TREE_TYPE (exp));
7014 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7016 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7017 address_mode = targetm.addr_space.address_mode (as);
7018 pointer_mode = targetm.addr_space.pointer_mode (as);
7021 /* We can get called with some Weird Things if the user does silliness
7022 like "(short) &a". In that case, convert_memory_address won't do
7023 the right thing, so ignore the given target mode. */
7024 if (tmode != address_mode && tmode != pointer_mode)
7025 tmode = address_mode;
7027 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7028 tmode, modifier, as);
7030 /* Despite expand_expr claims concerning ignoring TMODE when not
7031 strictly convenient, stuff breaks if we don't honor it. Note
7032 that combined with the above, we only do this for pointer modes. */
7033 rmode = GET_MODE (result);
7034 if (rmode == VOIDmode)
7037 result = convert_memory_address_addr_space (tmode, result, as);
7042 /* Generate code for computing CONSTRUCTOR EXP.
7043 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7044 is TRUE, instead of creating a temporary variable in memory
7045 NULL is returned and the caller needs to handle it differently. */
7048 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7049 bool avoid_temp_mem)
7051 tree type = TREE_TYPE (exp);
7052 enum machine_mode mode = TYPE_MODE (type);
7054 /* Try to avoid creating a temporary at all. This is possible
7055 if all of the initializer is zero.
7056 FIXME: try to handle all [0..255] initializers we can handle
7058 if (TREE_STATIC (exp)
7059 && !TREE_ADDRESSABLE (exp)
7060 && target != 0 && mode == BLKmode
7061 && all_zeros_p (exp))
7063 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7067 /* All elts simple constants => refer to a constant in memory. But
7068 if this is a non-BLKmode mode, let it store a field at a time
7069 since that should make a CONST_INT or CONST_DOUBLE when we
7070 fold. Likewise, if we have a target we can use, it is best to
7071 store directly into the target unless the type is large enough
7072 that memcpy will be used. If we are making an initializer and
7073 all operands are constant, put it in memory as well.
7075 FIXME: Avoid trying to fill vector constructors piece-meal.
7076 Output them with output_constant_def below unless we're sure
7077 they're zeros. This should go away when vector initializers
7078 are treated like VECTOR_CST instead of arrays. */
7079 if ((TREE_STATIC (exp)
7080 && ((mode == BLKmode
7081 && ! (target != 0 && safe_from_p (target, exp, 1)))
7082 || TREE_ADDRESSABLE (exp)
7083 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7084 && (! MOVE_BY_PIECES_P
7085 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7087 && ! mostly_zeros_p (exp))))
7088 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7089 && TREE_CONSTANT (exp)))
7096 constructor = expand_expr_constant (exp, 1, modifier);
7098 if (modifier != EXPAND_CONST_ADDRESS
7099 && modifier != EXPAND_INITIALIZER
7100 && modifier != EXPAND_SUM)
7101 constructor = validize_mem (constructor);
7106 /* Handle calls that pass values in multiple non-contiguous
7107 locations. The Irix 6 ABI has examples of this. */
7108 if (target == 0 || ! safe_from_p (target, exp, 1)
7109 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7115 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7116 | (TREE_READONLY (exp)
7117 * TYPE_QUAL_CONST))),
7118 0, TREE_ADDRESSABLE (exp), 1);
7121 store_constructor (exp, target, 0, int_expr_size (exp));
7126 /* expand_expr: generate code for computing expression EXP.
7127 An rtx for the computed value is returned. The value is never null.
7128 In the case of a void EXP, const0_rtx is returned.
7130 The value may be stored in TARGET if TARGET is nonzero.
7131 TARGET is just a suggestion; callers must assume that
7132 the rtx returned may not be the same as TARGET.
7134 If TARGET is CONST0_RTX, it means that the value will be ignored.
7136 If TMODE is not VOIDmode, it suggests generating the
7137 result in mode TMODE. But this is done only when convenient.
7138 Otherwise, TMODE is ignored and the value generated in its natural mode.
7139 TMODE is just a suggestion; callers must assume that
7140 the rtx returned may not have mode TMODE.
7142 Note that TARGET may have neither TMODE nor MODE. In that case, it
7143 probably will not be used.
7145 If MODIFIER is EXPAND_SUM then when EXP is an addition
7146 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7147 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7148 products as above, or REG or MEM, or constant.
7149 Ordinarily in such cases we would output mul or add instructions
7150 and then return a pseudo reg containing the sum.
7152 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7153 it also marks a label as absolutely required (it can't be dead).
7154 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7155 This is used for outputting expressions used in initializers.
7157 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7158 with a constant address even if that address is not normally legitimate.
7159 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7161 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7162 a call parameter. Such targets require special care as we haven't yet
7163 marked TARGET so that it's safe from being trashed by libcalls. We
7164 don't want to use TARGET for anything but the final result;
7165 Intermediate values must go elsewhere. Additionally, calls to
7166 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7168 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7169 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7170 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7171 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7175 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7176 enum expand_modifier modifier, rtx *alt_rtl)
7180 /* Handle ERROR_MARK before anybody tries to access its type. */
7181 if (TREE_CODE (exp) == ERROR_MARK
7182 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7184 ret = CONST0_RTX (tmode);
7185 return ret ? ret : const0_rtx;
7188 /* If this is an expression of some kind and it has an associated line
7189 number, then emit the line number before expanding the expression.
7191 We need to save and restore the file and line information so that
7192 errors discovered during expansion are emitted with the right
7193 information. It would be better of the diagnostic routines
7194 used the file/line information embedded in the tree nodes rather
7196 if (cfun && EXPR_HAS_LOCATION (exp))
7198 location_t saved_location = input_location;
7199 location_t saved_curr_loc = get_curr_insn_source_location ();
7200 tree saved_block = get_curr_insn_block ();
7201 input_location = EXPR_LOCATION (exp);
7202 set_curr_insn_source_location (input_location);
7204 /* Record where the insns produced belong. */
7205 set_curr_insn_block (TREE_BLOCK (exp));
7207 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7209 input_location = saved_location;
7210 set_curr_insn_block (saved_block);
7211 set_curr_insn_source_location (saved_curr_loc);
7215 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7222 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7223 enum expand_modifier modifier)
7225 rtx op0, op1, op2, temp;
7228 enum machine_mode mode;
7229 enum tree_code code = ops->code;
7231 rtx subtarget, original_target;
7233 bool reduce_bit_field;
7234 location_t loc = ops->location;
7235 tree treeop0, treeop1, treeop2;
7236 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7237 ? reduce_to_bit_field_precision ((expr), \
7243 mode = TYPE_MODE (type);
7244 unsignedp = TYPE_UNSIGNED (type);
7250 /* We should be called only on simple (binary or unary) expressions,
7251 exactly those that are valid in gimple expressions that aren't
7252 GIMPLE_SINGLE_RHS (or invalid). */
7253 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7254 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7255 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7257 ignore = (target == const0_rtx
7258 || ((CONVERT_EXPR_CODE_P (code)
7259 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7260 && TREE_CODE (type) == VOID_TYPE));
7262 /* We should be called only if we need the result. */
7263 gcc_assert (!ignore);
7265 /* An operation in what may be a bit-field type needs the
7266 result to be reduced to the precision of the bit-field type,
7267 which is narrower than that of the type's mode. */
7268 reduce_bit_field = (TREE_CODE (type) == INTEGER_TYPE
7269 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7271 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7274 /* Use subtarget as the target for operand 0 of a binary operation. */
7275 subtarget = get_subtarget (target);
7276 original_target = target;
7280 case NON_LVALUE_EXPR:
7283 if (treeop0 == error_mark_node)
7286 if (TREE_CODE (type) == UNION_TYPE)
7288 tree valtype = TREE_TYPE (treeop0);
7290 /* If both input and output are BLKmode, this conversion isn't doing
7291 anything except possibly changing memory attribute. */
7292 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7294 rtx result = expand_expr (treeop0, target, tmode,
7297 result = copy_rtx (result);
7298 set_mem_attributes (result, type, 0);
7304 if (TYPE_MODE (type) != BLKmode)
7305 target = gen_reg_rtx (TYPE_MODE (type));
7307 target = assign_temp (type, 0, 1, 1);
7311 /* Store data into beginning of memory target. */
7312 store_expr (treeop0,
7313 adjust_address (target, TYPE_MODE (valtype), 0),
7314 modifier == EXPAND_STACK_PARM,
7319 gcc_assert (REG_P (target));
7321 /* Store this field into a union of the proper type. */
7322 store_field (target,
7323 MIN ((int_size_in_bytes (TREE_TYPE
7326 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7327 0, TYPE_MODE (valtype), treeop0,
7331 /* Return the entire union. */
7335 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
7337 op0 = expand_expr (treeop0, target, VOIDmode,
7340 /* If the signedness of the conversion differs and OP0 is
7341 a promoted SUBREG, clear that indication since we now
7342 have to do the proper extension. */
7343 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
7344 && GET_CODE (op0) == SUBREG)
7345 SUBREG_PROMOTED_VAR_P (op0) = 0;
7347 return REDUCE_BIT_FIELD (op0);
7350 op0 = expand_expr (treeop0, NULL_RTX, mode,
7351 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7352 if (GET_MODE (op0) == mode)
7355 /* If OP0 is a constant, just convert it into the proper mode. */
7356 else if (CONSTANT_P (op0))
7358 tree inner_type = TREE_TYPE (treeop0);
7359 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7361 if (modifier == EXPAND_INITIALIZER)
7362 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7363 subreg_lowpart_offset (mode,
7366 op0= convert_modes (mode, inner_mode, op0,
7367 TYPE_UNSIGNED (inner_type));
7370 else if (modifier == EXPAND_INITIALIZER)
7371 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7373 else if (target == 0)
7374 op0 = convert_to_mode (mode, op0,
7375 TYPE_UNSIGNED (TREE_TYPE
7379 convert_move (target, op0,
7380 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7384 return REDUCE_BIT_FIELD (op0);
7386 case ADDR_SPACE_CONVERT_EXPR:
7388 tree treeop0_type = TREE_TYPE (treeop0);
7390 addr_space_t as_from;
7392 gcc_assert (POINTER_TYPE_P (type));
7393 gcc_assert (POINTER_TYPE_P (treeop0_type));
7395 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
7396 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
7398 /* Conversions between pointers to the same address space should
7399 have been implemented via CONVERT_EXPR / NOP_EXPR. */
7400 gcc_assert (as_to != as_from);
7402 /* Ask target code to handle conversion between pointers
7403 to overlapping address spaces. */
7404 if (targetm.addr_space.subset_p (as_to, as_from)
7405 || targetm.addr_space.subset_p (as_from, as_to))
7407 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
7408 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
7413 /* For disjoint address spaces, converting anything but
7414 a null pointer invokes undefined behaviour. We simply
7415 always return a null pointer here. */
7416 return CONST0_RTX (mode);
7419 case POINTER_PLUS_EXPR:
7420 /* Even though the sizetype mode and the pointer's mode can be different
7421 expand is able to handle this correctly and get the correct result out
7422 of the PLUS_EXPR code. */
7423 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7424 if sizetype precision is smaller than pointer precision. */
7425 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
7426 treeop1 = fold_convert_loc (loc, type,
7427 fold_convert_loc (loc, ssizetype,
7430 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7431 something else, make sure we add the register to the constant and
7432 then to the other thing. This case can occur during strength
7433 reduction and doing it this way will produce better code if the
7434 frame pointer or argument pointer is eliminated.
7436 fold-const.c will ensure that the constant is always in the inner
7437 PLUS_EXPR, so the only case we need to do anything about is if
7438 sp, ap, or fp is our second argument, in which case we must swap
7439 the innermost first argument and our second argument. */
7441 if (TREE_CODE (treeop0) == PLUS_EXPR
7442 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
7443 && TREE_CODE (treeop1) == VAR_DECL
7444 && (DECL_RTL (treeop1) == frame_pointer_rtx
7445 || DECL_RTL (treeop1) == stack_pointer_rtx
7446 || DECL_RTL (treeop1) == arg_pointer_rtx))
7450 treeop1 = TREE_OPERAND (treeop0, 0);
7451 TREE_OPERAND (treeop0, 0) = t;
7454 /* If the result is to be ptr_mode and we are adding an integer to
7455 something, we might be forming a constant. So try to use
7456 plus_constant. If it produces a sum and we can't accept it,
7457 use force_operand. This allows P = &ARR[const] to generate
7458 efficient code on machines where a SYMBOL_REF is not a valid
7461 If this is an EXPAND_SUM call, always return the sum. */
7462 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7463 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7465 if (modifier == EXPAND_STACK_PARM)
7467 if (TREE_CODE (treeop0) == INTEGER_CST
7468 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7469 && TREE_CONSTANT (treeop1))
7473 op1 = expand_expr (treeop1, subtarget, VOIDmode,
7475 /* Use immed_double_const to ensure that the constant is
7476 truncated according to the mode of OP1, then sign extended
7477 to a HOST_WIDE_INT. Using the constant directly can result
7478 in non-canonical RTL in a 64x32 cross compile. */
7480 = immed_double_const (TREE_INT_CST_LOW (treeop0),
7482 TYPE_MODE (TREE_TYPE (treeop1)));
7483 op1 = plus_constant (op1, INTVAL (constant_part));
7484 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7485 op1 = force_operand (op1, target);
7486 return REDUCE_BIT_FIELD (op1);
7489 else if (TREE_CODE (treeop1) == INTEGER_CST
7490 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7491 && TREE_CONSTANT (treeop0))
7495 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7496 (modifier == EXPAND_INITIALIZER
7497 ? EXPAND_INITIALIZER : EXPAND_SUM));
7498 if (! CONSTANT_P (op0))
7500 op1 = expand_expr (treeop1, NULL_RTX,
7501 VOIDmode, modifier);
7502 /* Return a PLUS if modifier says it's OK. */
7503 if (modifier == EXPAND_SUM
7504 || modifier == EXPAND_INITIALIZER)
7505 return simplify_gen_binary (PLUS, mode, op0, op1);
7508 /* Use immed_double_const to ensure that the constant is
7509 truncated according to the mode of OP1, then sign extended
7510 to a HOST_WIDE_INT. Using the constant directly can result
7511 in non-canonical RTL in a 64x32 cross compile. */
7513 = immed_double_const (TREE_INT_CST_LOW (treeop1),
7515 TYPE_MODE (TREE_TYPE (treeop0)));
7516 op0 = plus_constant (op0, INTVAL (constant_part));
7517 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7518 op0 = force_operand (op0, target);
7519 return REDUCE_BIT_FIELD (op0);
7523 /* Use TER to expand pointer addition of a negated value
7524 as pointer subtraction. */
7525 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
7526 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
7527 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
7528 && TREE_CODE (treeop1) == SSA_NAME
7529 && TYPE_MODE (TREE_TYPE (treeop0))
7530 == TYPE_MODE (TREE_TYPE (treeop1)))
7532 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
7535 treeop1 = gimple_assign_rhs1 (def);
7541 /* No sense saving up arithmetic to be done
7542 if it's all in the wrong mode to form part of an address.
7543 And force_operand won't know whether to sign-extend or
7545 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7546 || mode != ptr_mode)
7548 expand_operands (treeop0, treeop1,
7549 subtarget, &op0, &op1, EXPAND_NORMAL);
7550 if (op0 == const0_rtx)
7552 if (op1 == const0_rtx)
7557 expand_operands (treeop0, treeop1,
7558 subtarget, &op0, &op1, modifier);
7559 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7563 /* For initializers, we are allowed to return a MINUS of two
7564 symbolic constants. Here we handle all cases when both operands
7566 /* Handle difference of two symbolic constants,
7567 for the sake of an initializer. */
7568 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7569 && really_constant_p (treeop0)
7570 && really_constant_p (treeop1))
7572 expand_operands (treeop0, treeop1,
7573 NULL_RTX, &op0, &op1, modifier);
7575 /* If the last operand is a CONST_INT, use plus_constant of
7576 the negated constant. Else make the MINUS. */
7577 if (CONST_INT_P (op1))
7578 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7580 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7583 /* No sense saving up arithmetic to be done
7584 if it's all in the wrong mode to form part of an address.
7585 And force_operand won't know whether to sign-extend or
7587 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7588 || mode != ptr_mode)
7591 expand_operands (treeop0, treeop1,
7592 subtarget, &op0, &op1, modifier);
7594 /* Convert A - const to A + (-const). */
7595 if (CONST_INT_P (op1))
7597 op1 = negate_rtx (mode, op1);
7598 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7603 case WIDEN_MULT_PLUS_EXPR:
7604 case WIDEN_MULT_MINUS_EXPR:
7605 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
7606 op2 = expand_normal (treeop2);
7607 target = expand_widen_pattern_expr (ops, op0, op1, op2,
7611 case WIDEN_MULT_EXPR:
7612 /* If first operand is constant, swap them.
7613 Thus the following special case checks need only
7614 check the second operand. */
7615 if (TREE_CODE (treeop0) == INTEGER_CST)
7622 /* First, check if we have a multiplication of one signed and one
7623 unsigned operand. */
7624 if (TREE_CODE (treeop1) != INTEGER_CST
7625 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
7626 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
7628 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
7629 this_optab = usmul_widen_optab;
7630 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7632 if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
7634 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7635 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
7638 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
7644 /* Check for a multiplication with matching signedness. */
7645 else if ((TREE_CODE (treeop1) == INTEGER_CST
7646 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
7647 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
7648 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
7650 tree op0type = TREE_TYPE (treeop0);
7651 enum machine_mode innermode = TYPE_MODE (op0type);
7652 bool zextend_p = TYPE_UNSIGNED (op0type);
7653 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7654 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7656 if (mode == GET_MODE_2XWIDER_MODE (innermode)
7657 && TREE_CODE (treeop0) != INTEGER_CST)
7659 if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
7661 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
7663 temp = expand_widening_mult (mode, op0, op1, target,
7664 unsignedp, this_optab);
7665 return REDUCE_BIT_FIELD (temp);
7667 if (optab_handler (other_optab, mode) != CODE_FOR_nothing
7668 && innermode == word_mode)
7671 op0 = expand_normal (treeop0);
7672 if (TREE_CODE (treeop1) == INTEGER_CST)
7673 op1 = convert_modes (innermode, mode,
7674 expand_normal (treeop1), unsignedp);
7676 op1 = expand_normal (treeop1);
7677 temp = expand_binop (mode, other_optab, op0, op1, target,
7678 unsignedp, OPTAB_LIB_WIDEN);
7679 hipart = gen_highpart (innermode, temp);
7680 htem = expand_mult_highpart_adjust (innermode, hipart,
7684 emit_move_insn (hipart, htem);
7685 return REDUCE_BIT_FIELD (temp);
7689 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
7690 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
7691 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7692 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7696 optab opt = fma_optab;
7699 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
7701 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
7703 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
7706 gcc_assert (fn != NULL_TREE);
7707 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
7708 return expand_builtin (call_expr, target, subtarget, mode, false);
7711 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
7712 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
7717 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
7720 op0 = expand_normal (gimple_assign_rhs1 (def0));
7721 op2 = expand_normal (gimple_assign_rhs1 (def2));
7724 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
7727 op0 = expand_normal (gimple_assign_rhs1 (def0));
7730 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
7733 op2 = expand_normal (gimple_assign_rhs1 (def2));
7737 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
7739 op2 = expand_normal (treeop2);
7740 op1 = expand_normal (treeop1);
7742 return expand_ternary_op (TYPE_MODE (type), opt,
7743 op0, op1, op2, target, 0);
7747 /* If this is a fixed-point operation, then we cannot use the code
7748 below because "expand_mult" doesn't support sat/no-sat fixed-point
7750 if (ALL_FIXED_POINT_MODE_P (mode))
7753 /* If first operand is constant, swap them.
7754 Thus the following special case checks need only
7755 check the second operand. */
7756 if (TREE_CODE (treeop0) == INTEGER_CST)
7763 /* Attempt to return something suitable for generating an
7764 indexed address, for machines that support that. */
7766 if (modifier == EXPAND_SUM && mode == ptr_mode
7767 && host_integerp (treeop1, 0))
7769 tree exp1 = treeop1;
7771 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7775 op0 = force_operand (op0, NULL_RTX);
7777 op0 = copy_to_mode_reg (mode, op0);
7779 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7780 gen_int_mode (tree_low_cst (exp1, 0),
7781 TYPE_MODE (TREE_TYPE (exp1)))));
7784 if (modifier == EXPAND_STACK_PARM)
7787 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7788 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7790 case TRUNC_DIV_EXPR:
7791 case FLOOR_DIV_EXPR:
7793 case ROUND_DIV_EXPR:
7794 case EXACT_DIV_EXPR:
7795 /* If this is a fixed-point operation, then we cannot use the code
7796 below because "expand_divmod" doesn't support sat/no-sat fixed-point
7798 if (ALL_FIXED_POINT_MODE_P (mode))
7801 if (modifier == EXPAND_STACK_PARM)
7803 /* Possible optimization: compute the dividend with EXPAND_SUM
7804 then if the divisor is constant can optimize the case
7805 where some terms of the dividend have coeffs divisible by it. */
7806 expand_operands (treeop0, treeop1,
7807 subtarget, &op0, &op1, EXPAND_NORMAL);
7808 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7813 case TRUNC_MOD_EXPR:
7814 case FLOOR_MOD_EXPR:
7816 case ROUND_MOD_EXPR:
7817 if (modifier == EXPAND_STACK_PARM)
7819 expand_operands (treeop0, treeop1,
7820 subtarget, &op0, &op1, EXPAND_NORMAL);
7821 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7823 case FIXED_CONVERT_EXPR:
7824 op0 = expand_normal (treeop0);
7825 if (target == 0 || modifier == EXPAND_STACK_PARM)
7826 target = gen_reg_rtx (mode);
7828 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
7829 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7830 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
7831 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
7833 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
7836 case FIX_TRUNC_EXPR:
7837 op0 = expand_normal (treeop0);
7838 if (target == 0 || modifier == EXPAND_STACK_PARM)
7839 target = gen_reg_rtx (mode);
7840 expand_fix (target, op0, unsignedp);
7844 op0 = expand_normal (treeop0);
7845 if (target == 0 || modifier == EXPAND_STACK_PARM)
7846 target = gen_reg_rtx (mode);
7847 /* expand_float can't figure out what to do if FROM has VOIDmode.
7848 So give it the correct mode. With -O, cse will optimize this. */
7849 if (GET_MODE (op0) == VOIDmode)
7850 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
7852 expand_float (target, op0,
7853 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7857 op0 = expand_expr (treeop0, subtarget,
7858 VOIDmode, EXPAND_NORMAL);
7859 if (modifier == EXPAND_STACK_PARM)
7861 temp = expand_unop (mode,
7862 optab_for_tree_code (NEGATE_EXPR, type,
7866 return REDUCE_BIT_FIELD (temp);
7869 op0 = expand_expr (treeop0, subtarget,
7870 VOIDmode, EXPAND_NORMAL);
7871 if (modifier == EXPAND_STACK_PARM)
7874 /* ABS_EXPR is not valid for complex arguments. */
7875 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7876 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7878 /* Unsigned abs is simply the operand. Testing here means we don't
7879 risk generating incorrect code below. */
7880 if (TYPE_UNSIGNED (type))
7883 return expand_abs (mode, op0, target, unsignedp,
7884 safe_from_p (target, treeop0, 1));
7888 target = original_target;
7890 || modifier == EXPAND_STACK_PARM
7891 || (MEM_P (target) && MEM_VOLATILE_P (target))
7892 || GET_MODE (target) != mode
7894 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7895 target = gen_reg_rtx (mode);
7896 expand_operands (treeop0, treeop1,
7897 target, &op0, &op1, EXPAND_NORMAL);
7899 /* First try to do it with a special MIN or MAX instruction.
7900 If that does not win, use a conditional jump to select the proper
7902 this_optab = optab_for_tree_code (code, type, optab_default);
7903 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7908 /* At this point, a MEM target is no longer useful; we will get better
7911 if (! REG_P (target))
7912 target = gen_reg_rtx (mode);
7914 /* If op1 was placed in target, swap op0 and op1. */
7915 if (target != op0 && target == op1)
7922 /* We generate better code and avoid problems with op1 mentioning
7923 target by forcing op1 into a pseudo if it isn't a constant. */
7924 if (! CONSTANT_P (op1))
7925 op1 = force_reg (mode, op1);
7928 enum rtx_code comparison_code;
7931 if (code == MAX_EXPR)
7932 comparison_code = unsignedp ? GEU : GE;
7934 comparison_code = unsignedp ? LEU : LE;
7936 /* Canonicalize to comparisons against 0. */
7937 if (op1 == const1_rtx)
7939 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
7940 or (a != 0 ? a : 1) for unsigned.
7941 For MIN we are safe converting (a <= 1 ? a : 1)
7942 into (a <= 0 ? a : 1) */
7943 cmpop1 = const0_rtx;
7944 if (code == MAX_EXPR)
7945 comparison_code = unsignedp ? NE : GT;
7947 if (op1 == constm1_rtx && !unsignedp)
7949 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
7950 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
7951 cmpop1 = const0_rtx;
7952 if (code == MIN_EXPR)
7953 comparison_code = LT;
7955 #ifdef HAVE_conditional_move
7956 /* Use a conditional move if possible. */
7957 if (can_conditionally_move_p (mode))
7961 /* ??? Same problem as in expmed.c: emit_conditional_move
7962 forces a stack adjustment via compare_from_rtx, and we
7963 lose the stack adjustment if the sequence we are about
7964 to create is discarded. */
7965 do_pending_stack_adjust ();
7969 /* Try to emit the conditional move. */
7970 insn = emit_conditional_move (target, comparison_code,
7975 /* If we could do the conditional move, emit the sequence,
7979 rtx seq = get_insns ();
7985 /* Otherwise discard the sequence and fall back to code with
7991 emit_move_insn (target, op0);
7993 temp = gen_label_rtx ();
7994 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
7995 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
7998 emit_move_insn (target, op1);
8003 op0 = expand_expr (treeop0, subtarget,
8004 VOIDmode, EXPAND_NORMAL);
8005 if (modifier == EXPAND_STACK_PARM)
8007 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8011 /* ??? Can optimize bitwise operations with one arg constant.
8012 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8013 and (a bitwise1 b) bitwise2 b (etc)
8014 but that is probably not worth while. */
8016 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8017 boolean values when we want in all cases to compute both of them. In
8018 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8019 as actual zero-or-1 values and then bitwise anding. In cases where
8020 there cannot be any side effects, better code would be made by
8021 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8022 how to recognize those cases. */
8024 case TRUTH_AND_EXPR:
8025 code = BIT_AND_EXPR;
8030 code = BIT_IOR_EXPR;
8034 case TRUTH_XOR_EXPR:
8035 code = BIT_XOR_EXPR;
8041 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8042 || (GET_MODE_PRECISION (TYPE_MODE (type))
8043 == TYPE_PRECISION (type)));
8048 /* If this is a fixed-point operation, then we cannot use the code
8049 below because "expand_shift" doesn't support sat/no-sat fixed-point
8051 if (ALL_FIXED_POINT_MODE_P (mode))
8054 if (! safe_from_p (subtarget, treeop1, 1))
8056 if (modifier == EXPAND_STACK_PARM)
8058 op0 = expand_expr (treeop0, subtarget,
8059 VOIDmode, EXPAND_NORMAL);
8060 temp = expand_shift (code, mode, op0, treeop1, target,
8062 if (code == LSHIFT_EXPR)
8063 temp = REDUCE_BIT_FIELD (temp);
8066 /* Could determine the answer when only additive constants differ. Also,
8067 the addition of one can be handled by changing the condition. */
8074 case UNORDERED_EXPR:
8082 temp = do_store_flag (ops,
8083 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8084 tmode != VOIDmode ? tmode : mode);
8088 /* Use a compare and a jump for BLKmode comparisons, or for function
8089 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8092 || modifier == EXPAND_STACK_PARM
8093 || ! safe_from_p (target, treeop0, 1)
8094 || ! safe_from_p (target, treeop1, 1)
8095 /* Make sure we don't have a hard reg (such as function's return
8096 value) live across basic blocks, if not optimizing. */
8097 || (!optimize && REG_P (target)
8098 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8099 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8101 emit_move_insn (target, const0_rtx);
8103 op1 = gen_label_rtx ();
8104 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8106 emit_move_insn (target, const1_rtx);
8111 case TRUTH_NOT_EXPR:
8112 if (modifier == EXPAND_STACK_PARM)
8114 op0 = expand_expr (treeop0, target,
8115 VOIDmode, EXPAND_NORMAL);
8116 /* The parser is careful to generate TRUTH_NOT_EXPR
8117 only with operands that are always zero or one. */
8118 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8119 target, 1, OPTAB_LIB_WIDEN);
8124 /* Get the rtx code of the operands. */
8125 op0 = expand_normal (treeop0);
8126 op1 = expand_normal (treeop1);
8129 target = gen_reg_rtx (TYPE_MODE (type));
8131 /* Move the real (op0) and imaginary (op1) parts to their location. */
8132 write_complex_part (target, op0, false);
8133 write_complex_part (target, op1, true);
8137 case WIDEN_SUM_EXPR:
8139 tree oprnd0 = treeop0;
8140 tree oprnd1 = treeop1;
8142 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8143 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8148 case REDUC_MAX_EXPR:
8149 case REDUC_MIN_EXPR:
8150 case REDUC_PLUS_EXPR:
8152 op0 = expand_normal (treeop0);
8153 this_optab = optab_for_tree_code (code, type, optab_default);
8154 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8159 case VEC_EXTRACT_EVEN_EXPR:
8160 case VEC_EXTRACT_ODD_EXPR:
8162 expand_operands (treeop0, treeop1,
8163 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8164 this_optab = optab_for_tree_code (code, type, optab_default);
8165 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8171 case VEC_INTERLEAVE_HIGH_EXPR:
8172 case VEC_INTERLEAVE_LOW_EXPR:
8174 expand_operands (treeop0, treeop1,
8175 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8176 this_optab = optab_for_tree_code (code, type, optab_default);
8177 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8183 case VEC_LSHIFT_EXPR:
8184 case VEC_RSHIFT_EXPR:
8186 target = expand_vec_shift_expr (ops, target);
8190 case VEC_UNPACK_HI_EXPR:
8191 case VEC_UNPACK_LO_EXPR:
8193 op0 = expand_normal (treeop0);
8194 this_optab = optab_for_tree_code (code, type, optab_default);
8195 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8201 case VEC_UNPACK_FLOAT_HI_EXPR:
8202 case VEC_UNPACK_FLOAT_LO_EXPR:
8204 op0 = expand_normal (treeop0);
8205 /* The signedness is determined from input operand. */
8206 this_optab = optab_for_tree_code (code,
8207 TREE_TYPE (treeop0),
8209 temp = expand_widen_pattern_expr
8210 (ops, op0, NULL_RTX, NULL_RTX,
8211 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8217 case VEC_WIDEN_MULT_HI_EXPR:
8218 case VEC_WIDEN_MULT_LO_EXPR:
8220 tree oprnd0 = treeop0;
8221 tree oprnd1 = treeop1;
8223 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8224 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8226 gcc_assert (target);
8230 case VEC_PACK_TRUNC_EXPR:
8231 case VEC_PACK_SAT_EXPR:
8232 case VEC_PACK_FIX_TRUNC_EXPR:
8233 mode = TYPE_MODE (TREE_TYPE (treeop0));
8240 /* Here to do an ordinary binary operator. */
8242 expand_operands (treeop0, treeop1,
8243 subtarget, &op0, &op1, EXPAND_NORMAL);
8245 this_optab = optab_for_tree_code (code, type, optab_default);
8247 if (modifier == EXPAND_STACK_PARM)
8249 temp = expand_binop (mode, this_optab, op0, op1, target,
8250 unsignedp, OPTAB_LIB_WIDEN);
8252 return REDUCE_BIT_FIELD (temp);
8254 #undef REDUCE_BIT_FIELD
8257 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
8258 enum expand_modifier modifier, rtx *alt_rtl)
8260 rtx op0, op1, temp, decl_rtl;
8263 enum machine_mode mode;
8264 enum tree_code code = TREE_CODE (exp);
8266 rtx subtarget, original_target;
8269 bool reduce_bit_field;
8270 location_t loc = EXPR_LOCATION (exp);
8271 struct separate_ops ops;
8272 tree treeop0, treeop1, treeop2;
8273 tree ssa_name = NULL_TREE;
8276 type = TREE_TYPE (exp);
8277 mode = TYPE_MODE (type);
8278 unsignedp = TYPE_UNSIGNED (type);
8280 treeop0 = treeop1 = treeop2 = NULL_TREE;
8281 if (!VL_EXP_CLASS_P (exp))
8282 switch (TREE_CODE_LENGTH (code))
8285 case 3: treeop2 = TREE_OPERAND (exp, 2);
8286 case 2: treeop1 = TREE_OPERAND (exp, 1);
8287 case 1: treeop0 = TREE_OPERAND (exp, 0);
8297 ignore = (target == const0_rtx
8298 || ((CONVERT_EXPR_CODE_P (code)
8299 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8300 && TREE_CODE (type) == VOID_TYPE));
8302 /* An operation in what may be a bit-field type needs the
8303 result to be reduced to the precision of the bit-field type,
8304 which is narrower than that of the type's mode. */
8305 reduce_bit_field = (!ignore
8306 && TREE_CODE (type) == INTEGER_TYPE
8307 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8309 /* If we are going to ignore this result, we need only do something
8310 if there is a side-effect somewhere in the expression. If there
8311 is, short-circuit the most common cases here. Note that we must
8312 not call expand_expr with anything but const0_rtx in case this
8313 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
8317 if (! TREE_SIDE_EFFECTS (exp))
8320 /* Ensure we reference a volatile object even if value is ignored, but
8321 don't do this if all we are doing is taking its address. */
8322 if (TREE_THIS_VOLATILE (exp)
8323 && TREE_CODE (exp) != FUNCTION_DECL
8324 && mode != VOIDmode && mode != BLKmode
8325 && modifier != EXPAND_CONST_ADDRESS)
8327 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
8329 temp = copy_to_reg (temp);
8333 if (TREE_CODE_CLASS (code) == tcc_unary
8334 || code == COMPONENT_REF || code == INDIRECT_REF)
8335 return expand_expr (treeop0, const0_rtx, VOIDmode,
8338 else if (TREE_CODE_CLASS (code) == tcc_binary
8339 || TREE_CODE_CLASS (code) == tcc_comparison
8340 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
8342 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8343 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8346 else if (code == BIT_FIELD_REF)
8348 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8349 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8350 expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
8357 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8360 /* Use subtarget as the target for operand 0 of a binary operation. */
8361 subtarget = get_subtarget (target);
8362 original_target = target;
8368 tree function = decl_function_context (exp);
8370 temp = label_rtx (exp);
8371 temp = gen_rtx_LABEL_REF (Pmode, temp);
8373 if (function != current_function_decl
8375 LABEL_REF_NONLOCAL_P (temp) = 1;
8377 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
8382 /* ??? ivopts calls expander, without any preparation from
8383 out-of-ssa. So fake instructions as if this was an access to the
8384 base variable. This unnecessarily allocates a pseudo, see how we can
8385 reuse it, if partition base vars have it set already. */
8386 if (!currently_expanding_to_rtl)
8387 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
8390 g = get_gimple_for_ssa_name (exp);
8391 /* For EXPAND_INITIALIZER try harder to get something simpler. */
8393 && modifier == EXPAND_INITIALIZER
8394 && !SSA_NAME_IS_DEFAULT_DEF (exp)
8395 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
8396 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
8397 g = SSA_NAME_DEF_STMT (exp);
8399 return expand_expr_real (gimple_assign_rhs_to_tree (g), target, tmode,
8403 decl_rtl = get_rtx_for_ssa_name (ssa_name);
8404 exp = SSA_NAME_VAR (ssa_name);
8405 goto expand_decl_rtl;
8409 /* If a static var's type was incomplete when the decl was written,
8410 but the type is complete now, lay out the decl now. */
8411 if (DECL_SIZE (exp) == 0
8412 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
8413 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
8414 layout_decl (exp, 0);
8416 /* ... fall through ... */
8420 decl_rtl = DECL_RTL (exp);
8422 gcc_assert (decl_rtl);
8423 decl_rtl = copy_rtx (decl_rtl);
8424 /* Record writes to register variables. */
8425 if (modifier == EXPAND_WRITE && REG_P (decl_rtl)
8426 && REGNO (decl_rtl) < FIRST_PSEUDO_REGISTER)
8428 int i = REGNO (decl_rtl);
8429 int nregs = hard_regno_nregs[i][GET_MODE (decl_rtl)];
8432 SET_HARD_REG_BIT (crtl->asm_clobbers, i);
8438 /* Ensure variable marked as used even if it doesn't go through
8439 a parser. If it hasn't be used yet, write out an external
8441 if (! TREE_USED (exp))
8443 assemble_external (exp);
8444 TREE_USED (exp) = 1;
8447 /* Show we haven't gotten RTL for this yet. */
8450 /* Variables inherited from containing functions should have
8451 been lowered by this point. */
8452 context = decl_function_context (exp);
8453 gcc_assert (!context
8454 || context == current_function_decl
8455 || TREE_STATIC (exp)
8456 || DECL_EXTERNAL (exp)
8457 /* ??? C++ creates functions that are not TREE_STATIC. */
8458 || TREE_CODE (exp) == FUNCTION_DECL);
8460 /* This is the case of an array whose size is to be determined
8461 from its initializer, while the initializer is still being parsed.
8464 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
8465 temp = validize_mem (decl_rtl);
8467 /* If DECL_RTL is memory, we are in the normal case and the
8468 address is not valid, get the address into a register. */
8470 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
8473 *alt_rtl = decl_rtl;
8474 decl_rtl = use_anchored_address (decl_rtl);
8475 if (modifier != EXPAND_CONST_ADDRESS
8476 && modifier != EXPAND_SUM
8477 && !memory_address_addr_space_p (DECL_MODE (exp),
8479 MEM_ADDR_SPACE (decl_rtl)))
8480 temp = replace_equiv_address (decl_rtl,
8481 copy_rtx (XEXP (decl_rtl, 0)));
8484 /* If we got something, return it. But first, set the alignment
8485 if the address is a register. */
8488 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
8489 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
8494 /* If the mode of DECL_RTL does not match that of the decl, it
8495 must be a promoted value. We return a SUBREG of the wanted mode,
8496 but mark it so that we know that it was already extended. */
8497 if (REG_P (decl_rtl) && GET_MODE (decl_rtl) != DECL_MODE (exp))
8499 enum machine_mode pmode;
8501 /* Get the signedness to be used for this variable. Ensure we get
8502 the same mode we got when the variable was declared. */
8503 if (code == SSA_NAME
8504 && (g = SSA_NAME_DEF_STMT (ssa_name))
8505 && gimple_code (g) == GIMPLE_CALL)
8506 pmode = promote_function_mode (type, mode, &unsignedp,
8508 (TREE_TYPE (gimple_call_fn (g))),
8511 pmode = promote_decl_mode (exp, &unsignedp);
8512 gcc_assert (GET_MODE (decl_rtl) == pmode);
8514 temp = gen_lowpart_SUBREG (mode, decl_rtl);
8515 SUBREG_PROMOTED_VAR_P (temp) = 1;
8516 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
8523 temp = immed_double_const (TREE_INT_CST_LOW (exp),
8524 TREE_INT_CST_HIGH (exp), mode);
8530 tree tmp = NULL_TREE;
8531 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
8532 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
8533 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
8534 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
8535 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
8536 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
8537 return const_vector_from_tree (exp);
8538 if (GET_MODE_CLASS (mode) == MODE_INT)
8540 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
8542 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
8545 tmp = build_constructor_from_list (type,
8546 TREE_VECTOR_CST_ELTS (exp));
8547 return expand_expr (tmp, ignore ? const0_rtx : target,
8552 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
8555 /* If optimized, generate immediate CONST_DOUBLE
8556 which will be turned into memory by reload if necessary.
8558 We used to force a register so that loop.c could see it. But
8559 this does not allow gen_* patterns to perform optimizations with
8560 the constants. It also produces two insns in cases like "x = 1.0;".
8561 On most machines, floating-point constants are not permitted in
8562 many insns, so we'd end up copying it to a register in any case.
8564 Now, we do the copying in expand_binop, if appropriate. */
8565 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
8566 TYPE_MODE (TREE_TYPE (exp)));
8569 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
8570 TYPE_MODE (TREE_TYPE (exp)));
8573 /* Handle evaluating a complex constant in a CONCAT target. */
8574 if (original_target && GET_CODE (original_target) == CONCAT)
8576 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8579 rtarg = XEXP (original_target, 0);
8580 itarg = XEXP (original_target, 1);
8582 /* Move the real and imaginary parts separately. */
8583 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
8584 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
8587 emit_move_insn (rtarg, op0);
8589 emit_move_insn (itarg, op1);
8591 return original_target;
8594 /* ... fall through ... */
8597 temp = expand_expr_constant (exp, 1, modifier);
8599 /* temp contains a constant address.
8600 On RISC machines where a constant address isn't valid,
8601 make some insns to get that address into a register. */
8602 if (modifier != EXPAND_CONST_ADDRESS
8603 && modifier != EXPAND_INITIALIZER
8604 && modifier != EXPAND_SUM
8605 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
8606 MEM_ADDR_SPACE (temp)))
8607 return replace_equiv_address (temp,
8608 copy_rtx (XEXP (temp, 0)));
8614 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
8616 if (!SAVE_EXPR_RESOLVED_P (exp))
8618 /* We can indeed still hit this case, typically via builtin
8619 expanders calling save_expr immediately before expanding
8620 something. Assume this means that we only have to deal
8621 with non-BLKmode values. */
8622 gcc_assert (GET_MODE (ret) != BLKmode);
8624 val = build_decl (EXPR_LOCATION (exp),
8625 VAR_DECL, NULL, TREE_TYPE (exp));
8626 DECL_ARTIFICIAL (val) = 1;
8627 DECL_IGNORED_P (val) = 1;
8629 TREE_OPERAND (exp, 0) = treeop0;
8630 SAVE_EXPR_RESOLVED_P (exp) = 1;
8632 if (!CONSTANT_P (ret))
8633 ret = copy_to_reg (ret);
8634 SET_DECL_RTL (val, ret);
8642 /* If we don't need the result, just ensure we evaluate any
8646 unsigned HOST_WIDE_INT idx;
8649 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
8650 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
8655 return expand_constructor (exp, target, modifier, false);
8657 case TARGET_MEM_REF:
8659 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
8660 struct mem_address addr;
8663 get_address_description (exp, &addr);
8664 op0 = addr_for_mem_ref (&addr, as, true);
8665 op0 = memory_address_addr_space (mode, op0, as);
8666 temp = gen_rtx_MEM (mode, op0);
8667 set_mem_attributes (temp, exp, 0);
8668 set_mem_addr_space (temp, as);
8669 align = MAX (TYPE_ALIGN (TREE_TYPE (exp)),
8670 get_object_alignment (exp, BIGGEST_ALIGNMENT));
8672 && (unsigned) align < GET_MODE_ALIGNMENT (mode)
8673 /* If the target does not have special handling for unaligned
8674 loads of mode then it can use regular moves for them. */
8675 && ((icode = optab_handler (movmisalign_optab, mode))
8676 != CODE_FOR_nothing))
8680 /* We've already validated the memory, and we're creating a
8681 new pseudo destination. The predicates really can't fail. */
8682 reg = gen_reg_rtx (mode);
8684 /* Nor can the insn generator. */
8685 insn = GEN_FCN (icode) (reg, temp);
8686 gcc_assert (insn != NULL_RTX);
8697 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8698 enum machine_mode address_mode;
8699 tree base = TREE_OPERAND (exp, 0);
8702 /* Handle expansion of non-aliased memory with non-BLKmode. That
8703 might end up in a register. */
8704 if (TREE_CODE (base) == ADDR_EXPR)
8706 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
8708 base = TREE_OPERAND (base, 0);
8712 base = get_addr_base_and_unit_offset (base, &off);
8716 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
8717 decl we must use bitfield operations. */
8719 && !TREE_ADDRESSABLE (base)
8720 && DECL_MODE (base) != BLKmode
8721 && DECL_RTL_SET_P (base)
8722 && !MEM_P (DECL_RTL (base)))
8726 && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
8727 && (GET_MODE_BITSIZE (DECL_MODE (base))
8728 == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
8729 return expand_expr (build1 (VIEW_CONVERT_EXPR,
8730 TREE_TYPE (exp), base),
8731 target, tmode, modifier);
8732 bit_offset = bitsize_int (offset * BITS_PER_UNIT);
8733 bftype = TREE_TYPE (base);
8734 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
8735 bftype = TREE_TYPE (exp);
8736 return expand_expr (build3 (BIT_FIELD_REF, bftype,
8738 TYPE_SIZE (TREE_TYPE (exp)),
8740 target, tmode, modifier);
8743 address_mode = targetm.addr_space.address_mode (as);
8744 base = TREE_OPERAND (exp, 0);
8745 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
8747 tree mask = gimple_assign_rhs2 (def_stmt);
8748 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
8749 gimple_assign_rhs1 (def_stmt), mask);
8750 TREE_OPERAND (exp, 0) = base;
8752 align = MAX (TYPE_ALIGN (TREE_TYPE (exp)),
8753 get_object_alignment (exp, BIGGEST_ALIGNMENT));
8754 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
8755 op0 = memory_address_addr_space (address_mode, op0, as);
8756 if (!integer_zerop (TREE_OPERAND (exp, 1)))
8759 = immed_double_int_const (mem_ref_offset (exp), address_mode);
8760 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
8762 op0 = memory_address_addr_space (mode, op0, as);
8763 temp = gen_rtx_MEM (mode, op0);
8764 set_mem_attributes (temp, exp, 0);
8765 set_mem_addr_space (temp, as);
8766 if (TREE_THIS_VOLATILE (exp))
8767 MEM_VOLATILE_P (temp) = 1;
8769 && (unsigned) align < GET_MODE_ALIGNMENT (mode)
8770 /* If the target does not have special handling for unaligned
8771 loads of mode then it can use regular moves for them. */
8772 && ((icode = optab_handler (movmisalign_optab, mode))
8773 != CODE_FOR_nothing))
8777 /* We've already validated the memory, and we're creating a
8778 new pseudo destination. The predicates really can't fail. */
8779 reg = gen_reg_rtx (mode);
8781 /* Nor can the insn generator. */
8782 insn = GEN_FCN (icode) (reg, temp);
8793 tree array = treeop0;
8794 tree index = treeop1;
8796 /* Fold an expression like: "foo"[2].
8797 This is not done in fold so it won't happen inside &.
8798 Don't fold if this is for wide characters since it's too
8799 difficult to do correctly and this is a very rare case. */
8801 if (modifier != EXPAND_CONST_ADDRESS
8802 && modifier != EXPAND_INITIALIZER
8803 && modifier != EXPAND_MEMORY)
8805 tree t = fold_read_from_constant_string (exp);
8808 return expand_expr (t, target, tmode, modifier);
8811 /* If this is a constant index into a constant array,
8812 just get the value from the array. Handle both the cases when
8813 we have an explicit constructor and when our operand is a variable
8814 that was declared const. */
8816 if (modifier != EXPAND_CONST_ADDRESS
8817 && modifier != EXPAND_INITIALIZER
8818 && modifier != EXPAND_MEMORY
8819 && TREE_CODE (array) == CONSTRUCTOR
8820 && ! TREE_SIDE_EFFECTS (array)
8821 && TREE_CODE (index) == INTEGER_CST)
8823 unsigned HOST_WIDE_INT ix;
8826 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
8828 if (tree_int_cst_equal (field, index))
8830 if (!TREE_SIDE_EFFECTS (value))
8831 return expand_expr (fold (value), target, tmode, modifier);
8836 else if (optimize >= 1
8837 && modifier != EXPAND_CONST_ADDRESS
8838 && modifier != EXPAND_INITIALIZER
8839 && modifier != EXPAND_MEMORY
8840 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8841 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8842 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
8843 && const_value_known_p (array))
8845 if (TREE_CODE (index) == INTEGER_CST)
8847 tree init = DECL_INITIAL (array);
8849 if (TREE_CODE (init) == CONSTRUCTOR)
8851 unsigned HOST_WIDE_INT ix;
8854 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
8856 if (tree_int_cst_equal (field, index))
8858 if (TREE_SIDE_EFFECTS (value))
8861 if (TREE_CODE (value) == CONSTRUCTOR)
8863 /* If VALUE is a CONSTRUCTOR, this
8864 optimization is only useful if
8865 this doesn't store the CONSTRUCTOR
8866 into memory. If it does, it is more
8867 efficient to just load the data from
8868 the array directly. */
8869 rtx ret = expand_constructor (value, target,
8871 if (ret == NULL_RTX)
8875 return expand_expr (fold (value), target, tmode,
8879 else if(TREE_CODE (init) == STRING_CST)
8881 tree index1 = index;
8882 tree low_bound = array_ref_low_bound (exp);
8883 index1 = fold_convert_loc (loc, sizetype,
8886 /* Optimize the special-case of a zero lower bound.
8888 We convert the low_bound to sizetype to avoid some problems
8889 with constant folding. (E.g. suppose the lower bound is 1,
8890 and its mode is QI. Without the conversion,l (ARRAY
8891 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8892 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
8894 if (! integer_zerop (low_bound))
8895 index1 = size_diffop_loc (loc, index1,
8896 fold_convert_loc (loc, sizetype,
8899 if (0 > compare_tree_int (index1,
8900 TREE_STRING_LENGTH (init)))
8902 tree type = TREE_TYPE (TREE_TYPE (init));
8903 enum machine_mode mode = TYPE_MODE (type);
8905 if (GET_MODE_CLASS (mode) == MODE_INT
8906 && GET_MODE_SIZE (mode) == 1)
8907 return gen_int_mode (TREE_STRING_POINTER (init)
8908 [TREE_INT_CST_LOW (index1)],
8915 goto normal_inner_ref;
8918 /* If the operand is a CONSTRUCTOR, we can just extract the
8919 appropriate field if it is present. */
8920 if (TREE_CODE (treeop0) == CONSTRUCTOR)
8922 unsigned HOST_WIDE_INT idx;
8925 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
8927 if (field == treeop1
8928 /* We can normally use the value of the field in the
8929 CONSTRUCTOR. However, if this is a bitfield in
8930 an integral mode that we can fit in a HOST_WIDE_INT,
8931 we must mask only the number of bits in the bitfield,
8932 since this is done implicitly by the constructor. If
8933 the bitfield does not meet either of those conditions,
8934 we can't do this optimization. */
8935 && (! DECL_BIT_FIELD (field)
8936 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
8937 && (GET_MODE_BITSIZE (DECL_MODE (field))
8938 <= HOST_BITS_PER_WIDE_INT))))
8940 if (DECL_BIT_FIELD (field)
8941 && modifier == EXPAND_STACK_PARM)
8943 op0 = expand_expr (value, target, tmode, modifier);
8944 if (DECL_BIT_FIELD (field))
8946 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
8947 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
8949 if (TYPE_UNSIGNED (TREE_TYPE (field)))
8951 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
8952 op0 = expand_and (imode, op0, op1, target);
8957 = build_int_cst (NULL_TREE,
8958 GET_MODE_BITSIZE (imode) - bitsize);
8960 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
8962 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
8970 goto normal_inner_ref;
8973 case ARRAY_RANGE_REF:
8976 enum machine_mode mode1, mode2;
8977 HOST_WIDE_INT bitsize, bitpos;
8979 int volatilep = 0, must_force_mem;
8980 bool packedp = false;
8981 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8982 &mode1, &unsignedp, &volatilep, true);
8983 rtx orig_op0, memloc;
8985 /* If we got back the original object, something is wrong. Perhaps
8986 we are evaluating an expression too early. In any event, don't
8987 infinitely recurse. */
8988 gcc_assert (tem != exp);
8990 if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp, 0)))
8991 || (TREE_CODE (TREE_OPERAND (exp, 1)) == FIELD_DECL
8992 && DECL_PACKED (TREE_OPERAND (exp, 1))))
8995 /* If TEM's type is a union of variable size, pass TARGET to the inner
8996 computation, since it will need a temporary and TARGET is known
8997 to have to do. This occurs in unchecked conversion in Ada. */
9000 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9001 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9003 && modifier != EXPAND_STACK_PARM
9004 ? target : NULL_RTX),
9006 (modifier == EXPAND_INITIALIZER
9007 || modifier == EXPAND_CONST_ADDRESS
9008 || modifier == EXPAND_STACK_PARM)
9009 ? modifier : EXPAND_NORMAL);
9012 /* If the bitfield is volatile, we want to access it in the
9013 field's mode, not the computed mode.
9014 If a MEM has VOIDmode (external with incomplete type),
9015 use BLKmode for it instead. */
9018 if (volatilep && flag_strict_volatile_bitfields > 0)
9019 op0 = adjust_address (op0, mode1, 0);
9020 else if (GET_MODE (op0) == VOIDmode)
9021 op0 = adjust_address (op0, BLKmode, 0);
9025 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9027 /* If we have either an offset, a BLKmode result, or a reference
9028 outside the underlying object, we must force it to memory.
9029 Such a case can occur in Ada if we have unchecked conversion
9030 of an expression from a scalar type to an aggregate type or
9031 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9032 passed a partially uninitialized object or a view-conversion
9033 to a larger size. */
9034 must_force_mem = (offset
9036 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9038 /* Handle CONCAT first. */
9039 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9042 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9045 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9048 op0 = XEXP (op0, 0);
9049 mode2 = GET_MODE (op0);
9051 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9052 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9056 op0 = XEXP (op0, 1);
9058 mode2 = GET_MODE (op0);
9061 /* Otherwise force into memory. */
9065 /* If this is a constant, put it in a register if it is a legitimate
9066 constant and we don't need a memory reference. */
9067 if (CONSTANT_P (op0)
9069 && LEGITIMATE_CONSTANT_P (op0)
9071 op0 = force_reg (mode2, op0);
9073 /* Otherwise, if this is a constant, try to force it to the constant
9074 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9075 is a legitimate constant. */
9076 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9077 op0 = validize_mem (memloc);
9079 /* Otherwise, if this is a constant or the object is not in memory
9080 and need be, put it there. */
9081 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9083 tree nt = build_qualified_type (TREE_TYPE (tem),
9084 (TYPE_QUALS (TREE_TYPE (tem))
9085 | TYPE_QUAL_CONST));
9086 memloc = assign_temp (nt, 1, 1, 1);
9087 emit_move_insn (memloc, op0);
9093 enum machine_mode address_mode;
9094 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9097 gcc_assert (MEM_P (op0));
9100 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
9101 if (GET_MODE (offset_rtx) != address_mode)
9102 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9104 if (GET_MODE (op0) == BLKmode
9105 /* A constant address in OP0 can have VOIDmode, we must
9106 not try to call force_reg in that case. */
9107 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9109 && (bitpos % bitsize) == 0
9110 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9111 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9113 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9117 op0 = offset_address (op0, offset_rtx,
9118 highest_pow2_factor (offset));
9121 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9122 record its alignment as BIGGEST_ALIGNMENT. */
9123 if (MEM_P (op0) && bitpos == 0 && offset != 0
9124 && is_aligning_offset (offset, tem))
9125 set_mem_align (op0, BIGGEST_ALIGNMENT);
9127 /* Don't forget about volatility even if this is a bitfield. */
9128 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9130 if (op0 == orig_op0)
9131 op0 = copy_rtx (op0);
9133 MEM_VOLATILE_P (op0) = 1;
9136 /* In cases where an aligned union has an unaligned object
9137 as a field, we might be extracting a BLKmode value from
9138 an integer-mode (e.g., SImode) object. Handle this case
9139 by doing the extract into an object as wide as the field
9140 (which we know to be the width of a basic mode), then
9141 storing into memory, and changing the mode to BLKmode. */
9142 if (mode1 == VOIDmode
9143 || REG_P (op0) || GET_CODE (op0) == SUBREG
9144 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9145 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9146 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9147 && modifier != EXPAND_CONST_ADDRESS
9148 && modifier != EXPAND_INITIALIZER)
9149 /* If the field is volatile, we always want an aligned
9151 || (volatilep && flag_strict_volatile_bitfields > 0)
9152 /* If the field isn't aligned enough to fetch as a memref,
9153 fetch it as a bit field. */
9154 || (mode1 != BLKmode
9155 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9156 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9158 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9159 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9160 && ((modifier == EXPAND_CONST_ADDRESS
9161 || modifier == EXPAND_INITIALIZER)
9163 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9164 || (bitpos % BITS_PER_UNIT != 0)))
9165 /* If the type and the field are a constant size and the
9166 size of the type isn't the same size as the bitfield,
9167 we must use bitfield operations. */
9169 && TYPE_SIZE (TREE_TYPE (exp))
9170 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9171 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9174 enum machine_mode ext_mode = mode;
9176 if (ext_mode == BLKmode
9177 && ! (target != 0 && MEM_P (op0)
9179 && bitpos % BITS_PER_UNIT == 0))
9180 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9182 if (ext_mode == BLKmode)
9185 target = assign_temp (type, 0, 1, 1);
9190 /* In this case, BITPOS must start at a byte boundary and
9191 TARGET, if specified, must be a MEM. */
9192 gcc_assert (MEM_P (op0)
9193 && (!target || MEM_P (target))
9194 && !(bitpos % BITS_PER_UNIT));
9196 emit_block_move (target,
9197 adjust_address (op0, VOIDmode,
9198 bitpos / BITS_PER_UNIT),
9199 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
9201 (modifier == EXPAND_STACK_PARM
9202 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9207 op0 = validize_mem (op0);
9209 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
9210 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9212 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, packedp,
9213 (modifier == EXPAND_STACK_PARM
9214 ? NULL_RTX : target),
9215 ext_mode, ext_mode);
9217 /* If the result is a record type and BITSIZE is narrower than
9218 the mode of OP0, an integral mode, and this is a big endian
9219 machine, we must put the field into the high-order bits. */
9220 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9221 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9222 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
9223 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9224 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
9228 /* If the result type is BLKmode, store the data into a temporary
9229 of the appropriate type, but with the mode corresponding to the
9230 mode for the data we have (op0's mode). It's tempting to make
9231 this a constant type, since we know it's only being stored once,
9232 but that can cause problems if we are taking the address of this
9233 COMPONENT_REF because the MEM of any reference via that address
9234 will have flags corresponding to the type, which will not
9235 necessarily be constant. */
9236 if (mode == BLKmode)
9238 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
9241 /* If the reference doesn't use the alias set of its type,
9242 we cannot create the temporary using that type. */
9243 if (component_uses_parent_alias_set (exp))
9245 new_rtx = assign_stack_local (ext_mode, size, 0);
9246 set_mem_alias_set (new_rtx, get_alias_set (exp));
9249 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
9251 emit_move_insn (new_rtx, op0);
9252 op0 = copy_rtx (new_rtx);
9253 PUT_MODE (op0, BLKmode);
9254 set_mem_attributes (op0, exp, 1);
9260 /* If the result is BLKmode, use that to access the object
9262 if (mode == BLKmode)
9265 /* Get a reference to just this component. */
9266 if (modifier == EXPAND_CONST_ADDRESS
9267 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9268 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
9270 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9272 if (op0 == orig_op0)
9273 op0 = copy_rtx (op0);
9275 set_mem_attributes (op0, exp, 0);
9276 if (REG_P (XEXP (op0, 0)))
9277 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9279 MEM_VOLATILE_P (op0) |= volatilep;
9280 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
9281 || modifier == EXPAND_CONST_ADDRESS
9282 || modifier == EXPAND_INITIALIZER)
9284 else if (target == 0)
9285 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9287 convert_move (target, op0, unsignedp);
9292 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
9295 /* All valid uses of __builtin_va_arg_pack () are removed during
9297 if (CALL_EXPR_VA_ARG_PACK (exp))
9298 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9300 tree fndecl = get_callee_fndecl (exp), attr;
9303 && (attr = lookup_attribute ("error",
9304 DECL_ATTRIBUTES (fndecl))) != NULL)
9305 error ("%Kcall to %qs declared with attribute error: %s",
9306 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9307 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9309 && (attr = lookup_attribute ("warning",
9310 DECL_ATTRIBUTES (fndecl))) != NULL)
9311 warning_at (tree_nonartificial_location (exp),
9312 0, "%Kcall to %qs declared with attribute warning: %s",
9313 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9314 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9316 /* Check for a built-in function. */
9317 if (fndecl && DECL_BUILT_IN (fndecl))
9319 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
9320 return expand_builtin (exp, target, subtarget, tmode, ignore);
9323 return expand_call (exp, target, ignore);
9325 case VIEW_CONVERT_EXPR:
9328 /* If we are converting to BLKmode, try to avoid an intermediate
9329 temporary by fetching an inner memory reference. */
9331 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9332 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
9333 && handled_component_p (treeop0))
9335 enum machine_mode mode1;
9336 HOST_WIDE_INT bitsize, bitpos;
9341 = get_inner_reference (treeop0, &bitsize, &bitpos,
9342 &offset, &mode1, &unsignedp, &volatilep,
9346 /* ??? We should work harder and deal with non-zero offsets. */
9348 && (bitpos % BITS_PER_UNIT) == 0
9350 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
9352 /* See the normal_inner_ref case for the rationale. */
9355 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9356 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9358 && modifier != EXPAND_STACK_PARM
9359 ? target : NULL_RTX),
9361 (modifier == EXPAND_INITIALIZER
9362 || modifier == EXPAND_CONST_ADDRESS
9363 || modifier == EXPAND_STACK_PARM)
9364 ? modifier : EXPAND_NORMAL);
9366 if (MEM_P (orig_op0))
9370 /* Get a reference to just this component. */
9371 if (modifier == EXPAND_CONST_ADDRESS
9372 || modifier == EXPAND_SUM
9373 || modifier == EXPAND_INITIALIZER)
9374 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
9376 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
9378 if (op0 == orig_op0)
9379 op0 = copy_rtx (op0);
9381 set_mem_attributes (op0, treeop0, 0);
9382 if (REG_P (XEXP (op0, 0)))
9383 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9385 MEM_VOLATILE_P (op0) |= volatilep;
9391 op0 = expand_expr (treeop0,
9392 NULL_RTX, VOIDmode, modifier);
9394 /* If the input and output modes are both the same, we are done. */
9395 if (mode == GET_MODE (op0))
9397 /* If neither mode is BLKmode, and both modes are the same size
9398 then we can use gen_lowpart. */
9399 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
9400 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0))
9401 && !COMPLEX_MODE_P (GET_MODE (op0)))
9403 if (GET_CODE (op0) == SUBREG)
9404 op0 = force_reg (GET_MODE (op0), op0);
9405 temp = gen_lowpart_common (mode, op0);
9410 if (!REG_P (op0) && !MEM_P (op0))
9411 op0 = force_reg (GET_MODE (op0), op0);
9412 op0 = gen_lowpart (mode, op0);
9415 /* If both types are integral, convert from one mode to the other. */
9416 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
9417 op0 = convert_modes (mode, GET_MODE (op0), op0,
9418 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9419 /* As a last resort, spill op0 to memory, and reload it in a
9421 else if (!MEM_P (op0))
9423 /* If the operand is not a MEM, force it into memory. Since we
9424 are going to be changing the mode of the MEM, don't call
9425 force_const_mem for constants because we don't allow pool
9426 constants to change mode. */
9427 tree inner_type = TREE_TYPE (treeop0);
9429 gcc_assert (!TREE_ADDRESSABLE (exp));
9431 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
9433 = assign_stack_temp_for_type
9434 (TYPE_MODE (inner_type),
9435 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
9437 emit_move_insn (target, op0);
9441 /* At this point, OP0 is in the correct mode. If the output type is
9442 such that the operand is known to be aligned, indicate that it is.
9443 Otherwise, we need only be concerned about alignment for non-BLKmode
9447 op0 = copy_rtx (op0);
9449 if (TYPE_ALIGN_OK (type))
9450 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
9451 else if (STRICT_ALIGNMENT
9453 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
9455 tree inner_type = TREE_TYPE (treeop0);
9456 HOST_WIDE_INT temp_size
9457 = MAX (int_size_in_bytes (inner_type),
9458 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
9460 = assign_stack_temp_for_type (mode, temp_size, 0, type);
9461 rtx new_with_op0_mode
9462 = adjust_address (new_rtx, GET_MODE (op0), 0);
9464 gcc_assert (!TREE_ADDRESSABLE (exp));
9466 if (GET_MODE (op0) == BLKmode)
9467 emit_block_move (new_with_op0_mode, op0,
9468 GEN_INT (GET_MODE_SIZE (mode)),
9469 (modifier == EXPAND_STACK_PARM
9470 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9472 emit_move_insn (new_with_op0_mode, op0);
9477 op0 = adjust_address (op0, mode, 0);
9482 /* Use a compare and a jump for BLKmode comparisons, or for function
9483 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9485 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9486 are occassionally created by folding during expansion. */
9487 case TRUTH_ANDIF_EXPR:
9488 case TRUTH_ORIF_EXPR:
9491 || modifier == EXPAND_STACK_PARM
9492 || ! safe_from_p (target, treeop0, 1)
9493 || ! safe_from_p (target, treeop1, 1)
9494 /* Make sure we don't have a hard reg (such as function's return
9495 value) live across basic blocks, if not optimizing. */
9496 || (!optimize && REG_P (target)
9497 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9498 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9501 emit_move_insn (target, const0_rtx);
9503 op1 = gen_label_rtx ();
9504 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9507 emit_move_insn (target, const1_rtx);
9510 return ignore ? const0_rtx : target;
9512 case STATEMENT_LIST:
9514 tree_stmt_iterator iter;
9516 gcc_assert (ignore);
9518 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9519 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9524 /* A COND_EXPR with its type being VOID_TYPE represents a
9525 conditional jump and is handled in
9526 expand_gimple_cond_expr. */
9527 gcc_assert (!VOID_TYPE_P (type));
9529 /* Note that COND_EXPRs whose type is a structure or union
9530 are required to be constructed to contain assignments of
9531 a temporary variable, so that we can evaluate them here
9532 for side effect only. If type is void, we must do likewise. */
9534 gcc_assert (!TREE_ADDRESSABLE (type)
9536 && TREE_TYPE (treeop1) != void_type_node
9537 && TREE_TYPE (treeop2) != void_type_node);
9539 /* If we are not to produce a result, we have no target. Otherwise,
9540 if a target was specified use it; it will not be used as an
9541 intermediate target unless it is safe. If no target, use a
9544 if (modifier != EXPAND_STACK_PARM
9546 && safe_from_p (original_target, treeop0, 1)
9547 && GET_MODE (original_target) == mode
9548 #ifdef HAVE_conditional_move
9549 && (! can_conditionally_move_p (mode)
9550 || REG_P (original_target))
9552 && !MEM_P (original_target))
9553 temp = original_target;
9555 temp = assign_temp (type, 0, 0, 1);
9557 do_pending_stack_adjust ();
9559 op0 = gen_label_rtx ();
9560 op1 = gen_label_rtx ();
9561 jumpifnot (treeop0, op0, -1);
9562 store_expr (treeop1, temp,
9563 modifier == EXPAND_STACK_PARM,
9566 emit_jump_insn (gen_jump (op1));
9569 store_expr (treeop2, temp,
9570 modifier == EXPAND_STACK_PARM,
9578 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9585 gcc_assert (ignore);
9587 /* Check for |= or &= of a bitfield of size one into another bitfield
9588 of size 1. In this case, (unless we need the result of the
9589 assignment) we can do this more efficiently with a
9590 test followed by an assignment, if necessary.
9592 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9593 things change so we do, this code should be enhanced to
9595 if (TREE_CODE (lhs) == COMPONENT_REF
9596 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9597 || TREE_CODE (rhs) == BIT_AND_EXPR)
9598 && TREE_OPERAND (rhs, 0) == lhs
9599 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9600 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9601 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9603 rtx label = gen_label_rtx ();
9604 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9605 do_jump (TREE_OPERAND (rhs, 1),
9607 value ? 0 : label, -1);
9608 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9609 MOVE_NONTEMPORAL (exp));
9610 do_pending_stack_adjust ();
9615 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9620 return expand_expr_addr_expr (exp, target, tmode, modifier);
9623 op0 = expand_normal (treeop0);
9624 return read_complex_part (op0, false);
9627 op0 = expand_normal (treeop0);
9628 return read_complex_part (op0, true);
9635 /* Expanded in cfgexpand.c. */
9638 case TRY_CATCH_EXPR:
9640 case EH_FILTER_EXPR:
9641 case TRY_FINALLY_EXPR:
9642 /* Lowered by tree-eh.c. */
9645 case WITH_CLEANUP_EXPR:
9646 case CLEANUP_POINT_EXPR:
9648 case CASE_LABEL_EXPR:
9654 case PREINCREMENT_EXPR:
9655 case PREDECREMENT_EXPR:
9656 case POSTINCREMENT_EXPR:
9657 case POSTDECREMENT_EXPR:
9660 /* Lowered by gimplify.c. */
9664 /* Function descriptors are not valid except for as
9665 initialization constants, and should not be expanded. */
9668 case WITH_SIZE_EXPR:
9669 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9670 have pulled out the size to use in whatever context it needed. */
9671 return expand_expr_real (treeop0, original_target, tmode,
9674 case REALIGN_LOAD_EXPR:
9676 tree oprnd0 = treeop0;
9677 tree oprnd1 = treeop1;
9678 tree oprnd2 = treeop2;
9681 this_optab = optab_for_tree_code (code, type, optab_default);
9682 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9683 op2 = expand_normal (oprnd2);
9684 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9692 tree oprnd0 = treeop0;
9693 tree oprnd1 = treeop1;
9694 tree oprnd2 = treeop2;
9697 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9698 op2 = expand_normal (oprnd2);
9699 target = expand_widen_pattern_expr (&ops, op0, op1, op2,
9704 case COMPOUND_LITERAL_EXPR:
9706 /* Initialize the anonymous variable declared in the compound
9707 literal, then return the variable. */
9708 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
9710 /* Create RTL for this variable. */
9711 if (!DECL_RTL_SET_P (decl))
9713 if (DECL_HARD_REGISTER (decl))
9714 /* The user specified an assembler name for this variable.
9716 rest_of_decl_compilation (decl, 0, 0);
9721 return expand_expr_real (decl, original_target, tmode,
9726 return expand_expr_real_2 (&ops, target, tmode, modifier);
9730 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9731 signedness of TYPE), possibly returning the result in TARGET. */
9733 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9735 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9736 if (target && GET_MODE (target) != GET_MODE (exp))
9738 /* For constant values, reduce using build_int_cst_type. */
9739 if (CONST_INT_P (exp))
9741 HOST_WIDE_INT value = INTVAL (exp);
9742 tree t = build_int_cst_type (type, value);
9743 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9745 else if (TYPE_UNSIGNED (type))
9747 rtx mask = immed_double_int_const (double_int_mask (prec),
9749 return expand_and (GET_MODE (exp), exp, mask, target);
9753 tree count = build_int_cst (NULL_TREE,
9754 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9755 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9756 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9760 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9761 when applied to the address of EXP produces an address known to be
9762 aligned more than BIGGEST_ALIGNMENT. */
9765 is_aligning_offset (const_tree offset, const_tree exp)
9767 /* Strip off any conversions. */
9768 while (CONVERT_EXPR_P (offset))
9769 offset = TREE_OPERAND (offset, 0);
9771 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9772 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9773 if (TREE_CODE (offset) != BIT_AND_EXPR
9774 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9775 || compare_tree_int (TREE_OPERAND (offset, 1),
9776 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9777 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9780 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9781 It must be NEGATE_EXPR. Then strip any more conversions. */
9782 offset = TREE_OPERAND (offset, 0);
9783 while (CONVERT_EXPR_P (offset))
9784 offset = TREE_OPERAND (offset, 0);
9786 if (TREE_CODE (offset) != NEGATE_EXPR)
9789 offset = TREE_OPERAND (offset, 0);
9790 while (CONVERT_EXPR_P (offset))
9791 offset = TREE_OPERAND (offset, 0);
9793 /* This must now be the address of EXP. */
9794 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9797 /* Return the tree node if an ARG corresponds to a string constant or zero
9798 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9799 in bytes within the string that ARG is accessing. The type of the
9800 offset will be `sizetype'. */
9803 string_constant (tree arg, tree *ptr_offset)
9805 tree array, offset, lower_bound;
9808 if (TREE_CODE (arg) == ADDR_EXPR)
9810 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9812 *ptr_offset = size_zero_node;
9813 return TREE_OPERAND (arg, 0);
9815 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9817 array = TREE_OPERAND (arg, 0);
9818 offset = size_zero_node;
9820 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9822 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9823 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9824 if (TREE_CODE (array) != STRING_CST
9825 && TREE_CODE (array) != VAR_DECL)
9828 /* Check if the array has a nonzero lower bound. */
9829 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9830 if (!integer_zerop (lower_bound))
9832 /* If the offset and base aren't both constants, return 0. */
9833 if (TREE_CODE (lower_bound) != INTEGER_CST)
9835 if (TREE_CODE (offset) != INTEGER_CST)
9837 /* Adjust offset by the lower bound. */
9838 offset = size_diffop (fold_convert (sizetype, offset),
9839 fold_convert (sizetype, lower_bound));
9845 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9847 tree arg0 = TREE_OPERAND (arg, 0);
9848 tree arg1 = TREE_OPERAND (arg, 1);
9853 if (TREE_CODE (arg0) == ADDR_EXPR
9854 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9855 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9857 array = TREE_OPERAND (arg0, 0);
9860 else if (TREE_CODE (arg1) == ADDR_EXPR
9861 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9862 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9864 array = TREE_OPERAND (arg1, 0);
9873 if (TREE_CODE (array) == STRING_CST)
9875 *ptr_offset = fold_convert (sizetype, offset);
9878 else if (TREE_CODE (array) == VAR_DECL
9879 || TREE_CODE (array) == CONST_DECL)
9883 /* Variables initialized to string literals can be handled too. */
9884 if (!const_value_known_p (array)
9885 || !DECL_INITIAL (array)
9886 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9889 /* Avoid const char foo[4] = "abcde"; */
9890 if (DECL_SIZE_UNIT (array) == NULL_TREE
9891 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9892 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9893 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9896 /* If variable is bigger than the string literal, OFFSET must be constant
9897 and inside of the bounds of the string literal. */
9898 offset = fold_convert (sizetype, offset);
9899 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9900 && (! host_integerp (offset, 1)
9901 || compare_tree_int (offset, length) >= 0))
9904 *ptr_offset = offset;
9905 return DECL_INITIAL (array);
9911 /* Generate code to calculate OPS, and exploded expression
9912 using a store-flag instruction and return an rtx for the result.
9913 OPS reflects a comparison.
9915 If TARGET is nonzero, store the result there if convenient.
9917 Return zero if there is no suitable set-flag instruction
9918 available on this machine.
9920 Once expand_expr has been called on the arguments of the comparison,
9921 we are committed to doing the store flag, since it is not safe to
9922 re-evaluate the expression. We emit the store-flag insn by calling
9923 emit_store_flag, but only expand the arguments if we have a reason
9924 to believe that emit_store_flag will be successful. If we think that
9925 it will, but it isn't, we have to simulate the store-flag with a
9926 set/jump/set sequence. */
9929 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
9932 tree arg0, arg1, type;
9934 enum machine_mode operand_mode;
9937 rtx subtarget = target;
9938 location_t loc = ops->location;
9943 /* Don't crash if the comparison was erroneous. */
9944 if (arg0 == error_mark_node || arg1 == error_mark_node)
9947 type = TREE_TYPE (arg0);
9948 operand_mode = TYPE_MODE (type);
9949 unsignedp = TYPE_UNSIGNED (type);
9951 /* We won't bother with BLKmode store-flag operations because it would mean
9952 passing a lot of information to emit_store_flag. */
9953 if (operand_mode == BLKmode)
9956 /* We won't bother with store-flag operations involving function pointers
9957 when function pointers must be canonicalized before comparisons. */
9958 #ifdef HAVE_canonicalize_funcptr_for_compare
9959 if (HAVE_canonicalize_funcptr_for_compare
9960 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
9961 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
9963 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
9964 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
9965 == FUNCTION_TYPE))))
9972 /* Get the rtx comparison code to use. We know that EXP is a comparison
9973 operation of some type. Some comparisons against 1 and -1 can be
9974 converted to comparisons with zero. Do so here so that the tests
9975 below will be aware that we have a comparison with zero. These
9976 tests will not catch constants in the first operand, but constants
9977 are rarely passed as the first operand. */
9988 if (integer_onep (arg1))
9989 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9991 code = unsignedp ? LTU : LT;
9994 if (! unsignedp && integer_all_onesp (arg1))
9995 arg1 = integer_zero_node, code = LT;
9997 code = unsignedp ? LEU : LE;
10000 if (! unsignedp && integer_all_onesp (arg1))
10001 arg1 = integer_zero_node, code = GE;
10003 code = unsignedp ? GTU : GT;
10006 if (integer_onep (arg1))
10007 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10009 code = unsignedp ? GEU : GE;
10012 case UNORDERED_EXPR:
10038 gcc_unreachable ();
10041 /* Put a constant second. */
10042 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10043 || TREE_CODE (arg0) == FIXED_CST)
10045 tem = arg0; arg0 = arg1; arg1 = tem;
10046 code = swap_condition (code);
10049 /* If this is an equality or inequality test of a single bit, we can
10050 do this by shifting the bit being tested to the low-order bit and
10051 masking the result with the constant 1. If the condition was EQ,
10052 we xor it with 1. This does not require an scc insn and is faster
10053 than an scc insn even if we have it.
10055 The code to make this transformation was moved into fold_single_bit_test,
10056 so we just call into the folder and expand its result. */
10058 if ((code == NE || code == EQ)
10059 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10060 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10062 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10063 return expand_expr (fold_single_bit_test (loc,
10064 code == NE ? NE_EXPR : EQ_EXPR,
10066 target, VOIDmode, EXPAND_NORMAL);
10069 if (! get_subtarget (target)
10070 || GET_MODE (subtarget) != operand_mode)
10073 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10076 target = gen_reg_rtx (mode);
10078 /* Try a cstore if possible. */
10079 return emit_store_flag_force (target, code, op0, op1,
10080 operand_mode, unsignedp, 1);
10084 /* Stubs in case we haven't got a casesi insn. */
10085 #ifndef HAVE_casesi
10086 # define HAVE_casesi 0
10087 # define gen_casesi(a, b, c, d, e) (0)
10088 # define CODE_FOR_casesi CODE_FOR_nothing
10091 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10092 0 otherwise (i.e. if there is no casesi instruction). */
10094 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10095 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10096 rtx fallback_label ATTRIBUTE_UNUSED)
10098 enum machine_mode index_mode = SImode;
10099 int index_bits = GET_MODE_BITSIZE (index_mode);
10100 rtx op1, op2, index;
10101 enum machine_mode op_mode;
10106 /* Convert the index to SImode. */
10107 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10109 enum machine_mode omode = TYPE_MODE (index_type);
10110 rtx rangertx = expand_normal (range);
10112 /* We must handle the endpoints in the original mode. */
10113 index_expr = build2 (MINUS_EXPR, index_type,
10114 index_expr, minval);
10115 minval = integer_zero_node;
10116 index = expand_normal (index_expr);
10118 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10119 omode, 1, default_label);
10120 /* Now we can safely truncate. */
10121 index = convert_to_mode (index_mode, index, 0);
10125 if (TYPE_MODE (index_type) != index_mode)
10127 index_type = lang_hooks.types.type_for_size (index_bits, 0);
10128 index_expr = fold_convert (index_type, index_expr);
10131 index = expand_normal (index_expr);
10134 do_pending_stack_adjust ();
10136 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10137 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10139 index = copy_to_mode_reg (op_mode, index);
10141 op1 = expand_normal (minval);
10143 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10144 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10145 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
10146 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10148 op1 = copy_to_mode_reg (op_mode, op1);
10150 op2 = expand_normal (range);
10152 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10153 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10154 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
10155 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10157 op2 = copy_to_mode_reg (op_mode, op2);
10159 emit_jump_insn (gen_casesi (index, op1, op2,
10160 table_label, !default_label
10161 ? fallback_label : default_label));
10165 /* Attempt to generate a tablejump instruction; same concept. */
10166 #ifndef HAVE_tablejump
10167 #define HAVE_tablejump 0
10168 #define gen_tablejump(x, y) (0)
10171 /* Subroutine of the next function.
10173 INDEX is the value being switched on, with the lowest value
10174 in the table already subtracted.
10175 MODE is its expected mode (needed if INDEX is constant).
10176 RANGE is the length of the jump table.
10177 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10179 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10180 index value is out of range. */
10183 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10188 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10189 cfun->cfg->max_jumptable_ents = INTVAL (range);
10191 /* Do an unsigned comparison (in the proper mode) between the index
10192 expression and the value which represents the length of the range.
10193 Since we just finished subtracting the lower bound of the range
10194 from the index expression, this comparison allows us to simultaneously
10195 check that the original index expression value is both greater than
10196 or equal to the minimum value of the range and less than or equal to
10197 the maximum value of the range. */
10200 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10203 /* If index is in range, it must fit in Pmode.
10204 Convert to Pmode so we can index with it. */
10206 index = convert_to_mode (Pmode, index, 1);
10208 /* Don't let a MEM slip through, because then INDEX that comes
10209 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10210 and break_out_memory_refs will go to work on it and mess it up. */
10211 #ifdef PIC_CASE_VECTOR_ADDRESS
10212 if (flag_pic && !REG_P (index))
10213 index = copy_to_mode_reg (Pmode, index);
10216 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10217 GET_MODE_SIZE, because this indicates how large insns are. The other
10218 uses should all be Pmode, because they are addresses. This code
10219 could fail if addresses and insns are not the same size. */
10220 index = gen_rtx_PLUS (Pmode,
10221 gen_rtx_MULT (Pmode, index,
10222 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10223 gen_rtx_LABEL_REF (Pmode, table_label));
10224 #ifdef PIC_CASE_VECTOR_ADDRESS
10226 index = PIC_CASE_VECTOR_ADDRESS (index);
10229 index = memory_address (CASE_VECTOR_MODE, index);
10230 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10231 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10232 convert_move (temp, vector, 0);
10234 emit_jump_insn (gen_tablejump (temp, table_label));
10236 /* If we are generating PIC code or if the table is PC-relative, the
10237 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10238 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10243 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10244 rtx table_label, rtx default_label)
10248 if (! HAVE_tablejump)
10251 index_expr = fold_build2 (MINUS_EXPR, index_type,
10252 fold_convert (index_type, index_expr),
10253 fold_convert (index_type, minval));
10254 index = expand_normal (index_expr);
10255 do_pending_stack_adjust ();
10257 do_tablejump (index, TYPE_MODE (index_type),
10258 convert_modes (TYPE_MODE (index_type),
10259 TYPE_MODE (TREE_TYPE (range)),
10260 expand_normal (range),
10261 TYPE_UNSIGNED (TREE_TYPE (range))),
10262 table_label, default_label);
10266 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10268 const_vector_from_tree (tree exp)
10273 enum machine_mode inner, mode;
10275 mode = TYPE_MODE (TREE_TYPE (exp));
10277 if (initializer_zerop (exp))
10278 return CONST0_RTX (mode);
10280 units = GET_MODE_NUNITS (mode);
10281 inner = GET_MODE_INNER (mode);
10283 v = rtvec_alloc (units);
10285 link = TREE_VECTOR_CST_ELTS (exp);
10286 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10288 elt = TREE_VALUE (link);
10290 if (TREE_CODE (elt) == REAL_CST)
10291 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10293 else if (TREE_CODE (elt) == FIXED_CST)
10294 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10297 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
10301 /* Initialize remaining elements to 0. */
10302 for (; i < units; ++i)
10303 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10305 return gen_rtx_CONST_VECTOR (mode, v);
10308 /* Build a decl for a personality function given a language prefix. */
10311 build_personality_function (const char *lang)
10313 const char *unwind_and_version;
10317 switch (targetm.except_unwind_info (&global_options))
10322 unwind_and_version = "_sj0";
10326 unwind_and_version = "_v0";
10329 gcc_unreachable ();
10332 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
10334 type = build_function_type_list (integer_type_node, integer_type_node,
10335 long_long_unsigned_type_node,
10336 ptr_type_node, ptr_type_node, NULL_TREE);
10337 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
10338 get_identifier (name), type);
10339 DECL_ARTIFICIAL (decl) = 1;
10340 DECL_EXTERNAL (decl) = 1;
10341 TREE_PUBLIC (decl) = 1;
10343 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
10344 are the flags assigned by targetm.encode_section_info. */
10345 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
10350 /* Extracts the personality function of DECL and returns the corresponding
10354 get_personality_function (tree decl)
10356 tree personality = DECL_FUNCTION_PERSONALITY (decl);
10357 enum eh_personality_kind pk;
10359 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
10360 if (pk == eh_personality_none)
10364 && pk == eh_personality_any)
10365 personality = lang_hooks.eh_personality ();
10367 if (pk == eh_personality_lang)
10368 gcc_assert (personality != NULL_TREE);
10370 return XEXP (DECL_RTL (personality), 0);
10373 #include "gt-expr.h"