1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
29 #include "hard-reg-set.h"
32 #include "insn-config.h"
33 #include "insn-attr.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
40 #include "typeclass.h"
42 #include "langhooks.h"
45 #include "tree-iterator.h"
46 #include "tree-flow.h"
48 #include "common/common-target.h"
51 #include "diagnostic.h"
52 #include "ssaexpand.h"
53 #include "target-globals.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
91 struct move_by_pieces_d
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces_d
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
123 struct move_by_pieces_d *);
124 static bool block_move_libcall_safe_for_call_parm (void);
125 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
126 static tree emit_block_move_libcall_fn (int);
127 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
128 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
129 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
130 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
131 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
132 struct store_by_pieces_d *);
133 static tree clear_storage_libcall_fn (int);
134 static rtx compress_float_constant (rtx, rtx);
135 static rtx get_subtarget (rtx);
136 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
137 HOST_WIDE_INT, enum machine_mode,
138 tree, int, alias_set_type);
139 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
140 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
141 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
142 enum machine_mode, tree, alias_set_type, bool);
144 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
146 static int is_aligning_offset (const_tree, const_tree);
147 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
148 enum expand_modifier);
149 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
150 static rtx do_store_flag (sepops, rtx, enum machine_mode);
152 static void emit_single_push_insn (enum machine_mode, rtx, tree);
154 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx, int);
155 static rtx const_vector_from_tree (tree);
156 static void write_complex_part (rtx, rtx, bool);
158 /* This macro is used to determine whether move_by_pieces should be called
159 to perform a structure copy. */
160 #ifndef MOVE_BY_PIECES_P
161 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
162 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
163 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
166 /* This macro is used to determine whether clear_by_pieces should be
167 called to clear storage. */
168 #ifndef CLEAR_BY_PIECES_P
169 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
170 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
171 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
174 /* This macro is used to determine whether store_by_pieces should be
175 called to "memset" storage with byte values other than zero. */
176 #ifndef SET_BY_PIECES_P
177 #define SET_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
179 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
182 /* This macro is used to determine whether store_by_pieces should be
183 called to "memcpy" storage when the source is a constant string. */
184 #ifndef STORE_BY_PIECES_P
185 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
190 /* This is run to set up which modes can be used
191 directly in memory and to initialize the block move optab. It is run
192 at the beginning of compilation and when the target is reinitialized. */
195 init_expr_target (void)
198 enum machine_mode mode;
203 /* Try indexing by frame ptr and try by stack ptr.
204 It is known that on the Convex the stack ptr isn't a valid index.
205 With luck, one or the other is valid on any machine. */
206 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
207 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
209 /* A scratch register we can modify in-place below to avoid
210 useless RTL allocations. */
211 reg = gen_rtx_REG (VOIDmode, -1);
213 insn = rtx_alloc (INSN);
214 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
215 PATTERN (insn) = pat;
217 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
218 mode = (enum machine_mode) ((int) mode + 1))
222 direct_load[(int) mode] = direct_store[(int) mode] = 0;
223 PUT_MODE (mem, mode);
224 PUT_MODE (mem1, mode);
225 PUT_MODE (reg, mode);
227 /* See if there is some register that can be used in this mode and
228 directly loaded or stored from memory. */
230 if (mode != VOIDmode && mode != BLKmode)
231 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
232 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
235 if (! HARD_REGNO_MODE_OK (regno, mode))
238 SET_REGNO (reg, regno);
241 SET_DEST (pat) = reg;
242 if (recog (pat, insn, &num_clobbers) >= 0)
243 direct_load[(int) mode] = 1;
245 SET_SRC (pat) = mem1;
246 SET_DEST (pat) = reg;
247 if (recog (pat, insn, &num_clobbers) >= 0)
248 direct_load[(int) mode] = 1;
251 SET_DEST (pat) = mem;
252 if (recog (pat, insn, &num_clobbers) >= 0)
253 direct_store[(int) mode] = 1;
256 SET_DEST (pat) = mem1;
257 if (recog (pat, insn, &num_clobbers) >= 0)
258 direct_store[(int) mode] = 1;
262 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
264 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
265 mode = GET_MODE_WIDER_MODE (mode))
267 enum machine_mode srcmode;
268 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
269 srcmode = GET_MODE_WIDER_MODE (srcmode))
273 ic = can_extend_p (mode, srcmode, 0);
274 if (ic == CODE_FOR_nothing)
277 PUT_MODE (mem, srcmode);
279 if (insn_operand_matches (ic, 1, mem))
280 float_extend_from_mem[mode][srcmode] = true;
285 /* This is run at the start of compiling a function. */
290 memset (&crtl->expr, 0, sizeof (crtl->expr));
293 /* Copy data from FROM to TO, where the machine modes are not the same.
294 Both modes may be integer, or both may be floating, or both may be
296 UNSIGNEDP should be nonzero if FROM is an unsigned type.
297 This causes zero-extension instead of sign-extension. */
300 convert_move (rtx to, rtx from, int unsignedp)
302 enum machine_mode to_mode = GET_MODE (to);
303 enum machine_mode from_mode = GET_MODE (from);
304 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
305 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
309 /* rtx code for making an equivalent value. */
310 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
311 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
314 gcc_assert (to_real == from_real);
315 gcc_assert (to_mode != BLKmode);
316 gcc_assert (from_mode != BLKmode);
318 /* If the source and destination are already the same, then there's
323 /* If FROM is a SUBREG that indicates that we have already done at least
324 the required extension, strip it. We don't handle such SUBREGs as
327 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
328 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
329 >= GET_MODE_PRECISION (to_mode))
330 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
331 from = gen_lowpart (to_mode, from), from_mode = to_mode;
333 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
335 if (to_mode == from_mode
336 || (from_mode == VOIDmode && CONSTANT_P (from)))
338 emit_move_insn (to, from);
342 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
344 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
346 if (VECTOR_MODE_P (to_mode))
347 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
349 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
351 emit_move_insn (to, from);
355 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
357 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
358 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
367 gcc_assert ((GET_MODE_PRECISION (from_mode)
368 != GET_MODE_PRECISION (to_mode))
369 || (DECIMAL_FLOAT_MODE_P (from_mode)
370 != DECIMAL_FLOAT_MODE_P (to_mode)));
372 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
373 /* Conversion between decimal float and binary float, same size. */
374 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
375 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
380 /* Try converting directly if the insn is supported. */
382 code = convert_optab_handler (tab, to_mode, from_mode);
383 if (code != CODE_FOR_nothing)
385 emit_unop_insn (code, to, from,
386 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
390 /* Otherwise use a libcall. */
391 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
393 /* Is this conversion implemented yet? */
394 gcc_assert (libcall);
397 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
399 insns = get_insns ();
401 emit_libcall_block (insns, to, value,
402 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
404 : gen_rtx_FLOAT_EXTEND (to_mode, from));
408 /* Handle pointer conversion. */ /* SPEE 900220. */
409 /* Targets are expected to provide conversion insns between PxImode and
410 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
411 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
413 enum machine_mode full_mode
414 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
416 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
417 != CODE_FOR_nothing);
419 if (full_mode != from_mode)
420 from = convert_to_mode (full_mode, from, unsignedp);
421 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
425 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
428 enum machine_mode full_mode
429 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
430 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
431 enum insn_code icode;
433 icode = convert_optab_handler (ctab, full_mode, from_mode);
434 gcc_assert (icode != CODE_FOR_nothing);
436 if (to_mode == full_mode)
438 emit_unop_insn (icode, to, from, UNKNOWN);
442 new_from = gen_reg_rtx (full_mode);
443 emit_unop_insn (icode, new_from, from, UNKNOWN);
445 /* else proceed to integer conversions below. */
446 from_mode = full_mode;
450 /* Make sure both are fixed-point modes or both are not. */
451 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
452 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
453 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
455 /* If we widen from_mode to to_mode and they are in the same class,
456 we won't saturate the result.
457 Otherwise, always saturate the result to play safe. */
458 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
459 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
460 expand_fixed_convert (to, from, 0, 0);
462 expand_fixed_convert (to, from, 0, 1);
466 /* Now both modes are integers. */
468 /* Handle expanding beyond a word. */
469 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
470 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
477 enum machine_mode lowpart_mode;
478 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
480 /* Try converting directly if the insn is supported. */
481 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
484 /* If FROM is a SUBREG, put it into a register. Do this
485 so that we always generate the same set of insns for
486 better cse'ing; if an intermediate assignment occurred,
487 we won't be doing the operation directly on the SUBREG. */
488 if (optimize > 0 && GET_CODE (from) == SUBREG)
489 from = force_reg (from_mode, from);
490 emit_unop_insn (code, to, from, equiv_code);
493 /* Next, try converting via full word. */
494 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
495 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
496 != CODE_FOR_nothing))
498 rtx word_to = gen_reg_rtx (word_mode);
501 if (reg_overlap_mentioned_p (to, from))
502 from = force_reg (from_mode, from);
505 convert_move (word_to, from, unsignedp);
506 emit_unop_insn (code, to, word_to, equiv_code);
510 /* No special multiword conversion insn; do it by hand. */
513 /* Since we will turn this into a no conflict block, we must ensure the
514 the source does not overlap the target so force it into an isolated
515 register when maybe so. Likewise for any MEM input, since the
516 conversion sequence might require several references to it and we
517 must ensure we're getting the same value every time. */
519 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
520 from = force_reg (from_mode, from);
522 /* Get a copy of FROM widened to a word, if necessary. */
523 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
524 lowpart_mode = word_mode;
526 lowpart_mode = from_mode;
528 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
530 lowpart = gen_lowpart (lowpart_mode, to);
531 emit_move_insn (lowpart, lowfrom);
533 /* Compute the value to put in each remaining word. */
535 fill_value = const0_rtx;
537 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
538 LT, lowfrom, const0_rtx,
541 /* Fill the remaining words. */
542 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
544 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
545 rtx subword = operand_subword (to, index, 1, to_mode);
547 gcc_assert (subword);
549 if (fill_value != subword)
550 emit_move_insn (subword, fill_value);
553 insns = get_insns ();
560 /* Truncating multi-word to a word or less. */
561 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
562 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
565 && ! MEM_VOLATILE_P (from)
566 && direct_load[(int) to_mode]
567 && ! mode_dependent_address_p (XEXP (from, 0),
568 MEM_ADDR_SPACE (from)))
570 || GET_CODE (from) == SUBREG))
571 from = force_reg (from_mode, from);
572 convert_move (to, gen_lowpart (word_mode, from), 0);
576 /* Now follow all the conversions between integers
577 no more than a word long. */
579 /* For truncation, usually we can just refer to FROM in a narrower mode. */
580 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
581 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
584 && ! MEM_VOLATILE_P (from)
585 && direct_load[(int) to_mode]
586 && ! mode_dependent_address_p (XEXP (from, 0),
587 MEM_ADDR_SPACE (from)))
589 || GET_CODE (from) == SUBREG))
590 from = force_reg (from_mode, from);
591 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
592 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
593 from = copy_to_reg (from);
594 emit_move_insn (to, gen_lowpart (to_mode, from));
598 /* Handle extension. */
599 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
601 /* Convert directly if that works. */
602 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
605 emit_unop_insn (code, to, from, equiv_code);
610 enum machine_mode intermediate;
614 /* Search for a mode to convert via. */
615 for (intermediate = from_mode; intermediate != VOIDmode;
616 intermediate = GET_MODE_WIDER_MODE (intermediate))
617 if (((can_extend_p (to_mode, intermediate, unsignedp)
619 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
620 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
621 && (can_extend_p (intermediate, from_mode, unsignedp)
622 != CODE_FOR_nothing))
624 convert_move (to, convert_to_mode (intermediate, from,
625 unsignedp), unsignedp);
629 /* No suitable intermediate mode.
630 Generate what we need with shifts. */
631 shift_amount = (GET_MODE_PRECISION (to_mode)
632 - GET_MODE_PRECISION (from_mode));
633 from = gen_lowpart (to_mode, force_reg (from_mode, from));
634 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
636 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
639 emit_move_insn (to, tmp);
644 /* Support special truncate insns for certain modes. */
645 if (convert_optab_handler (trunc_optab, to_mode,
646 from_mode) != CODE_FOR_nothing)
648 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
653 /* Handle truncation of volatile memrefs, and so on;
654 the things that couldn't be truncated directly,
655 and for which there was no special instruction.
657 ??? Code above formerly short-circuited this, for most integer
658 mode pairs, with a force_reg in from_mode followed by a recursive
659 call to this routine. Appears always to have been wrong. */
660 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
662 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
663 emit_move_insn (to, temp);
667 /* Mode combination is not recognized. */
671 /* Return an rtx for a value that would result
672 from converting X to mode MODE.
673 Both X and MODE may be floating, or both integer.
674 UNSIGNEDP is nonzero if X is an unsigned value.
675 This can be done by referring to a part of X in place
676 or by copying to a new temporary with conversion. */
679 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
681 return convert_modes (mode, VOIDmode, x, unsignedp);
684 /* Return an rtx for a value that would result
685 from converting X from mode OLDMODE to mode MODE.
686 Both modes may be floating, or both integer.
687 UNSIGNEDP is nonzero if X is an unsigned value.
689 This can be done by referring to a part of X in place
690 or by copying to a new temporary with conversion.
692 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
695 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
699 /* If FROM is a SUBREG that indicates that we have already done at least
700 the required extension, strip it. */
702 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
703 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
704 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
705 x = gen_lowpart (mode, x);
707 if (GET_MODE (x) != VOIDmode)
708 oldmode = GET_MODE (x);
713 /* There is one case that we must handle specially: If we are converting
714 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
715 we are to interpret the constant as unsigned, gen_lowpart will do
716 the wrong if the constant appears negative. What we want to do is
717 make the high-order word of the constant zero, not all ones. */
719 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
720 && GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT
721 && CONST_INT_P (x) && INTVAL (x) < 0)
723 double_int val = double_int::from_uhwi (INTVAL (x));
725 /* We need to zero extend VAL. */
726 if (oldmode != VOIDmode)
727 val = val.zext (GET_MODE_BITSIZE (oldmode));
729 return immed_double_int_const (val, mode);
732 /* We can do this with a gen_lowpart if both desired and current modes
733 are integer, and this is either a constant integer, a register, or a
734 non-volatile MEM. Except for the constant case where MODE is no
735 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
738 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT)
739 || (GET_MODE_CLASS (mode) == MODE_INT
740 && GET_MODE_CLASS (oldmode) == MODE_INT
741 && (CONST_DOUBLE_AS_INT_P (x)
742 || (GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
743 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
744 && direct_load[(int) mode])
746 && (! HARD_REGISTER_P (x)
747 || HARD_REGNO_MODE_OK (REGNO (x), mode))
748 && TRULY_NOOP_TRUNCATION_MODES_P (mode,
751 /* ?? If we don't know OLDMODE, we have to assume here that
752 X does not need sign- or zero-extension. This may not be
753 the case, but it's the best we can do. */
754 if (CONST_INT_P (x) && oldmode != VOIDmode
755 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (oldmode))
757 HOST_WIDE_INT val = INTVAL (x);
759 /* We must sign or zero-extend in this case. Start by
760 zero-extending, then sign extend if we need to. */
761 val &= GET_MODE_MASK (oldmode);
763 && val_signbit_known_set_p (oldmode, val))
764 val |= ~GET_MODE_MASK (oldmode);
766 return gen_int_mode (val, mode);
769 return gen_lowpart (mode, x);
772 /* Converting from integer constant into mode is always equivalent to an
774 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
776 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
777 return simplify_gen_subreg (mode, x, oldmode, 0);
780 temp = gen_reg_rtx (mode);
781 convert_move (temp, x, unsignedp);
785 /* Return the largest alignment we can use for doing a move (or store)
786 of MAX_PIECES. ALIGN is the largest alignment we could use. */
789 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
791 enum machine_mode tmode;
793 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
794 if (align >= GET_MODE_ALIGNMENT (tmode))
795 align = GET_MODE_ALIGNMENT (tmode);
798 enum machine_mode tmode, xmode;
800 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
802 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
803 if (GET_MODE_SIZE (tmode) > max_pieces
804 || SLOW_UNALIGNED_ACCESS (tmode, align))
807 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
813 /* Return the widest integer mode no wider than SIZE. If no such mode
814 can be found, return VOIDmode. */
816 static enum machine_mode
817 widest_int_mode_for_size (unsigned int size)
819 enum machine_mode tmode, mode = VOIDmode;
821 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
822 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
823 if (GET_MODE_SIZE (tmode) < size)
829 /* STORE_MAX_PIECES is the number of bytes at a time that we can
830 store efficiently. Due to internal GCC limitations, this is
831 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
832 for an immediate constant. */
834 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
836 /* Determine whether the LEN bytes can be moved by using several move
837 instructions. Return nonzero if a call to move_by_pieces should
841 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED,
842 unsigned int align ATTRIBUTE_UNUSED)
844 return MOVE_BY_PIECES_P (len, align);
847 /* Generate several move instructions to copy LEN bytes from block FROM to
848 block TO. (These are MEM rtx's with BLKmode).
850 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
851 used to push FROM to the stack.
853 ALIGN is maximum stack alignment we can assume.
855 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
856 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
860 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
861 unsigned int align, int endp)
863 struct move_by_pieces_d data;
864 enum machine_mode to_addr_mode;
865 enum machine_mode from_addr_mode = get_address_mode (from);
866 rtx to_addr, from_addr = XEXP (from, 0);
867 unsigned int max_size = MOVE_MAX_PIECES + 1;
868 enum insn_code icode;
870 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
873 data.from_addr = from_addr;
876 to_addr_mode = get_address_mode (to);
877 to_addr = XEXP (to, 0);
880 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
881 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
883 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
887 to_addr_mode = VOIDmode;
891 #ifdef STACK_GROWS_DOWNWARD
897 data.to_addr = to_addr;
900 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
901 || GET_CODE (from_addr) == POST_INC
902 || GET_CODE (from_addr) == POST_DEC);
904 data.explicit_inc_from = 0;
905 data.explicit_inc_to = 0;
906 if (data.reverse) data.offset = len;
909 /* If copying requires more than two move insns,
910 copy addresses to registers (to make displacements shorter)
911 and use post-increment if available. */
912 if (!(data.autinc_from && data.autinc_to)
913 && move_by_pieces_ninsns (len, align, max_size) > 2)
915 /* Find the mode of the largest move...
916 MODE might not be used depending on the definitions of the
917 USE_* macros below. */
918 enum machine_mode mode ATTRIBUTE_UNUSED
919 = widest_int_mode_for_size (max_size);
921 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
923 data.from_addr = copy_to_mode_reg (from_addr_mode,
924 plus_constant (from_addr_mode,
926 data.autinc_from = 1;
927 data.explicit_inc_from = -1;
929 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
931 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
932 data.autinc_from = 1;
933 data.explicit_inc_from = 1;
935 if (!data.autinc_from && CONSTANT_P (from_addr))
936 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
937 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
939 data.to_addr = copy_to_mode_reg (to_addr_mode,
940 plus_constant (to_addr_mode,
943 data.explicit_inc_to = -1;
945 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
947 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
949 data.explicit_inc_to = 1;
951 if (!data.autinc_to && CONSTANT_P (to_addr))
952 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
955 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
957 /* First move what we can in the largest integer mode, then go to
958 successively smaller modes. */
960 while (max_size > 1 && data.len > 0)
962 enum machine_mode mode = widest_int_mode_for_size (max_size);
964 if (mode == VOIDmode)
967 icode = optab_handler (mov_optab, mode);
968 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
969 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
971 max_size = GET_MODE_SIZE (mode);
974 /* The code above should have handled everything. */
975 gcc_assert (!data.len);
981 gcc_assert (!data.reverse);
986 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
987 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
989 data.to_addr = copy_to_mode_reg (to_addr_mode,
990 plus_constant (to_addr_mode,
994 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1001 to1 = adjust_address (data.to, QImode, data.offset);
1009 /* Return number of insns required to move L bytes by pieces.
1010 ALIGN (in bits) is maximum alignment we can assume. */
1012 unsigned HOST_WIDE_INT
1013 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1014 unsigned int max_size)
1016 unsigned HOST_WIDE_INT n_insns = 0;
1018 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1020 while (max_size > 1 && l > 0)
1022 enum machine_mode mode;
1023 enum insn_code icode;
1025 mode = widest_int_mode_for_size (max_size);
1027 if (mode == VOIDmode)
1030 icode = optab_handler (mov_optab, mode);
1031 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1032 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1034 max_size = GET_MODE_SIZE (mode);
1041 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1042 with move instructions for mode MODE. GENFUN is the gen_... function
1043 to make a move insn for that mode. DATA has all the other info. */
1046 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1047 struct move_by_pieces_d *data)
1049 unsigned int size = GET_MODE_SIZE (mode);
1050 rtx to1 = NULL_RTX, from1;
1052 while (data->len >= size)
1055 data->offset -= size;
1059 if (data->autinc_to)
1060 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1063 to1 = adjust_address (data->to, mode, data->offset);
1066 if (data->autinc_from)
1067 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1070 from1 = adjust_address (data->from, mode, data->offset);
1072 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1073 emit_insn (gen_add2_insn (data->to_addr,
1074 GEN_INT (-(HOST_WIDE_INT)size)));
1075 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1076 emit_insn (gen_add2_insn (data->from_addr,
1077 GEN_INT (-(HOST_WIDE_INT)size)));
1080 emit_insn ((*genfun) (to1, from1));
1083 #ifdef PUSH_ROUNDING
1084 emit_single_push_insn (mode, from1, NULL);
1090 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1091 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1092 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1093 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1095 if (! data->reverse)
1096 data->offset += size;
1102 /* Emit code to move a block Y to a block X. This may be done with
1103 string-move instructions, with multiple scalar move instructions,
1104 or with a library call.
1106 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1107 SIZE is an rtx that says how long they are.
1108 ALIGN is the maximum alignment we can assume they have.
1109 METHOD describes what kind of copy this is, and what mechanisms may be used.
1111 Return the address of the new block, if memcpy is called and returns it,
1115 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1116 unsigned int expected_align, HOST_WIDE_INT expected_size)
1123 if (CONST_INT_P (size)
1124 && INTVAL (size) == 0)
1129 case BLOCK_OP_NORMAL:
1130 case BLOCK_OP_TAILCALL:
1131 may_use_call = true;
1134 case BLOCK_OP_CALL_PARM:
1135 may_use_call = block_move_libcall_safe_for_call_parm ();
1137 /* Make inhibit_defer_pop nonzero around the library call
1138 to force it to pop the arguments right away. */
1142 case BLOCK_OP_NO_LIBCALL:
1143 may_use_call = false;
1150 gcc_assert (MEM_P (x) && MEM_P (y));
1151 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1152 gcc_assert (align >= BITS_PER_UNIT);
1154 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1155 block copy is more efficient for other large modes, e.g. DCmode. */
1156 x = adjust_address (x, BLKmode, 0);
1157 y = adjust_address (y, BLKmode, 0);
1159 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1160 can be incorrect is coming from __builtin_memcpy. */
1161 if (CONST_INT_P (size))
1163 x = shallow_copy_rtx (x);
1164 y = shallow_copy_rtx (y);
1165 set_mem_size (x, INTVAL (size));
1166 set_mem_size (y, INTVAL (size));
1169 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1170 move_by_pieces (x, y, INTVAL (size), align, 0);
1171 else if (emit_block_move_via_movmem (x, y, size, align,
1172 expected_align, expected_size))
1174 else if (may_use_call
1175 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1176 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1178 /* Since x and y are passed to a libcall, mark the corresponding
1179 tree EXPR as addressable. */
1180 tree y_expr = MEM_EXPR (y);
1181 tree x_expr = MEM_EXPR (x);
1183 mark_addressable (y_expr);
1185 mark_addressable (x_expr);
1186 retval = emit_block_move_via_libcall (x, y, size,
1187 method == BLOCK_OP_TAILCALL);
1191 emit_block_move_via_loop (x, y, size, align);
1193 if (method == BLOCK_OP_CALL_PARM)
1200 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1202 return emit_block_move_hints (x, y, size, method, 0, -1);
1205 /* A subroutine of emit_block_move. Returns true if calling the
1206 block move libcall will not clobber any parameters which may have
1207 already been placed on the stack. */
1210 block_move_libcall_safe_for_call_parm (void)
1212 #if defined (REG_PARM_STACK_SPACE)
1216 /* If arguments are pushed on the stack, then they're safe. */
1220 /* If registers go on the stack anyway, any argument is sure to clobber
1221 an outgoing argument. */
1222 #if defined (REG_PARM_STACK_SPACE)
1223 fn = emit_block_move_libcall_fn (false);
1224 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1225 depend on its argument. */
1227 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1228 && REG_PARM_STACK_SPACE (fn) != 0)
1232 /* If any argument goes in memory, then it might clobber an outgoing
1235 CUMULATIVE_ARGS args_so_far_v;
1236 cumulative_args_t args_so_far;
1239 fn = emit_block_move_libcall_fn (false);
1240 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1241 args_so_far = pack_cumulative_args (&args_so_far_v);
1243 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1244 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1246 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1247 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1249 if (!tmp || !REG_P (tmp))
1251 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1253 targetm.calls.function_arg_advance (args_so_far, mode,
1260 /* A subroutine of emit_block_move. Expand a movmem pattern;
1261 return true if successful. */
1264 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1265 unsigned int expected_align, HOST_WIDE_INT expected_size)
1267 int save_volatile_ok = volatile_ok;
1268 enum machine_mode mode;
1270 if (expected_align < align)
1271 expected_align = align;
1273 /* Since this is a move insn, we don't care about volatility. */
1276 /* Try the most limited insn first, because there's no point
1277 including more than one in the machine description unless
1278 the more limited one has some advantage. */
1280 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1281 mode = GET_MODE_WIDER_MODE (mode))
1283 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1285 if (code != CODE_FOR_nothing
1286 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1287 here because if SIZE is less than the mode mask, as it is
1288 returned by the macro, it will definitely be less than the
1289 actual mode mask. */
1290 && ((CONST_INT_P (size)
1291 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1292 <= (GET_MODE_MASK (mode) >> 1)))
1293 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
1295 struct expand_operand ops[6];
1298 /* ??? When called via emit_block_move_for_call, it'd be
1299 nice if there were some way to inform the backend, so
1300 that it doesn't fail the expansion because it thinks
1301 emitting the libcall would be more efficient. */
1302 nops = insn_data[(int) code].n_generator_args;
1303 gcc_assert (nops == 4 || nops == 6);
1305 create_fixed_operand (&ops[0], x);
1306 create_fixed_operand (&ops[1], y);
1307 /* The check above guarantees that this size conversion is valid. */
1308 create_convert_operand_to (&ops[2], size, mode, true);
1309 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1312 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1313 create_integer_operand (&ops[5], expected_size);
1315 if (maybe_expand_insn (code, nops, ops))
1317 volatile_ok = save_volatile_ok;
1323 volatile_ok = save_volatile_ok;
1327 /* A subroutine of emit_block_move. Expand a call to memcpy.
1328 Return the return value from memcpy, 0 otherwise. */
1331 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1333 rtx dst_addr, src_addr;
1334 tree call_expr, fn, src_tree, dst_tree, size_tree;
1335 enum machine_mode size_mode;
1338 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1339 pseudos. We can then place those new pseudos into a VAR_DECL and
1342 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1343 src_addr = copy_addr_to_reg (XEXP (src, 0));
1345 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1346 src_addr = convert_memory_address (ptr_mode, src_addr);
1348 dst_tree = make_tree (ptr_type_node, dst_addr);
1349 src_tree = make_tree (ptr_type_node, src_addr);
1351 size_mode = TYPE_MODE (sizetype);
1353 size = convert_to_mode (size_mode, size, 1);
1354 size = copy_to_mode_reg (size_mode, size);
1356 /* It is incorrect to use the libcall calling conventions to call
1357 memcpy in this context. This could be a user call to memcpy and
1358 the user may wish to examine the return value from memcpy. For
1359 targets where libcalls and normal calls have different conventions
1360 for returning pointers, we could end up generating incorrect code. */
1362 size_tree = make_tree (sizetype, size);
1364 fn = emit_block_move_libcall_fn (true);
1365 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1366 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1368 retval = expand_normal (call_expr);
1373 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1374 for the function we use for block copies. */
1376 static GTY(()) tree block_move_fn;
1379 init_block_move_fn (const char *asmspec)
1383 tree args, fn, attrs, attr_args;
1385 fn = get_identifier ("memcpy");
1386 args = build_function_type_list (ptr_type_node, ptr_type_node,
1387 const_ptr_type_node, sizetype,
1390 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1391 DECL_EXTERNAL (fn) = 1;
1392 TREE_PUBLIC (fn) = 1;
1393 DECL_ARTIFICIAL (fn) = 1;
1394 TREE_NOTHROW (fn) = 1;
1395 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1396 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1398 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1399 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1401 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1407 set_user_assembler_name (block_move_fn, asmspec);
1411 emit_block_move_libcall_fn (int for_call)
1413 static bool emitted_extern;
1416 init_block_move_fn (NULL);
1418 if (for_call && !emitted_extern)
1420 emitted_extern = true;
1421 make_decl_rtl (block_move_fn);
1424 return block_move_fn;
1427 /* A subroutine of emit_block_move. Copy the data via an explicit
1428 loop. This is used only when libcalls are forbidden. */
1429 /* ??? It'd be nice to copy in hunks larger than QImode. */
1432 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1433 unsigned int align ATTRIBUTE_UNUSED)
1435 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1436 enum machine_mode x_addr_mode = get_address_mode (x);
1437 enum machine_mode y_addr_mode = get_address_mode (y);
1438 enum machine_mode iter_mode;
1440 iter_mode = GET_MODE (size);
1441 if (iter_mode == VOIDmode)
1442 iter_mode = word_mode;
1444 top_label = gen_label_rtx ();
1445 cmp_label = gen_label_rtx ();
1446 iter = gen_reg_rtx (iter_mode);
1448 emit_move_insn (iter, const0_rtx);
1450 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1451 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1452 do_pending_stack_adjust ();
1454 emit_jump (cmp_label);
1455 emit_label (top_label);
1457 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1458 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1460 if (x_addr_mode != y_addr_mode)
1461 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1462 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1464 x = change_address (x, QImode, x_addr);
1465 y = change_address (y, QImode, y_addr);
1467 emit_move_insn (x, y);
1469 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1470 true, OPTAB_LIB_WIDEN);
1472 emit_move_insn (iter, tmp);
1474 emit_label (cmp_label);
1476 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1477 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1480 /* Copy all or part of a value X into registers starting at REGNO.
1481 The number of registers to be filled is NREGS. */
1484 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1487 #ifdef HAVE_load_multiple
1495 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1496 x = validize_mem (force_const_mem (mode, x));
1498 /* See if the machine can do this with a load multiple insn. */
1499 #ifdef HAVE_load_multiple
1500 if (HAVE_load_multiple)
1502 last = get_last_insn ();
1503 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1511 delete_insns_since (last);
1515 for (i = 0; i < nregs; i++)
1516 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1517 operand_subword_force (x, i, mode));
1520 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1521 The number of registers to be filled is NREGS. */
1524 move_block_from_reg (int regno, rtx x, int nregs)
1531 /* See if the machine can do this with a store multiple insn. */
1532 #ifdef HAVE_store_multiple
1533 if (HAVE_store_multiple)
1535 rtx last = get_last_insn ();
1536 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1544 delete_insns_since (last);
1548 for (i = 0; i < nregs; i++)
1550 rtx tem = operand_subword (x, i, 1, BLKmode);
1554 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1558 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1559 ORIG, where ORIG is a non-consecutive group of registers represented by
1560 a PARALLEL. The clone is identical to the original except in that the
1561 original set of registers is replaced by a new set of pseudo registers.
1562 The new set has the same modes as the original set. */
1565 gen_group_rtx (rtx orig)
1570 gcc_assert (GET_CODE (orig) == PARALLEL);
1572 length = XVECLEN (orig, 0);
1573 tmps = XALLOCAVEC (rtx, length);
1575 /* Skip a NULL entry in first slot. */
1576 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1581 for (; i < length; i++)
1583 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1584 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1586 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1589 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1592 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1593 except that values are placed in TMPS[i], and must later be moved
1594 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1597 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1601 enum machine_mode m = GET_MODE (orig_src);
1603 gcc_assert (GET_CODE (dst) == PARALLEL);
1606 && !SCALAR_INT_MODE_P (m)
1607 && !MEM_P (orig_src)
1608 && GET_CODE (orig_src) != CONCAT)
1610 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1611 if (imode == BLKmode)
1612 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1614 src = gen_reg_rtx (imode);
1615 if (imode != BLKmode)
1616 src = gen_lowpart (GET_MODE (orig_src), src);
1617 emit_move_insn (src, orig_src);
1618 /* ...and back again. */
1619 if (imode != BLKmode)
1620 src = gen_lowpart (imode, src);
1621 emit_group_load_1 (tmps, dst, src, type, ssize);
1625 /* Check for a NULL entry, used to indicate that the parameter goes
1626 both on the stack and in registers. */
1627 if (XEXP (XVECEXP (dst, 0, 0), 0))
1632 /* Process the pieces. */
1633 for (i = start; i < XVECLEN (dst, 0); i++)
1635 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1636 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1637 unsigned int bytelen = GET_MODE_SIZE (mode);
1640 /* Handle trailing fragments that run over the size of the struct. */
1641 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1643 /* Arrange to shift the fragment to where it belongs.
1644 extract_bit_field loads to the lsb of the reg. */
1646 #ifdef BLOCK_REG_PADDING
1647 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1648 == (BYTES_BIG_ENDIAN ? upward : downward)
1653 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1654 bytelen = ssize - bytepos;
1655 gcc_assert (bytelen > 0);
1658 /* If we won't be loading directly from memory, protect the real source
1659 from strange tricks we might play; but make sure that the source can
1660 be loaded directly into the destination. */
1662 if (!MEM_P (orig_src)
1663 && (!CONSTANT_P (orig_src)
1664 || (GET_MODE (orig_src) != mode
1665 && GET_MODE (orig_src) != VOIDmode)))
1667 if (GET_MODE (orig_src) == VOIDmode)
1668 src = gen_reg_rtx (mode);
1670 src = gen_reg_rtx (GET_MODE (orig_src));
1672 emit_move_insn (src, orig_src);
1675 /* Optimize the access just a bit. */
1677 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1678 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1679 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1680 && bytelen == GET_MODE_SIZE (mode))
1682 tmps[i] = gen_reg_rtx (mode);
1683 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1685 else if (COMPLEX_MODE_P (mode)
1686 && GET_MODE (src) == mode
1687 && bytelen == GET_MODE_SIZE (mode))
1688 /* Let emit_move_complex do the bulk of the work. */
1690 else if (GET_CODE (src) == CONCAT)
1692 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1693 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1695 if ((bytepos == 0 && bytelen == slen0)
1696 || (bytepos != 0 && bytepos + bytelen <= slen))
1698 /* The following assumes that the concatenated objects all
1699 have the same size. In this case, a simple calculation
1700 can be used to determine the object and the bit field
1702 tmps[i] = XEXP (src, bytepos / slen0);
1703 if (! CONSTANT_P (tmps[i])
1704 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1705 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1706 (bytepos % slen0) * BITS_PER_UNIT,
1707 1, false, NULL_RTX, mode, mode);
1713 gcc_assert (!bytepos);
1714 mem = assign_stack_temp (GET_MODE (src), slen);
1715 emit_move_insn (mem, src);
1716 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1717 0, 1, false, NULL_RTX, mode, mode);
1720 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1721 SIMD register, which is currently broken. While we get GCC
1722 to emit proper RTL for these cases, let's dump to memory. */
1723 else if (VECTOR_MODE_P (GET_MODE (dst))
1726 int slen = GET_MODE_SIZE (GET_MODE (src));
1729 mem = assign_stack_temp (GET_MODE (src), slen);
1730 emit_move_insn (mem, src);
1731 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1733 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1734 && XVECLEN (dst, 0) > 1)
1735 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1736 else if (CONSTANT_P (src))
1738 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1746 gcc_assert (2 * len == ssize);
1747 split_double (src, &first, &second);
1754 else if (REG_P (src) && GET_MODE (src) == mode)
1757 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1758 bytepos * BITS_PER_UNIT, 1, false, NULL_RTX,
1762 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1767 /* Emit code to move a block SRC of type TYPE to a block DST,
1768 where DST is non-consecutive registers represented by a PARALLEL.
1769 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1773 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1778 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1779 emit_group_load_1 (tmps, dst, src, type, ssize);
1781 /* Copy the extracted pieces into the proper (probable) hard regs. */
1782 for (i = 0; i < XVECLEN (dst, 0); i++)
1784 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1787 emit_move_insn (d, tmps[i]);
1791 /* Similar, but load SRC into new pseudos in a format that looks like
1792 PARALLEL. This can later be fed to emit_group_move to get things
1793 in the right place. */
1796 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1801 vec = rtvec_alloc (XVECLEN (parallel, 0));
1802 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1804 /* Convert the vector to look just like the original PARALLEL, except
1805 with the computed values. */
1806 for (i = 0; i < XVECLEN (parallel, 0); i++)
1808 rtx e = XVECEXP (parallel, 0, i);
1809 rtx d = XEXP (e, 0);
1813 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1814 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1816 RTVEC_ELT (vec, i) = e;
1819 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1822 /* Emit code to move a block SRC to block DST, where SRC and DST are
1823 non-consecutive groups of registers, each represented by a PARALLEL. */
1826 emit_group_move (rtx dst, rtx src)
1830 gcc_assert (GET_CODE (src) == PARALLEL
1831 && GET_CODE (dst) == PARALLEL
1832 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1834 /* Skip first entry if NULL. */
1835 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1836 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1837 XEXP (XVECEXP (src, 0, i), 0));
1840 /* Move a group of registers represented by a PARALLEL into pseudos. */
1843 emit_group_move_into_temps (rtx src)
1845 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1848 for (i = 0; i < XVECLEN (src, 0); i++)
1850 rtx e = XVECEXP (src, 0, i);
1851 rtx d = XEXP (e, 0);
1854 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1855 RTVEC_ELT (vec, i) = e;
1858 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1861 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1862 where SRC is non-consecutive registers represented by a PARALLEL.
1863 SSIZE represents the total size of block ORIG_DST, or -1 if not
1867 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1870 int start, finish, i;
1871 enum machine_mode m = GET_MODE (orig_dst);
1873 gcc_assert (GET_CODE (src) == PARALLEL);
1875 if (!SCALAR_INT_MODE_P (m)
1876 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1878 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1879 if (imode == BLKmode)
1880 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1882 dst = gen_reg_rtx (imode);
1883 emit_group_store (dst, src, type, ssize);
1884 if (imode != BLKmode)
1885 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1886 emit_move_insn (orig_dst, dst);
1890 /* Check for a NULL entry, used to indicate that the parameter goes
1891 both on the stack and in registers. */
1892 if (XEXP (XVECEXP (src, 0, 0), 0))
1896 finish = XVECLEN (src, 0);
1898 tmps = XALLOCAVEC (rtx, finish);
1900 /* Copy the (probable) hard regs into pseudos. */
1901 for (i = start; i < finish; i++)
1903 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1904 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1906 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1907 emit_move_insn (tmps[i], reg);
1913 /* If we won't be storing directly into memory, protect the real destination
1914 from strange tricks we might play. */
1916 if (GET_CODE (dst) == PARALLEL)
1920 /* We can get a PARALLEL dst if there is a conditional expression in
1921 a return statement. In that case, the dst and src are the same,
1922 so no action is necessary. */
1923 if (rtx_equal_p (dst, src))
1926 /* It is unclear if we can ever reach here, but we may as well handle
1927 it. Allocate a temporary, and split this into a store/load to/from
1930 temp = assign_stack_temp (GET_MODE (dst), ssize);
1931 emit_group_store (temp, src, type, ssize);
1932 emit_group_load (dst, temp, type, ssize);
1935 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1937 enum machine_mode outer = GET_MODE (dst);
1938 enum machine_mode inner;
1939 HOST_WIDE_INT bytepos;
1943 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1944 dst = gen_reg_rtx (outer);
1946 /* Make life a bit easier for combine. */
1947 /* If the first element of the vector is the low part
1948 of the destination mode, use a paradoxical subreg to
1949 initialize the destination. */
1952 inner = GET_MODE (tmps[start]);
1953 bytepos = subreg_lowpart_offset (inner, outer);
1954 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1956 temp = simplify_gen_subreg (outer, tmps[start],
1960 emit_move_insn (dst, temp);
1967 /* If the first element wasn't the low part, try the last. */
1969 && start < finish - 1)
1971 inner = GET_MODE (tmps[finish - 1]);
1972 bytepos = subreg_lowpart_offset (inner, outer);
1973 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1975 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1979 emit_move_insn (dst, temp);
1986 /* Otherwise, simply initialize the result to zero. */
1988 emit_move_insn (dst, CONST0_RTX (outer));
1991 /* Process the pieces. */
1992 for (i = start; i < finish; i++)
1994 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1995 enum machine_mode mode = GET_MODE (tmps[i]);
1996 unsigned int bytelen = GET_MODE_SIZE (mode);
1997 unsigned int adj_bytelen = bytelen;
2000 /* Handle trailing fragments that run over the size of the struct. */
2001 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2002 adj_bytelen = ssize - bytepos;
2004 if (GET_CODE (dst) == CONCAT)
2006 if (bytepos + adj_bytelen
2007 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2008 dest = XEXP (dst, 0);
2009 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2011 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2012 dest = XEXP (dst, 1);
2016 enum machine_mode dest_mode = GET_MODE (dest);
2017 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2019 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2021 if (GET_MODE_ALIGNMENT (dest_mode)
2022 >= GET_MODE_ALIGNMENT (tmp_mode))
2024 dest = assign_stack_temp (dest_mode,
2025 GET_MODE_SIZE (dest_mode));
2026 emit_move_insn (adjust_address (dest,
2034 dest = assign_stack_temp (tmp_mode,
2035 GET_MODE_SIZE (tmp_mode));
2036 emit_move_insn (dest, tmps[i]);
2037 dst = adjust_address (dest, dest_mode, bytepos);
2043 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2045 /* store_bit_field always takes its value from the lsb.
2046 Move the fragment to the lsb if it's not already there. */
2048 #ifdef BLOCK_REG_PADDING
2049 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2050 == (BYTES_BIG_ENDIAN ? upward : downward)
2056 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2057 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2060 bytelen = adj_bytelen;
2063 /* Optimize the access just a bit. */
2065 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2066 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2067 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2068 && bytelen == GET_MODE_SIZE (mode))
2069 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2071 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2072 0, 0, mode, tmps[i]);
2075 /* Copy from the pseudo into the (probable) hard reg. */
2076 if (orig_dst != dst)
2077 emit_move_insn (orig_dst, dst);
2080 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2081 of the value stored in X. */
2084 maybe_emit_group_store (rtx x, tree type)
2086 enum machine_mode mode = TYPE_MODE (type);
2087 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2088 if (GET_CODE (x) == PARALLEL)
2090 rtx result = gen_reg_rtx (mode);
2091 emit_group_store (result, x, type, int_size_in_bytes (type));
2097 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2099 This is used on targets that return BLKmode values in registers. */
2102 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2104 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2105 rtx src = NULL, dst = NULL;
2106 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2107 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2108 enum machine_mode mode = GET_MODE (srcreg);
2109 enum machine_mode tmode = GET_MODE (target);
2110 enum machine_mode copy_mode;
2112 /* BLKmode registers created in the back-end shouldn't have survived. */
2113 gcc_assert (mode != BLKmode);
2115 /* If the structure doesn't take up a whole number of words, see whether
2116 SRCREG is padded on the left or on the right. If it's on the left,
2117 set PADDING_CORRECTION to the number of bits to skip.
2119 In most ABIs, the structure will be returned at the least end of
2120 the register, which translates to right padding on little-endian
2121 targets and left padding on big-endian targets. The opposite
2122 holds if the structure is returned at the most significant
2123 end of the register. */
2124 if (bytes % UNITS_PER_WORD != 0
2125 && (targetm.calls.return_in_msb (type)
2127 : BYTES_BIG_ENDIAN))
2129 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2131 /* We can use a single move if we have an exact mode for the size. */
2132 else if (MEM_P (target)
2133 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2134 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2135 && bytes == GET_MODE_SIZE (mode))
2137 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2141 /* And if we additionally have the same mode for a register. */
2142 else if (REG_P (target)
2143 && GET_MODE (target) == mode
2144 && bytes == GET_MODE_SIZE (mode))
2146 emit_move_insn (target, srcreg);
2150 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2151 into a new pseudo which is a full word. */
2152 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2154 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2158 /* Copy the structure BITSIZE bits at a time. If the target lives in
2159 memory, take care of not reading/writing past its end by selecting
2160 a copy mode suited to BITSIZE. This should always be possible given
2163 If the target lives in register, make sure not to select a copy mode
2164 larger than the mode of the register.
2166 We could probably emit more efficient code for machines which do not use
2167 strict alignment, but it doesn't seem worth the effort at the current
2170 copy_mode = word_mode;
2173 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2174 if (mem_mode != BLKmode)
2175 copy_mode = mem_mode;
2177 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2180 for (bitpos = 0, xbitpos = padding_correction;
2181 bitpos < bytes * BITS_PER_UNIT;
2182 bitpos += bitsize, xbitpos += bitsize)
2184 /* We need a new source operand each time xbitpos is on a
2185 word boundary and when xbitpos == padding_correction
2186 (the first time through). */
2187 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2188 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2190 /* We need a new destination operand each time bitpos is on
2192 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2194 else if (bitpos % BITS_PER_WORD == 0)
2195 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2197 /* Use xbitpos for the source extraction (right justified) and
2198 bitpos for the destination store (left justified). */
2199 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2200 extract_bit_field (src, bitsize,
2201 xbitpos % BITS_PER_WORD, 1, false,
2202 NULL_RTX, copy_mode, copy_mode));
2206 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2207 register if it contains any data, otherwise return null.
2209 This is used on targets that return BLKmode values in registers. */
2212 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2215 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2216 unsigned int bitsize;
2217 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2218 enum machine_mode dst_mode;
2220 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2222 x = expand_normal (src);
2224 bytes = int_size_in_bytes (TREE_TYPE (src));
2228 /* If the structure doesn't take up a whole number of words, see
2229 whether the register value should be padded on the left or on
2230 the right. Set PADDING_CORRECTION to the number of padding
2231 bits needed on the left side.
2233 In most ABIs, the structure will be returned at the least end of
2234 the register, which translates to right padding on little-endian
2235 targets and left padding on big-endian targets. The opposite
2236 holds if the structure is returned at the most significant
2237 end of the register. */
2238 if (bytes % UNITS_PER_WORD != 0
2239 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2241 : BYTES_BIG_ENDIAN))
2242 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2245 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2246 dst_words = XALLOCAVEC (rtx, n_regs);
2247 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2249 /* Copy the structure BITSIZE bits at a time. */
2250 for (bitpos = 0, xbitpos = padding_correction;
2251 bitpos < bytes * BITS_PER_UNIT;
2252 bitpos += bitsize, xbitpos += bitsize)
2254 /* We need a new destination pseudo each time xbitpos is
2255 on a word boundary and when xbitpos == padding_correction
2256 (the first time through). */
2257 if (xbitpos % BITS_PER_WORD == 0
2258 || xbitpos == padding_correction)
2260 /* Generate an appropriate register. */
2261 dst_word = gen_reg_rtx (word_mode);
2262 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2264 /* Clear the destination before we move anything into it. */
2265 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2268 /* We need a new source operand each time bitpos is on a word
2270 if (bitpos % BITS_PER_WORD == 0)
2271 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2273 /* Use bitpos for the source extraction (left justified) and
2274 xbitpos for the destination store (right justified). */
2275 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2277 extract_bit_field (src_word, bitsize,
2278 bitpos % BITS_PER_WORD, 1, false,
2279 NULL_RTX, word_mode, word_mode));
2282 if (mode == BLKmode)
2284 /* Find the smallest integer mode large enough to hold the
2285 entire structure. */
2286 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2288 mode = GET_MODE_WIDER_MODE (mode))
2289 /* Have we found a large enough mode? */
2290 if (GET_MODE_SIZE (mode) >= bytes)
2293 /* A suitable mode should have been found. */
2294 gcc_assert (mode != VOIDmode);
2297 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2298 dst_mode = word_mode;
2301 dst = gen_reg_rtx (dst_mode);
2303 for (i = 0; i < n_regs; i++)
2304 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2306 if (mode != dst_mode)
2307 dst = gen_lowpart (mode, dst);
2312 /* Add a USE expression for REG to the (possibly empty) list pointed
2313 to by CALL_FUSAGE. REG must denote a hard register. */
2316 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2318 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2321 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2324 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2325 starting at REGNO. All of these registers must be hard registers. */
2328 use_regs (rtx *call_fusage, int regno, int nregs)
2332 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2334 for (i = 0; i < nregs; i++)
2335 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2338 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2339 PARALLEL REGS. This is for calls that pass values in multiple
2340 non-contiguous locations. The Irix 6 ABI has examples of this. */
2343 use_group_regs (rtx *call_fusage, rtx regs)
2347 for (i = 0; i < XVECLEN (regs, 0); i++)
2349 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2351 /* A NULL entry means the parameter goes both on the stack and in
2352 registers. This can also be a MEM for targets that pass values
2353 partially on the stack and partially in registers. */
2354 if (reg != 0 && REG_P (reg))
2355 use_reg (call_fusage, reg);
2359 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2360 assigment and the code of the expresion on the RHS is CODE. Return
2364 get_def_for_expr (tree name, enum tree_code code)
2368 if (TREE_CODE (name) != SSA_NAME)
2371 def_stmt = get_gimple_for_ssa_name (name);
2373 || gimple_assign_rhs_code (def_stmt) != code)
2379 #ifdef HAVE_conditional_move
2380 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2381 assigment and the class of the expresion on the RHS is CLASS. Return
2385 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2389 if (TREE_CODE (name) != SSA_NAME)
2392 def_stmt = get_gimple_for_ssa_name (name);
2394 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2402 /* Determine whether the LEN bytes generated by CONSTFUN can be
2403 stored to memory using several move instructions. CONSTFUNDATA is
2404 a pointer which will be passed as argument in every CONSTFUN call.
2405 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2406 a memset operation and false if it's a copy of a constant string.
2407 Return nonzero if a call to store_by_pieces should succeed. */
2410 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2411 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2412 void *constfundata, unsigned int align, bool memsetp)
2414 unsigned HOST_WIDE_INT l;
2415 unsigned int max_size;
2416 HOST_WIDE_INT offset = 0;
2417 enum machine_mode mode;
2418 enum insn_code icode;
2420 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2421 rtx cst ATTRIBUTE_UNUSED;
2427 ? SET_BY_PIECES_P (len, align)
2428 : STORE_BY_PIECES_P (len, align)))
2431 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2433 /* We would first store what we can in the largest integer mode, then go to
2434 successively smaller modes. */
2437 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2441 max_size = STORE_MAX_PIECES + 1;
2442 while (max_size > 1 && l > 0)
2444 mode = widest_int_mode_for_size (max_size);
2446 if (mode == VOIDmode)
2449 icode = optab_handler (mov_optab, mode);
2450 if (icode != CODE_FOR_nothing
2451 && align >= GET_MODE_ALIGNMENT (mode))
2453 unsigned int size = GET_MODE_SIZE (mode);
2460 cst = (*constfun) (constfundata, offset, mode);
2461 if (!targetm.legitimate_constant_p (mode, cst))
2471 max_size = GET_MODE_SIZE (mode);
2474 /* The code above should have handled everything. */
2481 /* Generate several move instructions to store LEN bytes generated by
2482 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2483 pointer which will be passed as argument in every CONSTFUN call.
2484 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2485 a memset operation and false if it's a copy of a constant string.
2486 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2487 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2491 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2492 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2493 void *constfundata, unsigned int align, bool memsetp, int endp)
2495 enum machine_mode to_addr_mode = get_address_mode (to);
2496 struct store_by_pieces_d data;
2500 gcc_assert (endp != 2);
2505 ? SET_BY_PIECES_P (len, align)
2506 : STORE_BY_PIECES_P (len, align));
2507 data.constfun = constfun;
2508 data.constfundata = constfundata;
2511 store_by_pieces_1 (&data, align);
2516 gcc_assert (!data.reverse);
2521 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2522 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2524 data.to_addr = copy_to_mode_reg (to_addr_mode,
2525 plus_constant (to_addr_mode,
2529 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2536 to1 = adjust_address (data.to, QImode, data.offset);
2544 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2545 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2548 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2550 struct store_by_pieces_d data;
2555 data.constfun = clear_by_pieces_1;
2556 data.constfundata = NULL;
2559 store_by_pieces_1 (&data, align);
2562 /* Callback routine for clear_by_pieces.
2563 Return const0_rtx unconditionally. */
2566 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2567 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2568 enum machine_mode mode ATTRIBUTE_UNUSED)
2573 /* Subroutine of clear_by_pieces and store_by_pieces.
2574 Generate several move instructions to store LEN bytes of block TO. (A MEM
2575 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2578 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2579 unsigned int align ATTRIBUTE_UNUSED)
2581 enum machine_mode to_addr_mode = get_address_mode (data->to);
2582 rtx to_addr = XEXP (data->to, 0);
2583 unsigned int max_size = STORE_MAX_PIECES + 1;
2584 enum insn_code icode;
2587 data->to_addr = to_addr;
2589 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2590 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2592 data->explicit_inc_to = 0;
2594 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2596 data->offset = data->len;
2598 /* If storing requires more than two move insns,
2599 copy addresses to registers (to make displacements shorter)
2600 and use post-increment if available. */
2601 if (!data->autinc_to
2602 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2604 /* Determine the main mode we'll be using.
2605 MODE might not be used depending on the definitions of the
2606 USE_* macros below. */
2607 enum machine_mode mode ATTRIBUTE_UNUSED
2608 = widest_int_mode_for_size (max_size);
2610 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2612 data->to_addr = copy_to_mode_reg (to_addr_mode,
2613 plus_constant (to_addr_mode,
2616 data->autinc_to = 1;
2617 data->explicit_inc_to = -1;
2620 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2621 && ! data->autinc_to)
2623 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2624 data->autinc_to = 1;
2625 data->explicit_inc_to = 1;
2628 if ( !data->autinc_to && CONSTANT_P (to_addr))
2629 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2632 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2634 /* First store what we can in the largest integer mode, then go to
2635 successively smaller modes. */
2637 while (max_size > 1 && data->len > 0)
2639 enum machine_mode mode = widest_int_mode_for_size (max_size);
2641 if (mode == VOIDmode)
2644 icode = optab_handler (mov_optab, mode);
2645 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2646 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2648 max_size = GET_MODE_SIZE (mode);
2651 /* The code above should have handled everything. */
2652 gcc_assert (!data->len);
2655 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2656 with move instructions for mode MODE. GENFUN is the gen_... function
2657 to make a move insn for that mode. DATA has all the other info. */
2660 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2661 struct store_by_pieces_d *data)
2663 unsigned int size = GET_MODE_SIZE (mode);
2666 while (data->len >= size)
2669 data->offset -= size;
2671 if (data->autinc_to)
2672 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2675 to1 = adjust_address (data->to, mode, data->offset);
2677 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2678 emit_insn (gen_add2_insn (data->to_addr,
2679 GEN_INT (-(HOST_WIDE_INT) size)));
2681 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2682 emit_insn ((*genfun) (to1, cst));
2684 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2685 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2687 if (! data->reverse)
2688 data->offset += size;
2694 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2695 its length in bytes. */
2698 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2699 unsigned int expected_align, HOST_WIDE_INT expected_size)
2701 enum machine_mode mode = GET_MODE (object);
2704 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2706 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2707 just move a zero. Otherwise, do this a piece at a time. */
2709 && CONST_INT_P (size)
2710 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2712 rtx zero = CONST0_RTX (mode);
2715 emit_move_insn (object, zero);
2719 if (COMPLEX_MODE_P (mode))
2721 zero = CONST0_RTX (GET_MODE_INNER (mode));
2724 write_complex_part (object, zero, 0);
2725 write_complex_part (object, zero, 1);
2731 if (size == const0_rtx)
2734 align = MEM_ALIGN (object);
2736 if (CONST_INT_P (size)
2737 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2738 clear_by_pieces (object, INTVAL (size), align);
2739 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2740 expected_align, expected_size))
2742 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2743 return set_storage_via_libcall (object, size, const0_rtx,
2744 method == BLOCK_OP_TAILCALL);
2752 clear_storage (rtx object, rtx size, enum block_op_methods method)
2754 return clear_storage_hints (object, size, method, 0, -1);
2758 /* A subroutine of clear_storage. Expand a call to memset.
2759 Return the return value of memset, 0 otherwise. */
2762 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2764 tree call_expr, fn, object_tree, size_tree, val_tree;
2765 enum machine_mode size_mode;
2768 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2769 place those into new pseudos into a VAR_DECL and use them later. */
2771 object = copy_addr_to_reg (XEXP (object, 0));
2773 size_mode = TYPE_MODE (sizetype);
2774 size = convert_to_mode (size_mode, size, 1);
2775 size = copy_to_mode_reg (size_mode, size);
2777 /* It is incorrect to use the libcall calling conventions to call
2778 memset in this context. This could be a user call to memset and
2779 the user may wish to examine the return value from memset. For
2780 targets where libcalls and normal calls have different conventions
2781 for returning pointers, we could end up generating incorrect code. */
2783 object_tree = make_tree (ptr_type_node, object);
2784 if (!CONST_INT_P (val))
2785 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2786 size_tree = make_tree (sizetype, size);
2787 val_tree = make_tree (integer_type_node, val);
2789 fn = clear_storage_libcall_fn (true);
2790 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2791 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2793 retval = expand_normal (call_expr);
2798 /* A subroutine of set_storage_via_libcall. Create the tree node
2799 for the function we use for block clears. */
2801 tree block_clear_fn;
2804 init_block_clear_fn (const char *asmspec)
2806 if (!block_clear_fn)
2810 fn = get_identifier ("memset");
2811 args = build_function_type_list (ptr_type_node, ptr_type_node,
2812 integer_type_node, sizetype,
2815 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2816 DECL_EXTERNAL (fn) = 1;
2817 TREE_PUBLIC (fn) = 1;
2818 DECL_ARTIFICIAL (fn) = 1;
2819 TREE_NOTHROW (fn) = 1;
2820 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2821 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2823 block_clear_fn = fn;
2827 set_user_assembler_name (block_clear_fn, asmspec);
2831 clear_storage_libcall_fn (int for_call)
2833 static bool emitted_extern;
2835 if (!block_clear_fn)
2836 init_block_clear_fn (NULL);
2838 if (for_call && !emitted_extern)
2840 emitted_extern = true;
2841 make_decl_rtl (block_clear_fn);
2844 return block_clear_fn;
2847 /* Expand a setmem pattern; return true if successful. */
2850 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2851 unsigned int expected_align, HOST_WIDE_INT expected_size)
2853 /* Try the most limited insn first, because there's no point
2854 including more than one in the machine description unless
2855 the more limited one has some advantage. */
2857 enum machine_mode mode;
2859 if (expected_align < align)
2860 expected_align = align;
2862 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2863 mode = GET_MODE_WIDER_MODE (mode))
2865 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2867 if (code != CODE_FOR_nothing
2868 /* We don't need MODE to be narrower than
2869 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2870 the mode mask, as it is returned by the macro, it will
2871 definitely be less than the actual mode mask. */
2872 && ((CONST_INT_P (size)
2873 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2874 <= (GET_MODE_MASK (mode) >> 1)))
2875 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
2877 struct expand_operand ops[6];
2880 nops = insn_data[(int) code].n_generator_args;
2881 gcc_assert (nops == 4 || nops == 6);
2883 create_fixed_operand (&ops[0], object);
2884 /* The check above guarantees that this size conversion is valid. */
2885 create_convert_operand_to (&ops[1], size, mode, true);
2886 create_convert_operand_from (&ops[2], val, byte_mode, true);
2887 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2890 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2891 create_integer_operand (&ops[5], expected_size);
2893 if (maybe_expand_insn (code, nops, ops))
2902 /* Write to one of the components of the complex value CPLX. Write VAL to
2903 the real part if IMAG_P is false, and the imaginary part if its true. */
2906 write_complex_part (rtx cplx, rtx val, bool imag_p)
2908 enum machine_mode cmode;
2909 enum machine_mode imode;
2912 if (GET_CODE (cplx) == CONCAT)
2914 emit_move_insn (XEXP (cplx, imag_p), val);
2918 cmode = GET_MODE (cplx);
2919 imode = GET_MODE_INNER (cmode);
2920 ibitsize = GET_MODE_BITSIZE (imode);
2922 /* For MEMs simplify_gen_subreg may generate an invalid new address
2923 because, e.g., the original address is considered mode-dependent
2924 by the target, which restricts simplify_subreg from invoking
2925 adjust_address_nv. Instead of preparing fallback support for an
2926 invalid address, we call adjust_address_nv directly. */
2929 emit_move_insn (adjust_address_nv (cplx, imode,
2930 imag_p ? GET_MODE_SIZE (imode) : 0),
2935 /* If the sub-object is at least word sized, then we know that subregging
2936 will work. This special case is important, since store_bit_field
2937 wants to operate on integer modes, and there's rarely an OImode to
2938 correspond to TCmode. */
2939 if (ibitsize >= BITS_PER_WORD
2940 /* For hard regs we have exact predicates. Assume we can split
2941 the original object if it spans an even number of hard regs.
2942 This special case is important for SCmode on 64-bit platforms
2943 where the natural size of floating-point regs is 32-bit. */
2945 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2946 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2948 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2949 imag_p ? GET_MODE_SIZE (imode) : 0);
2952 emit_move_insn (part, val);
2956 /* simplify_gen_subreg may fail for sub-word MEMs. */
2957 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2960 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
2963 /* Extract one of the components of the complex value CPLX. Extract the
2964 real part if IMAG_P is false, and the imaginary part if it's true. */
2967 read_complex_part (rtx cplx, bool imag_p)
2969 enum machine_mode cmode, imode;
2972 if (GET_CODE (cplx) == CONCAT)
2973 return XEXP (cplx, imag_p);
2975 cmode = GET_MODE (cplx);
2976 imode = GET_MODE_INNER (cmode);
2977 ibitsize = GET_MODE_BITSIZE (imode);
2979 /* Special case reads from complex constants that got spilled to memory. */
2980 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2982 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2983 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2985 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2986 if (CONSTANT_CLASS_P (part))
2987 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2991 /* For MEMs simplify_gen_subreg may generate an invalid new address
2992 because, e.g., the original address is considered mode-dependent
2993 by the target, which restricts simplify_subreg from invoking
2994 adjust_address_nv. Instead of preparing fallback support for an
2995 invalid address, we call adjust_address_nv directly. */
2997 return adjust_address_nv (cplx, imode,
2998 imag_p ? GET_MODE_SIZE (imode) : 0);
3000 /* If the sub-object is at least word sized, then we know that subregging
3001 will work. This special case is important, since extract_bit_field
3002 wants to operate on integer modes, and there's rarely an OImode to
3003 correspond to TCmode. */
3004 if (ibitsize >= BITS_PER_WORD
3005 /* For hard regs we have exact predicates. Assume we can split
3006 the original object if it spans an even number of hard regs.
3007 This special case is important for SCmode on 64-bit platforms
3008 where the natural size of floating-point regs is 32-bit. */
3010 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3011 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3013 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3014 imag_p ? GET_MODE_SIZE (imode) : 0);
3018 /* simplify_gen_subreg may fail for sub-word MEMs. */
3019 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3022 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3023 true, false, NULL_RTX, imode, imode);
3026 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3027 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3028 represented in NEW_MODE. If FORCE is true, this will never happen, as
3029 we'll force-create a SUBREG if needed. */
3032 emit_move_change_mode (enum machine_mode new_mode,
3033 enum machine_mode old_mode, rtx x, bool force)
3037 if (push_operand (x, GET_MODE (x)))
3039 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3040 MEM_COPY_ATTRIBUTES (ret, x);
3044 /* We don't have to worry about changing the address since the
3045 size in bytes is supposed to be the same. */
3046 if (reload_in_progress)
3048 /* Copy the MEM to change the mode and move any
3049 substitutions from the old MEM to the new one. */
3050 ret = adjust_address_nv (x, new_mode, 0);
3051 copy_replacements (x, ret);
3054 ret = adjust_address (x, new_mode, 0);
3058 /* Note that we do want simplify_subreg's behavior of validating
3059 that the new mode is ok for a hard register. If we were to use
3060 simplify_gen_subreg, we would create the subreg, but would
3061 probably run into the target not being able to implement it. */
3062 /* Except, of course, when FORCE is true, when this is exactly what
3063 we want. Which is needed for CCmodes on some targets. */
3065 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3067 ret = simplify_subreg (new_mode, x, old_mode, 0);
3073 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3074 an integer mode of the same size as MODE. Returns the instruction
3075 emitted, or NULL if such a move could not be generated. */
3078 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3080 enum machine_mode imode;
3081 enum insn_code code;
3083 /* There must exist a mode of the exact size we require. */
3084 imode = int_mode_for_mode (mode);
3085 if (imode == BLKmode)
3088 /* The target must support moves in this mode. */
3089 code = optab_handler (mov_optab, imode);
3090 if (code == CODE_FOR_nothing)
3093 x = emit_move_change_mode (imode, mode, x, force);
3096 y = emit_move_change_mode (imode, mode, y, force);
3099 return emit_insn (GEN_FCN (code) (x, y));
3102 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3103 Return an equivalent MEM that does not use an auto-increment. */
3106 emit_move_resolve_push (enum machine_mode mode, rtx x)
3108 enum rtx_code code = GET_CODE (XEXP (x, 0));
3109 HOST_WIDE_INT adjust;
3112 adjust = GET_MODE_SIZE (mode);
3113 #ifdef PUSH_ROUNDING
3114 adjust = PUSH_ROUNDING (adjust);
3116 if (code == PRE_DEC || code == POST_DEC)
3118 else if (code == PRE_MODIFY || code == POST_MODIFY)
3120 rtx expr = XEXP (XEXP (x, 0), 1);
3123 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3124 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3125 val = INTVAL (XEXP (expr, 1));
3126 if (GET_CODE (expr) == MINUS)
3128 gcc_assert (adjust == val || adjust == -val);
3132 /* Do not use anti_adjust_stack, since we don't want to update
3133 stack_pointer_delta. */
3134 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3135 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3136 0, OPTAB_LIB_WIDEN);
3137 if (temp != stack_pointer_rtx)
3138 emit_move_insn (stack_pointer_rtx, temp);
3145 temp = stack_pointer_rtx;
3150 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3156 return replace_equiv_address (x, temp);
3159 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3160 X is known to satisfy push_operand, and MODE is known to be complex.
3161 Returns the last instruction emitted. */
3164 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3166 enum machine_mode submode = GET_MODE_INNER (mode);
3169 #ifdef PUSH_ROUNDING
3170 unsigned int submodesize = GET_MODE_SIZE (submode);
3172 /* In case we output to the stack, but the size is smaller than the
3173 machine can push exactly, we need to use move instructions. */
3174 if (PUSH_ROUNDING (submodesize) != submodesize)
3176 x = emit_move_resolve_push (mode, x);
3177 return emit_move_insn (x, y);
3181 /* Note that the real part always precedes the imag part in memory
3182 regardless of machine's endianness. */
3183 switch (GET_CODE (XEXP (x, 0)))
3197 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3198 read_complex_part (y, imag_first));
3199 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3200 read_complex_part (y, !imag_first));
3203 /* A subroutine of emit_move_complex. Perform the move from Y to X
3204 via two moves of the parts. Returns the last instruction emitted. */
3207 emit_move_complex_parts (rtx x, rtx y)
3209 /* Show the output dies here. This is necessary for SUBREGs
3210 of pseudos since we cannot track their lifetimes correctly;
3211 hard regs shouldn't appear here except as return values. */
3212 if (!reload_completed && !reload_in_progress
3213 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3216 write_complex_part (x, read_complex_part (y, false), false);
3217 write_complex_part (x, read_complex_part (y, true), true);
3219 return get_last_insn ();
3222 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3223 MODE is known to be complex. Returns the last instruction emitted. */
3226 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3230 /* Need to take special care for pushes, to maintain proper ordering
3231 of the data, and possibly extra padding. */
3232 if (push_operand (x, mode))
3233 return emit_move_complex_push (mode, x, y);
3235 /* See if we can coerce the target into moving both values at once, except
3236 for floating point where we favor moving as parts if this is easy. */
3237 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3238 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3240 && HARD_REGISTER_P (x)
3241 && hard_regno_nregs[REGNO(x)][mode] == 1)
3243 && HARD_REGISTER_P (y)
3244 && hard_regno_nregs[REGNO(y)][mode] == 1))
3246 /* Not possible if the values are inherently not adjacent. */
3247 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3249 /* Is possible if both are registers (or subregs of registers). */
3250 else if (register_operand (x, mode) && register_operand (y, mode))
3252 /* If one of the operands is a memory, and alignment constraints
3253 are friendly enough, we may be able to do combined memory operations.
3254 We do not attempt this if Y is a constant because that combination is
3255 usually better with the by-parts thing below. */
3256 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3257 && (!STRICT_ALIGNMENT
3258 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3267 /* For memory to memory moves, optimal behavior can be had with the
3268 existing block move logic. */
3269 if (MEM_P (x) && MEM_P (y))
3271 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3272 BLOCK_OP_NO_LIBCALL);
3273 return get_last_insn ();
3276 ret = emit_move_via_integer (mode, x, y, true);
3281 return emit_move_complex_parts (x, y);
3284 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3285 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3288 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3292 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3295 enum insn_code code = optab_handler (mov_optab, CCmode);
3296 if (code != CODE_FOR_nothing)
3298 x = emit_move_change_mode (CCmode, mode, x, true);
3299 y = emit_move_change_mode (CCmode, mode, y, true);
3300 return emit_insn (GEN_FCN (code) (x, y));
3304 /* Otherwise, find the MODE_INT mode of the same width. */
3305 ret = emit_move_via_integer (mode, x, y, false);
3306 gcc_assert (ret != NULL);
3310 /* Return true if word I of OP lies entirely in the
3311 undefined bits of a paradoxical subreg. */
3314 undefined_operand_subword_p (const_rtx op, int i)
3316 enum machine_mode innermode, innermostmode;
3318 if (GET_CODE (op) != SUBREG)
3320 innermode = GET_MODE (op);
3321 innermostmode = GET_MODE (SUBREG_REG (op));
3322 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3323 /* The SUBREG_BYTE represents offset, as if the value were stored in
3324 memory, except for a paradoxical subreg where we define
3325 SUBREG_BYTE to be 0; undo this exception as in
3327 if (SUBREG_BYTE (op) == 0
3328 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3330 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3331 if (WORDS_BIG_ENDIAN)
3332 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3333 if (BYTES_BIG_ENDIAN)
3334 offset += difference % UNITS_PER_WORD;
3336 if (offset >= GET_MODE_SIZE (innermostmode)
3337 || offset <= -GET_MODE_SIZE (word_mode))
3342 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3343 MODE is any multi-word or full-word mode that lacks a move_insn
3344 pattern. Note that you will get better code if you define such
3345 patterns, even if they must turn into multiple assembler instructions. */
3348 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3355 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3357 /* If X is a push on the stack, do the push now and replace
3358 X with a reference to the stack pointer. */
3359 if (push_operand (x, mode))
3360 x = emit_move_resolve_push (mode, x);
3362 /* If we are in reload, see if either operand is a MEM whose address
3363 is scheduled for replacement. */
3364 if (reload_in_progress && MEM_P (x)
3365 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3366 x = replace_equiv_address_nv (x, inner);
3367 if (reload_in_progress && MEM_P (y)
3368 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3369 y = replace_equiv_address_nv (y, inner);
3373 need_clobber = false;
3375 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3378 rtx xpart = operand_subword (x, i, 1, mode);
3381 /* Do not generate code for a move if it would come entirely
3382 from the undefined bits of a paradoxical subreg. */
3383 if (undefined_operand_subword_p (y, i))
3386 ypart = operand_subword (y, i, 1, mode);
3388 /* If we can't get a part of Y, put Y into memory if it is a
3389 constant. Otherwise, force it into a register. Then we must
3390 be able to get a part of Y. */
3391 if (ypart == 0 && CONSTANT_P (y))
3393 y = use_anchored_address (force_const_mem (mode, y));
3394 ypart = operand_subword (y, i, 1, mode);
3396 else if (ypart == 0)
3397 ypart = operand_subword_force (y, i, mode);
3399 gcc_assert (xpart && ypart);
3401 need_clobber |= (GET_CODE (xpart) == SUBREG);
3403 last_insn = emit_move_insn (xpart, ypart);
3409 /* Show the output dies here. This is necessary for SUBREGs
3410 of pseudos since we cannot track their lifetimes correctly;
3411 hard regs shouldn't appear here except as return values.
3412 We never want to emit such a clobber after reload. */
3414 && ! (reload_in_progress || reload_completed)
3415 && need_clobber != 0)
3423 /* Low level part of emit_move_insn.
3424 Called just like emit_move_insn, but assumes X and Y
3425 are basically valid. */
3428 emit_move_insn_1 (rtx x, rtx y)
3430 enum machine_mode mode = GET_MODE (x);
3431 enum insn_code code;
3433 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3435 code = optab_handler (mov_optab, mode);
3436 if (code != CODE_FOR_nothing)
3437 return emit_insn (GEN_FCN (code) (x, y));
3439 /* Expand complex moves by moving real part and imag part. */
3440 if (COMPLEX_MODE_P (mode))
3441 return emit_move_complex (mode, x, y);
3443 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3444 || ALL_FIXED_POINT_MODE_P (mode))
3446 rtx result = emit_move_via_integer (mode, x, y, true);
3448 /* If we can't find an integer mode, use multi words. */
3452 return emit_move_multi_word (mode, x, y);
3455 if (GET_MODE_CLASS (mode) == MODE_CC)
3456 return emit_move_ccmode (mode, x, y);
3458 /* Try using a move pattern for the corresponding integer mode. This is
3459 only safe when simplify_subreg can convert MODE constants into integer
3460 constants. At present, it can only do this reliably if the value
3461 fits within a HOST_WIDE_INT. */
3462 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3464 rtx ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3468 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3473 return emit_move_multi_word (mode, x, y);
3476 /* Generate code to copy Y into X.
3477 Both Y and X must have the same mode, except that
3478 Y can be a constant with VOIDmode.
3479 This mode cannot be BLKmode; use emit_block_move for that.
3481 Return the last instruction emitted. */
3484 emit_move_insn (rtx x, rtx y)
3486 enum machine_mode mode = GET_MODE (x);
3487 rtx y_cst = NULL_RTX;
3490 gcc_assert (mode != BLKmode
3491 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3496 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3497 && (last_insn = compress_float_constant (x, y)))
3502 if (!targetm.legitimate_constant_p (mode, y))
3504 y = force_const_mem (mode, y);
3506 /* If the target's cannot_force_const_mem prevented the spill,
3507 assume that the target's move expanders will also take care
3508 of the non-legitimate constant. */
3512 y = use_anchored_address (y);
3516 /* If X or Y are memory references, verify that their addresses are valid
3519 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3521 && ! push_operand (x, GET_MODE (x))))
3522 x = validize_mem (x);
3525 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3526 MEM_ADDR_SPACE (y)))
3527 y = validize_mem (y);
3529 gcc_assert (mode != BLKmode);
3531 last_insn = emit_move_insn_1 (x, y);
3533 if (y_cst && REG_P (x)
3534 && (set = single_set (last_insn)) != NULL_RTX
3535 && SET_DEST (set) == x
3536 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3537 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3542 /* If Y is representable exactly in a narrower mode, and the target can
3543 perform the extension directly from constant or memory, then emit the
3544 move as an extension. */
3547 compress_float_constant (rtx x, rtx y)
3549 enum machine_mode dstmode = GET_MODE (x);
3550 enum machine_mode orig_srcmode = GET_MODE (y);
3551 enum machine_mode srcmode;
3553 int oldcost, newcost;
3554 bool speed = optimize_insn_for_speed_p ();
3556 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3558 if (targetm.legitimate_constant_p (dstmode, y))
3559 oldcost = set_src_cost (y, speed);
3561 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3563 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3564 srcmode != orig_srcmode;
3565 srcmode = GET_MODE_WIDER_MODE (srcmode))
3568 rtx trunc_y, last_insn;
3570 /* Skip if the target can't extend this way. */
3571 ic = can_extend_p (dstmode, srcmode, 0);
3572 if (ic == CODE_FOR_nothing)
3575 /* Skip if the narrowed value isn't exact. */
3576 if (! exact_real_truncate (srcmode, &r))
3579 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3581 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3583 /* Skip if the target needs extra instructions to perform
3585 if (!insn_operand_matches (ic, 1, trunc_y))
3587 /* This is valid, but may not be cheaper than the original. */
3588 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3590 if (oldcost < newcost)
3593 else if (float_extend_from_mem[dstmode][srcmode])
3595 trunc_y = force_const_mem (srcmode, trunc_y);
3596 /* This is valid, but may not be cheaper than the original. */
3597 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3599 if (oldcost < newcost)
3601 trunc_y = validize_mem (trunc_y);
3606 /* For CSE's benefit, force the compressed constant pool entry
3607 into a new pseudo. This constant may be used in different modes,
3608 and if not, combine will put things back together for us. */
3609 trunc_y = force_reg (srcmode, trunc_y);
3610 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3611 last_insn = get_last_insn ();
3614 set_unique_reg_note (last_insn, REG_EQUAL, y);
3622 /* Pushing data onto the stack. */
3624 /* Push a block of length SIZE (perhaps variable)
3625 and return an rtx to address the beginning of the block.
3626 The value may be virtual_outgoing_args_rtx.
3628 EXTRA is the number of bytes of padding to push in addition to SIZE.
3629 BELOW nonzero means this padding comes at low addresses;
3630 otherwise, the padding comes at high addresses. */
3633 push_block (rtx size, int extra, int below)
3637 size = convert_modes (Pmode, ptr_mode, size, 1);
3638 if (CONSTANT_P (size))
3639 anti_adjust_stack (plus_constant (Pmode, size, extra));
3640 else if (REG_P (size) && extra == 0)
3641 anti_adjust_stack (size);
3644 temp = copy_to_mode_reg (Pmode, size);
3646 temp = expand_binop (Pmode, add_optab, temp,
3647 gen_int_mode (extra, Pmode),
3648 temp, 0, OPTAB_LIB_WIDEN);
3649 anti_adjust_stack (temp);
3652 #ifndef STACK_GROWS_DOWNWARD
3658 temp = virtual_outgoing_args_rtx;
3659 if (extra != 0 && below)
3660 temp = plus_constant (Pmode, temp, extra);
3664 if (CONST_INT_P (size))
3665 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3666 -INTVAL (size) - (below ? 0 : extra));
3667 else if (extra != 0 && !below)
3668 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3669 negate_rtx (Pmode, plus_constant (Pmode, size,
3672 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3673 negate_rtx (Pmode, size));
3676 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3679 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3682 mem_autoinc_base (rtx mem)
3686 rtx addr = XEXP (mem, 0);
3687 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3688 return XEXP (addr, 0);
3693 /* A utility routine used here, in reload, and in try_split. The insns
3694 after PREV up to and including LAST are known to adjust the stack,
3695 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3696 placing notes as appropriate. PREV may be NULL, indicating the
3697 entire insn sequence prior to LAST should be scanned.
3699 The set of allowed stack pointer modifications is small:
3700 (1) One or more auto-inc style memory references (aka pushes),
3701 (2) One or more addition/subtraction with the SP as destination,
3702 (3) A single move insn with the SP as destination,
3703 (4) A call_pop insn,
3704 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3706 Insns in the sequence that do not modify the SP are ignored,
3707 except for noreturn calls.
3709 The return value is the amount of adjustment that can be trivially
3710 verified, via immediate operand or auto-inc. If the adjustment
3711 cannot be trivially extracted, the return value is INT_MIN. */
3714 find_args_size_adjust (rtx insn)
3719 pat = PATTERN (insn);
3722 /* Look for a call_pop pattern. */
3725 /* We have to allow non-call_pop patterns for the case
3726 of emit_single_push_insn of a TLS address. */
3727 if (GET_CODE (pat) != PARALLEL)
3730 /* All call_pop have a stack pointer adjust in the parallel.
3731 The call itself is always first, and the stack adjust is
3732 usually last, so search from the end. */
3733 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3735 set = XVECEXP (pat, 0, i);
3736 if (GET_CODE (set) != SET)
3738 dest = SET_DEST (set);
3739 if (dest == stack_pointer_rtx)
3742 /* We'd better have found the stack pointer adjust. */
3745 /* Fall through to process the extracted SET and DEST
3746 as if it was a standalone insn. */
3748 else if (GET_CODE (pat) == SET)
3750 else if ((set = single_set (insn)) != NULL)
3752 else if (GET_CODE (pat) == PARALLEL)
3754 /* ??? Some older ports use a parallel with a stack adjust
3755 and a store for a PUSH_ROUNDING pattern, rather than a
3756 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3757 /* ??? See h8300 and m68k, pushqi1. */
3758 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3760 set = XVECEXP (pat, 0, i);
3761 if (GET_CODE (set) != SET)
3763 dest = SET_DEST (set);
3764 if (dest == stack_pointer_rtx)
3767 /* We do not expect an auto-inc of the sp in the parallel. */
3768 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3769 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3770 != stack_pointer_rtx);
3778 dest = SET_DEST (set);
3780 /* Look for direct modifications of the stack pointer. */
3781 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3783 /* Look for a trivial adjustment, otherwise assume nothing. */
3784 /* Note that the SPU restore_stack_block pattern refers to
3785 the stack pointer in V4SImode. Consider that non-trivial. */
3786 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3787 && GET_CODE (SET_SRC (set)) == PLUS
3788 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3789 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3790 return INTVAL (XEXP (SET_SRC (set), 1));
3791 /* ??? Reload can generate no-op moves, which will be cleaned
3792 up later. Recognize it and continue searching. */
3793 else if (rtx_equal_p (dest, SET_SRC (set)))
3796 return HOST_WIDE_INT_MIN;
3802 /* Otherwise only think about autoinc patterns. */
3803 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3806 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3807 != stack_pointer_rtx);
3809 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3810 mem = SET_SRC (set);
3814 addr = XEXP (mem, 0);
3815 switch (GET_CODE (addr))
3819 return GET_MODE_SIZE (GET_MODE (mem));
3822 return -GET_MODE_SIZE (GET_MODE (mem));
3825 addr = XEXP (addr, 1);
3826 gcc_assert (GET_CODE (addr) == PLUS);
3827 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3828 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3829 return INTVAL (XEXP (addr, 1));
3837 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3839 int args_size = end_args_size;
3840 bool saw_unknown = false;
3843 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3845 HOST_WIDE_INT this_delta;
3847 if (!NONDEBUG_INSN_P (insn))
3850 this_delta = find_args_size_adjust (insn);
3851 if (this_delta == 0)
3854 || ACCUMULATE_OUTGOING_ARGS
3855 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3859 gcc_assert (!saw_unknown);
3860 if (this_delta == HOST_WIDE_INT_MIN)
3863 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3864 #ifdef STACK_GROWS_DOWNWARD
3865 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3867 args_size -= this_delta;
3870 return saw_unknown ? INT_MIN : args_size;
3873 #ifdef PUSH_ROUNDING
3874 /* Emit single push insn. */
3877 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3880 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3882 enum insn_code icode;
3884 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3885 /* If there is push pattern, use it. Otherwise try old way of throwing
3886 MEM representing push operation to move expander. */
3887 icode = optab_handler (push_optab, mode);
3888 if (icode != CODE_FOR_nothing)
3890 struct expand_operand ops[1];
3892 create_input_operand (&ops[0], x, mode);
3893 if (maybe_expand_insn (icode, 1, ops))
3896 if (GET_MODE_SIZE (mode) == rounded_size)
3897 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3898 /* If we are to pad downward, adjust the stack pointer first and
3899 then store X into the stack location using an offset. This is
3900 because emit_move_insn does not know how to pad; it does not have
3902 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3904 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3905 HOST_WIDE_INT offset;
3907 emit_move_insn (stack_pointer_rtx,
3908 expand_binop (Pmode,
3909 #ifdef STACK_GROWS_DOWNWARD
3915 gen_int_mode (rounded_size, Pmode),
3916 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3918 offset = (HOST_WIDE_INT) padding_size;
3919 #ifdef STACK_GROWS_DOWNWARD
3920 if (STACK_PUSH_CODE == POST_DEC)
3921 /* We have already decremented the stack pointer, so get the
3923 offset += (HOST_WIDE_INT) rounded_size;
3925 if (STACK_PUSH_CODE == POST_INC)
3926 /* We have already incremented the stack pointer, so get the
3928 offset -= (HOST_WIDE_INT) rounded_size;
3930 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3931 gen_int_mode (offset, Pmode));
3935 #ifdef STACK_GROWS_DOWNWARD
3936 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3937 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3938 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
3941 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3942 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3943 gen_int_mode (rounded_size, Pmode));
3945 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3948 dest = gen_rtx_MEM (mode, dest_addr);
3952 set_mem_attributes (dest, type, 1);
3954 if (flag_optimize_sibling_calls)
3955 /* Function incoming arguments may overlap with sibling call
3956 outgoing arguments and we cannot allow reordering of reads
3957 from function arguments with stores to outgoing arguments
3958 of sibling calls. */
3959 set_mem_alias_set (dest, 0);
3961 emit_move_insn (dest, x);
3964 /* Emit and annotate a single push insn. */
3967 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3969 int delta, old_delta = stack_pointer_delta;
3970 rtx prev = get_last_insn ();
3973 emit_single_push_insn_1 (mode, x, type);
3975 last = get_last_insn ();
3977 /* Notice the common case where we emitted exactly one insn. */
3978 if (PREV_INSN (last) == prev)
3980 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
3984 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
3985 gcc_assert (delta == INT_MIN || delta == old_delta);
3989 /* Generate code to push X onto the stack, assuming it has mode MODE and
3991 MODE is redundant except when X is a CONST_INT (since they don't
3993 SIZE is an rtx for the size of data to be copied (in bytes),
3994 needed only if X is BLKmode.
3996 ALIGN (in bits) is maximum alignment we can assume.
3998 If PARTIAL and REG are both nonzero, then copy that many of the first
3999 bytes of X into registers starting with REG, and push the rest of X.
4000 The amount of space pushed is decreased by PARTIAL bytes.
4001 REG must be a hard register in this case.
4002 If REG is zero but PARTIAL is not, take any all others actions for an
4003 argument partially in registers, but do not actually load any
4006 EXTRA is the amount in bytes of extra space to leave next to this arg.
4007 This is ignored if an argument block has already been allocated.
4009 On a machine that lacks real push insns, ARGS_ADDR is the address of
4010 the bottom of the argument block for this call. We use indexing off there
4011 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4012 argument block has not been preallocated.
4014 ARGS_SO_FAR is the size of args previously pushed for this call.
4016 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4017 for arguments passed in registers. If nonzero, it will be the number
4018 of bytes required. */
4021 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
4022 unsigned int align, int partial, rtx reg, int extra,
4023 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4027 enum direction stack_direction
4028 #ifdef STACK_GROWS_DOWNWARD
4034 /* Decide where to pad the argument: `downward' for below,
4035 `upward' for above, or `none' for don't pad it.
4036 Default is below for small data on big-endian machines; else above. */
4037 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4039 /* Invert direction if stack is post-decrement.
4041 if (STACK_PUSH_CODE == POST_DEC)
4042 if (where_pad != none)
4043 where_pad = (where_pad == downward ? upward : downward);
4048 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4050 /* Copy a block into the stack, entirely or partially. */
4057 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4058 used = partial - offset;
4060 if (mode != BLKmode)
4062 /* A value is to be stored in an insufficiently aligned
4063 stack slot; copy via a suitably aligned slot if
4065 size = GEN_INT (GET_MODE_SIZE (mode));
4066 if (!MEM_P (xinner))
4068 temp = assign_temp (type, 1, 1);
4069 emit_move_insn (temp, xinner);
4076 /* USED is now the # of bytes we need not copy to the stack
4077 because registers will take care of them. */
4080 xinner = adjust_address (xinner, BLKmode, used);
4082 /* If the partial register-part of the arg counts in its stack size,
4083 skip the part of stack space corresponding to the registers.
4084 Otherwise, start copying to the beginning of the stack space,
4085 by setting SKIP to 0. */
4086 skip = (reg_parm_stack_space == 0) ? 0 : used;
4088 #ifdef PUSH_ROUNDING
4089 /* Do it with several push insns if that doesn't take lots of insns
4090 and if there is no difficulty with push insns that skip bytes
4091 on the stack for alignment purposes. */
4094 && CONST_INT_P (size)
4096 && MEM_ALIGN (xinner) >= align
4097 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4098 /* Here we avoid the case of a structure whose weak alignment
4099 forces many pushes of a small amount of data,
4100 and such small pushes do rounding that causes trouble. */
4101 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4102 || align >= BIGGEST_ALIGNMENT
4103 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4104 == (align / BITS_PER_UNIT)))
4105 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4107 /* Push padding now if padding above and stack grows down,
4108 or if padding below and stack grows up.
4109 But if space already allocated, this has already been done. */
4110 if (extra && args_addr == 0
4111 && where_pad != none && where_pad != stack_direction)
4112 anti_adjust_stack (GEN_INT (extra));
4114 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4117 #endif /* PUSH_ROUNDING */
4121 /* Otherwise make space on the stack and copy the data
4122 to the address of that space. */
4124 /* Deduct words put into registers from the size we must copy. */
4127 if (CONST_INT_P (size))
4128 size = GEN_INT (INTVAL (size) - used);
4130 size = expand_binop (GET_MODE (size), sub_optab, size,
4131 gen_int_mode (used, GET_MODE (size)),
4132 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4135 /* Get the address of the stack space.
4136 In this case, we do not deal with EXTRA separately.
4137 A single stack adjust will do. */
4140 temp = push_block (size, extra, where_pad == downward);
4143 else if (CONST_INT_P (args_so_far))
4144 temp = memory_address (BLKmode,
4145 plus_constant (Pmode, args_addr,
4146 skip + INTVAL (args_so_far)));
4148 temp = memory_address (BLKmode,
4149 plus_constant (Pmode,
4150 gen_rtx_PLUS (Pmode,
4155 if (!ACCUMULATE_OUTGOING_ARGS)
4157 /* If the source is referenced relative to the stack pointer,
4158 copy it to another register to stabilize it. We do not need
4159 to do this if we know that we won't be changing sp. */
4161 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4162 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4163 temp = copy_to_reg (temp);
4166 target = gen_rtx_MEM (BLKmode, temp);
4168 /* We do *not* set_mem_attributes here, because incoming arguments
4169 may overlap with sibling call outgoing arguments and we cannot
4170 allow reordering of reads from function arguments with stores
4171 to outgoing arguments of sibling calls. We do, however, want
4172 to record the alignment of the stack slot. */
4173 /* ALIGN may well be better aligned than TYPE, e.g. due to
4174 PARM_BOUNDARY. Assume the caller isn't lying. */
4175 set_mem_align (target, align);
4177 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4180 else if (partial > 0)
4182 /* Scalar partly in registers. */
4184 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4187 /* # bytes of start of argument
4188 that we must make space for but need not store. */
4189 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4190 int args_offset = INTVAL (args_so_far);
4193 /* Push padding now if padding above and stack grows down,
4194 or if padding below and stack grows up.
4195 But if space already allocated, this has already been done. */
4196 if (extra && args_addr == 0
4197 && where_pad != none && where_pad != stack_direction)
4198 anti_adjust_stack (GEN_INT (extra));
4200 /* If we make space by pushing it, we might as well push
4201 the real data. Otherwise, we can leave OFFSET nonzero
4202 and leave the space uninitialized. */
4206 /* Now NOT_STACK gets the number of words that we don't need to
4207 allocate on the stack. Convert OFFSET to words too. */
4208 not_stack = (partial - offset) / UNITS_PER_WORD;
4209 offset /= UNITS_PER_WORD;
4211 /* If the partial register-part of the arg counts in its stack size,
4212 skip the part of stack space corresponding to the registers.
4213 Otherwise, start copying to the beginning of the stack space,
4214 by setting SKIP to 0. */
4215 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4217 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4218 x = validize_mem (force_const_mem (mode, x));
4220 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4221 SUBREGs of such registers are not allowed. */
4222 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4223 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4224 x = copy_to_reg (x);
4226 /* Loop over all the words allocated on the stack for this arg. */
4227 /* We can do it by words, because any scalar bigger than a word
4228 has a size a multiple of a word. */
4229 #ifndef PUSH_ARGS_REVERSED
4230 for (i = not_stack; i < size; i++)
4232 for (i = size - 1; i >= not_stack; i--)
4234 if (i >= not_stack + offset)
4235 emit_push_insn (operand_subword_force (x, i, mode),
4236 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4238 GEN_INT (args_offset + ((i - not_stack + skip)
4240 reg_parm_stack_space, alignment_pad);
4247 /* Push padding now if padding above and stack grows down,
4248 or if padding below and stack grows up.
4249 But if space already allocated, this has already been done. */
4250 if (extra && args_addr == 0
4251 && where_pad != none && where_pad != stack_direction)
4252 anti_adjust_stack (GEN_INT (extra));
4254 #ifdef PUSH_ROUNDING
4255 if (args_addr == 0 && PUSH_ARGS)
4256 emit_single_push_insn (mode, x, type);
4260 if (CONST_INT_P (args_so_far))
4262 = memory_address (mode,
4263 plus_constant (Pmode, args_addr,
4264 INTVAL (args_so_far)));
4266 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4268 dest = gen_rtx_MEM (mode, addr);
4270 /* We do *not* set_mem_attributes here, because incoming arguments
4271 may overlap with sibling call outgoing arguments and we cannot
4272 allow reordering of reads from function arguments with stores
4273 to outgoing arguments of sibling calls. We do, however, want
4274 to record the alignment of the stack slot. */
4275 /* ALIGN may well be better aligned than TYPE, e.g. due to
4276 PARM_BOUNDARY. Assume the caller isn't lying. */
4277 set_mem_align (dest, align);
4279 emit_move_insn (dest, x);
4283 /* If part should go in registers, copy that part
4284 into the appropriate registers. Do this now, at the end,
4285 since mem-to-mem copies above may do function calls. */
4286 if (partial > 0 && reg != 0)
4288 /* Handle calls that pass values in multiple non-contiguous locations.
4289 The Irix 6 ABI has examples of this. */
4290 if (GET_CODE (reg) == PARALLEL)
4291 emit_group_load (reg, x, type, -1);
4294 gcc_assert (partial % UNITS_PER_WORD == 0);
4295 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4299 if (extra && args_addr == 0 && where_pad == stack_direction)
4300 anti_adjust_stack (GEN_INT (extra));
4302 if (alignment_pad && args_addr == 0)
4303 anti_adjust_stack (alignment_pad);
4306 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4310 get_subtarget (rtx x)
4314 /* Only registers can be subtargets. */
4316 /* Don't use hard regs to avoid extending their life. */
4317 || REGNO (x) < FIRST_PSEUDO_REGISTER
4321 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4322 FIELD is a bitfield. Returns true if the optimization was successful,
4323 and there's nothing else to do. */
4326 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4327 unsigned HOST_WIDE_INT bitpos,
4328 unsigned HOST_WIDE_INT bitregion_start,
4329 unsigned HOST_WIDE_INT bitregion_end,
4330 enum machine_mode mode1, rtx str_rtx,
4333 enum machine_mode str_mode = GET_MODE (str_rtx);
4334 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4339 enum tree_code code;
4341 if (mode1 != VOIDmode
4342 || bitsize >= BITS_PER_WORD
4343 || str_bitsize > BITS_PER_WORD
4344 || TREE_SIDE_EFFECTS (to)
4345 || TREE_THIS_VOLATILE (to))
4349 if (TREE_CODE (src) != SSA_NAME)
4351 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4354 srcstmt = get_gimple_for_ssa_name (src);
4356 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4359 code = gimple_assign_rhs_code (srcstmt);
4361 op0 = gimple_assign_rhs1 (srcstmt);
4363 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4364 to find its initialization. Hopefully the initialization will
4365 be from a bitfield load. */
4366 if (TREE_CODE (op0) == SSA_NAME)
4368 gimple op0stmt = get_gimple_for_ssa_name (op0);
4370 /* We want to eventually have OP0 be the same as TO, which
4371 should be a bitfield. */
4373 || !is_gimple_assign (op0stmt)
4374 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4376 op0 = gimple_assign_rhs1 (op0stmt);
4379 op1 = gimple_assign_rhs2 (srcstmt);
4381 if (!operand_equal_p (to, op0, 0))
4384 if (MEM_P (str_rtx))
4386 unsigned HOST_WIDE_INT offset1;
4388 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4389 str_mode = word_mode;
4390 str_mode = get_best_mode (bitsize, bitpos,
4391 bitregion_start, bitregion_end,
4392 MEM_ALIGN (str_rtx), str_mode, 0);
4393 if (str_mode == VOIDmode)
4395 str_bitsize = GET_MODE_BITSIZE (str_mode);
4398 bitpos %= str_bitsize;
4399 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4400 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4402 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4405 /* If the bit field covers the whole REG/MEM, store_field
4406 will likely generate better code. */
4407 if (bitsize >= str_bitsize)
4410 /* We can't handle fields split across multiple entities. */
4411 if (bitpos + bitsize > str_bitsize)
4414 if (BYTES_BIG_ENDIAN)
4415 bitpos = str_bitsize - bitpos - bitsize;
4421 /* For now, just optimize the case of the topmost bitfield
4422 where we don't need to do any masking and also
4423 1 bit bitfields where xor can be used.
4424 We might win by one instruction for the other bitfields
4425 too if insv/extv instructions aren't used, so that
4426 can be added later. */
4427 if (bitpos + bitsize != str_bitsize
4428 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4431 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4432 value = convert_modes (str_mode,
4433 TYPE_MODE (TREE_TYPE (op1)), value,
4434 TYPE_UNSIGNED (TREE_TYPE (op1)));
4436 /* We may be accessing data outside the field, which means
4437 we can alias adjacent data. */
4438 if (MEM_P (str_rtx))
4440 str_rtx = shallow_copy_rtx (str_rtx);
4441 set_mem_alias_set (str_rtx, 0);
4442 set_mem_expr (str_rtx, 0);
4445 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4446 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4448 value = expand_and (str_mode, value, const1_rtx, NULL);
4451 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4452 result = expand_binop (str_mode, binop, str_rtx,
4453 value, str_rtx, 1, OPTAB_WIDEN);
4454 if (result != str_rtx)
4455 emit_move_insn (str_rtx, result);
4460 if (TREE_CODE (op1) != INTEGER_CST)
4462 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4463 value = convert_modes (str_mode,
4464 TYPE_MODE (TREE_TYPE (op1)), value,
4465 TYPE_UNSIGNED (TREE_TYPE (op1)));
4467 /* We may be accessing data outside the field, which means
4468 we can alias adjacent data. */
4469 if (MEM_P (str_rtx))
4471 str_rtx = shallow_copy_rtx (str_rtx);
4472 set_mem_alias_set (str_rtx, 0);
4473 set_mem_expr (str_rtx, 0);
4476 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4477 if (bitpos + bitsize != str_bitsize)
4479 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4481 value = expand_and (str_mode, value, mask, NULL_RTX);
4483 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4484 result = expand_binop (str_mode, binop, str_rtx,
4485 value, str_rtx, 1, OPTAB_WIDEN);
4486 if (result != str_rtx)
4487 emit_move_insn (str_rtx, result);
4497 /* In the C++ memory model, consecutive bit fields in a structure are
4498 considered one memory location.
4500 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4501 returns the bit range of consecutive bits in which this COMPONENT_REF
4502 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4503 and *OFFSET may be adjusted in the process.
4505 If the access does not need to be restricted, 0 is returned in both
4506 *BITSTART and *BITEND. */
4509 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4510 unsigned HOST_WIDE_INT *bitend,
4512 HOST_WIDE_INT *bitpos,
4515 HOST_WIDE_INT bitoffset;
4518 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4520 field = TREE_OPERAND (exp, 1);
4521 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4522 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4523 need to limit the range we can access. */
4526 *bitstart = *bitend = 0;
4530 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4531 part of a larger bit field, then the representative does not serve any
4532 useful purpose. This can occur in Ada. */
4533 if (handled_component_p (TREE_OPERAND (exp, 0)))
4535 enum machine_mode rmode;
4536 HOST_WIDE_INT rbitsize, rbitpos;
4540 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4541 &roffset, &rmode, &unsignedp, &volatilep, false);
4542 if ((rbitpos % BITS_PER_UNIT) != 0)
4544 *bitstart = *bitend = 0;
4549 /* Compute the adjustment to bitpos from the offset of the field
4550 relative to the representative. DECL_FIELD_OFFSET of field and
4551 repr are the same by construction if they are not constants,
4552 see finish_bitfield_layout. */
4553 if (host_integerp (DECL_FIELD_OFFSET (field), 1)
4554 && host_integerp (DECL_FIELD_OFFSET (repr), 1))
4555 bitoffset = (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
4556 - tree_low_cst (DECL_FIELD_OFFSET (repr), 1)) * BITS_PER_UNIT;
4559 bitoffset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
4560 - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
4562 /* If the adjustment is larger than bitpos, we would have a negative bit
4563 position for the lower bound and this may wreak havoc later. This can
4564 occur only if we have a non-null offset, so adjust offset and bitpos
4565 to make the lower bound non-negative. */
4566 if (bitoffset > *bitpos)
4568 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4570 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4571 gcc_assert (*offset != NULL_TREE);
4575 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4579 *bitstart = *bitpos - bitoffset;
4581 *bitend = *bitstart + tree_low_cst (DECL_SIZE (repr), 1) - 1;
4584 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4585 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4586 DECL_RTL was not set yet, return NORTL. */
4589 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4591 if (TREE_CODE (addr) != ADDR_EXPR)
4594 tree base = TREE_OPERAND (addr, 0);
4597 || TREE_ADDRESSABLE (base)
4598 || DECL_MODE (base) == BLKmode)
4601 if (!DECL_RTL_SET_P (base))
4604 return (!MEM_P (DECL_RTL (base)));
4607 /* Returns true if the MEM_REF REF refers to an object that does not
4608 reside in memory and has non-BLKmode. */
4611 mem_ref_refers_to_non_mem_p (tree ref)
4613 tree base = TREE_OPERAND (ref, 0);
4614 return addr_expr_of_non_mem_decl_p_1 (base, false);
4617 /* Return TRUE iff OP is an ADDR_EXPR of a DECL that's not
4618 addressable. This is very much like mem_ref_refers_to_non_mem_p,
4619 but instead of the MEM_REF, it takes its base, and it doesn't
4620 assume a DECL is in memory just because its RTL is not set yet. */
4623 addr_expr_of_non_mem_decl_p (tree op)
4625 return addr_expr_of_non_mem_decl_p_1 (op, true);
4628 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4629 is true, try generating a nontemporal store. */
4632 expand_assignment (tree to, tree from, bool nontemporal)
4636 enum machine_mode mode;
4638 enum insn_code icode;
4640 /* Don't crash if the lhs of the assignment was erroneous. */
4641 if (TREE_CODE (to) == ERROR_MARK)
4643 expand_normal (from);
4647 /* Optimize away no-op moves without side-effects. */
4648 if (operand_equal_p (to, from, 0))
4651 /* Handle misaligned stores. */
4652 mode = TYPE_MODE (TREE_TYPE (to));
4653 if ((TREE_CODE (to) == MEM_REF
4654 || TREE_CODE (to) == TARGET_MEM_REF)
4656 && !mem_ref_refers_to_non_mem_p (to)
4657 && ((align = get_object_alignment (to))
4658 < GET_MODE_ALIGNMENT (mode))
4659 && (((icode = optab_handler (movmisalign_optab, mode))
4660 != CODE_FOR_nothing)
4661 || SLOW_UNALIGNED_ACCESS (mode, align)))
4665 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4666 reg = force_not_mem (reg);
4667 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4669 if (icode != CODE_FOR_nothing)
4671 struct expand_operand ops[2];
4673 create_fixed_operand (&ops[0], mem);
4674 create_input_operand (&ops[1], reg, mode);
4675 /* The movmisalign<mode> pattern cannot fail, else the assignment
4676 would silently be omitted. */
4677 expand_insn (icode, 2, ops);
4680 store_bit_field (mem, GET_MODE_BITSIZE (mode),
4681 0, 0, 0, mode, reg);
4685 /* Assignment of a structure component needs special treatment
4686 if the structure component's rtx is not simply a MEM.
4687 Assignment of an array element at a constant index, and assignment of
4688 an array element in an unaligned packed structure field, has the same
4689 problem. Same for (partially) storing into a non-memory object. */
4690 if (handled_component_p (to)
4691 || (TREE_CODE (to) == MEM_REF
4692 && mem_ref_refers_to_non_mem_p (to))
4693 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4695 enum machine_mode mode1;
4696 HOST_WIDE_INT bitsize, bitpos;
4697 unsigned HOST_WIDE_INT bitregion_start = 0;
4698 unsigned HOST_WIDE_INT bitregion_end = 0;
4707 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4708 &unsignedp, &volatilep, true);
4710 if (TREE_CODE (to) == COMPONENT_REF
4711 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4712 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4714 /* If we are going to use store_bit_field and extract_bit_field,
4715 make sure to_rtx will be safe for multiple use. */
4716 mode = TYPE_MODE (TREE_TYPE (tem));
4717 if (TREE_CODE (tem) == MEM_REF
4719 && ((align = get_object_alignment (tem))
4720 < GET_MODE_ALIGNMENT (mode))
4721 && ((icode = optab_handler (movmisalign_optab, mode))
4722 != CODE_FOR_nothing))
4724 struct expand_operand ops[2];
4727 to_rtx = gen_reg_rtx (mode);
4728 mem = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4730 /* If the misaligned store doesn't overwrite all bits, perform
4731 rmw cycle on MEM. */
4732 if (bitsize != GET_MODE_BITSIZE (mode))
4734 create_input_operand (&ops[0], to_rtx, mode);
4735 create_fixed_operand (&ops[1], mem);
4736 /* The movmisalign<mode> pattern cannot fail, else the assignment
4737 would silently be omitted. */
4738 expand_insn (icode, 2, ops);
4740 mem = copy_rtx (mem);
4746 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4749 /* If the bitfield is volatile, we want to access it in the
4750 field's mode, not the computed mode.
4751 If a MEM has VOIDmode (external with incomplete type),
4752 use BLKmode for it instead. */
4755 if (volatilep && flag_strict_volatile_bitfields > 0)
4756 to_rtx = adjust_address (to_rtx, mode1, 0);
4757 else if (GET_MODE (to_rtx) == VOIDmode)
4758 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4763 enum machine_mode address_mode;
4766 if (!MEM_P (to_rtx))
4768 /* We can get constant negative offsets into arrays with broken
4769 user code. Translate this to a trap instead of ICEing. */
4770 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4771 expand_builtin_trap ();
4772 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4775 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4776 address_mode = get_address_mode (to_rtx);
4777 if (GET_MODE (offset_rtx) != address_mode)
4778 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4780 /* A constant address in TO_RTX can have VOIDmode, we must not try
4781 to call force_reg for that case. Avoid that case. */
4783 && GET_MODE (to_rtx) == BLKmode
4784 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4786 && (bitpos % bitsize) == 0
4787 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4788 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4790 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4794 to_rtx = offset_address (to_rtx, offset_rtx,
4795 highest_pow2_factor_for_target (to,
4799 /* No action is needed if the target is not a memory and the field
4800 lies completely outside that target. This can occur if the source
4801 code contains an out-of-bounds access to a small array. */
4803 && GET_MODE (to_rtx) != BLKmode
4804 && (unsigned HOST_WIDE_INT) bitpos
4805 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4807 expand_normal (from);
4810 /* Handle expand_expr of a complex value returning a CONCAT. */
4811 else if (GET_CODE (to_rtx) == CONCAT)
4813 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4814 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4816 && bitsize == mode_bitsize)
4817 result = store_expr (from, to_rtx, false, nontemporal);
4818 else if (bitsize == mode_bitsize / 2
4819 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4820 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4822 else if (bitpos + bitsize <= mode_bitsize / 2)
4823 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4824 bitregion_start, bitregion_end,
4826 get_alias_set (to), nontemporal);
4827 else if (bitpos >= mode_bitsize / 2)
4828 result = store_field (XEXP (to_rtx, 1), bitsize,
4829 bitpos - mode_bitsize / 2,
4830 bitregion_start, bitregion_end,
4832 get_alias_set (to), nontemporal);
4833 else if (bitpos == 0 && bitsize == mode_bitsize)
4836 result = expand_normal (from);
4837 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4838 TYPE_MODE (TREE_TYPE (from)), 0);
4839 emit_move_insn (XEXP (to_rtx, 0),
4840 read_complex_part (from_rtx, false));
4841 emit_move_insn (XEXP (to_rtx, 1),
4842 read_complex_part (from_rtx, true));
4846 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4847 GET_MODE_SIZE (GET_MODE (to_rtx)));
4848 write_complex_part (temp, XEXP (to_rtx, 0), false);
4849 write_complex_part (temp, XEXP (to_rtx, 1), true);
4850 result = store_field (temp, bitsize, bitpos,
4851 bitregion_start, bitregion_end,
4853 get_alias_set (to), nontemporal);
4854 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4855 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4862 /* If the field is at offset zero, we could have been given the
4863 DECL_RTX of the parent struct. Don't munge it. */
4864 to_rtx = shallow_copy_rtx (to_rtx);
4866 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4868 /* Deal with volatile and readonly fields. The former is only
4869 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4871 MEM_VOLATILE_P (to_rtx) = 1;
4874 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4875 bitregion_start, bitregion_end,
4880 result = store_field (to_rtx, bitsize, bitpos,
4881 bitregion_start, bitregion_end,
4883 get_alias_set (to), nontemporal);
4888 struct expand_operand ops[2];
4890 create_fixed_operand (&ops[0], mem);
4891 create_input_operand (&ops[1], to_rtx, mode);
4892 /* The movmisalign<mode> pattern cannot fail, else the assignment
4893 would silently be omitted. */
4894 expand_insn (icode, 2, ops);
4898 preserve_temp_slots (result);
4903 /* If the rhs is a function call and its value is not an aggregate,
4904 call the function before we start to compute the lhs.
4905 This is needed for correct code for cases such as
4906 val = setjmp (buf) on machines where reference to val
4907 requires loading up part of an address in a separate insn.
4909 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4910 since it might be a promoted variable where the zero- or sign- extension
4911 needs to be done. Handling this in the normal way is safe because no
4912 computation is done before the call. The same is true for SSA names. */
4913 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4914 && COMPLETE_TYPE_P (TREE_TYPE (from))
4915 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4916 && ! (((TREE_CODE (to) == VAR_DECL
4917 || TREE_CODE (to) == PARM_DECL
4918 || TREE_CODE (to) == RESULT_DECL)
4919 && REG_P (DECL_RTL (to)))
4920 || TREE_CODE (to) == SSA_NAME))
4925 value = expand_normal (from);
4927 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4929 /* Handle calls that return values in multiple non-contiguous locations.
4930 The Irix 6 ABI has examples of this. */
4931 if (GET_CODE (to_rtx) == PARALLEL)
4933 if (GET_CODE (value) == PARALLEL)
4934 emit_group_move (to_rtx, value);
4936 emit_group_load (to_rtx, value, TREE_TYPE (from),
4937 int_size_in_bytes (TREE_TYPE (from)));
4939 else if (GET_CODE (value) == PARALLEL)
4940 emit_group_store (to_rtx, value, TREE_TYPE (from),
4941 int_size_in_bytes (TREE_TYPE (from)));
4942 else if (GET_MODE (to_rtx) == BLKmode)
4944 /* Handle calls that return BLKmode values in registers. */
4946 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
4948 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4952 if (POINTER_TYPE_P (TREE_TYPE (to)))
4953 value = convert_memory_address_addr_space
4954 (GET_MODE (to_rtx), value,
4955 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4957 emit_move_insn (to_rtx, value);
4959 preserve_temp_slots (to_rtx);
4964 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
4965 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4967 /* Don't move directly into a return register. */
4968 if (TREE_CODE (to) == RESULT_DECL
4969 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4975 /* If the source is itself a return value, it still is in a pseudo at
4976 this point so we can move it back to the return register directly. */
4978 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
4979 && TREE_CODE (from) != CALL_EXPR)
4980 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
4982 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4984 /* Handle calls that return values in multiple non-contiguous locations.
4985 The Irix 6 ABI has examples of this. */
4986 if (GET_CODE (to_rtx) == PARALLEL)
4988 if (GET_CODE (temp) == PARALLEL)
4989 emit_group_move (to_rtx, temp);
4991 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4992 int_size_in_bytes (TREE_TYPE (from)));
4995 emit_move_insn (to_rtx, temp);
4997 preserve_temp_slots (to_rtx);
5002 /* In case we are returning the contents of an object which overlaps
5003 the place the value is being stored, use a safe function when copying
5004 a value through a pointer into a structure value return block. */
5005 if (TREE_CODE (to) == RESULT_DECL
5006 && TREE_CODE (from) == INDIRECT_REF
5007 && ADDR_SPACE_GENERIC_P
5008 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5009 && refs_may_alias_p (to, from)
5010 && cfun->returns_struct
5011 && !cfun->returns_pcc_struct)
5016 size = expr_size (from);
5017 from_rtx = expand_normal (from);
5019 emit_library_call (memmove_libfunc, LCT_NORMAL,
5020 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5021 XEXP (from_rtx, 0), Pmode,
5022 convert_to_mode (TYPE_MODE (sizetype),
5023 size, TYPE_UNSIGNED (sizetype)),
5024 TYPE_MODE (sizetype));
5026 preserve_temp_slots (to_rtx);
5031 /* Compute FROM and store the value in the rtx we got. */
5034 result = store_expr (from, to_rtx, 0, nontemporal);
5035 preserve_temp_slots (result);
5040 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5041 succeeded, false otherwise. */
5044 emit_storent_insn (rtx to, rtx from)
5046 struct expand_operand ops[2];
5047 enum machine_mode mode = GET_MODE (to);
5048 enum insn_code code = optab_handler (storent_optab, mode);
5050 if (code == CODE_FOR_nothing)
5053 create_fixed_operand (&ops[0], to);
5054 create_input_operand (&ops[1], from, mode);
5055 return maybe_expand_insn (code, 2, ops);
5058 /* Generate code for computing expression EXP,
5059 and storing the value into TARGET.
5061 If the mode is BLKmode then we may return TARGET itself.
5062 It turns out that in BLKmode it doesn't cause a problem.
5063 because C has no operators that could combine two different
5064 assignments into the same BLKmode object with different values
5065 with no sequence point. Will other languages need this to
5068 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5069 stack, and block moves may need to be treated specially.
5071 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5074 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5077 rtx alt_rtl = NULL_RTX;
5078 location_t loc = curr_insn_location ();
5080 if (VOID_TYPE_P (TREE_TYPE (exp)))
5082 /* C++ can generate ?: expressions with a throw expression in one
5083 branch and an rvalue in the other. Here, we resolve attempts to
5084 store the throw expression's nonexistent result. */
5085 gcc_assert (!call_param_p);
5086 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5089 if (TREE_CODE (exp) == COMPOUND_EXPR)
5091 /* Perform first part of compound expression, then assign from second
5093 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5094 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5095 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5098 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5100 /* For conditional expression, get safe form of the target. Then
5101 test the condition, doing the appropriate assignment on either
5102 side. This avoids the creation of unnecessary temporaries.
5103 For non-BLKmode, it is more efficient not to do this. */
5105 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5107 do_pending_stack_adjust ();
5109 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5110 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5112 emit_jump_insn (gen_jump (lab2));
5115 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5122 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5123 /* If this is a scalar in a register that is stored in a wider mode
5124 than the declared mode, compute the result into its declared mode
5125 and then convert to the wider mode. Our value is the computed
5128 rtx inner_target = 0;
5130 /* We can do the conversion inside EXP, which will often result
5131 in some optimizations. Do the conversion in two steps: first
5132 change the signedness, if needed, then the extend. But don't
5133 do this if the type of EXP is a subtype of something else
5134 since then the conversion might involve more than just
5135 converting modes. */
5136 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5137 && TREE_TYPE (TREE_TYPE (exp)) == 0
5138 && GET_MODE_PRECISION (GET_MODE (target))
5139 == TYPE_PRECISION (TREE_TYPE (exp)))
5141 if (TYPE_UNSIGNED (TREE_TYPE (exp))
5142 != SUBREG_PROMOTED_UNSIGNED_P (target))
5144 /* Some types, e.g. Fortran's logical*4, won't have a signed
5145 version, so use the mode instead. */
5147 = (signed_or_unsigned_type_for
5148 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5150 ntype = lang_hooks.types.type_for_mode
5151 (TYPE_MODE (TREE_TYPE (exp)),
5152 SUBREG_PROMOTED_UNSIGNED_P (target));
5154 exp = fold_convert_loc (loc, ntype, exp);
5157 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5158 (GET_MODE (SUBREG_REG (target)),
5159 SUBREG_PROMOTED_UNSIGNED_P (target)),
5162 inner_target = SUBREG_REG (target);
5165 temp = expand_expr (exp, inner_target, VOIDmode,
5166 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5168 /* If TEMP is a VOIDmode constant, use convert_modes to make
5169 sure that we properly convert it. */
5170 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5172 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5173 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5174 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5175 GET_MODE (target), temp,
5176 SUBREG_PROMOTED_UNSIGNED_P (target));
5179 convert_move (SUBREG_REG (target), temp,
5180 SUBREG_PROMOTED_UNSIGNED_P (target));
5184 else if ((TREE_CODE (exp) == STRING_CST
5185 || (TREE_CODE (exp) == MEM_REF
5186 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5187 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5189 && integer_zerop (TREE_OPERAND (exp, 1))))
5190 && !nontemporal && !call_param_p
5193 /* Optimize initialization of an array with a STRING_CST. */
5194 HOST_WIDE_INT exp_len, str_copy_len;
5196 tree str = TREE_CODE (exp) == STRING_CST
5197 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5199 exp_len = int_expr_size (exp);
5203 if (TREE_STRING_LENGTH (str) <= 0)
5206 str_copy_len = strlen (TREE_STRING_POINTER (str));
5207 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5210 str_copy_len = TREE_STRING_LENGTH (str);
5211 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5212 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5214 str_copy_len += STORE_MAX_PIECES - 1;
5215 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5217 str_copy_len = MIN (str_copy_len, exp_len);
5218 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5219 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5220 MEM_ALIGN (target), false))
5225 dest_mem = store_by_pieces (dest_mem,
5226 str_copy_len, builtin_strncpy_read_str,
5228 TREE_STRING_POINTER (str)),
5229 MEM_ALIGN (target), false,
5230 exp_len > str_copy_len ? 1 : 0);
5231 if (exp_len > str_copy_len)
5232 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5233 GEN_INT (exp_len - str_copy_len),
5242 /* If we want to use a nontemporal store, force the value to
5244 tmp_target = nontemporal ? NULL_RTX : target;
5245 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5247 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5251 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5252 the same as that of TARGET, adjust the constant. This is needed, for
5253 example, in case it is a CONST_DOUBLE and we want only a word-sized
5255 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5256 && TREE_CODE (exp) != ERROR_MARK
5257 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5258 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5259 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5261 /* If value was not generated in the target, store it there.
5262 Convert the value to TARGET's type first if necessary and emit the
5263 pending incrementations that have been queued when expanding EXP.
5264 Note that we cannot emit the whole queue blindly because this will
5265 effectively disable the POST_INC optimization later.
5267 If TEMP and TARGET compare equal according to rtx_equal_p, but
5268 one or both of them are volatile memory refs, we have to distinguish
5270 - expand_expr has used TARGET. In this case, we must not generate
5271 another copy. This can be detected by TARGET being equal according
5273 - expand_expr has not used TARGET - that means that the source just
5274 happens to have the same RTX form. Since temp will have been created
5275 by expand_expr, it will compare unequal according to == .
5276 We must generate a copy in this case, to reach the correct number
5277 of volatile memory references. */
5279 if ((! rtx_equal_p (temp, target)
5280 || (temp != target && (side_effects_p (temp)
5281 || side_effects_p (target))))
5282 && TREE_CODE (exp) != ERROR_MARK
5283 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5284 but TARGET is not valid memory reference, TEMP will differ
5285 from TARGET although it is really the same location. */
5287 && rtx_equal_p (alt_rtl, target)
5288 && !side_effects_p (alt_rtl)
5289 && !side_effects_p (target))
5290 /* If there's nothing to copy, don't bother. Don't call
5291 expr_size unless necessary, because some front-ends (C++)
5292 expr_size-hook must not be given objects that are not
5293 supposed to be bit-copied or bit-initialized. */
5294 && expr_size (exp) != const0_rtx)
5296 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5298 if (GET_MODE (target) == BLKmode)
5300 /* Handle calls that return BLKmode values in registers. */
5301 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5302 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5304 store_bit_field (target,
5305 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5306 0, 0, 0, GET_MODE (temp), temp);
5309 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5312 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5314 /* Handle copying a string constant into an array. The string
5315 constant may be shorter than the array. So copy just the string's
5316 actual length, and clear the rest. First get the size of the data
5317 type of the string, which is actually the size of the target. */
5318 rtx size = expr_size (exp);
5320 if (CONST_INT_P (size)
5321 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5322 emit_block_move (target, temp, size,
5324 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5327 enum machine_mode pointer_mode
5328 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5329 enum machine_mode address_mode = get_address_mode (target);
5331 /* Compute the size of the data to copy from the string. */
5333 = size_binop_loc (loc, MIN_EXPR,
5334 make_tree (sizetype, size),
5335 size_int (TREE_STRING_LENGTH (exp)));
5337 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5339 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5342 /* Copy that much. */
5343 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5344 TYPE_UNSIGNED (sizetype));
5345 emit_block_move (target, temp, copy_size_rtx,
5347 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5349 /* Figure out how much is left in TARGET that we have to clear.
5350 Do all calculations in pointer_mode. */
5351 if (CONST_INT_P (copy_size_rtx))
5353 size = plus_constant (address_mode, size,
5354 -INTVAL (copy_size_rtx));
5355 target = adjust_address (target, BLKmode,
5356 INTVAL (copy_size_rtx));
5360 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5361 copy_size_rtx, NULL_RTX, 0,
5364 if (GET_MODE (copy_size_rtx) != address_mode)
5365 copy_size_rtx = convert_to_mode (address_mode,
5367 TYPE_UNSIGNED (sizetype));
5369 target = offset_address (target, copy_size_rtx,
5370 highest_pow2_factor (copy_size));
5371 label = gen_label_rtx ();
5372 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5373 GET_MODE (size), 0, label);
5376 if (size != const0_rtx)
5377 clear_storage (target, size, BLOCK_OP_NORMAL);
5383 /* Handle calls that return values in multiple non-contiguous locations.
5384 The Irix 6 ABI has examples of this. */
5385 else if (GET_CODE (target) == PARALLEL)
5387 if (GET_CODE (temp) == PARALLEL)
5388 emit_group_move (target, temp);
5390 emit_group_load (target, temp, TREE_TYPE (exp),
5391 int_size_in_bytes (TREE_TYPE (exp)));
5393 else if (GET_CODE (temp) == PARALLEL)
5394 emit_group_store (target, temp, TREE_TYPE (exp),
5395 int_size_in_bytes (TREE_TYPE (exp)));
5396 else if (GET_MODE (temp) == BLKmode)
5397 emit_block_move (target, temp, expr_size (exp),
5399 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5400 /* If we emit a nontemporal store, there is nothing else to do. */
5401 else if (nontemporal && emit_storent_insn (target, temp))
5405 temp = force_operand (temp, target);
5407 emit_move_insn (target, temp);
5414 /* Return true if field F of structure TYPE is a flexible array. */
5417 flexible_array_member_p (const_tree f, const_tree type)
5422 return (DECL_CHAIN (f) == NULL
5423 && TREE_CODE (tf) == ARRAY_TYPE
5425 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5426 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5427 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5428 && int_size_in_bytes (type) >= 0);
5431 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5432 must have in order for it to completely initialize a value of type TYPE.
5433 Return -1 if the number isn't known.
5435 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5437 static HOST_WIDE_INT
5438 count_type_elements (const_tree type, bool for_ctor_p)
5440 switch (TREE_CODE (type))
5446 nelts = array_type_nelts (type);
5447 if (nelts && host_integerp (nelts, 1))
5449 unsigned HOST_WIDE_INT n;
5451 n = tree_low_cst (nelts, 1) + 1;
5452 if (n == 0 || for_ctor_p)
5455 return n * count_type_elements (TREE_TYPE (type), false);
5457 return for_ctor_p ? -1 : 1;
5462 unsigned HOST_WIDE_INT n;
5466 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5467 if (TREE_CODE (f) == FIELD_DECL)
5470 n += count_type_elements (TREE_TYPE (f), false);
5471 else if (!flexible_array_member_p (f, type))
5472 /* Don't count flexible arrays, which are not supposed
5473 to be initialized. */
5481 case QUAL_UNION_TYPE:
5486 gcc_assert (!for_ctor_p);
5487 /* Estimate the number of scalars in each field and pick the
5488 maximum. Other estimates would do instead; the idea is simply
5489 to make sure that the estimate is not sensitive to the ordering
5492 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5493 if (TREE_CODE (f) == FIELD_DECL)
5495 m = count_type_elements (TREE_TYPE (f), false);
5496 /* If the field doesn't span the whole union, add an extra
5497 scalar for the rest. */
5498 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5499 TYPE_SIZE (type)) != 1)
5511 return TYPE_VECTOR_SUBPARTS (type);
5515 case FIXED_POINT_TYPE:
5520 case REFERENCE_TYPE:
5536 /* Helper for categorize_ctor_elements. Identical interface. */
5539 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5540 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5542 unsigned HOST_WIDE_INT idx;
5543 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5544 tree value, purpose, elt_type;
5546 /* Whether CTOR is a valid constant initializer, in accordance with what
5547 initializer_constant_valid_p does. If inferred from the constructor
5548 elements, true until proven otherwise. */
5549 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5550 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5555 elt_type = NULL_TREE;
5557 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5559 HOST_WIDE_INT mult = 1;
5561 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5563 tree lo_index = TREE_OPERAND (purpose, 0);
5564 tree hi_index = TREE_OPERAND (purpose, 1);
5566 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
5567 mult = (tree_low_cst (hi_index, 1)
5568 - tree_low_cst (lo_index, 1) + 1);
5571 elt_type = TREE_TYPE (value);
5573 switch (TREE_CODE (value))
5577 HOST_WIDE_INT nz = 0, ic = 0;
5579 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5582 nz_elts += mult * nz;
5583 init_elts += mult * ic;
5585 if (const_from_elts_p && const_p)
5586 const_p = const_elt_p;
5593 if (!initializer_zerop (value))
5599 nz_elts += mult * TREE_STRING_LENGTH (value);
5600 init_elts += mult * TREE_STRING_LENGTH (value);
5604 if (!initializer_zerop (TREE_REALPART (value)))
5606 if (!initializer_zerop (TREE_IMAGPART (value)))
5614 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5616 tree v = VECTOR_CST_ELT (value, i);
5617 if (!initializer_zerop (v))
5626 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5627 nz_elts += mult * tc;
5628 init_elts += mult * tc;
5630 if (const_from_elts_p && const_p)
5631 const_p = initializer_constant_valid_p (value, elt_type)
5638 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5639 num_fields, elt_type))
5640 *p_complete = false;
5642 *p_nz_elts += nz_elts;
5643 *p_init_elts += init_elts;
5648 /* Examine CTOR to discover:
5649 * how many scalar fields are set to nonzero values,
5650 and place it in *P_NZ_ELTS;
5651 * how many scalar fields in total are in CTOR,
5652 and place it in *P_ELT_COUNT.
5653 * whether the constructor is complete -- in the sense that every
5654 meaningful byte is explicitly given a value --
5655 and place it in *P_COMPLETE.
5657 Return whether or not CTOR is a valid static constant initializer, the same
5658 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5661 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5662 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5668 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5671 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5672 of which had type LAST_TYPE. Each element was itself a complete
5673 initializer, in the sense that every meaningful byte was explicitly
5674 given a value. Return true if the same is true for the constructor
5678 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5679 const_tree last_type)
5681 if (TREE_CODE (type) == UNION_TYPE
5682 || TREE_CODE (type) == QUAL_UNION_TYPE)
5687 gcc_assert (num_elts == 1 && last_type);
5689 /* ??? We could look at each element of the union, and find the
5690 largest element. Which would avoid comparing the size of the
5691 initialized element against any tail padding in the union.
5692 Doesn't seem worth the effort... */
5693 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5696 return count_type_elements (type, true) == num_elts;
5699 /* Return 1 if EXP contains mostly (3/4) zeros. */
5702 mostly_zeros_p (const_tree exp)
5704 if (TREE_CODE (exp) == CONSTRUCTOR)
5706 HOST_WIDE_INT nz_elts, init_elts;
5709 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5710 return !complete_p || nz_elts < init_elts / 4;
5713 return initializer_zerop (exp);
5716 /* Return 1 if EXP contains all zeros. */
5719 all_zeros_p (const_tree exp)
5721 if (TREE_CODE (exp) == CONSTRUCTOR)
5723 HOST_WIDE_INT nz_elts, init_elts;
5726 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5727 return nz_elts == 0;
5730 return initializer_zerop (exp);
5733 /* Helper function for store_constructor.
5734 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5735 CLEARED is as for store_constructor.
5736 ALIAS_SET is the alias set to use for any stores.
5738 This provides a recursive shortcut back to store_constructor when it isn't
5739 necessary to go through store_field. This is so that we can pass through
5740 the cleared field to let store_constructor know that we may not have to
5741 clear a substructure if the outer structure has already been cleared. */
5744 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5745 HOST_WIDE_INT bitpos, enum machine_mode mode,
5746 tree exp, int cleared, alias_set_type alias_set)
5748 if (TREE_CODE (exp) == CONSTRUCTOR
5749 /* We can only call store_constructor recursively if the size and
5750 bit position are on a byte boundary. */
5751 && bitpos % BITS_PER_UNIT == 0
5752 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5753 /* If we have a nonzero bitpos for a register target, then we just
5754 let store_field do the bitfield handling. This is unlikely to
5755 generate unnecessary clear instructions anyways. */
5756 && (bitpos == 0 || MEM_P (target)))
5760 = adjust_address (target,
5761 GET_MODE (target) == BLKmode
5763 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5764 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5767 /* Update the alias set, if required. */
5768 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5769 && MEM_ALIAS_SET (target) != 0)
5771 target = copy_rtx (target);
5772 set_mem_alias_set (target, alias_set);
5775 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5778 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5781 /* Store the value of constructor EXP into the rtx TARGET.
5782 TARGET is either a REG or a MEM; we know it cannot conflict, since
5783 safe_from_p has been called.
5784 CLEARED is true if TARGET is known to have been zero'd.
5785 SIZE is the number of bytes of TARGET we are allowed to modify: this
5786 may not be the same as the size of EXP if we are assigning to a field
5787 which has been packed to exclude padding bits. */
5790 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5792 tree type = TREE_TYPE (exp);
5793 #ifdef WORD_REGISTER_OPERATIONS
5794 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5797 switch (TREE_CODE (type))
5801 case QUAL_UNION_TYPE:
5803 unsigned HOST_WIDE_INT idx;
5806 /* If size is zero or the target is already cleared, do nothing. */
5807 if (size == 0 || cleared)
5809 /* We either clear the aggregate or indicate the value is dead. */
5810 else if ((TREE_CODE (type) == UNION_TYPE
5811 || TREE_CODE (type) == QUAL_UNION_TYPE)
5812 && ! CONSTRUCTOR_ELTS (exp))
5813 /* If the constructor is empty, clear the union. */
5815 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5819 /* If we are building a static constructor into a register,
5820 set the initial value as zero so we can fold the value into
5821 a constant. But if more than one register is involved,
5822 this probably loses. */
5823 else if (REG_P (target) && TREE_STATIC (exp)
5824 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5826 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5830 /* If the constructor has fewer fields than the structure or
5831 if we are initializing the structure to mostly zeros, clear
5832 the whole structure first. Don't do this if TARGET is a
5833 register whose mode size isn't equal to SIZE since
5834 clear_storage can't handle this case. */
5836 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5837 != fields_length (type))
5838 || mostly_zeros_p (exp))
5840 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5843 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5847 if (REG_P (target) && !cleared)
5848 emit_clobber (target);
5850 /* Store each element of the constructor into the
5851 corresponding field of TARGET. */
5852 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5854 enum machine_mode mode;
5855 HOST_WIDE_INT bitsize;
5856 HOST_WIDE_INT bitpos = 0;
5858 rtx to_rtx = target;
5860 /* Just ignore missing fields. We cleared the whole
5861 structure, above, if any fields are missing. */
5865 if (cleared && initializer_zerop (value))
5868 if (host_integerp (DECL_SIZE (field), 1))
5869 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5873 mode = DECL_MODE (field);
5874 if (DECL_BIT_FIELD (field))
5877 offset = DECL_FIELD_OFFSET (field);
5878 if (host_integerp (offset, 0)
5879 && host_integerp (bit_position (field), 0))
5881 bitpos = int_bit_position (field);
5885 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5889 enum machine_mode address_mode;
5893 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5894 make_tree (TREE_TYPE (exp),
5897 offset_rtx = expand_normal (offset);
5898 gcc_assert (MEM_P (to_rtx));
5900 address_mode = get_address_mode (to_rtx);
5901 if (GET_MODE (offset_rtx) != address_mode)
5902 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5904 to_rtx = offset_address (to_rtx, offset_rtx,
5905 highest_pow2_factor (offset));
5908 #ifdef WORD_REGISTER_OPERATIONS
5909 /* If this initializes a field that is smaller than a
5910 word, at the start of a word, try to widen it to a full
5911 word. This special case allows us to output C++ member
5912 function initializations in a form that the optimizers
5915 && bitsize < BITS_PER_WORD
5916 && bitpos % BITS_PER_WORD == 0
5917 && GET_MODE_CLASS (mode) == MODE_INT
5918 && TREE_CODE (value) == INTEGER_CST
5920 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5922 tree type = TREE_TYPE (value);
5924 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5926 type = lang_hooks.types.type_for_mode
5927 (word_mode, TYPE_UNSIGNED (type));
5928 value = fold_convert (type, value);
5931 if (BYTES_BIG_ENDIAN)
5933 = fold_build2 (LSHIFT_EXPR, type, value,
5934 build_int_cst (type,
5935 BITS_PER_WORD - bitsize));
5936 bitsize = BITS_PER_WORD;
5941 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5942 && DECL_NONADDRESSABLE_P (field))
5944 to_rtx = copy_rtx (to_rtx);
5945 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5948 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5950 get_alias_set (TREE_TYPE (field)));
5957 unsigned HOST_WIDE_INT i;
5960 tree elttype = TREE_TYPE (type);
5962 HOST_WIDE_INT minelt = 0;
5963 HOST_WIDE_INT maxelt = 0;
5965 domain = TYPE_DOMAIN (type);
5966 const_bounds_p = (TYPE_MIN_VALUE (domain)
5967 && TYPE_MAX_VALUE (domain)
5968 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5969 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5971 /* If we have constant bounds for the range of the type, get them. */
5974 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5975 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5978 /* If the constructor has fewer elements than the array, clear
5979 the whole array first. Similarly if this is static
5980 constructor of a non-BLKmode object. */
5983 else if (REG_P (target) && TREE_STATIC (exp))
5987 unsigned HOST_WIDE_INT idx;
5989 HOST_WIDE_INT count = 0, zero_count = 0;
5990 need_to_clear = ! const_bounds_p;
5992 /* This loop is a more accurate version of the loop in
5993 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5994 is also needed to check for missing elements. */
5995 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5997 HOST_WIDE_INT this_node_count;
6002 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6004 tree lo_index = TREE_OPERAND (index, 0);
6005 tree hi_index = TREE_OPERAND (index, 1);
6007 if (! host_integerp (lo_index, 1)
6008 || ! host_integerp (hi_index, 1))
6014 this_node_count = (tree_low_cst (hi_index, 1)
6015 - tree_low_cst (lo_index, 1) + 1);
6018 this_node_count = 1;
6020 count += this_node_count;
6021 if (mostly_zeros_p (value))
6022 zero_count += this_node_count;
6025 /* Clear the entire array first if there are any missing
6026 elements, or if the incidence of zero elements is >=
6029 && (count < maxelt - minelt + 1
6030 || 4 * zero_count >= 3 * count))
6034 if (need_to_clear && size > 0)
6037 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6039 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6043 if (!cleared && REG_P (target))
6044 /* Inform later passes that the old value is dead. */
6045 emit_clobber (target);
6047 /* Store each element of the constructor into the
6048 corresponding element of TARGET, determined by counting the
6050 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6052 enum machine_mode mode;
6053 HOST_WIDE_INT bitsize;
6054 HOST_WIDE_INT bitpos;
6055 rtx xtarget = target;
6057 if (cleared && initializer_zerop (value))
6060 mode = TYPE_MODE (elttype);
6061 if (mode == BLKmode)
6062 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
6063 ? tree_low_cst (TYPE_SIZE (elttype), 1)
6066 bitsize = GET_MODE_BITSIZE (mode);
6068 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6070 tree lo_index = TREE_OPERAND (index, 0);
6071 tree hi_index = TREE_OPERAND (index, 1);
6072 rtx index_r, pos_rtx;
6073 HOST_WIDE_INT lo, hi, count;
6076 /* If the range is constant and "small", unroll the loop. */
6078 && host_integerp (lo_index, 0)
6079 && host_integerp (hi_index, 0)
6080 && (lo = tree_low_cst (lo_index, 0),
6081 hi = tree_low_cst (hi_index, 0),
6082 count = hi - lo + 1,
6085 || (host_integerp (TYPE_SIZE (elttype), 1)
6086 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
6089 lo -= minelt; hi -= minelt;
6090 for (; lo <= hi; lo++)
6092 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
6095 && !MEM_KEEP_ALIAS_SET_P (target)
6096 && TREE_CODE (type) == ARRAY_TYPE
6097 && TYPE_NONALIASED_COMPONENT (type))
6099 target = copy_rtx (target);
6100 MEM_KEEP_ALIAS_SET_P (target) = 1;
6103 store_constructor_field
6104 (target, bitsize, bitpos, mode, value, cleared,
6105 get_alias_set (elttype));
6110 rtx loop_start = gen_label_rtx ();
6111 rtx loop_end = gen_label_rtx ();
6114 expand_normal (hi_index);
6116 index = build_decl (EXPR_LOCATION (exp),
6117 VAR_DECL, NULL_TREE, domain);
6118 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6119 SET_DECL_RTL (index, index_r);
6120 store_expr (lo_index, index_r, 0, false);
6122 /* Build the head of the loop. */
6123 do_pending_stack_adjust ();
6124 emit_label (loop_start);
6126 /* Assign value to element index. */
6128 fold_convert (ssizetype,
6129 fold_build2 (MINUS_EXPR,
6132 TYPE_MIN_VALUE (domain)));
6135 size_binop (MULT_EXPR, position,
6136 fold_convert (ssizetype,
6137 TYPE_SIZE_UNIT (elttype)));
6139 pos_rtx = expand_normal (position);
6140 xtarget = offset_address (target, pos_rtx,
6141 highest_pow2_factor (position));
6142 xtarget = adjust_address (xtarget, mode, 0);
6143 if (TREE_CODE (value) == CONSTRUCTOR)
6144 store_constructor (value, xtarget, cleared,
6145 bitsize / BITS_PER_UNIT);
6147 store_expr (value, xtarget, 0, false);
6149 /* Generate a conditional jump to exit the loop. */
6150 exit_cond = build2 (LT_EXPR, integer_type_node,
6152 jumpif (exit_cond, loop_end, -1);
6154 /* Update the loop counter, and jump to the head of
6156 expand_assignment (index,
6157 build2 (PLUS_EXPR, TREE_TYPE (index),
6158 index, integer_one_node),
6161 emit_jump (loop_start);
6163 /* Build the end of the loop. */
6164 emit_label (loop_end);
6167 else if ((index != 0 && ! host_integerp (index, 0))
6168 || ! host_integerp (TYPE_SIZE (elttype), 1))
6173 index = ssize_int (1);
6176 index = fold_convert (ssizetype,
6177 fold_build2 (MINUS_EXPR,
6180 TYPE_MIN_VALUE (domain)));
6183 size_binop (MULT_EXPR, index,
6184 fold_convert (ssizetype,
6185 TYPE_SIZE_UNIT (elttype)));
6186 xtarget = offset_address (target,
6187 expand_normal (position),
6188 highest_pow2_factor (position));
6189 xtarget = adjust_address (xtarget, mode, 0);
6190 store_expr (value, xtarget, 0, false);
6195 bitpos = ((tree_low_cst (index, 0) - minelt)
6196 * tree_low_cst (TYPE_SIZE (elttype), 1));
6198 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
6200 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6201 && TREE_CODE (type) == ARRAY_TYPE
6202 && TYPE_NONALIASED_COMPONENT (type))
6204 target = copy_rtx (target);
6205 MEM_KEEP_ALIAS_SET_P (target) = 1;
6207 store_constructor_field (target, bitsize, bitpos, mode, value,
6208 cleared, get_alias_set (elttype));
6216 unsigned HOST_WIDE_INT idx;
6217 constructor_elt *ce;
6220 int icode = CODE_FOR_nothing;
6221 tree elttype = TREE_TYPE (type);
6222 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
6223 enum machine_mode eltmode = TYPE_MODE (elttype);
6224 HOST_WIDE_INT bitsize;
6225 HOST_WIDE_INT bitpos;
6226 rtvec vector = NULL;
6228 alias_set_type alias;
6230 gcc_assert (eltmode != BLKmode);
6232 n_elts = TYPE_VECTOR_SUBPARTS (type);
6233 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6235 enum machine_mode mode = GET_MODE (target);
6237 icode = (int) optab_handler (vec_init_optab, mode);
6238 if (icode != CODE_FOR_nothing)
6242 vector = rtvec_alloc (n_elts);
6243 for (i = 0; i < n_elts; i++)
6244 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6248 /* If the constructor has fewer elements than the vector,
6249 clear the whole array first. Similarly if this is static
6250 constructor of a non-BLKmode object. */
6253 else if (REG_P (target) && TREE_STATIC (exp))
6257 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6260 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6262 int n_elts_here = tree_low_cst
6263 (int_const_binop (TRUNC_DIV_EXPR,
6264 TYPE_SIZE (TREE_TYPE (value)),
6265 TYPE_SIZE (elttype)), 1);
6267 count += n_elts_here;
6268 if (mostly_zeros_p (value))
6269 zero_count += n_elts_here;
6272 /* Clear the entire vector first if there are any missing elements,
6273 or if the incidence of zero elements is >= 75%. */
6274 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6277 if (need_to_clear && size > 0 && !vector)
6280 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6282 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6286 /* Inform later passes that the old value is dead. */
6287 if (!cleared && !vector && REG_P (target))
6288 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6291 alias = MEM_ALIAS_SET (target);
6293 alias = get_alias_set (elttype);
6295 /* Store each element of the constructor into the corresponding
6296 element of TARGET, determined by counting the elements. */
6297 for (idx = 0, i = 0;
6298 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6299 idx++, i += bitsize / elt_size)
6301 HOST_WIDE_INT eltpos;
6302 tree value = ce->value;
6304 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
6305 if (cleared && initializer_zerop (value))
6309 eltpos = tree_low_cst (ce->index, 1);
6315 /* Vector CONSTRUCTORs should only be built from smaller
6316 vectors in the case of BLKmode vectors. */
6317 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6318 RTVEC_ELT (vector, eltpos)
6319 = expand_normal (value);
6323 enum machine_mode value_mode =
6324 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6325 ? TYPE_MODE (TREE_TYPE (value))
6327 bitpos = eltpos * elt_size;
6328 store_constructor_field (target, bitsize, bitpos, value_mode,
6329 value, cleared, alias);
6334 emit_insn (GEN_FCN (icode)
6336 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6345 /* Store the value of EXP (an expression tree)
6346 into a subfield of TARGET which has mode MODE and occupies
6347 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6348 If MODE is VOIDmode, it means that we are storing into a bit-field.
6350 BITREGION_START is bitpos of the first bitfield in this region.
6351 BITREGION_END is the bitpos of the ending bitfield in this region.
6352 These two fields are 0, if the C++ memory model does not apply,
6353 or we are not interested in keeping track of bitfield regions.
6355 Always return const0_rtx unless we have something particular to
6358 ALIAS_SET is the alias set for the destination. This value will
6359 (in general) be different from that for TARGET, since TARGET is a
6360 reference to the containing structure.
6362 If NONTEMPORAL is true, try generating a nontemporal store. */
6365 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6366 unsigned HOST_WIDE_INT bitregion_start,
6367 unsigned HOST_WIDE_INT bitregion_end,
6368 enum machine_mode mode, tree exp,
6369 alias_set_type alias_set, bool nontemporal)
6371 if (TREE_CODE (exp) == ERROR_MARK)
6374 /* If we have nothing to store, do nothing unless the expression has
6377 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6379 if (GET_CODE (target) == CONCAT)
6381 /* We're storing into a struct containing a single __complex. */
6383 gcc_assert (!bitpos);
6384 return store_expr (exp, target, 0, nontemporal);
6387 /* If the structure is in a register or if the component
6388 is a bit field, we cannot use addressing to access it.
6389 Use bit-field techniques or SUBREG to store in it. */
6391 if (mode == VOIDmode
6392 || (mode != BLKmode && ! direct_store[(int) mode]
6393 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6394 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6396 || GET_CODE (target) == SUBREG
6397 /* If the field isn't aligned enough to store as an ordinary memref,
6398 store it as a bit field. */
6400 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6401 || bitpos % GET_MODE_ALIGNMENT (mode))
6402 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6403 || (bitpos % BITS_PER_UNIT != 0)))
6404 || (bitsize >= 0 && mode != BLKmode
6405 && GET_MODE_BITSIZE (mode) > bitsize)
6406 /* If the RHS and field are a constant size and the size of the
6407 RHS isn't the same size as the bitfield, we must use bitfield
6410 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6411 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6412 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6413 decl we must use bitfield operations. */
6415 && TREE_CODE (exp) == MEM_REF
6416 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6417 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6418 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6419 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6424 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6425 implies a mask operation. If the precision is the same size as
6426 the field we're storing into, that mask is redundant. This is
6427 particularly common with bit field assignments generated by the
6429 nop_def = get_def_for_expr (exp, NOP_EXPR);
6432 tree type = TREE_TYPE (exp);
6433 if (INTEGRAL_TYPE_P (type)
6434 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6435 && bitsize == TYPE_PRECISION (type))
6437 tree op = gimple_assign_rhs1 (nop_def);
6438 type = TREE_TYPE (op);
6439 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6444 temp = expand_normal (exp);
6446 /* If BITSIZE is narrower than the size of the type of EXP
6447 we will be narrowing TEMP. Normally, what's wanted are the
6448 low-order bits. However, if EXP's type is a record and this is
6449 big-endian machine, we want the upper BITSIZE bits. */
6450 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6451 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6452 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6453 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6454 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6457 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6458 if (mode != VOIDmode && mode != BLKmode
6459 && mode != TYPE_MODE (TREE_TYPE (exp)))
6460 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6462 /* If the modes of TEMP and TARGET are both BLKmode, both
6463 must be in memory and BITPOS must be aligned on a byte
6464 boundary. If so, we simply do a block copy. Likewise
6465 for a BLKmode-like TARGET. */
6466 if (GET_MODE (temp) == BLKmode
6467 && (GET_MODE (target) == BLKmode
6469 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6470 && (bitpos % BITS_PER_UNIT) == 0
6471 && (bitsize % BITS_PER_UNIT) == 0)))
6473 gcc_assert (MEM_P (target) && MEM_P (temp)
6474 && (bitpos % BITS_PER_UNIT) == 0);
6476 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6477 emit_block_move (target, temp,
6478 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6485 /* Handle calls that return values in multiple non-contiguous locations.
6486 The Irix 6 ABI has examples of this. */
6487 if (GET_CODE (temp) == PARALLEL)
6489 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6491 if (mode == BLKmode)
6492 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6493 temp_target = gen_reg_rtx (mode);
6494 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6497 else if (mode == BLKmode)
6499 /* Handle calls that return BLKmode values in registers. */
6500 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6502 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6503 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6508 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6510 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6511 temp_target = gen_reg_rtx (mode);
6513 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6514 false, temp_target, mode, mode);
6519 /* Store the value in the bitfield. */
6520 store_bit_field (target, bitsize, bitpos,
6521 bitregion_start, bitregion_end,
6528 /* Now build a reference to just the desired component. */
6529 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6531 if (to_rtx == target)
6532 to_rtx = copy_rtx (to_rtx);
6534 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6535 set_mem_alias_set (to_rtx, alias_set);
6537 return store_expr (exp, to_rtx, 0, nontemporal);
6541 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6542 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6543 codes and find the ultimate containing object, which we return.
6545 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6546 bit position, and *PUNSIGNEDP to the signedness of the field.
6547 If the position of the field is variable, we store a tree
6548 giving the variable offset (in units) in *POFFSET.
6549 This offset is in addition to the bit position.
6550 If the position is not variable, we store 0 in *POFFSET.
6552 If any of the extraction expressions is volatile,
6553 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6555 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6556 Otherwise, it is a mode that can be used to access the field.
6558 If the field describes a variable-sized object, *PMODE is set to
6559 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6560 this case, but the address of the object can be found.
6562 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6563 look through nodes that serve as markers of a greater alignment than
6564 the one that can be deduced from the expression. These nodes make it
6565 possible for front-ends to prevent temporaries from being created by
6566 the middle-end on alignment considerations. For that purpose, the
6567 normal operating mode at high-level is to always pass FALSE so that
6568 the ultimate containing object is really returned; moreover, the
6569 associated predicate handled_component_p will always return TRUE
6570 on these nodes, thus indicating that they are essentially handled
6571 by get_inner_reference. TRUE should only be passed when the caller
6572 is scanning the expression in order to build another representation
6573 and specifically knows how to handle these nodes; as such, this is
6574 the normal operating mode in the RTL expanders. */
6577 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6578 HOST_WIDE_INT *pbitpos, tree *poffset,
6579 enum machine_mode *pmode, int *punsignedp,
6580 int *pvolatilep, bool keep_aligning)
6583 enum machine_mode mode = VOIDmode;
6584 bool blkmode_bitfield = false;
6585 tree offset = size_zero_node;
6586 double_int bit_offset = double_int_zero;
6588 /* First get the mode, signedness, and size. We do this from just the
6589 outermost expression. */
6591 if (TREE_CODE (exp) == COMPONENT_REF)
6593 tree field = TREE_OPERAND (exp, 1);
6594 size_tree = DECL_SIZE (field);
6595 if (!DECL_BIT_FIELD (field))
6596 mode = DECL_MODE (field);
6597 else if (DECL_MODE (field) == BLKmode)
6598 blkmode_bitfield = true;
6599 else if (TREE_THIS_VOLATILE (exp)
6600 && flag_strict_volatile_bitfields > 0)
6601 /* Volatile bitfields should be accessed in the mode of the
6602 field's type, not the mode computed based on the bit
6604 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6606 *punsignedp = DECL_UNSIGNED (field);
6608 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6610 size_tree = TREE_OPERAND (exp, 1);
6611 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6612 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6614 /* For vector types, with the correct size of access, use the mode of
6616 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6617 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6618 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6619 mode = TYPE_MODE (TREE_TYPE (exp));
6623 mode = TYPE_MODE (TREE_TYPE (exp));
6624 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6626 if (mode == BLKmode)
6627 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6629 *pbitsize = GET_MODE_BITSIZE (mode);
6634 if (! host_integerp (size_tree, 1))
6635 mode = BLKmode, *pbitsize = -1;
6637 *pbitsize = tree_low_cst (size_tree, 1);
6640 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6641 and find the ultimate containing object. */
6644 switch (TREE_CODE (exp))
6647 bit_offset += tree_to_double_int (TREE_OPERAND (exp, 2));
6652 tree field = TREE_OPERAND (exp, 1);
6653 tree this_offset = component_ref_field_offset (exp);
6655 /* If this field hasn't been filled in yet, don't go past it.
6656 This should only happen when folding expressions made during
6657 type construction. */
6658 if (this_offset == 0)
6661 offset = size_binop (PLUS_EXPR, offset, this_offset);
6662 bit_offset += tree_to_double_int (DECL_FIELD_BIT_OFFSET (field));
6664 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6669 case ARRAY_RANGE_REF:
6671 tree index = TREE_OPERAND (exp, 1);
6672 tree low_bound = array_ref_low_bound (exp);
6673 tree unit_size = array_ref_element_size (exp);
6675 /* We assume all arrays have sizes that are a multiple of a byte.
6676 First subtract the lower bound, if any, in the type of the
6677 index, then convert to sizetype and multiply by the size of
6678 the array element. */
6679 if (! integer_zerop (low_bound))
6680 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6683 offset = size_binop (PLUS_EXPR, offset,
6684 size_binop (MULT_EXPR,
6685 fold_convert (sizetype, index),
6694 bit_offset += double_int::from_uhwi (*pbitsize);
6697 case VIEW_CONVERT_EXPR:
6698 if (keep_aligning && STRICT_ALIGNMENT
6699 && (TYPE_ALIGN (TREE_TYPE (exp))
6700 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6701 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6702 < BIGGEST_ALIGNMENT)
6703 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6704 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6709 /* Hand back the decl for MEM[&decl, off]. */
6710 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6712 tree off = TREE_OPERAND (exp, 1);
6713 if (!integer_zerop (off))
6715 double_int boff, coff = mem_ref_offset (exp);
6716 boff = coff.lshift (BITS_PER_UNIT == 8
6717 ? 3 : exact_log2 (BITS_PER_UNIT));
6720 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6728 /* If any reference in the chain is volatile, the effect is volatile. */
6729 if (TREE_THIS_VOLATILE (exp))
6732 exp = TREE_OPERAND (exp, 0);
6736 /* If OFFSET is constant, see if we can return the whole thing as a
6737 constant bit position. Make sure to handle overflow during
6739 if (TREE_CODE (offset) == INTEGER_CST)
6741 double_int tem = tree_to_double_int (offset);
6742 tem = tem.sext (TYPE_PRECISION (sizetype));
6743 tem = tem.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
6745 if (tem.fits_shwi ())
6747 *pbitpos = tem.to_shwi ();
6748 *poffset = offset = NULL_TREE;
6752 /* Otherwise, split it up. */
6755 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6756 if (bit_offset.is_negative ())
6759 = double_int::mask (BITS_PER_UNIT == 8
6760 ? 3 : exact_log2 (BITS_PER_UNIT));
6761 double_int tem = bit_offset.and_not (mask);
6762 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6763 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6765 tem = tem.arshift (BITS_PER_UNIT == 8
6766 ? 3 : exact_log2 (BITS_PER_UNIT),
6767 HOST_BITS_PER_DOUBLE_INT);
6768 offset = size_binop (PLUS_EXPR, offset,
6769 double_int_to_tree (sizetype, tem));
6772 *pbitpos = bit_offset.to_shwi ();
6776 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6777 if (mode == VOIDmode
6779 && (*pbitpos % BITS_PER_UNIT) == 0
6780 && (*pbitsize % BITS_PER_UNIT) == 0)
6788 /* Return a tree of sizetype representing the size, in bytes, of the element
6789 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6792 array_ref_element_size (tree exp)
6794 tree aligned_size = TREE_OPERAND (exp, 3);
6795 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6796 location_t loc = EXPR_LOCATION (exp);
6798 /* If a size was specified in the ARRAY_REF, it's the size measured
6799 in alignment units of the element type. So multiply by that value. */
6802 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6803 sizetype from another type of the same width and signedness. */
6804 if (TREE_TYPE (aligned_size) != sizetype)
6805 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6806 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6807 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6810 /* Otherwise, take the size from that of the element type. Substitute
6811 any PLACEHOLDER_EXPR that we have. */
6813 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6816 /* Return a tree representing the lower bound of the array mentioned in
6817 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6820 array_ref_low_bound (tree exp)
6822 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6824 /* If a lower bound is specified in EXP, use it. */
6825 if (TREE_OPERAND (exp, 2))
6826 return TREE_OPERAND (exp, 2);
6828 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6829 substituting for a PLACEHOLDER_EXPR as needed. */
6830 if (domain_type && TYPE_MIN_VALUE (domain_type))
6831 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6833 /* Otherwise, return a zero of the appropriate type. */
6834 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6837 /* Returns true if REF is an array reference to an array at the end of
6838 a structure. If this is the case, the array may be allocated larger
6839 than its upper bound implies. */
6842 array_at_struct_end_p (tree ref)
6844 if (TREE_CODE (ref) != ARRAY_REF
6845 && TREE_CODE (ref) != ARRAY_RANGE_REF)
6848 while (handled_component_p (ref))
6850 /* If the reference chain contains a component reference to a
6851 non-union type and there follows another field the reference
6852 is not at the end of a structure. */
6853 if (TREE_CODE (ref) == COMPONENT_REF
6854 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6856 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6857 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6858 nextf = DECL_CHAIN (nextf);
6863 ref = TREE_OPERAND (ref, 0);
6866 /* If the reference is based on a declared entity, the size of the array
6867 is constrained by its given domain. */
6874 /* Return a tree representing the upper bound of the array mentioned in
6875 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6878 array_ref_up_bound (tree exp)
6880 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6882 /* If there is a domain type and it has an upper bound, use it, substituting
6883 for a PLACEHOLDER_EXPR as needed. */
6884 if (domain_type && TYPE_MAX_VALUE (domain_type))
6885 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6887 /* Otherwise fail. */
6891 /* Return a tree representing the offset, in bytes, of the field referenced
6892 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6895 component_ref_field_offset (tree exp)
6897 tree aligned_offset = TREE_OPERAND (exp, 2);
6898 tree field = TREE_OPERAND (exp, 1);
6899 location_t loc = EXPR_LOCATION (exp);
6901 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6902 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6906 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6907 sizetype from another type of the same width and signedness. */
6908 if (TREE_TYPE (aligned_offset) != sizetype)
6909 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6910 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6911 size_int (DECL_OFFSET_ALIGN (field)
6915 /* Otherwise, take the offset from that of the field. Substitute
6916 any PLACEHOLDER_EXPR that we have. */
6918 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6921 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6923 static unsigned HOST_WIDE_INT
6924 target_align (const_tree target)
6926 /* We might have a chain of nested references with intermediate misaligning
6927 bitfields components, so need to recurse to find out. */
6929 unsigned HOST_WIDE_INT this_align, outer_align;
6931 switch (TREE_CODE (target))
6937 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6938 outer_align = target_align (TREE_OPERAND (target, 0));
6939 return MIN (this_align, outer_align);
6942 case ARRAY_RANGE_REF:
6943 this_align = TYPE_ALIGN (TREE_TYPE (target));
6944 outer_align = target_align (TREE_OPERAND (target, 0));
6945 return MIN (this_align, outer_align);
6948 case NON_LVALUE_EXPR:
6949 case VIEW_CONVERT_EXPR:
6950 this_align = TYPE_ALIGN (TREE_TYPE (target));
6951 outer_align = target_align (TREE_OPERAND (target, 0));
6952 return MAX (this_align, outer_align);
6955 return TYPE_ALIGN (TREE_TYPE (target));
6960 /* Given an rtx VALUE that may contain additions and multiplications, return
6961 an equivalent value that just refers to a register, memory, or constant.
6962 This is done by generating instructions to perform the arithmetic and
6963 returning a pseudo-register containing the value.
6965 The returned value may be a REG, SUBREG, MEM or constant. */
6968 force_operand (rtx value, rtx target)
6971 /* Use subtarget as the target for operand 0 of a binary operation. */
6972 rtx subtarget = get_subtarget (target);
6973 enum rtx_code code = GET_CODE (value);
6975 /* Check for subreg applied to an expression produced by loop optimizer. */
6977 && !REG_P (SUBREG_REG (value))
6978 && !MEM_P (SUBREG_REG (value)))
6981 = simplify_gen_subreg (GET_MODE (value),
6982 force_reg (GET_MODE (SUBREG_REG (value)),
6983 force_operand (SUBREG_REG (value),
6985 GET_MODE (SUBREG_REG (value)),
6986 SUBREG_BYTE (value));
6987 code = GET_CODE (value);
6990 /* Check for a PIC address load. */
6991 if ((code == PLUS || code == MINUS)
6992 && XEXP (value, 0) == pic_offset_table_rtx
6993 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6994 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6995 || GET_CODE (XEXP (value, 1)) == CONST))
6998 subtarget = gen_reg_rtx (GET_MODE (value));
6999 emit_move_insn (subtarget, value);
7003 if (ARITHMETIC_P (value))
7005 op2 = XEXP (value, 1);
7006 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7008 if (code == MINUS && CONST_INT_P (op2))
7011 op2 = negate_rtx (GET_MODE (value), op2);
7014 /* Check for an addition with OP2 a constant integer and our first
7015 operand a PLUS of a virtual register and something else. In that
7016 case, we want to emit the sum of the virtual register and the
7017 constant first and then add the other value. This allows virtual
7018 register instantiation to simply modify the constant rather than
7019 creating another one around this addition. */
7020 if (code == PLUS && CONST_INT_P (op2)
7021 && GET_CODE (XEXP (value, 0)) == PLUS
7022 && REG_P (XEXP (XEXP (value, 0), 0))
7023 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7024 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7026 rtx temp = expand_simple_binop (GET_MODE (value), code,
7027 XEXP (XEXP (value, 0), 0), op2,
7028 subtarget, 0, OPTAB_LIB_WIDEN);
7029 return expand_simple_binop (GET_MODE (value), code, temp,
7030 force_operand (XEXP (XEXP (value,
7032 target, 0, OPTAB_LIB_WIDEN);
7035 op1 = force_operand (XEXP (value, 0), subtarget);
7036 op2 = force_operand (op2, NULL_RTX);
7040 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7042 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7043 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7044 target, 1, OPTAB_LIB_WIDEN);
7046 return expand_divmod (0,
7047 FLOAT_MODE_P (GET_MODE (value))
7048 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7049 GET_MODE (value), op1, op2, target, 0);
7051 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7054 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7057 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7060 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7061 target, 0, OPTAB_LIB_WIDEN);
7063 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7064 target, 1, OPTAB_LIB_WIDEN);
7067 if (UNARY_P (value))
7070 target = gen_reg_rtx (GET_MODE (value));
7071 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7078 case FLOAT_TRUNCATE:
7079 convert_move (target, op1, code == ZERO_EXTEND);
7084 expand_fix (target, op1, code == UNSIGNED_FIX);
7088 case UNSIGNED_FLOAT:
7089 expand_float (target, op1, code == UNSIGNED_FLOAT);
7093 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7097 #ifdef INSN_SCHEDULING
7098 /* On machines that have insn scheduling, we want all memory reference to be
7099 explicit, so we need to deal with such paradoxical SUBREGs. */
7100 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7102 = simplify_gen_subreg (GET_MODE (value),
7103 force_reg (GET_MODE (SUBREG_REG (value)),
7104 force_operand (SUBREG_REG (value),
7106 GET_MODE (SUBREG_REG (value)),
7107 SUBREG_BYTE (value));
7113 /* Subroutine of expand_expr: return nonzero iff there is no way that
7114 EXP can reference X, which is being modified. TOP_P is nonzero if this
7115 call is going to be used to determine whether we need a temporary
7116 for EXP, as opposed to a recursive call to this function.
7118 It is always safe for this routine to return zero since it merely
7119 searches for optimization opportunities. */
7122 safe_from_p (const_rtx x, tree exp, int top_p)
7128 /* If EXP has varying size, we MUST use a target since we currently
7129 have no way of allocating temporaries of variable size
7130 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7131 So we assume here that something at a higher level has prevented a
7132 clash. This is somewhat bogus, but the best we can do. Only
7133 do this when X is BLKmode and when we are at the top level. */
7134 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7135 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7136 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7137 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7138 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7140 && GET_MODE (x) == BLKmode)
7141 /* If X is in the outgoing argument area, it is always safe. */
7143 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7144 || (GET_CODE (XEXP (x, 0)) == PLUS
7145 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7148 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7149 find the underlying pseudo. */
7150 if (GET_CODE (x) == SUBREG)
7153 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7157 /* Now look at our tree code and possibly recurse. */
7158 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7160 case tcc_declaration:
7161 exp_rtl = DECL_RTL_IF_SET (exp);
7167 case tcc_exceptional:
7168 if (TREE_CODE (exp) == TREE_LIST)
7172 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7174 exp = TREE_CHAIN (exp);
7177 if (TREE_CODE (exp) != TREE_LIST)
7178 return safe_from_p (x, exp, 0);
7181 else if (TREE_CODE (exp) == CONSTRUCTOR)
7183 constructor_elt *ce;
7184 unsigned HOST_WIDE_INT idx;
7186 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7187 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7188 || !safe_from_p (x, ce->value, 0))
7192 else if (TREE_CODE (exp) == ERROR_MARK)
7193 return 1; /* An already-visited SAVE_EXPR? */
7198 /* The only case we look at here is the DECL_INITIAL inside a
7200 return (TREE_CODE (exp) != DECL_EXPR
7201 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7202 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7203 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7206 case tcc_comparison:
7207 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7212 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7214 case tcc_expression:
7217 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7218 the expression. If it is set, we conflict iff we are that rtx or
7219 both are in memory. Otherwise, we check all operands of the
7220 expression recursively. */
7222 switch (TREE_CODE (exp))
7225 /* If the operand is static or we are static, we can't conflict.
7226 Likewise if we don't conflict with the operand at all. */
7227 if (staticp (TREE_OPERAND (exp, 0))
7228 || TREE_STATIC (exp)
7229 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7232 /* Otherwise, the only way this can conflict is if we are taking
7233 the address of a DECL a that address if part of X, which is
7235 exp = TREE_OPERAND (exp, 0);
7238 if (!DECL_RTL_SET_P (exp)
7239 || !MEM_P (DECL_RTL (exp)))
7242 exp_rtl = XEXP (DECL_RTL (exp), 0);
7248 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7249 get_alias_set (exp)))
7254 /* Assume that the call will clobber all hard registers and
7256 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7261 case WITH_CLEANUP_EXPR:
7262 case CLEANUP_POINT_EXPR:
7263 /* Lowered by gimplify.c. */
7267 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7273 /* If we have an rtx, we do not need to scan our operands. */
7277 nops = TREE_OPERAND_LENGTH (exp);
7278 for (i = 0; i < nops; i++)
7279 if (TREE_OPERAND (exp, i) != 0
7280 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7286 /* Should never get a type here. */
7290 /* If we have an rtl, find any enclosed object. Then see if we conflict
7294 if (GET_CODE (exp_rtl) == SUBREG)
7296 exp_rtl = SUBREG_REG (exp_rtl);
7298 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7302 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7303 are memory and they conflict. */
7304 return ! (rtx_equal_p (x, exp_rtl)
7305 || (MEM_P (x) && MEM_P (exp_rtl)
7306 && true_dependence (exp_rtl, VOIDmode, x)));
7309 /* If we reach here, it is safe. */
7314 /* Return the highest power of two that EXP is known to be a multiple of.
7315 This is used in updating alignment of MEMs in array references. */
7317 unsigned HOST_WIDE_INT
7318 highest_pow2_factor (const_tree exp)
7320 unsigned HOST_WIDE_INT c0, c1;
7322 switch (TREE_CODE (exp))
7325 /* We can find the lowest bit that's a one. If the low
7326 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
7327 We need to handle this case since we can find it in a COND_EXPR,
7328 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
7329 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
7331 if (TREE_OVERFLOW (exp))
7332 return BIGGEST_ALIGNMENT;
7335 /* Note: tree_low_cst is intentionally not used here,
7336 we don't care about the upper bits. */
7337 c0 = TREE_INT_CST_LOW (exp);
7339 return c0 ? c0 : BIGGEST_ALIGNMENT;
7343 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
7344 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7345 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7346 return MIN (c0, c1);
7349 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7350 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7353 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
7355 if (integer_pow2p (TREE_OPERAND (exp, 1))
7356 && host_integerp (TREE_OPERAND (exp, 1), 1))
7358 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7359 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
7360 return MAX (1, c0 / c1);
7365 /* The highest power of two of a bit-and expression is the maximum of
7366 that of its operands. We typically get here for a complex LHS and
7367 a constant negative power of two on the RHS to force an explicit
7368 alignment, so don't bother looking at the LHS. */
7369 return highest_pow2_factor (TREE_OPERAND (exp, 1));
7373 return highest_pow2_factor (TREE_OPERAND (exp, 0));
7376 return highest_pow2_factor (TREE_OPERAND (exp, 1));
7379 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7380 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
7381 return MIN (c0, c1);
7390 /* Similar, except that the alignment requirements of TARGET are
7391 taken into account. Assume it is at least as aligned as its
7392 type, unless it is a COMPONENT_REF in which case the layout of
7393 the structure gives the alignment. */
7395 static unsigned HOST_WIDE_INT
7396 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7398 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7399 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7401 return MAX (factor, talign);
7404 #ifdef HAVE_conditional_move
7405 /* Convert the tree comparison code TCODE to the rtl one where the
7406 signedness is UNSIGNEDP. */
7408 static enum rtx_code
7409 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7421 code = unsignedp ? LTU : LT;
7424 code = unsignedp ? LEU : LE;
7427 code = unsignedp ? GTU : GT;
7430 code = unsignedp ? GEU : GE;
7432 case UNORDERED_EXPR:
7464 /* Subroutine of expand_expr. Expand the two operands of a binary
7465 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7466 The value may be stored in TARGET if TARGET is nonzero. The
7467 MODIFIER argument is as documented by expand_expr. */
7470 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7471 enum expand_modifier modifier)
7473 if (! safe_from_p (target, exp1, 1))
7475 if (operand_equal_p (exp0, exp1, 0))
7477 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7478 *op1 = copy_rtx (*op0);
7482 /* If we need to preserve evaluation order, copy exp0 into its own
7483 temporary variable so that it can't be clobbered by exp1. */
7484 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7485 exp0 = save_expr (exp0);
7486 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7487 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7492 /* Return a MEM that contains constant EXP. DEFER is as for
7493 output_constant_def and MODIFIER is as for expand_expr. */
7496 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7500 mem = output_constant_def (exp, defer);
7501 if (modifier != EXPAND_INITIALIZER)
7502 mem = use_anchored_address (mem);
7506 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7507 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7510 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7511 enum expand_modifier modifier, addr_space_t as)
7513 rtx result, subtarget;
7515 HOST_WIDE_INT bitsize, bitpos;
7516 int volatilep, unsignedp;
7517 enum machine_mode mode1;
7519 /* If we are taking the address of a constant and are at the top level,
7520 we have to use output_constant_def since we can't call force_const_mem
7522 /* ??? This should be considered a front-end bug. We should not be
7523 generating ADDR_EXPR of something that isn't an LVALUE. The only
7524 exception here is STRING_CST. */
7525 if (CONSTANT_CLASS_P (exp))
7527 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7528 if (modifier < EXPAND_SUM)
7529 result = force_operand (result, target);
7533 /* Everything must be something allowed by is_gimple_addressable. */
7534 switch (TREE_CODE (exp))
7537 /* This case will happen via recursion for &a->b. */
7538 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7542 tree tem = TREE_OPERAND (exp, 0);
7543 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7544 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7545 return expand_expr (tem, target, tmode, modifier);
7549 /* Expand the initializer like constants above. */
7550 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7552 if (modifier < EXPAND_SUM)
7553 result = force_operand (result, target);
7557 /* The real part of the complex number is always first, therefore
7558 the address is the same as the address of the parent object. */
7561 inner = TREE_OPERAND (exp, 0);
7565 /* The imaginary part of the complex number is always second.
7566 The expression is therefore always offset by the size of the
7569 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7570 inner = TREE_OPERAND (exp, 0);
7573 case COMPOUND_LITERAL_EXPR:
7574 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7575 rtl_for_decl_init is called on DECL_INITIAL with
7576 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7577 if (modifier == EXPAND_INITIALIZER
7578 && COMPOUND_LITERAL_EXPR_DECL (exp))
7579 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7580 target, tmode, modifier, as);
7583 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7584 expand_expr, as that can have various side effects; LABEL_DECLs for
7585 example, may not have their DECL_RTL set yet. Expand the rtl of
7586 CONSTRUCTORs too, which should yield a memory reference for the
7587 constructor's contents. Assume language specific tree nodes can
7588 be expanded in some interesting way. */
7589 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7591 || TREE_CODE (exp) == CONSTRUCTOR
7592 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7594 result = expand_expr (exp, target, tmode,
7595 modifier == EXPAND_INITIALIZER
7596 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7598 /* If the DECL isn't in memory, then the DECL wasn't properly
7599 marked TREE_ADDRESSABLE, which will be either a front-end
7600 or a tree optimizer bug. */
7602 if (TREE_ADDRESSABLE (exp)
7604 && ! targetm.calls.allocate_stack_slots_for_args())
7606 error ("local frame unavailable (naked function?)");
7610 gcc_assert (MEM_P (result));
7611 result = XEXP (result, 0);
7613 /* ??? Is this needed anymore? */
7615 TREE_USED (exp) = 1;
7617 if (modifier != EXPAND_INITIALIZER
7618 && modifier != EXPAND_CONST_ADDRESS
7619 && modifier != EXPAND_SUM)
7620 result = force_operand (result, target);
7624 /* Pass FALSE as the last argument to get_inner_reference although
7625 we are expanding to RTL. The rationale is that we know how to
7626 handle "aligning nodes" here: we can just bypass them because
7627 they won't change the final object whose address will be returned
7628 (they actually exist only for that purpose). */
7629 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7630 &mode1, &unsignedp, &volatilep, false);
7634 /* We must have made progress. */
7635 gcc_assert (inner != exp);
7637 subtarget = offset || bitpos ? NULL_RTX : target;
7638 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7639 inner alignment, force the inner to be sufficiently aligned. */
7640 if (CONSTANT_CLASS_P (inner)
7641 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7643 inner = copy_node (inner);
7644 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7645 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7646 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7648 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7654 if (modifier != EXPAND_NORMAL)
7655 result = force_operand (result, NULL);
7656 tmp = expand_expr (offset, NULL_RTX, tmode,
7657 modifier == EXPAND_INITIALIZER
7658 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7660 result = convert_memory_address_addr_space (tmode, result, as);
7661 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7663 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7664 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7667 subtarget = bitpos ? NULL_RTX : target;
7668 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7669 1, OPTAB_LIB_WIDEN);
7675 /* Someone beforehand should have rejected taking the address
7676 of such an object. */
7677 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7679 result = convert_memory_address_addr_space (tmode, result, as);
7680 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7681 if (modifier < EXPAND_SUM)
7682 result = force_operand (result, target);
7688 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7689 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7692 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7693 enum expand_modifier modifier)
7695 addr_space_t as = ADDR_SPACE_GENERIC;
7696 enum machine_mode address_mode = Pmode;
7697 enum machine_mode pointer_mode = ptr_mode;
7698 enum machine_mode rmode;
7701 /* Target mode of VOIDmode says "whatever's natural". */
7702 if (tmode == VOIDmode)
7703 tmode = TYPE_MODE (TREE_TYPE (exp));
7705 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7707 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7708 address_mode = targetm.addr_space.address_mode (as);
7709 pointer_mode = targetm.addr_space.pointer_mode (as);
7712 /* We can get called with some Weird Things if the user does silliness
7713 like "(short) &a". In that case, convert_memory_address won't do
7714 the right thing, so ignore the given target mode. */
7715 if (tmode != address_mode && tmode != pointer_mode)
7716 tmode = address_mode;
7718 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7719 tmode, modifier, as);
7721 /* Despite expand_expr claims concerning ignoring TMODE when not
7722 strictly convenient, stuff breaks if we don't honor it. Note
7723 that combined with the above, we only do this for pointer modes. */
7724 rmode = GET_MODE (result);
7725 if (rmode == VOIDmode)
7728 result = convert_memory_address_addr_space (tmode, result, as);
7733 /* Generate code for computing CONSTRUCTOR EXP.
7734 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7735 is TRUE, instead of creating a temporary variable in memory
7736 NULL is returned and the caller needs to handle it differently. */
7739 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7740 bool avoid_temp_mem)
7742 tree type = TREE_TYPE (exp);
7743 enum machine_mode mode = TYPE_MODE (type);
7745 /* Try to avoid creating a temporary at all. This is possible
7746 if all of the initializer is zero.
7747 FIXME: try to handle all [0..255] initializers we can handle
7749 if (TREE_STATIC (exp)
7750 && !TREE_ADDRESSABLE (exp)
7751 && target != 0 && mode == BLKmode
7752 && all_zeros_p (exp))
7754 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7758 /* All elts simple constants => refer to a constant in memory. But
7759 if this is a non-BLKmode mode, let it store a field at a time
7760 since that should make a CONST_INT or CONST_DOUBLE when we
7761 fold. Likewise, if we have a target we can use, it is best to
7762 store directly into the target unless the type is large enough
7763 that memcpy will be used. If we are making an initializer and
7764 all operands are constant, put it in memory as well.
7766 FIXME: Avoid trying to fill vector constructors piece-meal.
7767 Output them with output_constant_def below unless we're sure
7768 they're zeros. This should go away when vector initializers
7769 are treated like VECTOR_CST instead of arrays. */
7770 if ((TREE_STATIC (exp)
7771 && ((mode == BLKmode
7772 && ! (target != 0 && safe_from_p (target, exp, 1)))
7773 || TREE_ADDRESSABLE (exp)
7774 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7775 && (! MOVE_BY_PIECES_P
7776 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7778 && ! mostly_zeros_p (exp))))
7779 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7780 && TREE_CONSTANT (exp)))
7787 constructor = expand_expr_constant (exp, 1, modifier);
7789 if (modifier != EXPAND_CONST_ADDRESS
7790 && modifier != EXPAND_INITIALIZER
7791 && modifier != EXPAND_SUM)
7792 constructor = validize_mem (constructor);
7797 /* Handle calls that pass values in multiple non-contiguous
7798 locations. The Irix 6 ABI has examples of this. */
7799 if (target == 0 || ! safe_from_p (target, exp, 1)
7800 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7806 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7807 | (TREE_READONLY (exp)
7808 * TYPE_QUAL_CONST))),
7809 TREE_ADDRESSABLE (exp), 1);
7812 store_constructor (exp, target, 0, int_expr_size (exp));
7817 /* expand_expr: generate code for computing expression EXP.
7818 An rtx for the computed value is returned. The value is never null.
7819 In the case of a void EXP, const0_rtx is returned.
7821 The value may be stored in TARGET if TARGET is nonzero.
7822 TARGET is just a suggestion; callers must assume that
7823 the rtx returned may not be the same as TARGET.
7825 If TARGET is CONST0_RTX, it means that the value will be ignored.
7827 If TMODE is not VOIDmode, it suggests generating the
7828 result in mode TMODE. But this is done only when convenient.
7829 Otherwise, TMODE is ignored and the value generated in its natural mode.
7830 TMODE is just a suggestion; callers must assume that
7831 the rtx returned may not have mode TMODE.
7833 Note that TARGET may have neither TMODE nor MODE. In that case, it
7834 probably will not be used.
7836 If MODIFIER is EXPAND_SUM then when EXP is an addition
7837 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7838 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7839 products as above, or REG or MEM, or constant.
7840 Ordinarily in such cases we would output mul or add instructions
7841 and then return a pseudo reg containing the sum.
7843 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7844 it also marks a label as absolutely required (it can't be dead).
7845 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7846 This is used for outputting expressions used in initializers.
7848 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7849 with a constant address even if that address is not normally legitimate.
7850 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7852 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7853 a call parameter. Such targets require special care as we haven't yet
7854 marked TARGET so that it's safe from being trashed by libcalls. We
7855 don't want to use TARGET for anything but the final result;
7856 Intermediate values must go elsewhere. Additionally, calls to
7857 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7859 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7860 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7861 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7862 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7866 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7867 enum expand_modifier modifier, rtx *alt_rtl)
7871 /* Handle ERROR_MARK before anybody tries to access its type. */
7872 if (TREE_CODE (exp) == ERROR_MARK
7873 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7875 ret = CONST0_RTX (tmode);
7876 return ret ? ret : const0_rtx;
7879 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7883 /* Try to expand the conditional expression which is represented by
7884 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7885 return the rtl reg which repsents the result. Otherwise return
7889 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7890 tree treeop1 ATTRIBUTE_UNUSED,
7891 tree treeop2 ATTRIBUTE_UNUSED)
7893 #ifdef HAVE_conditional_move
7895 rtx op00, op01, op1, op2;
7896 enum rtx_code comparison_code;
7897 enum machine_mode comparison_mode;
7900 tree type = TREE_TYPE (treeop1);
7901 int unsignedp = TYPE_UNSIGNED (type);
7902 enum machine_mode mode = TYPE_MODE (type);
7903 enum machine_mode orig_mode = mode;
7905 /* If we cannot do a conditional move on the mode, try doing it
7906 with the promoted mode. */
7907 if (!can_conditionally_move_p (mode))
7909 mode = promote_mode (type, mode, &unsignedp);
7910 if (!can_conditionally_move_p (mode))
7912 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7915 temp = assign_temp (type, 0, 1);
7918 expand_operands (treeop1, treeop2,
7919 temp, &op1, &op2, EXPAND_NORMAL);
7921 if (TREE_CODE (treeop0) == SSA_NAME
7922 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7924 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7925 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7926 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7927 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7928 comparison_mode = TYPE_MODE (type);
7929 unsignedp = TYPE_UNSIGNED (type);
7930 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7932 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
7934 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7935 enum tree_code cmpcode = TREE_CODE (treeop0);
7936 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7937 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7938 unsignedp = TYPE_UNSIGNED (type);
7939 comparison_mode = TYPE_MODE (type);
7940 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7944 op00 = expand_normal (treeop0);
7946 comparison_code = NE;
7947 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7950 if (GET_MODE (op1) != mode)
7951 op1 = gen_lowpart (mode, op1);
7953 if (GET_MODE (op2) != mode)
7954 op2 = gen_lowpart (mode, op2);
7956 /* Try to emit the conditional move. */
7957 insn = emit_conditional_move (temp, comparison_code,
7958 op00, op01, comparison_mode,
7962 /* If we could do the conditional move, emit the sequence,
7966 rtx seq = get_insns ();
7969 return convert_modes (orig_mode, mode, temp, 0);
7972 /* Otherwise discard the sequence and fall back to code with
7980 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7981 enum expand_modifier modifier)
7983 rtx op0, op1, op2, temp;
7986 enum machine_mode mode;
7987 enum tree_code code = ops->code;
7989 rtx subtarget, original_target;
7991 bool reduce_bit_field;
7992 location_t loc = ops->location;
7993 tree treeop0, treeop1, treeop2;
7994 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7995 ? reduce_to_bit_field_precision ((expr), \
8001 mode = TYPE_MODE (type);
8002 unsignedp = TYPE_UNSIGNED (type);
8008 /* We should be called only on simple (binary or unary) expressions,
8009 exactly those that are valid in gimple expressions that aren't
8010 GIMPLE_SINGLE_RHS (or invalid). */
8011 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8012 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8013 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8015 ignore = (target == const0_rtx
8016 || ((CONVERT_EXPR_CODE_P (code)
8017 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8018 && TREE_CODE (type) == VOID_TYPE));
8020 /* We should be called only if we need the result. */
8021 gcc_assert (!ignore);
8023 /* An operation in what may be a bit-field type needs the
8024 result to be reduced to the precision of the bit-field type,
8025 which is narrower than that of the type's mode. */
8026 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8027 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8029 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8032 /* Use subtarget as the target for operand 0 of a binary operation. */
8033 subtarget = get_subtarget (target);
8034 original_target = target;
8038 case NON_LVALUE_EXPR:
8041 if (treeop0 == error_mark_node)
8044 if (TREE_CODE (type) == UNION_TYPE)
8046 tree valtype = TREE_TYPE (treeop0);
8048 /* If both input and output are BLKmode, this conversion isn't doing
8049 anything except possibly changing memory attribute. */
8050 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8052 rtx result = expand_expr (treeop0, target, tmode,
8055 result = copy_rtx (result);
8056 set_mem_attributes (result, type, 0);
8062 if (TYPE_MODE (type) != BLKmode)
8063 target = gen_reg_rtx (TYPE_MODE (type));
8065 target = assign_temp (type, 1, 1);
8069 /* Store data into beginning of memory target. */
8070 store_expr (treeop0,
8071 adjust_address (target, TYPE_MODE (valtype), 0),
8072 modifier == EXPAND_STACK_PARM,
8077 gcc_assert (REG_P (target));
8079 /* Store this field into a union of the proper type. */
8080 store_field (target,
8081 MIN ((int_size_in_bytes (TREE_TYPE
8084 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8085 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8088 /* Return the entire union. */
8092 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8094 op0 = expand_expr (treeop0, target, VOIDmode,
8097 /* If the signedness of the conversion differs and OP0 is
8098 a promoted SUBREG, clear that indication since we now
8099 have to do the proper extension. */
8100 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8101 && GET_CODE (op0) == SUBREG)
8102 SUBREG_PROMOTED_VAR_P (op0) = 0;
8104 return REDUCE_BIT_FIELD (op0);
8107 op0 = expand_expr (treeop0, NULL_RTX, mode,
8108 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8109 if (GET_MODE (op0) == mode)
8112 /* If OP0 is a constant, just convert it into the proper mode. */
8113 else if (CONSTANT_P (op0))
8115 tree inner_type = TREE_TYPE (treeop0);
8116 enum machine_mode inner_mode = GET_MODE (op0);
8118 if (inner_mode == VOIDmode)
8119 inner_mode = TYPE_MODE (inner_type);
8121 if (modifier == EXPAND_INITIALIZER)
8122 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8123 subreg_lowpart_offset (mode,
8126 op0= convert_modes (mode, inner_mode, op0,
8127 TYPE_UNSIGNED (inner_type));
8130 else if (modifier == EXPAND_INITIALIZER)
8131 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8133 else if (target == 0)
8134 op0 = convert_to_mode (mode, op0,
8135 TYPE_UNSIGNED (TREE_TYPE
8139 convert_move (target, op0,
8140 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8144 return REDUCE_BIT_FIELD (op0);
8146 case ADDR_SPACE_CONVERT_EXPR:
8148 tree treeop0_type = TREE_TYPE (treeop0);
8150 addr_space_t as_from;
8152 gcc_assert (POINTER_TYPE_P (type));
8153 gcc_assert (POINTER_TYPE_P (treeop0_type));
8155 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8156 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8158 /* Conversions between pointers to the same address space should
8159 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8160 gcc_assert (as_to != as_from);
8162 /* Ask target code to handle conversion between pointers
8163 to overlapping address spaces. */
8164 if (targetm.addr_space.subset_p (as_to, as_from)
8165 || targetm.addr_space.subset_p (as_from, as_to))
8167 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8168 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8173 /* For disjoint address spaces, converting anything but
8174 a null pointer invokes undefined behaviour. We simply
8175 always return a null pointer here. */
8176 return CONST0_RTX (mode);
8179 case POINTER_PLUS_EXPR:
8180 /* Even though the sizetype mode and the pointer's mode can be different
8181 expand is able to handle this correctly and get the correct result out
8182 of the PLUS_EXPR code. */
8183 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8184 if sizetype precision is smaller than pointer precision. */
8185 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8186 treeop1 = fold_convert_loc (loc, type,
8187 fold_convert_loc (loc, ssizetype,
8189 /* If sizetype precision is larger than pointer precision, truncate the
8190 offset to have matching modes. */
8191 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8192 treeop1 = fold_convert_loc (loc, type, treeop1);
8195 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8196 something else, make sure we add the register to the constant and
8197 then to the other thing. This case can occur during strength
8198 reduction and doing it this way will produce better code if the
8199 frame pointer or argument pointer is eliminated.
8201 fold-const.c will ensure that the constant is always in the inner
8202 PLUS_EXPR, so the only case we need to do anything about is if
8203 sp, ap, or fp is our second argument, in which case we must swap
8204 the innermost first argument and our second argument. */
8206 if (TREE_CODE (treeop0) == PLUS_EXPR
8207 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8208 && TREE_CODE (treeop1) == VAR_DECL
8209 && (DECL_RTL (treeop1) == frame_pointer_rtx
8210 || DECL_RTL (treeop1) == stack_pointer_rtx
8211 || DECL_RTL (treeop1) == arg_pointer_rtx))
8216 /* If the result is to be ptr_mode and we are adding an integer to
8217 something, we might be forming a constant. So try to use
8218 plus_constant. If it produces a sum and we can't accept it,
8219 use force_operand. This allows P = &ARR[const] to generate
8220 efficient code on machines where a SYMBOL_REF is not a valid
8223 If this is an EXPAND_SUM call, always return the sum. */
8224 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8225 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8227 if (modifier == EXPAND_STACK_PARM)
8229 if (TREE_CODE (treeop0) == INTEGER_CST
8230 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8231 && TREE_CONSTANT (treeop1))
8235 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8237 /* Use immed_double_const to ensure that the constant is
8238 truncated according to the mode of OP1, then sign extended
8239 to a HOST_WIDE_INT. Using the constant directly can result
8240 in non-canonical RTL in a 64x32 cross compile. */
8242 = immed_double_const (TREE_INT_CST_LOW (treeop0),
8244 TYPE_MODE (TREE_TYPE (treeop1)));
8245 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8246 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8247 op1 = force_operand (op1, target);
8248 return REDUCE_BIT_FIELD (op1);
8251 else if (TREE_CODE (treeop1) == INTEGER_CST
8252 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8253 && TREE_CONSTANT (treeop0))
8257 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8258 (modifier == EXPAND_INITIALIZER
8259 ? EXPAND_INITIALIZER : EXPAND_SUM));
8260 if (! CONSTANT_P (op0))
8262 op1 = expand_expr (treeop1, NULL_RTX,
8263 VOIDmode, modifier);
8264 /* Return a PLUS if modifier says it's OK. */
8265 if (modifier == EXPAND_SUM
8266 || modifier == EXPAND_INITIALIZER)
8267 return simplify_gen_binary (PLUS, mode, op0, op1);
8270 /* Use immed_double_const to ensure that the constant is
8271 truncated according to the mode of OP1, then sign extended
8272 to a HOST_WIDE_INT. Using the constant directly can result
8273 in non-canonical RTL in a 64x32 cross compile. */
8275 = immed_double_const (TREE_INT_CST_LOW (treeop1),
8277 TYPE_MODE (TREE_TYPE (treeop0)));
8278 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8279 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8280 op0 = force_operand (op0, target);
8281 return REDUCE_BIT_FIELD (op0);
8285 /* Use TER to expand pointer addition of a negated value
8286 as pointer subtraction. */
8287 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8288 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8289 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8290 && TREE_CODE (treeop1) == SSA_NAME
8291 && TYPE_MODE (TREE_TYPE (treeop0))
8292 == TYPE_MODE (TREE_TYPE (treeop1)))
8294 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8297 treeop1 = gimple_assign_rhs1 (def);
8303 /* No sense saving up arithmetic to be done
8304 if it's all in the wrong mode to form part of an address.
8305 And force_operand won't know whether to sign-extend or
8307 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8308 || mode != ptr_mode)
8310 expand_operands (treeop0, treeop1,
8311 subtarget, &op0, &op1, EXPAND_NORMAL);
8312 if (op0 == const0_rtx)
8314 if (op1 == const0_rtx)
8319 expand_operands (treeop0, treeop1,
8320 subtarget, &op0, &op1, modifier);
8321 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8325 /* For initializers, we are allowed to return a MINUS of two
8326 symbolic constants. Here we handle all cases when both operands
8328 /* Handle difference of two symbolic constants,
8329 for the sake of an initializer. */
8330 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8331 && really_constant_p (treeop0)
8332 && really_constant_p (treeop1))
8334 expand_operands (treeop0, treeop1,
8335 NULL_RTX, &op0, &op1, modifier);
8337 /* If the last operand is a CONST_INT, use plus_constant of
8338 the negated constant. Else make the MINUS. */
8339 if (CONST_INT_P (op1))
8340 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8343 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8346 /* No sense saving up arithmetic to be done
8347 if it's all in the wrong mode to form part of an address.
8348 And force_operand won't know whether to sign-extend or
8350 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8351 || mode != ptr_mode)
8354 expand_operands (treeop0, treeop1,
8355 subtarget, &op0, &op1, modifier);
8357 /* Convert A - const to A + (-const). */
8358 if (CONST_INT_P (op1))
8360 op1 = negate_rtx (mode, op1);
8361 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8366 case WIDEN_MULT_PLUS_EXPR:
8367 case WIDEN_MULT_MINUS_EXPR:
8368 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8369 op2 = expand_normal (treeop2);
8370 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8374 case WIDEN_MULT_EXPR:
8375 /* If first operand is constant, swap them.
8376 Thus the following special case checks need only
8377 check the second operand. */
8378 if (TREE_CODE (treeop0) == INTEGER_CST)
8385 /* First, check if we have a multiplication of one signed and one
8386 unsigned operand. */
8387 if (TREE_CODE (treeop1) != INTEGER_CST
8388 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8389 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8391 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8392 this_optab = usmul_widen_optab;
8393 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8394 != CODE_FOR_nothing)
8396 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8397 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8400 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8402 /* op0 and op1 might still be constant, despite the above
8403 != INTEGER_CST check. Handle it. */
8404 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8406 op0 = convert_modes (innermode, mode, op0, true);
8407 op1 = convert_modes (innermode, mode, op1, false);
8408 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8409 target, unsignedp));
8414 /* Check for a multiplication with matching signedness. */
8415 else if ((TREE_CODE (treeop1) == INTEGER_CST
8416 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8417 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8418 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8420 tree op0type = TREE_TYPE (treeop0);
8421 enum machine_mode innermode = TYPE_MODE (op0type);
8422 bool zextend_p = TYPE_UNSIGNED (op0type);
8423 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8424 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8426 if (TREE_CODE (treeop0) != INTEGER_CST)
8428 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8429 != CODE_FOR_nothing)
8431 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8433 /* op0 and op1 might still be constant, despite the above
8434 != INTEGER_CST check. Handle it. */
8435 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8438 op0 = convert_modes (innermode, mode, op0, zextend_p);
8440 = convert_modes (innermode, mode, op1,
8441 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8442 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8446 temp = expand_widening_mult (mode, op0, op1, target,
8447 unsignedp, this_optab);
8448 return REDUCE_BIT_FIELD (temp);
8450 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8452 && innermode == word_mode)
8455 op0 = expand_normal (treeop0);
8456 if (TREE_CODE (treeop1) == INTEGER_CST)
8457 op1 = convert_modes (innermode, mode,
8458 expand_normal (treeop1),
8459 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8461 op1 = expand_normal (treeop1);
8462 /* op0 and op1 might still be constant, despite the above
8463 != INTEGER_CST check. Handle it. */
8464 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8465 goto widen_mult_const;
8466 temp = expand_binop (mode, other_optab, op0, op1, target,
8467 unsignedp, OPTAB_LIB_WIDEN);
8468 hipart = gen_highpart (innermode, temp);
8469 htem = expand_mult_highpart_adjust (innermode, hipart,
8473 emit_move_insn (hipart, htem);
8474 return REDUCE_BIT_FIELD (temp);
8478 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8479 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8480 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8481 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8485 optab opt = fma_optab;
8488 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8490 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8492 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8495 gcc_assert (fn != NULL_TREE);
8496 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8497 return expand_builtin (call_expr, target, subtarget, mode, false);
8500 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8501 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8506 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8509 op0 = expand_normal (gimple_assign_rhs1 (def0));
8510 op2 = expand_normal (gimple_assign_rhs1 (def2));
8513 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8516 op0 = expand_normal (gimple_assign_rhs1 (def0));
8519 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8522 op2 = expand_normal (gimple_assign_rhs1 (def2));
8526 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8528 op2 = expand_normal (treeop2);
8529 op1 = expand_normal (treeop1);
8531 return expand_ternary_op (TYPE_MODE (type), opt,
8532 op0, op1, op2, target, 0);
8536 /* If this is a fixed-point operation, then we cannot use the code
8537 below because "expand_mult" doesn't support sat/no-sat fixed-point
8539 if (ALL_FIXED_POINT_MODE_P (mode))
8542 /* If first operand is constant, swap them.
8543 Thus the following special case checks need only
8544 check the second operand. */
8545 if (TREE_CODE (treeop0) == INTEGER_CST)
8552 /* Attempt to return something suitable for generating an
8553 indexed address, for machines that support that. */
8555 if (modifier == EXPAND_SUM && mode == ptr_mode
8556 && host_integerp (treeop1, 0))
8558 tree exp1 = treeop1;
8560 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8564 op0 = force_operand (op0, NULL_RTX);
8566 op0 = copy_to_mode_reg (mode, op0);
8568 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8569 gen_int_mode (tree_low_cst (exp1, 0),
8570 TYPE_MODE (TREE_TYPE (exp1)))));
8573 if (modifier == EXPAND_STACK_PARM)
8576 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8577 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8579 case TRUNC_DIV_EXPR:
8580 case FLOOR_DIV_EXPR:
8582 case ROUND_DIV_EXPR:
8583 case EXACT_DIV_EXPR:
8584 /* If this is a fixed-point operation, then we cannot use the code
8585 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8587 if (ALL_FIXED_POINT_MODE_P (mode))
8590 if (modifier == EXPAND_STACK_PARM)
8592 /* Possible optimization: compute the dividend with EXPAND_SUM
8593 then if the divisor is constant can optimize the case
8594 where some terms of the dividend have coeffs divisible by it. */
8595 expand_operands (treeop0, treeop1,
8596 subtarget, &op0, &op1, EXPAND_NORMAL);
8597 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8602 case MULT_HIGHPART_EXPR:
8603 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8604 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8608 case TRUNC_MOD_EXPR:
8609 case FLOOR_MOD_EXPR:
8611 case ROUND_MOD_EXPR:
8612 if (modifier == EXPAND_STACK_PARM)
8614 expand_operands (treeop0, treeop1,
8615 subtarget, &op0, &op1, EXPAND_NORMAL);
8616 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8618 case FIXED_CONVERT_EXPR:
8619 op0 = expand_normal (treeop0);
8620 if (target == 0 || modifier == EXPAND_STACK_PARM)
8621 target = gen_reg_rtx (mode);
8623 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8624 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8625 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8626 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8628 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8631 case FIX_TRUNC_EXPR:
8632 op0 = expand_normal (treeop0);
8633 if (target == 0 || modifier == EXPAND_STACK_PARM)
8634 target = gen_reg_rtx (mode);
8635 expand_fix (target, op0, unsignedp);
8639 op0 = expand_normal (treeop0);
8640 if (target == 0 || modifier == EXPAND_STACK_PARM)
8641 target = gen_reg_rtx (mode);
8642 /* expand_float can't figure out what to do if FROM has VOIDmode.
8643 So give it the correct mode. With -O, cse will optimize this. */
8644 if (GET_MODE (op0) == VOIDmode)
8645 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8647 expand_float (target, op0,
8648 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8652 op0 = expand_expr (treeop0, subtarget,
8653 VOIDmode, EXPAND_NORMAL);
8654 if (modifier == EXPAND_STACK_PARM)
8656 temp = expand_unop (mode,
8657 optab_for_tree_code (NEGATE_EXPR, type,
8661 return REDUCE_BIT_FIELD (temp);
8664 op0 = expand_expr (treeop0, subtarget,
8665 VOIDmode, EXPAND_NORMAL);
8666 if (modifier == EXPAND_STACK_PARM)
8669 /* ABS_EXPR is not valid for complex arguments. */
8670 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8671 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8673 /* Unsigned abs is simply the operand. Testing here means we don't
8674 risk generating incorrect code below. */
8675 if (TYPE_UNSIGNED (type))
8678 return expand_abs (mode, op0, target, unsignedp,
8679 safe_from_p (target, treeop0, 1));
8683 target = original_target;
8685 || modifier == EXPAND_STACK_PARM
8686 || (MEM_P (target) && MEM_VOLATILE_P (target))
8687 || GET_MODE (target) != mode
8689 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8690 target = gen_reg_rtx (mode);
8691 expand_operands (treeop0, treeop1,
8692 target, &op0, &op1, EXPAND_NORMAL);
8694 /* First try to do it with a special MIN or MAX instruction.
8695 If that does not win, use a conditional jump to select the proper
8697 this_optab = optab_for_tree_code (code, type, optab_default);
8698 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8703 /* At this point, a MEM target is no longer useful; we will get better
8706 if (! REG_P (target))
8707 target = gen_reg_rtx (mode);
8709 /* If op1 was placed in target, swap op0 and op1. */
8710 if (target != op0 && target == op1)
8717 /* We generate better code and avoid problems with op1 mentioning
8718 target by forcing op1 into a pseudo if it isn't a constant. */
8719 if (! CONSTANT_P (op1))
8720 op1 = force_reg (mode, op1);
8723 enum rtx_code comparison_code;
8726 if (code == MAX_EXPR)
8727 comparison_code = unsignedp ? GEU : GE;
8729 comparison_code = unsignedp ? LEU : LE;
8731 /* Canonicalize to comparisons against 0. */
8732 if (op1 == const1_rtx)
8734 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8735 or (a != 0 ? a : 1) for unsigned.
8736 For MIN we are safe converting (a <= 1 ? a : 1)
8737 into (a <= 0 ? a : 1) */
8738 cmpop1 = const0_rtx;
8739 if (code == MAX_EXPR)
8740 comparison_code = unsignedp ? NE : GT;
8742 if (op1 == constm1_rtx && !unsignedp)
8744 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8745 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8746 cmpop1 = const0_rtx;
8747 if (code == MIN_EXPR)
8748 comparison_code = LT;
8750 #ifdef HAVE_conditional_move
8751 /* Use a conditional move if possible. */
8752 if (can_conditionally_move_p (mode))
8756 /* ??? Same problem as in expmed.c: emit_conditional_move
8757 forces a stack adjustment via compare_from_rtx, and we
8758 lose the stack adjustment if the sequence we are about
8759 to create is discarded. */
8760 do_pending_stack_adjust ();
8764 /* Try to emit the conditional move. */
8765 insn = emit_conditional_move (target, comparison_code,
8770 /* If we could do the conditional move, emit the sequence,
8774 rtx seq = get_insns ();
8780 /* Otherwise discard the sequence and fall back to code with
8786 emit_move_insn (target, op0);
8788 temp = gen_label_rtx ();
8789 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8790 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8793 emit_move_insn (target, op1);
8798 op0 = expand_expr (treeop0, subtarget,
8799 VOIDmode, EXPAND_NORMAL);
8800 if (modifier == EXPAND_STACK_PARM)
8802 /* In case we have to reduce the result to bitfield precision
8803 for unsigned bitfield expand this as XOR with a proper constant
8805 if (reduce_bit_field && TYPE_UNSIGNED (type))
8806 temp = expand_binop (mode, xor_optab, op0,
8807 immed_double_int_const
8808 (double_int::mask (TYPE_PRECISION (type)), mode),
8809 target, 1, OPTAB_LIB_WIDEN);
8811 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8815 /* ??? Can optimize bitwise operations with one arg constant.
8816 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8817 and (a bitwise1 b) bitwise2 b (etc)
8818 but that is probably not worth while. */
8827 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8828 || (GET_MODE_PRECISION (TYPE_MODE (type))
8829 == TYPE_PRECISION (type)));
8834 /* If this is a fixed-point operation, then we cannot use the code
8835 below because "expand_shift" doesn't support sat/no-sat fixed-point
8837 if (ALL_FIXED_POINT_MODE_P (mode))
8840 if (! safe_from_p (subtarget, treeop1, 1))
8842 if (modifier == EXPAND_STACK_PARM)
8844 op0 = expand_expr (treeop0, subtarget,
8845 VOIDmode, EXPAND_NORMAL);
8846 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8848 if (code == LSHIFT_EXPR)
8849 temp = REDUCE_BIT_FIELD (temp);
8852 /* Could determine the answer when only additive constants differ. Also,
8853 the addition of one can be handled by changing the condition. */
8860 case UNORDERED_EXPR:
8868 temp = do_store_flag (ops,
8869 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8870 tmode != VOIDmode ? tmode : mode);
8874 /* Use a compare and a jump for BLKmode comparisons, or for function
8875 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8878 || modifier == EXPAND_STACK_PARM
8879 || ! safe_from_p (target, treeop0, 1)
8880 || ! safe_from_p (target, treeop1, 1)
8881 /* Make sure we don't have a hard reg (such as function's return
8882 value) live across basic blocks, if not optimizing. */
8883 || (!optimize && REG_P (target)
8884 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8885 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8887 emit_move_insn (target, const0_rtx);
8889 op1 = gen_label_rtx ();
8890 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8892 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8893 emit_move_insn (target, constm1_rtx);
8895 emit_move_insn (target, const1_rtx);
8901 /* Get the rtx code of the operands. */
8902 op0 = expand_normal (treeop0);
8903 op1 = expand_normal (treeop1);
8906 target = gen_reg_rtx (TYPE_MODE (type));
8908 /* If target overlaps with op1, then either we need to force
8909 op1 into a pseudo (if target also overlaps with op0),
8910 or write the complex parts in reverse order. */
8911 switch (GET_CODE (target))
8914 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8916 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8918 complex_expr_force_op1:
8919 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8920 emit_move_insn (temp, op1);
8924 complex_expr_swap_order:
8925 /* Move the imaginary (op1) and real (op0) parts to their
8927 write_complex_part (target, op1, true);
8928 write_complex_part (target, op0, false);
8934 temp = adjust_address_nv (target,
8935 GET_MODE_INNER (GET_MODE (target)), 0);
8936 if (reg_overlap_mentioned_p (temp, op1))
8938 enum machine_mode imode = GET_MODE_INNER (GET_MODE (target));
8939 temp = adjust_address_nv (target, imode,
8940 GET_MODE_SIZE (imode));
8941 if (reg_overlap_mentioned_p (temp, op0))
8942 goto complex_expr_force_op1;
8943 goto complex_expr_swap_order;
8947 if (reg_overlap_mentioned_p (target, op1))
8949 if (reg_overlap_mentioned_p (target, op0))
8950 goto complex_expr_force_op1;
8951 goto complex_expr_swap_order;
8956 /* Move the real (op0) and imaginary (op1) parts to their location. */
8957 write_complex_part (target, op0, false);
8958 write_complex_part (target, op1, true);
8962 case WIDEN_SUM_EXPR:
8964 tree oprnd0 = treeop0;
8965 tree oprnd1 = treeop1;
8967 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8968 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8973 case REDUC_MAX_EXPR:
8974 case REDUC_MIN_EXPR:
8975 case REDUC_PLUS_EXPR:
8977 op0 = expand_normal (treeop0);
8978 this_optab = optab_for_tree_code (code, type, optab_default);
8979 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8984 case VEC_LSHIFT_EXPR:
8985 case VEC_RSHIFT_EXPR:
8987 target = expand_vec_shift_expr (ops, target);
8991 case VEC_UNPACK_HI_EXPR:
8992 case VEC_UNPACK_LO_EXPR:
8994 op0 = expand_normal (treeop0);
8995 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9001 case VEC_UNPACK_FLOAT_HI_EXPR:
9002 case VEC_UNPACK_FLOAT_LO_EXPR:
9004 op0 = expand_normal (treeop0);
9005 /* The signedness is determined from input operand. */
9006 temp = expand_widen_pattern_expr
9007 (ops, op0, NULL_RTX, NULL_RTX,
9008 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9014 case VEC_WIDEN_MULT_HI_EXPR:
9015 case VEC_WIDEN_MULT_LO_EXPR:
9016 case VEC_WIDEN_MULT_EVEN_EXPR:
9017 case VEC_WIDEN_MULT_ODD_EXPR:
9018 case VEC_WIDEN_LSHIFT_HI_EXPR:
9019 case VEC_WIDEN_LSHIFT_LO_EXPR:
9020 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9021 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9023 gcc_assert (target);
9026 case VEC_PACK_TRUNC_EXPR:
9027 case VEC_PACK_SAT_EXPR:
9028 case VEC_PACK_FIX_TRUNC_EXPR:
9029 mode = TYPE_MODE (TREE_TYPE (treeop0));
9033 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9034 op2 = expand_normal (treeop2);
9036 /* Careful here: if the target doesn't support integral vector modes,
9037 a constant selection vector could wind up smooshed into a normal
9038 integral constant. */
9039 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9041 tree sel_type = TREE_TYPE (treeop2);
9042 enum machine_mode vmode
9043 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9044 TYPE_VECTOR_SUBPARTS (sel_type));
9045 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9046 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9047 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9050 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9052 temp = expand_vec_perm (mode, op0, op1, op2, target);
9058 tree oprnd0 = treeop0;
9059 tree oprnd1 = treeop1;
9060 tree oprnd2 = treeop2;
9063 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9064 op2 = expand_normal (oprnd2);
9065 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9070 case REALIGN_LOAD_EXPR:
9072 tree oprnd0 = treeop0;
9073 tree oprnd1 = treeop1;
9074 tree oprnd2 = treeop2;
9077 this_optab = optab_for_tree_code (code, type, optab_default);
9078 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9079 op2 = expand_normal (oprnd2);
9080 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9087 /* A COND_EXPR with its type being VOID_TYPE represents a
9088 conditional jump and is handled in
9089 expand_gimple_cond_expr. */
9090 gcc_assert (!VOID_TYPE_P (type));
9092 /* Note that COND_EXPRs whose type is a structure or union
9093 are required to be constructed to contain assignments of
9094 a temporary variable, so that we can evaluate them here
9095 for side effect only. If type is void, we must do likewise. */
9097 gcc_assert (!TREE_ADDRESSABLE (type)
9099 && TREE_TYPE (treeop1) != void_type_node
9100 && TREE_TYPE (treeop2) != void_type_node);
9102 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9106 /* If we are not to produce a result, we have no target. Otherwise,
9107 if a target was specified use it; it will not be used as an
9108 intermediate target unless it is safe. If no target, use a
9111 if (modifier != EXPAND_STACK_PARM
9113 && safe_from_p (original_target, treeop0, 1)
9114 && GET_MODE (original_target) == mode
9115 && !MEM_P (original_target))
9116 temp = original_target;
9118 temp = assign_temp (type, 0, 1);
9120 do_pending_stack_adjust ();
9122 op0 = gen_label_rtx ();
9123 op1 = gen_label_rtx ();
9124 jumpifnot (treeop0, op0, -1);
9125 store_expr (treeop1, temp,
9126 modifier == EXPAND_STACK_PARM,
9129 emit_jump_insn (gen_jump (op1));
9132 store_expr (treeop2, temp,
9133 modifier == EXPAND_STACK_PARM,
9141 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9148 /* Here to do an ordinary binary operator. */
9150 expand_operands (treeop0, treeop1,
9151 subtarget, &op0, &op1, EXPAND_NORMAL);
9153 this_optab = optab_for_tree_code (code, type, optab_default);
9155 if (modifier == EXPAND_STACK_PARM)
9157 temp = expand_binop (mode, this_optab, op0, op1, target,
9158 unsignedp, OPTAB_LIB_WIDEN);
9160 /* Bitwise operations do not need bitfield reduction as we expect their
9161 operands being properly truncated. */
9162 if (code == BIT_XOR_EXPR
9163 || code == BIT_AND_EXPR
9164 || code == BIT_IOR_EXPR)
9166 return REDUCE_BIT_FIELD (temp);
9168 #undef REDUCE_BIT_FIELD
9171 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9172 enum expand_modifier modifier, rtx *alt_rtl)
9174 rtx op0, op1, temp, decl_rtl;
9177 enum machine_mode mode;
9178 enum tree_code code = TREE_CODE (exp);
9179 rtx subtarget, original_target;
9182 bool reduce_bit_field;
9183 location_t loc = EXPR_LOCATION (exp);
9184 struct separate_ops ops;
9185 tree treeop0, treeop1, treeop2;
9186 tree ssa_name = NULL_TREE;
9189 type = TREE_TYPE (exp);
9190 mode = TYPE_MODE (type);
9191 unsignedp = TYPE_UNSIGNED (type);
9193 treeop0 = treeop1 = treeop2 = NULL_TREE;
9194 if (!VL_EXP_CLASS_P (exp))
9195 switch (TREE_CODE_LENGTH (code))
9198 case 3: treeop2 = TREE_OPERAND (exp, 2);
9199 case 2: treeop1 = TREE_OPERAND (exp, 1);
9200 case 1: treeop0 = TREE_OPERAND (exp, 0);
9210 ignore = (target == const0_rtx
9211 || ((CONVERT_EXPR_CODE_P (code)
9212 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9213 && TREE_CODE (type) == VOID_TYPE));
9215 /* An operation in what may be a bit-field type needs the
9216 result to be reduced to the precision of the bit-field type,
9217 which is narrower than that of the type's mode. */
9218 reduce_bit_field = (!ignore
9219 && INTEGRAL_TYPE_P (type)
9220 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9222 /* If we are going to ignore this result, we need only do something
9223 if there is a side-effect somewhere in the expression. If there
9224 is, short-circuit the most common cases here. Note that we must
9225 not call expand_expr with anything but const0_rtx in case this
9226 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9230 if (! TREE_SIDE_EFFECTS (exp))
9233 /* Ensure we reference a volatile object even if value is ignored, but
9234 don't do this if all we are doing is taking its address. */
9235 if (TREE_THIS_VOLATILE (exp)
9236 && TREE_CODE (exp) != FUNCTION_DECL
9237 && mode != VOIDmode && mode != BLKmode
9238 && modifier != EXPAND_CONST_ADDRESS)
9240 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9246 if (TREE_CODE_CLASS (code) == tcc_unary
9247 || code == BIT_FIELD_REF
9248 || code == COMPONENT_REF
9249 || code == INDIRECT_REF)
9250 return expand_expr (treeop0, const0_rtx, VOIDmode,
9253 else if (TREE_CODE_CLASS (code) == tcc_binary
9254 || TREE_CODE_CLASS (code) == tcc_comparison
9255 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9257 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9258 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9265 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9268 /* Use subtarget as the target for operand 0 of a binary operation. */
9269 subtarget = get_subtarget (target);
9270 original_target = target;
9276 tree function = decl_function_context (exp);
9278 temp = label_rtx (exp);
9279 temp = gen_rtx_LABEL_REF (Pmode, temp);
9281 if (function != current_function_decl
9283 LABEL_REF_NONLOCAL_P (temp) = 1;
9285 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9290 /* ??? ivopts calls expander, without any preparation from
9291 out-of-ssa. So fake instructions as if this was an access to the
9292 base variable. This unnecessarily allocates a pseudo, see how we can
9293 reuse it, if partition base vars have it set already. */
9294 if (!currently_expanding_to_rtl)
9296 tree var = SSA_NAME_VAR (exp);
9297 if (var && DECL_RTL_SET_P (var))
9298 return DECL_RTL (var);
9299 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9300 LAST_VIRTUAL_REGISTER + 1);
9303 g = get_gimple_for_ssa_name (exp);
9304 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9306 && modifier == EXPAND_INITIALIZER
9307 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9308 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9309 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9310 g = SSA_NAME_DEF_STMT (exp);
9314 location_t saved_loc = curr_insn_location ();
9316 set_curr_insn_location (gimple_location (g));
9317 r = expand_expr_real (gimple_assign_rhs_to_tree (g), target,
9318 tmode, modifier, NULL);
9319 set_curr_insn_location (saved_loc);
9320 if (REG_P (r) && !REG_EXPR (r))
9321 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9326 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9327 exp = SSA_NAME_VAR (ssa_name);
9328 goto expand_decl_rtl;
9332 /* If a static var's type was incomplete when the decl was written,
9333 but the type is complete now, lay out the decl now. */
9334 if (DECL_SIZE (exp) == 0
9335 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9336 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9337 layout_decl (exp, 0);
9339 /* ... fall through ... */
9343 decl_rtl = DECL_RTL (exp);
9345 gcc_assert (decl_rtl);
9346 decl_rtl = copy_rtx (decl_rtl);
9347 /* Record writes to register variables. */
9348 if (modifier == EXPAND_WRITE
9350 && HARD_REGISTER_P (decl_rtl))
9351 add_to_hard_reg_set (&crtl->asm_clobbers,
9352 GET_MODE (decl_rtl), REGNO (decl_rtl));
9354 /* Ensure variable marked as used even if it doesn't go through
9355 a parser. If it hasn't be used yet, write out an external
9357 TREE_USED (exp) = 1;
9359 /* Show we haven't gotten RTL for this yet. */
9362 /* Variables inherited from containing functions should have
9363 been lowered by this point. */
9364 context = decl_function_context (exp);
9365 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9366 || context == current_function_decl
9367 || TREE_STATIC (exp)
9368 || DECL_EXTERNAL (exp)
9369 /* ??? C++ creates functions that are not TREE_STATIC. */
9370 || TREE_CODE (exp) == FUNCTION_DECL);
9372 /* This is the case of an array whose size is to be determined
9373 from its initializer, while the initializer is still being parsed.
9374 ??? We aren't parsing while expanding anymore. */
9376 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9377 temp = validize_mem (decl_rtl);
9379 /* If DECL_RTL is memory, we are in the normal case and the
9380 address is not valid, get the address into a register. */
9382 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9385 *alt_rtl = decl_rtl;
9386 decl_rtl = use_anchored_address (decl_rtl);
9387 if (modifier != EXPAND_CONST_ADDRESS
9388 && modifier != EXPAND_SUM
9389 && !memory_address_addr_space_p (DECL_MODE (exp),
9391 MEM_ADDR_SPACE (decl_rtl)))
9392 temp = replace_equiv_address (decl_rtl,
9393 copy_rtx (XEXP (decl_rtl, 0)));
9396 /* If we got something, return it. But first, set the alignment
9397 if the address is a register. */
9400 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9401 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9406 /* If the mode of DECL_RTL does not match that of the decl,
9407 there are two cases: we are dealing with a BLKmode value
9408 that is returned in a register, or we are dealing with
9409 a promoted value. In the latter case, return a SUBREG
9410 of the wanted mode, but mark it so that we know that it
9411 was already extended. */
9412 if (REG_P (decl_rtl)
9413 && DECL_MODE (exp) != BLKmode
9414 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9416 enum machine_mode pmode;
9418 /* Get the signedness to be used for this variable. Ensure we get
9419 the same mode we got when the variable was declared. */
9420 if (code == SSA_NAME
9421 && (g = SSA_NAME_DEF_STMT (ssa_name))
9422 && gimple_code (g) == GIMPLE_CALL)
9424 gcc_assert (!gimple_call_internal_p (g));
9425 pmode = promote_function_mode (type, mode, &unsignedp,
9426 gimple_call_fntype (g),
9430 pmode = promote_decl_mode (exp, &unsignedp);
9431 gcc_assert (GET_MODE (decl_rtl) == pmode);
9433 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9434 SUBREG_PROMOTED_VAR_P (temp) = 1;
9435 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9442 temp = immed_double_const (TREE_INT_CST_LOW (exp),
9443 TREE_INT_CST_HIGH (exp), mode);
9449 tree tmp = NULL_TREE;
9450 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9451 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9452 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9453 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9454 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9455 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9456 return const_vector_from_tree (exp);
9457 if (GET_MODE_CLASS (mode) == MODE_INT)
9459 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9461 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9465 vec<constructor_elt, va_gc> *v;
9467 vec_alloc (v, VECTOR_CST_NELTS (exp));
9468 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9469 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9470 tmp = build_constructor (type, v);
9472 return expand_expr (tmp, ignore ? const0_rtx : target,
9477 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9480 /* If optimized, generate immediate CONST_DOUBLE
9481 which will be turned into memory by reload if necessary.
9483 We used to force a register so that loop.c could see it. But
9484 this does not allow gen_* patterns to perform optimizations with
9485 the constants. It also produces two insns in cases like "x = 1.0;".
9486 On most machines, floating-point constants are not permitted in
9487 many insns, so we'd end up copying it to a register in any case.
9489 Now, we do the copying in expand_binop, if appropriate. */
9490 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9491 TYPE_MODE (TREE_TYPE (exp)));
9494 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9495 TYPE_MODE (TREE_TYPE (exp)));
9498 /* Handle evaluating a complex constant in a CONCAT target. */
9499 if (original_target && GET_CODE (original_target) == CONCAT)
9501 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9504 rtarg = XEXP (original_target, 0);
9505 itarg = XEXP (original_target, 1);
9507 /* Move the real and imaginary parts separately. */
9508 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9509 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9512 emit_move_insn (rtarg, op0);
9514 emit_move_insn (itarg, op1);
9516 return original_target;
9519 /* ... fall through ... */
9522 temp = expand_expr_constant (exp, 1, modifier);
9524 /* temp contains a constant address.
9525 On RISC machines where a constant address isn't valid,
9526 make some insns to get that address into a register. */
9527 if (modifier != EXPAND_CONST_ADDRESS
9528 && modifier != EXPAND_INITIALIZER
9529 && modifier != EXPAND_SUM
9530 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9531 MEM_ADDR_SPACE (temp)))
9532 return replace_equiv_address (temp,
9533 copy_rtx (XEXP (temp, 0)));
9539 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
9541 if (!SAVE_EXPR_RESOLVED_P (exp))
9543 /* We can indeed still hit this case, typically via builtin
9544 expanders calling save_expr immediately before expanding
9545 something. Assume this means that we only have to deal
9546 with non-BLKmode values. */
9547 gcc_assert (GET_MODE (ret) != BLKmode);
9549 val = build_decl (curr_insn_location (),
9550 VAR_DECL, NULL, TREE_TYPE (exp));
9551 DECL_ARTIFICIAL (val) = 1;
9552 DECL_IGNORED_P (val) = 1;
9554 TREE_OPERAND (exp, 0) = treeop0;
9555 SAVE_EXPR_RESOLVED_P (exp) = 1;
9557 if (!CONSTANT_P (ret))
9558 ret = copy_to_reg (ret);
9559 SET_DECL_RTL (val, ret);
9567 /* If we don't need the result, just ensure we evaluate any
9571 unsigned HOST_WIDE_INT idx;
9574 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9575 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9580 return expand_constructor (exp, target, modifier, false);
9582 case TARGET_MEM_REF:
9585 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9586 struct mem_address addr;
9587 enum insn_code icode;
9590 get_address_description (exp, &addr);
9591 op0 = addr_for_mem_ref (&addr, as, true);
9592 op0 = memory_address_addr_space (mode, op0, as);
9593 temp = gen_rtx_MEM (mode, op0);
9594 set_mem_attributes (temp, exp, 0);
9595 set_mem_addr_space (temp, as);
9596 align = get_object_alignment (exp);
9597 if (modifier != EXPAND_WRITE
9598 && modifier != EXPAND_MEMORY
9600 && align < GET_MODE_ALIGNMENT (mode)
9601 /* If the target does not have special handling for unaligned
9602 loads of mode then it can use regular moves for them. */
9603 && ((icode = optab_handler (movmisalign_optab, mode))
9604 != CODE_FOR_nothing))
9606 struct expand_operand ops[2];
9608 /* We've already validated the memory, and we're creating a
9609 new pseudo destination. The predicates really can't fail,
9610 nor can the generator. */
9611 create_output_operand (&ops[0], NULL_RTX, mode);
9612 create_fixed_operand (&ops[1], temp);
9613 expand_insn (icode, 2, ops);
9614 temp = ops[0].value;
9622 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9623 enum machine_mode address_mode;
9624 tree base = TREE_OPERAND (exp, 0);
9626 enum insn_code icode;
9628 /* Handle expansion of non-aliased memory with non-BLKmode. That
9629 might end up in a register. */
9630 if (mem_ref_refers_to_non_mem_p (exp))
9632 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
9633 base = TREE_OPERAND (base, 0);
9635 && host_integerp (TYPE_SIZE (type), 1)
9636 && (GET_MODE_BITSIZE (DECL_MODE (base))
9637 == TREE_INT_CST_LOW (TYPE_SIZE (type))))
9638 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9639 target, tmode, modifier);
9640 if (TYPE_MODE (type) == BLKmode)
9642 temp = assign_stack_temp (DECL_MODE (base),
9643 GET_MODE_SIZE (DECL_MODE (base)));
9644 store_expr (base, temp, 0, false);
9645 temp = adjust_address (temp, BLKmode, offset);
9646 set_mem_size (temp, int_size_in_bytes (type));
9649 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9650 bitsize_int (offset * BITS_PER_UNIT));
9651 return expand_expr (exp, target, tmode, modifier);
9653 address_mode = targetm.addr_space.address_mode (as);
9654 base = TREE_OPERAND (exp, 0);
9655 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9657 tree mask = gimple_assign_rhs2 (def_stmt);
9658 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9659 gimple_assign_rhs1 (def_stmt), mask);
9660 TREE_OPERAND (exp, 0) = base;
9662 align = get_object_alignment (exp);
9663 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9664 op0 = memory_address_addr_space (address_mode, op0, as);
9665 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9668 = immed_double_int_const (mem_ref_offset (exp), address_mode);
9669 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9671 op0 = memory_address_addr_space (mode, op0, as);
9672 temp = gen_rtx_MEM (mode, op0);
9673 set_mem_attributes (temp, exp, 0);
9674 set_mem_addr_space (temp, as);
9675 if (TREE_THIS_VOLATILE (exp))
9676 MEM_VOLATILE_P (temp) = 1;
9677 if (modifier != EXPAND_WRITE
9678 && modifier != EXPAND_MEMORY
9680 && align < GET_MODE_ALIGNMENT (mode))
9682 if ((icode = optab_handler (movmisalign_optab, mode))
9683 != CODE_FOR_nothing)
9685 struct expand_operand ops[2];
9687 /* We've already validated the memory, and we're creating a
9688 new pseudo destination. The predicates really can't fail,
9689 nor can the generator. */
9690 create_output_operand (&ops[0], NULL_RTX, mode);
9691 create_fixed_operand (&ops[1], temp);
9692 expand_insn (icode, 2, ops);
9693 temp = ops[0].value;
9695 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9696 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9697 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9698 true, (modifier == EXPAND_STACK_PARM
9699 ? NULL_RTX : target),
9708 tree array = treeop0;
9709 tree index = treeop1;
9712 /* Fold an expression like: "foo"[2].
9713 This is not done in fold so it won't happen inside &.
9714 Don't fold if this is for wide characters since it's too
9715 difficult to do correctly and this is a very rare case. */
9717 if (modifier != EXPAND_CONST_ADDRESS
9718 && modifier != EXPAND_INITIALIZER
9719 && modifier != EXPAND_MEMORY)
9721 tree t = fold_read_from_constant_string (exp);
9724 return expand_expr (t, target, tmode, modifier);
9727 /* If this is a constant index into a constant array,
9728 just get the value from the array. Handle both the cases when
9729 we have an explicit constructor and when our operand is a variable
9730 that was declared const. */
9732 if (modifier != EXPAND_CONST_ADDRESS
9733 && modifier != EXPAND_INITIALIZER
9734 && modifier != EXPAND_MEMORY
9735 && TREE_CODE (array) == CONSTRUCTOR
9736 && ! TREE_SIDE_EFFECTS (array)
9737 && TREE_CODE (index) == INTEGER_CST)
9739 unsigned HOST_WIDE_INT ix;
9742 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9744 if (tree_int_cst_equal (field, index))
9746 if (!TREE_SIDE_EFFECTS (value))
9747 return expand_expr (fold (value), target, tmode, modifier);
9752 else if (optimize >= 1
9753 && modifier != EXPAND_CONST_ADDRESS
9754 && modifier != EXPAND_INITIALIZER
9755 && modifier != EXPAND_MEMORY
9756 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9757 && TREE_CODE (index) == INTEGER_CST
9758 && (TREE_CODE (array) == VAR_DECL
9759 || TREE_CODE (array) == CONST_DECL)
9760 && (init = ctor_for_folding (array)) != error_mark_node)
9762 if (TREE_CODE (init) == CONSTRUCTOR)
9764 unsigned HOST_WIDE_INT ix;
9767 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9769 if (tree_int_cst_equal (field, index))
9771 if (TREE_SIDE_EFFECTS (value))
9774 if (TREE_CODE (value) == CONSTRUCTOR)
9776 /* If VALUE is a CONSTRUCTOR, this
9777 optimization is only useful if
9778 this doesn't store the CONSTRUCTOR
9779 into memory. If it does, it is more
9780 efficient to just load the data from
9781 the array directly. */
9782 rtx ret = expand_constructor (value, target,
9784 if (ret == NULL_RTX)
9789 expand_expr (fold (value), target, tmode, modifier);
9792 else if (TREE_CODE (init) == STRING_CST)
9794 tree low_bound = array_ref_low_bound (exp);
9795 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9797 /* Optimize the special case of a zero lower bound.
9799 We convert the lower bound to sizetype to avoid problems
9800 with constant folding. E.g. suppose the lower bound is
9801 1 and its mode is QI. Without the conversion
9802 (ARRAY + (INDEX - (unsigned char)1))
9804 (ARRAY + (-(unsigned char)1) + INDEX)
9806 (ARRAY + 255 + INDEX). Oops! */
9807 if (!integer_zerop (low_bound))
9808 index1 = size_diffop_loc (loc, index1,
9809 fold_convert_loc (loc, sizetype,
9812 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
9814 tree type = TREE_TYPE (TREE_TYPE (init));
9815 enum machine_mode mode = TYPE_MODE (type);
9817 if (GET_MODE_CLASS (mode) == MODE_INT
9818 && GET_MODE_SIZE (mode) == 1)
9819 return gen_int_mode (TREE_STRING_POINTER (init)
9820 [TREE_INT_CST_LOW (index1)],
9826 goto normal_inner_ref;
9829 /* If the operand is a CONSTRUCTOR, we can just extract the
9830 appropriate field if it is present. */
9831 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9833 unsigned HOST_WIDE_INT idx;
9836 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9838 if (field == treeop1
9839 /* We can normally use the value of the field in the
9840 CONSTRUCTOR. However, if this is a bitfield in
9841 an integral mode that we can fit in a HOST_WIDE_INT,
9842 we must mask only the number of bits in the bitfield,
9843 since this is done implicitly by the constructor. If
9844 the bitfield does not meet either of those conditions,
9845 we can't do this optimization. */
9846 && (! DECL_BIT_FIELD (field)
9847 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9848 && (GET_MODE_PRECISION (DECL_MODE (field))
9849 <= HOST_BITS_PER_WIDE_INT))))
9851 if (DECL_BIT_FIELD (field)
9852 && modifier == EXPAND_STACK_PARM)
9854 op0 = expand_expr (value, target, tmode, modifier);
9855 if (DECL_BIT_FIELD (field))
9857 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9858 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9860 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9862 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
9864 op0 = expand_and (imode, op0, op1, target);
9868 int count = GET_MODE_PRECISION (imode) - bitsize;
9870 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9872 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9880 goto normal_inner_ref;
9883 case ARRAY_RANGE_REF:
9886 enum machine_mode mode1, mode2;
9887 HOST_WIDE_INT bitsize, bitpos;
9889 int volatilep = 0, must_force_mem;
9890 bool packedp = false;
9891 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9892 &mode1, &unsignedp, &volatilep, true);
9893 rtx orig_op0, memloc;
9894 bool mem_attrs_from_type = false;
9896 /* If we got back the original object, something is wrong. Perhaps
9897 we are evaluating an expression too early. In any event, don't
9898 infinitely recurse. */
9899 gcc_assert (tem != exp);
9901 if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp, 0)))
9902 || (TREE_CODE (TREE_OPERAND (exp, 1)) == FIELD_DECL
9903 && DECL_PACKED (TREE_OPERAND (exp, 1))))
9906 /* If TEM's type is a union of variable size, pass TARGET to the inner
9907 computation, since it will need a temporary and TARGET is known
9908 to have to do. This occurs in unchecked conversion in Ada. */
9911 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9912 && COMPLETE_TYPE_P (TREE_TYPE (tem))
9913 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9915 && modifier != EXPAND_STACK_PARM
9916 ? target : NULL_RTX),
9918 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
9920 /* If the bitfield is volatile, we want to access it in the
9921 field's mode, not the computed mode.
9922 If a MEM has VOIDmode (external with incomplete type),
9923 use BLKmode for it instead. */
9926 if (volatilep && flag_strict_volatile_bitfields > 0)
9927 op0 = adjust_address (op0, mode1, 0);
9928 else if (GET_MODE (op0) == VOIDmode)
9929 op0 = adjust_address (op0, BLKmode, 0);
9933 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9935 /* If we have either an offset, a BLKmode result, or a reference
9936 outside the underlying object, we must force it to memory.
9937 Such a case can occur in Ada if we have unchecked conversion
9938 of an expression from a scalar type to an aggregate type or
9939 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9940 passed a partially uninitialized object or a view-conversion
9941 to a larger size. */
9942 must_force_mem = (offset
9944 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9946 /* Handle CONCAT first. */
9947 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9950 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9953 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9956 op0 = XEXP (op0, 0);
9957 mode2 = GET_MODE (op0);
9959 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9960 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9964 op0 = XEXP (op0, 1);
9966 mode2 = GET_MODE (op0);
9969 /* Otherwise force into memory. */
9973 /* If this is a constant, put it in a register if it is a legitimate
9974 constant and we don't need a memory reference. */
9975 if (CONSTANT_P (op0)
9977 && targetm.legitimate_constant_p (mode2, op0)
9979 op0 = force_reg (mode2, op0);
9981 /* Otherwise, if this is a constant, try to force it to the constant
9982 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9983 is a legitimate constant. */
9984 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9985 op0 = validize_mem (memloc);
9987 /* Otherwise, if this is a constant or the object is not in memory
9988 and need be, put it there. */
9989 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9991 tree nt = build_qualified_type (TREE_TYPE (tem),
9992 (TYPE_QUALS (TREE_TYPE (tem))
9993 | TYPE_QUAL_CONST));
9994 memloc = assign_temp (nt, 1, 1);
9995 emit_move_insn (memloc, op0);
9997 mem_attrs_from_type = true;
10002 enum machine_mode address_mode;
10003 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10006 gcc_assert (MEM_P (op0));
10008 address_mode = get_address_mode (op0);
10009 if (GET_MODE (offset_rtx) != address_mode)
10010 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10012 if (GET_MODE (op0) == BLKmode
10013 /* A constant address in OP0 can have VOIDmode, we must
10014 not try to call force_reg in that case. */
10015 && GET_MODE (XEXP (op0, 0)) != VOIDmode
10017 && (bitpos % bitsize) == 0
10018 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10019 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
10021 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10025 op0 = offset_address (op0, offset_rtx,
10026 highest_pow2_factor (offset));
10029 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10030 record its alignment as BIGGEST_ALIGNMENT. */
10031 if (MEM_P (op0) && bitpos == 0 && offset != 0
10032 && is_aligning_offset (offset, tem))
10033 set_mem_align (op0, BIGGEST_ALIGNMENT);
10035 /* Don't forget about volatility even if this is a bitfield. */
10036 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10038 if (op0 == orig_op0)
10039 op0 = copy_rtx (op0);
10041 MEM_VOLATILE_P (op0) = 1;
10044 /* In cases where an aligned union has an unaligned object
10045 as a field, we might be extracting a BLKmode value from
10046 an integer-mode (e.g., SImode) object. Handle this case
10047 by doing the extract into an object as wide as the field
10048 (which we know to be the width of a basic mode), then
10049 storing into memory, and changing the mode to BLKmode. */
10050 if (mode1 == VOIDmode
10051 || REG_P (op0) || GET_CODE (op0) == SUBREG
10052 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10053 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10054 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10055 && modifier != EXPAND_CONST_ADDRESS
10056 && modifier != EXPAND_INITIALIZER
10057 && modifier != EXPAND_MEMORY)
10058 /* If the field is volatile, we always want an aligned
10059 access. Do this in following two situations:
10060 1. the access is not already naturally
10061 aligned, otherwise "normal" (non-bitfield) volatile fields
10062 become non-addressable.
10063 2. the bitsize is narrower than the access size. Need
10064 to extract bitfields from the access. */
10065 || (volatilep && flag_strict_volatile_bitfields > 0
10066 && (bitpos % GET_MODE_ALIGNMENT (mode) != 0
10067 || (mode1 != BLKmode
10068 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)))
10069 /* If the field isn't aligned enough to fetch as a memref,
10070 fetch it as a bit field. */
10071 || (mode1 != BLKmode
10072 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10073 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10075 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10076 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10077 && modifier != EXPAND_MEMORY
10078 && ((modifier == EXPAND_CONST_ADDRESS
10079 || modifier == EXPAND_INITIALIZER)
10081 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10082 || (bitpos % BITS_PER_UNIT != 0)))
10083 /* If the type and the field are a constant size and the
10084 size of the type isn't the same size as the bitfield,
10085 we must use bitfield operations. */
10087 && TYPE_SIZE (TREE_TYPE (exp))
10088 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10089 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10092 enum machine_mode ext_mode = mode;
10094 if (ext_mode == BLKmode
10095 && ! (target != 0 && MEM_P (op0)
10097 && bitpos % BITS_PER_UNIT == 0))
10098 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10100 if (ext_mode == BLKmode)
10103 target = assign_temp (type, 1, 1);
10108 /* In this case, BITPOS must start at a byte boundary and
10109 TARGET, if specified, must be a MEM. */
10110 gcc_assert (MEM_P (op0)
10111 && (!target || MEM_P (target))
10112 && !(bitpos % BITS_PER_UNIT));
10114 emit_block_move (target,
10115 adjust_address (op0, VOIDmode,
10116 bitpos / BITS_PER_UNIT),
10117 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10119 (modifier == EXPAND_STACK_PARM
10120 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10125 op0 = validize_mem (op0);
10127 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10128 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10130 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, packedp,
10131 (modifier == EXPAND_STACK_PARM
10132 ? NULL_RTX : target),
10133 ext_mode, ext_mode);
10135 /* If the result is a record type and BITSIZE is narrower than
10136 the mode of OP0, an integral mode, and this is a big endian
10137 machine, we must put the field into the high-order bits. */
10138 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10139 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10140 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10141 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10142 GET_MODE_BITSIZE (GET_MODE (op0))
10143 - bitsize, op0, 1);
10145 /* If the result type is BLKmode, store the data into a temporary
10146 of the appropriate type, but with the mode corresponding to the
10147 mode for the data we have (op0's mode). It's tempting to make
10148 this a constant type, since we know it's only being stored once,
10149 but that can cause problems if we are taking the address of this
10150 COMPONENT_REF because the MEM of any reference via that address
10151 will have flags corresponding to the type, which will not
10152 necessarily be constant. */
10153 if (mode == BLKmode)
10157 new_rtx = assign_stack_temp_for_type (ext_mode,
10158 GET_MODE_BITSIZE (ext_mode),
10160 emit_move_insn (new_rtx, op0);
10161 op0 = copy_rtx (new_rtx);
10162 PUT_MODE (op0, BLKmode);
10168 /* If the result is BLKmode, use that to access the object
10170 if (mode == BLKmode)
10173 /* Get a reference to just this component. */
10174 if (modifier == EXPAND_CONST_ADDRESS
10175 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10176 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10178 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10180 if (op0 == orig_op0)
10181 op0 = copy_rtx (op0);
10183 /* If op0 is a temporary because of forcing to memory, pass only the
10184 type to set_mem_attributes so that the original expression is never
10185 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10186 if (mem_attrs_from_type)
10187 set_mem_attributes (op0, type, 0);
10189 set_mem_attributes (op0, exp, 0);
10191 if (REG_P (XEXP (op0, 0)))
10192 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10194 MEM_VOLATILE_P (op0) |= volatilep;
10195 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10196 || modifier == EXPAND_CONST_ADDRESS
10197 || modifier == EXPAND_INITIALIZER)
10201 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10203 convert_move (target, op0, unsignedp);
10208 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10211 /* All valid uses of __builtin_va_arg_pack () are removed during
10213 if (CALL_EXPR_VA_ARG_PACK (exp))
10214 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10216 tree fndecl = get_callee_fndecl (exp), attr;
10219 && (attr = lookup_attribute ("error",
10220 DECL_ATTRIBUTES (fndecl))) != NULL)
10221 error ("%Kcall to %qs declared with attribute error: %s",
10222 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10223 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10225 && (attr = lookup_attribute ("warning",
10226 DECL_ATTRIBUTES (fndecl))) != NULL)
10227 warning_at (tree_nonartificial_location (exp),
10228 0, "%Kcall to %qs declared with attribute warning: %s",
10229 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10230 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10232 /* Check for a built-in function. */
10233 if (fndecl && DECL_BUILT_IN (fndecl))
10235 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10236 return expand_builtin (exp, target, subtarget, tmode, ignore);
10239 return expand_call (exp, target, ignore);
10241 case VIEW_CONVERT_EXPR:
10244 /* If we are converting to BLKmode, try to avoid an intermediate
10245 temporary by fetching an inner memory reference. */
10246 if (mode == BLKmode
10247 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10248 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10249 && handled_component_p (treeop0))
10251 enum machine_mode mode1;
10252 HOST_WIDE_INT bitsize, bitpos;
10257 = get_inner_reference (treeop0, &bitsize, &bitpos,
10258 &offset, &mode1, &unsignedp, &volatilep,
10262 /* ??? We should work harder and deal with non-zero offsets. */
10264 && (bitpos % BITS_PER_UNIT) == 0
10266 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10268 /* See the normal_inner_ref case for the rationale. */
10270 = expand_expr (tem,
10271 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10272 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10274 && modifier != EXPAND_STACK_PARM
10275 ? target : NULL_RTX),
10277 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
10279 if (MEM_P (orig_op0))
10283 /* Get a reference to just this component. */
10284 if (modifier == EXPAND_CONST_ADDRESS
10285 || modifier == EXPAND_SUM
10286 || modifier == EXPAND_INITIALIZER)
10287 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10289 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10291 if (op0 == orig_op0)
10292 op0 = copy_rtx (op0);
10294 set_mem_attributes (op0, treeop0, 0);
10295 if (REG_P (XEXP (op0, 0)))
10296 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10298 MEM_VOLATILE_P (op0) |= volatilep;
10304 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
10306 /* If the input and output modes are both the same, we are done. */
10307 if (mode == GET_MODE (op0))
10309 /* If neither mode is BLKmode, and both modes are the same size
10310 then we can use gen_lowpart. */
10311 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10312 && (GET_MODE_PRECISION (mode)
10313 == GET_MODE_PRECISION (GET_MODE (op0)))
10314 && !COMPLEX_MODE_P (GET_MODE (op0)))
10316 if (GET_CODE (op0) == SUBREG)
10317 op0 = force_reg (GET_MODE (op0), op0);
10318 temp = gen_lowpart_common (mode, op0);
10323 if (!REG_P (op0) && !MEM_P (op0))
10324 op0 = force_reg (GET_MODE (op0), op0);
10325 op0 = gen_lowpart (mode, op0);
10328 /* If both types are integral, convert from one mode to the other. */
10329 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10330 op0 = convert_modes (mode, GET_MODE (op0), op0,
10331 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10332 /* As a last resort, spill op0 to memory, and reload it in a
10334 else if (!MEM_P (op0))
10336 /* If the operand is not a MEM, force it into memory. Since we
10337 are going to be changing the mode of the MEM, don't call
10338 force_const_mem for constants because we don't allow pool
10339 constants to change mode. */
10340 tree inner_type = TREE_TYPE (treeop0);
10342 gcc_assert (!TREE_ADDRESSABLE (exp));
10344 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10346 = assign_stack_temp_for_type
10347 (TYPE_MODE (inner_type),
10348 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10350 emit_move_insn (target, op0);
10354 /* At this point, OP0 is in the correct mode. If the output type is
10355 such that the operand is known to be aligned, indicate that it is.
10356 Otherwise, we need only be concerned about alignment for non-BLKmode
10360 enum insn_code icode;
10362 if (TYPE_ALIGN_OK (type))
10364 /* ??? Copying the MEM without substantially changing it might
10365 run afoul of the code handling volatile memory references in
10366 store_expr, which assumes that TARGET is returned unmodified
10367 if it has been used. */
10368 op0 = copy_rtx (op0);
10369 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10371 else if (mode != BLKmode
10372 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode)
10373 /* If the target does have special handling for unaligned
10374 loads of mode then use them. */
10375 && ((icode = optab_handler (movmisalign_optab, mode))
10376 != CODE_FOR_nothing))
10380 op0 = adjust_address (op0, mode, 0);
10381 /* We've already validated the memory, and we're creating a
10382 new pseudo destination. The predicates really can't
10384 reg = gen_reg_rtx (mode);
10386 /* Nor can the insn generator. */
10387 insn = GEN_FCN (icode) (reg, op0);
10391 else if (STRICT_ALIGNMENT
10393 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10395 tree inner_type = TREE_TYPE (treeop0);
10396 HOST_WIDE_INT temp_size
10397 = MAX (int_size_in_bytes (inner_type),
10398 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10400 = assign_stack_temp_for_type (mode, temp_size, type);
10401 rtx new_with_op0_mode
10402 = adjust_address (new_rtx, GET_MODE (op0), 0);
10404 gcc_assert (!TREE_ADDRESSABLE (exp));
10406 if (GET_MODE (op0) == BLKmode)
10407 emit_block_move (new_with_op0_mode, op0,
10408 GEN_INT (GET_MODE_SIZE (mode)),
10409 (modifier == EXPAND_STACK_PARM
10410 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10412 emit_move_insn (new_with_op0_mode, op0);
10417 op0 = adjust_address (op0, mode, 0);
10424 tree lhs = treeop0;
10425 tree rhs = treeop1;
10426 gcc_assert (ignore);
10428 /* Check for |= or &= of a bitfield of size one into another bitfield
10429 of size 1. In this case, (unless we need the result of the
10430 assignment) we can do this more efficiently with a
10431 test followed by an assignment, if necessary.
10433 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10434 things change so we do, this code should be enhanced to
10436 if (TREE_CODE (lhs) == COMPONENT_REF
10437 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10438 || TREE_CODE (rhs) == BIT_AND_EXPR)
10439 && TREE_OPERAND (rhs, 0) == lhs
10440 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10441 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10442 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10444 rtx label = gen_label_rtx ();
10445 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10446 do_jump (TREE_OPERAND (rhs, 1),
10448 value ? 0 : label, -1);
10449 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10451 do_pending_stack_adjust ();
10452 emit_label (label);
10456 expand_assignment (lhs, rhs, false);
10461 return expand_expr_addr_expr (exp, target, tmode, modifier);
10463 case REALPART_EXPR:
10464 op0 = expand_normal (treeop0);
10465 return read_complex_part (op0, false);
10467 case IMAGPART_EXPR:
10468 op0 = expand_normal (treeop0);
10469 return read_complex_part (op0, true);
10476 /* Expanded in cfgexpand.c. */
10477 gcc_unreachable ();
10479 case TRY_CATCH_EXPR:
10481 case EH_FILTER_EXPR:
10482 case TRY_FINALLY_EXPR:
10483 /* Lowered by tree-eh.c. */
10484 gcc_unreachable ();
10486 case WITH_CLEANUP_EXPR:
10487 case CLEANUP_POINT_EXPR:
10489 case CASE_LABEL_EXPR:
10494 case COMPOUND_EXPR:
10495 case PREINCREMENT_EXPR:
10496 case PREDECREMENT_EXPR:
10497 case POSTINCREMENT_EXPR:
10498 case POSTDECREMENT_EXPR:
10501 case COMPOUND_LITERAL_EXPR:
10502 /* Lowered by gimplify.c. */
10503 gcc_unreachable ();
10506 /* Function descriptors are not valid except for as
10507 initialization constants, and should not be expanded. */
10508 gcc_unreachable ();
10510 case WITH_SIZE_EXPR:
10511 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10512 have pulled out the size to use in whatever context it needed. */
10513 return expand_expr_real (treeop0, original_target, tmode,
10514 modifier, alt_rtl);
10517 return expand_expr_real_2 (&ops, target, tmode, modifier);
10521 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10522 signedness of TYPE), possibly returning the result in TARGET. */
10524 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10526 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10527 if (target && GET_MODE (target) != GET_MODE (exp))
10529 /* For constant values, reduce using build_int_cst_type. */
10530 if (CONST_INT_P (exp))
10532 HOST_WIDE_INT value = INTVAL (exp);
10533 tree t = build_int_cst_type (type, value);
10534 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10536 else if (TYPE_UNSIGNED (type))
10538 rtx mask = immed_double_int_const (double_int::mask (prec),
10540 return expand_and (GET_MODE (exp), exp, mask, target);
10544 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10545 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10546 exp, count, target, 0);
10547 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10548 exp, count, target, 0);
10552 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10553 when applied to the address of EXP produces an address known to be
10554 aligned more than BIGGEST_ALIGNMENT. */
10557 is_aligning_offset (const_tree offset, const_tree exp)
10559 /* Strip off any conversions. */
10560 while (CONVERT_EXPR_P (offset))
10561 offset = TREE_OPERAND (offset, 0);
10563 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10564 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10565 if (TREE_CODE (offset) != BIT_AND_EXPR
10566 || !host_integerp (TREE_OPERAND (offset, 1), 1)
10567 || compare_tree_int (TREE_OPERAND (offset, 1),
10568 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10569 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
10572 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10573 It must be NEGATE_EXPR. Then strip any more conversions. */
10574 offset = TREE_OPERAND (offset, 0);
10575 while (CONVERT_EXPR_P (offset))
10576 offset = TREE_OPERAND (offset, 0);
10578 if (TREE_CODE (offset) != NEGATE_EXPR)
10581 offset = TREE_OPERAND (offset, 0);
10582 while (CONVERT_EXPR_P (offset))
10583 offset = TREE_OPERAND (offset, 0);
10585 /* This must now be the address of EXP. */
10586 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10589 /* Return the tree node if an ARG corresponds to a string constant or zero
10590 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10591 in bytes within the string that ARG is accessing. The type of the
10592 offset will be `sizetype'. */
10595 string_constant (tree arg, tree *ptr_offset)
10597 tree array, offset, lower_bound;
10600 if (TREE_CODE (arg) == ADDR_EXPR)
10602 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10604 *ptr_offset = size_zero_node;
10605 return TREE_OPERAND (arg, 0);
10607 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10609 array = TREE_OPERAND (arg, 0);
10610 offset = size_zero_node;
10612 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10614 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10615 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10616 if (TREE_CODE (array) != STRING_CST
10617 && TREE_CODE (array) != VAR_DECL)
10620 /* Check if the array has a nonzero lower bound. */
10621 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10622 if (!integer_zerop (lower_bound))
10624 /* If the offset and base aren't both constants, return 0. */
10625 if (TREE_CODE (lower_bound) != INTEGER_CST)
10627 if (TREE_CODE (offset) != INTEGER_CST)
10629 /* Adjust offset by the lower bound. */
10630 offset = size_diffop (fold_convert (sizetype, offset),
10631 fold_convert (sizetype, lower_bound));
10634 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10636 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10637 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10638 if (TREE_CODE (array) != ADDR_EXPR)
10640 array = TREE_OPERAND (array, 0);
10641 if (TREE_CODE (array) != STRING_CST
10642 && TREE_CODE (array) != VAR_DECL)
10648 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10650 tree arg0 = TREE_OPERAND (arg, 0);
10651 tree arg1 = TREE_OPERAND (arg, 1);
10656 if (TREE_CODE (arg0) == ADDR_EXPR
10657 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10658 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10660 array = TREE_OPERAND (arg0, 0);
10663 else if (TREE_CODE (arg1) == ADDR_EXPR
10664 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10665 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10667 array = TREE_OPERAND (arg1, 0);
10676 if (TREE_CODE (array) == STRING_CST)
10678 *ptr_offset = fold_convert (sizetype, offset);
10681 else if (TREE_CODE (array) == VAR_DECL
10682 || TREE_CODE (array) == CONST_DECL)
10685 tree init = ctor_for_folding (array);
10687 /* Variables initialized to string literals can be handled too. */
10688 if (init == error_mark_node
10690 || TREE_CODE (init) != STRING_CST)
10693 /* Avoid const char foo[4] = "abcde"; */
10694 if (DECL_SIZE_UNIT (array) == NULL_TREE
10695 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10696 || (length = TREE_STRING_LENGTH (init)) <= 0
10697 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10700 /* If variable is bigger than the string literal, OFFSET must be constant
10701 and inside of the bounds of the string literal. */
10702 offset = fold_convert (sizetype, offset);
10703 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10704 && (! host_integerp (offset, 1)
10705 || compare_tree_int (offset, length) >= 0))
10708 *ptr_offset = offset;
10715 /* Generate code to calculate OPS, and exploded expression
10716 using a store-flag instruction and return an rtx for the result.
10717 OPS reflects a comparison.
10719 If TARGET is nonzero, store the result there if convenient.
10721 Return zero if there is no suitable set-flag instruction
10722 available on this machine.
10724 Once expand_expr has been called on the arguments of the comparison,
10725 we are committed to doing the store flag, since it is not safe to
10726 re-evaluate the expression. We emit the store-flag insn by calling
10727 emit_store_flag, but only expand the arguments if we have a reason
10728 to believe that emit_store_flag will be successful. If we think that
10729 it will, but it isn't, we have to simulate the store-flag with a
10730 set/jump/set sequence. */
10733 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10735 enum rtx_code code;
10736 tree arg0, arg1, type;
10738 enum machine_mode operand_mode;
10741 rtx subtarget = target;
10742 location_t loc = ops->location;
10747 /* Don't crash if the comparison was erroneous. */
10748 if (arg0 == error_mark_node || arg1 == error_mark_node)
10751 type = TREE_TYPE (arg0);
10752 operand_mode = TYPE_MODE (type);
10753 unsignedp = TYPE_UNSIGNED (type);
10755 /* We won't bother with BLKmode store-flag operations because it would mean
10756 passing a lot of information to emit_store_flag. */
10757 if (operand_mode == BLKmode)
10760 /* We won't bother with store-flag operations involving function pointers
10761 when function pointers must be canonicalized before comparisons. */
10762 #ifdef HAVE_canonicalize_funcptr_for_compare
10763 if (HAVE_canonicalize_funcptr_for_compare
10764 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10765 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10767 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10768 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10769 == FUNCTION_TYPE))))
10776 /* For vector typed comparisons emit code to generate the desired
10777 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10778 expander for this. */
10779 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10781 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10782 tree if_true = constant_boolean_node (true, ops->type);
10783 tree if_false = constant_boolean_node (false, ops->type);
10784 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10787 /* Get the rtx comparison code to use. We know that EXP is a comparison
10788 operation of some type. Some comparisons against 1 and -1 can be
10789 converted to comparisons with zero. Do so here so that the tests
10790 below will be aware that we have a comparison with zero. These
10791 tests will not catch constants in the first operand, but constants
10792 are rarely passed as the first operand. */
10803 if (integer_onep (arg1))
10804 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10806 code = unsignedp ? LTU : LT;
10809 if (! unsignedp && integer_all_onesp (arg1))
10810 arg1 = integer_zero_node, code = LT;
10812 code = unsignedp ? LEU : LE;
10815 if (! unsignedp && integer_all_onesp (arg1))
10816 arg1 = integer_zero_node, code = GE;
10818 code = unsignedp ? GTU : GT;
10821 if (integer_onep (arg1))
10822 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10824 code = unsignedp ? GEU : GE;
10827 case UNORDERED_EXPR:
10853 gcc_unreachable ();
10856 /* Put a constant second. */
10857 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10858 || TREE_CODE (arg0) == FIXED_CST)
10860 tem = arg0; arg0 = arg1; arg1 = tem;
10861 code = swap_condition (code);
10864 /* If this is an equality or inequality test of a single bit, we can
10865 do this by shifting the bit being tested to the low-order bit and
10866 masking the result with the constant 1. If the condition was EQ,
10867 we xor it with 1. This does not require an scc insn and is faster
10868 than an scc insn even if we have it.
10870 The code to make this transformation was moved into fold_single_bit_test,
10871 so we just call into the folder and expand its result. */
10873 if ((code == NE || code == EQ)
10874 && integer_zerop (arg1)
10875 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10877 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
10879 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
10881 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
10882 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10883 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
10884 gimple_assign_rhs1 (srcstmt),
10885 gimple_assign_rhs2 (srcstmt));
10886 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
10888 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
10892 if (! get_subtarget (target)
10893 || GET_MODE (subtarget) != operand_mode)
10896 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10899 target = gen_reg_rtx (mode);
10901 /* Try a cstore if possible. */
10902 return emit_store_flag_force (target, code, op0, op1,
10903 operand_mode, unsignedp,
10904 (TYPE_PRECISION (ops->type) == 1
10905 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10909 /* Stubs in case we haven't got a casesi insn. */
10910 #ifndef HAVE_casesi
10911 # define HAVE_casesi 0
10912 # define gen_casesi(a, b, c, d, e) (0)
10913 # define CODE_FOR_casesi CODE_FOR_nothing
10916 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10917 0 otherwise (i.e. if there is no casesi instruction).
10919 DEFAULT_PROBABILITY is the probability of jumping to the default
10922 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10923 rtx table_label, rtx default_label, rtx fallback_label,
10924 int default_probability)
10926 struct expand_operand ops[5];
10927 enum machine_mode index_mode = SImode;
10928 rtx op1, op2, index;
10933 /* Convert the index to SImode. */
10934 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10936 enum machine_mode omode = TYPE_MODE (index_type);
10937 rtx rangertx = expand_normal (range);
10939 /* We must handle the endpoints in the original mode. */
10940 index_expr = build2 (MINUS_EXPR, index_type,
10941 index_expr, minval);
10942 minval = integer_zero_node;
10943 index = expand_normal (index_expr);
10945 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10946 omode, 1, default_label,
10947 default_probability);
10948 /* Now we can safely truncate. */
10949 index = convert_to_mode (index_mode, index, 0);
10953 if (TYPE_MODE (index_type) != index_mode)
10955 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
10956 index_expr = fold_convert (index_type, index_expr);
10959 index = expand_normal (index_expr);
10962 do_pending_stack_adjust ();
10964 op1 = expand_normal (minval);
10965 op2 = expand_normal (range);
10967 create_input_operand (&ops[0], index, index_mode);
10968 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
10969 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
10970 create_fixed_operand (&ops[3], table_label);
10971 create_fixed_operand (&ops[4], (default_label
10973 : fallback_label));
10974 expand_jump_insn (CODE_FOR_casesi, 5, ops);
10978 /* Attempt to generate a tablejump instruction; same concept. */
10979 #ifndef HAVE_tablejump
10980 #define HAVE_tablejump 0
10981 #define gen_tablejump(x, y) (0)
10984 /* Subroutine of the next function.
10986 INDEX is the value being switched on, with the lowest value
10987 in the table already subtracted.
10988 MODE is its expected mode (needed if INDEX is constant).
10989 RANGE is the length of the jump table.
10990 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10992 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10993 index value is out of range.
10994 DEFAULT_PROBABILITY is the probability of jumping to
10995 the default label. */
10998 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10999 rtx default_label, int default_probability)
11003 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11004 cfun->cfg->max_jumptable_ents = INTVAL (range);
11006 /* Do an unsigned comparison (in the proper mode) between the index
11007 expression and the value which represents the length of the range.
11008 Since we just finished subtracting the lower bound of the range
11009 from the index expression, this comparison allows us to simultaneously
11010 check that the original index expression value is both greater than
11011 or equal to the minimum value of the range and less than or equal to
11012 the maximum value of the range. */
11015 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11016 default_label, default_probability);
11019 /* If index is in range, it must fit in Pmode.
11020 Convert to Pmode so we can index with it. */
11022 index = convert_to_mode (Pmode, index, 1);
11024 /* Don't let a MEM slip through, because then INDEX that comes
11025 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11026 and break_out_memory_refs will go to work on it and mess it up. */
11027 #ifdef PIC_CASE_VECTOR_ADDRESS
11028 if (flag_pic && !REG_P (index))
11029 index = copy_to_mode_reg (Pmode, index);
11032 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11033 GET_MODE_SIZE, because this indicates how large insns are. The other
11034 uses should all be Pmode, because they are addresses. This code
11035 could fail if addresses and insns are not the same size. */
11036 index = gen_rtx_PLUS
11038 gen_rtx_MULT (Pmode, index,
11039 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE), Pmode)),
11040 gen_rtx_LABEL_REF (Pmode, table_label));
11041 #ifdef PIC_CASE_VECTOR_ADDRESS
11043 index = PIC_CASE_VECTOR_ADDRESS (index);
11046 index = memory_address (CASE_VECTOR_MODE, index);
11047 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11048 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11049 convert_move (temp, vector, 0);
11051 emit_jump_insn (gen_tablejump (temp, table_label));
11053 /* If we are generating PIC code or if the table is PC-relative, the
11054 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11055 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11060 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11061 rtx table_label, rtx default_label, int default_probability)
11065 if (! HAVE_tablejump)
11068 index_expr = fold_build2 (MINUS_EXPR, index_type,
11069 fold_convert (index_type, index_expr),
11070 fold_convert (index_type, minval));
11071 index = expand_normal (index_expr);
11072 do_pending_stack_adjust ();
11074 do_tablejump (index, TYPE_MODE (index_type),
11075 convert_modes (TYPE_MODE (index_type),
11076 TYPE_MODE (TREE_TYPE (range)),
11077 expand_normal (range),
11078 TYPE_UNSIGNED (TREE_TYPE (range))),
11079 table_label, default_label, default_probability);
11083 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11085 const_vector_from_tree (tree exp)
11091 enum machine_mode inner, mode;
11093 mode = TYPE_MODE (TREE_TYPE (exp));
11095 if (initializer_zerop (exp))
11096 return CONST0_RTX (mode);
11098 units = GET_MODE_NUNITS (mode);
11099 inner = GET_MODE_INNER (mode);
11101 v = rtvec_alloc (units);
11103 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11105 elt = VECTOR_CST_ELT (exp, i);
11107 if (TREE_CODE (elt) == REAL_CST)
11108 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11110 else if (TREE_CODE (elt) == FIXED_CST)
11111 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11114 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
11118 return gen_rtx_CONST_VECTOR (mode, v);
11121 /* Build a decl for a personality function given a language prefix. */
11124 build_personality_function (const char *lang)
11126 const char *unwind_and_version;
11130 switch (targetm_common.except_unwind_info (&global_options))
11135 unwind_and_version = "_sj0";
11139 unwind_and_version = "_v0";
11142 unwind_and_version = "_seh0";
11145 gcc_unreachable ();
11148 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11150 type = build_function_type_list (integer_type_node, integer_type_node,
11151 long_long_unsigned_type_node,
11152 ptr_type_node, ptr_type_node, NULL_TREE);
11153 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11154 get_identifier (name), type);
11155 DECL_ARTIFICIAL (decl) = 1;
11156 DECL_EXTERNAL (decl) = 1;
11157 TREE_PUBLIC (decl) = 1;
11159 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11160 are the flags assigned by targetm.encode_section_info. */
11161 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11166 /* Extracts the personality function of DECL and returns the corresponding
11170 get_personality_function (tree decl)
11172 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11173 enum eh_personality_kind pk;
11175 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11176 if (pk == eh_personality_none)
11180 && pk == eh_personality_any)
11181 personality = lang_hooks.eh_personality ();
11183 if (pk == eh_personality_lang)
11184 gcc_assert (personality != NULL_TREE);
11186 return XEXP (DECL_RTL (personality), 0);
11189 #include "gt-expr.h"