1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
31 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
44 #include "langhooks.h"
47 #include "tree-iterator.h"
48 #include "tree-flow.h"
50 #include "common/common-target.h"
53 #include "diagnostic.h"
54 #include "ssaexpand.h"
55 #include "target-globals.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
78 #define STACK_PUSH_CODE PRE_INC
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* This structure is used by move_by_pieces to describe the move to
93 struct move_by_pieces_d
102 int explicit_inc_from;
103 unsigned HOST_WIDE_INT len;
104 HOST_WIDE_INT offset;
108 /* This structure is used by store_by_pieces to describe the clear to
111 struct store_by_pieces_d
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
124 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
125 struct move_by_pieces_d *);
126 static bool block_move_libcall_safe_for_call_parm (void);
127 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
128 static tree emit_block_move_libcall_fn (int);
129 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
130 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
131 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
132 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
133 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
134 struct store_by_pieces_d *);
135 static tree clear_storage_libcall_fn (int);
136 static rtx compress_float_constant (rtx, rtx);
137 static rtx get_subtarget (rtx);
138 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
139 HOST_WIDE_INT, enum machine_mode,
140 tree, int, alias_set_type);
141 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
142 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
143 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
144 enum machine_mode, tree, alias_set_type, bool);
146 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
148 static int is_aligning_offset (const_tree, const_tree);
149 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
150 enum expand_modifier);
151 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
152 static rtx do_store_flag (sepops, rtx, enum machine_mode);
154 static void emit_single_push_insn (enum machine_mode, rtx, tree);
156 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx, int);
157 static rtx const_vector_from_tree (tree);
158 static void write_complex_part (rtx, rtx, bool);
160 /* This macro is used to determine whether move_by_pieces should be called
161 to perform a structure copy. */
162 #ifndef MOVE_BY_PIECES_P
163 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
164 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
165 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
168 /* This macro is used to determine whether clear_by_pieces should be
169 called to clear storage. */
170 #ifndef CLEAR_BY_PIECES_P
171 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
172 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
173 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
176 /* This macro is used to determine whether store_by_pieces should be
177 called to "memset" storage with byte values other than zero. */
178 #ifndef SET_BY_PIECES_P
179 #define SET_BY_PIECES_P(SIZE, ALIGN) \
180 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
181 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
184 /* This macro is used to determine whether store_by_pieces should be
185 called to "memcpy" storage when the source is a constant string. */
186 #ifndef STORE_BY_PIECES_P
187 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
188 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
189 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
192 /* This is run to set up which modes can be used
193 directly in memory and to initialize the block move optab. It is run
194 at the beginning of compilation and when the target is reinitialized. */
197 init_expr_target (void)
200 enum machine_mode mode;
205 /* Try indexing by frame ptr and try by stack ptr.
206 It is known that on the Convex the stack ptr isn't a valid index.
207 With luck, one or the other is valid on any machine. */
208 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
209 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
211 /* A scratch register we can modify in-place below to avoid
212 useless RTL allocations. */
213 reg = gen_rtx_REG (VOIDmode, -1);
215 insn = rtx_alloc (INSN);
216 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
217 PATTERN (insn) = pat;
219 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
220 mode = (enum machine_mode) ((int) mode + 1))
224 direct_load[(int) mode] = direct_store[(int) mode] = 0;
225 PUT_MODE (mem, mode);
226 PUT_MODE (mem1, mode);
227 PUT_MODE (reg, mode);
229 /* See if there is some register that can be used in this mode and
230 directly loaded or stored from memory. */
232 if (mode != VOIDmode && mode != BLKmode)
233 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
234 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
237 if (! HARD_REGNO_MODE_OK (regno, mode))
240 SET_REGNO (reg, regno);
243 SET_DEST (pat) = reg;
244 if (recog (pat, insn, &num_clobbers) >= 0)
245 direct_load[(int) mode] = 1;
247 SET_SRC (pat) = mem1;
248 SET_DEST (pat) = reg;
249 if (recog (pat, insn, &num_clobbers) >= 0)
250 direct_load[(int) mode] = 1;
253 SET_DEST (pat) = mem;
254 if (recog (pat, insn, &num_clobbers) >= 0)
255 direct_store[(int) mode] = 1;
258 SET_DEST (pat) = mem1;
259 if (recog (pat, insn, &num_clobbers) >= 0)
260 direct_store[(int) mode] = 1;
264 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
266 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
267 mode = GET_MODE_WIDER_MODE (mode))
269 enum machine_mode srcmode;
270 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
271 srcmode = GET_MODE_WIDER_MODE (srcmode))
275 ic = can_extend_p (mode, srcmode, 0);
276 if (ic == CODE_FOR_nothing)
279 PUT_MODE (mem, srcmode);
281 if (insn_operand_matches (ic, 1, mem))
282 float_extend_from_mem[mode][srcmode] = true;
287 /* This is run at the start of compiling a function. */
292 memset (&crtl->expr, 0, sizeof (crtl->expr));
295 /* Copy data from FROM to TO, where the machine modes are not the same.
296 Both modes may be integer, or both may be floating, or both may be
298 UNSIGNEDP should be nonzero if FROM is an unsigned type.
299 This causes zero-extension instead of sign-extension. */
302 convert_move (rtx to, rtx from, int unsignedp)
304 enum machine_mode to_mode = GET_MODE (to);
305 enum machine_mode from_mode = GET_MODE (from);
306 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
307 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
311 /* rtx code for making an equivalent value. */
312 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
313 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
316 gcc_assert (to_real == from_real);
317 gcc_assert (to_mode != BLKmode);
318 gcc_assert (from_mode != BLKmode);
320 /* If the source and destination are already the same, then there's
325 /* If FROM is a SUBREG that indicates that we have already done at least
326 the required extension, strip it. We don't handle such SUBREGs as
329 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
330 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
331 >= GET_MODE_PRECISION (to_mode))
332 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
333 from = gen_lowpart (to_mode, from), from_mode = to_mode;
335 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
337 if (to_mode == from_mode
338 || (from_mode == VOIDmode && CONSTANT_P (from)))
340 emit_move_insn (to, from);
344 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
346 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
348 if (VECTOR_MODE_P (to_mode))
349 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
351 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
353 emit_move_insn (to, from);
357 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
359 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
360 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
369 gcc_assert ((GET_MODE_PRECISION (from_mode)
370 != GET_MODE_PRECISION (to_mode))
371 || (DECIMAL_FLOAT_MODE_P (from_mode)
372 != DECIMAL_FLOAT_MODE_P (to_mode)));
374 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
375 /* Conversion between decimal float and binary float, same size. */
376 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
377 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
382 /* Try converting directly if the insn is supported. */
384 code = convert_optab_handler (tab, to_mode, from_mode);
385 if (code != CODE_FOR_nothing)
387 emit_unop_insn (code, to, from,
388 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
392 /* Otherwise use a libcall. */
393 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
395 /* Is this conversion implemented yet? */
396 gcc_assert (libcall);
399 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
401 insns = get_insns ();
403 emit_libcall_block (insns, to, value,
404 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
406 : gen_rtx_FLOAT_EXTEND (to_mode, from));
410 /* Handle pointer conversion. */ /* SPEE 900220. */
411 /* Targets are expected to provide conversion insns between PxImode and
412 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
413 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
415 enum machine_mode full_mode
416 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
418 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
419 != CODE_FOR_nothing);
421 if (full_mode != from_mode)
422 from = convert_to_mode (full_mode, from, unsignedp);
423 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
427 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
430 enum machine_mode full_mode
431 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
432 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
433 enum insn_code icode;
435 icode = convert_optab_handler (ctab, full_mode, from_mode);
436 gcc_assert (icode != CODE_FOR_nothing);
438 if (to_mode == full_mode)
440 emit_unop_insn (icode, to, from, UNKNOWN);
444 new_from = gen_reg_rtx (full_mode);
445 emit_unop_insn (icode, new_from, from, UNKNOWN);
447 /* else proceed to integer conversions below. */
448 from_mode = full_mode;
452 /* Make sure both are fixed-point modes or both are not. */
453 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
454 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
455 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
457 /* If we widen from_mode to to_mode and they are in the same class,
458 we won't saturate the result.
459 Otherwise, always saturate the result to play safe. */
460 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
461 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
462 expand_fixed_convert (to, from, 0, 0);
464 expand_fixed_convert (to, from, 0, 1);
468 /* Now both modes are integers. */
470 /* Handle expanding beyond a word. */
471 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
472 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
479 enum machine_mode lowpart_mode;
480 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
482 /* Try converting directly if the insn is supported. */
483 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
486 /* If FROM is a SUBREG, put it into a register. Do this
487 so that we always generate the same set of insns for
488 better cse'ing; if an intermediate assignment occurred,
489 we won't be doing the operation directly on the SUBREG. */
490 if (optimize > 0 && GET_CODE (from) == SUBREG)
491 from = force_reg (from_mode, from);
492 emit_unop_insn (code, to, from, equiv_code);
495 /* Next, try converting via full word. */
496 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
497 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
498 != CODE_FOR_nothing))
500 rtx word_to = gen_reg_rtx (word_mode);
503 if (reg_overlap_mentioned_p (to, from))
504 from = force_reg (from_mode, from);
507 convert_move (word_to, from, unsignedp);
508 emit_unop_insn (code, to, word_to, equiv_code);
512 /* No special multiword conversion insn; do it by hand. */
515 /* Since we will turn this into a no conflict block, we must ensure the
516 the source does not overlap the target so force it into an isolated
517 register when maybe so. Likewise for any MEM input, since the
518 conversion sequence might require several references to it and we
519 must ensure we're getting the same value every time. */
521 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
522 from = force_reg (from_mode, from);
524 /* Get a copy of FROM widened to a word, if necessary. */
525 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
526 lowpart_mode = word_mode;
528 lowpart_mode = from_mode;
530 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
532 lowpart = gen_lowpart (lowpart_mode, to);
533 emit_move_insn (lowpart, lowfrom);
535 /* Compute the value to put in each remaining word. */
537 fill_value = const0_rtx;
539 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
540 LT, lowfrom, const0_rtx,
543 /* Fill the remaining words. */
544 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
546 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
547 rtx subword = operand_subword (to, index, 1, to_mode);
549 gcc_assert (subword);
551 if (fill_value != subword)
552 emit_move_insn (subword, fill_value);
555 insns = get_insns ();
562 /* Truncating multi-word to a word or less. */
563 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
564 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
567 && ! MEM_VOLATILE_P (from)
568 && direct_load[(int) to_mode]
569 && ! mode_dependent_address_p (XEXP (from, 0),
570 MEM_ADDR_SPACE (from)))
572 || GET_CODE (from) == SUBREG))
573 from = force_reg (from_mode, from);
574 convert_move (to, gen_lowpart (word_mode, from), 0);
578 /* Now follow all the conversions between integers
579 no more than a word long. */
581 /* For truncation, usually we can just refer to FROM in a narrower mode. */
582 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
583 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
586 && ! MEM_VOLATILE_P (from)
587 && direct_load[(int) to_mode]
588 && ! mode_dependent_address_p (XEXP (from, 0),
589 MEM_ADDR_SPACE (from)))
591 || GET_CODE (from) == SUBREG))
592 from = force_reg (from_mode, from);
593 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
594 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
595 from = copy_to_reg (from);
596 emit_move_insn (to, gen_lowpart (to_mode, from));
600 /* Handle extension. */
601 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
603 /* Convert directly if that works. */
604 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
607 emit_unop_insn (code, to, from, equiv_code);
612 enum machine_mode intermediate;
616 /* Search for a mode to convert via. */
617 for (intermediate = from_mode; intermediate != VOIDmode;
618 intermediate = GET_MODE_WIDER_MODE (intermediate))
619 if (((can_extend_p (to_mode, intermediate, unsignedp)
621 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
622 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
623 && (can_extend_p (intermediate, from_mode, unsignedp)
624 != CODE_FOR_nothing))
626 convert_move (to, convert_to_mode (intermediate, from,
627 unsignedp), unsignedp);
631 /* No suitable intermediate mode.
632 Generate what we need with shifts. */
633 shift_amount = (GET_MODE_PRECISION (to_mode)
634 - GET_MODE_PRECISION (from_mode));
635 from = gen_lowpart (to_mode, force_reg (from_mode, from));
636 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
638 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
641 emit_move_insn (to, tmp);
646 /* Support special truncate insns for certain modes. */
647 if (convert_optab_handler (trunc_optab, to_mode,
648 from_mode) != CODE_FOR_nothing)
650 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
655 /* Handle truncation of volatile memrefs, and so on;
656 the things that couldn't be truncated directly,
657 and for which there was no special instruction.
659 ??? Code above formerly short-circuited this, for most integer
660 mode pairs, with a force_reg in from_mode followed by a recursive
661 call to this routine. Appears always to have been wrong. */
662 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
664 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
665 emit_move_insn (to, temp);
669 /* Mode combination is not recognized. */
673 /* Return an rtx for a value that would result
674 from converting X to mode MODE.
675 Both X and MODE may be floating, or both integer.
676 UNSIGNEDP is nonzero if X is an unsigned value.
677 This can be done by referring to a part of X in place
678 or by copying to a new temporary with conversion. */
681 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
683 return convert_modes (mode, VOIDmode, x, unsignedp);
686 /* Return an rtx for a value that would result
687 from converting X from mode OLDMODE to mode MODE.
688 Both modes may be floating, or both integer.
689 UNSIGNEDP is nonzero if X is an unsigned value.
691 This can be done by referring to a part of X in place
692 or by copying to a new temporary with conversion.
694 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
697 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
701 /* If FROM is a SUBREG that indicates that we have already done at least
702 the required extension, strip it. */
704 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
705 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
706 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
707 x = gen_lowpart (mode, x);
709 if (GET_MODE (x) != VOIDmode)
710 oldmode = GET_MODE (x);
715 /* There is one case that we must handle specially: If we are converting
716 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
717 we are to interpret the constant as unsigned, gen_lowpart will do
718 the wrong if the constant appears negative. What we want to do is
719 make the high-order word of the constant zero, not all ones. */
721 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
722 && GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT
723 && CONST_INT_P (x) && INTVAL (x) < 0)
725 double_int val = double_int::from_uhwi (INTVAL (x));
727 /* We need to zero extend VAL. */
728 if (oldmode != VOIDmode)
729 val = val.zext (GET_MODE_BITSIZE (oldmode));
731 return immed_double_int_const (val, mode);
734 /* We can do this with a gen_lowpart if both desired and current modes
735 are integer, and this is either a constant integer, a register, or a
736 non-volatile MEM. Except for the constant case where MODE is no
737 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
740 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT)
741 || (GET_MODE_CLASS (mode) == MODE_INT
742 && GET_MODE_CLASS (oldmode) == MODE_INT
743 && (CONST_DOUBLE_AS_INT_P (x)
744 || (GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
745 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
746 && direct_load[(int) mode])
748 && (! HARD_REGISTER_P (x)
749 || HARD_REGNO_MODE_OK (REGNO (x), mode))
750 && TRULY_NOOP_TRUNCATION_MODES_P (mode,
753 /* ?? If we don't know OLDMODE, we have to assume here that
754 X does not need sign- or zero-extension. This may not be
755 the case, but it's the best we can do. */
756 if (CONST_INT_P (x) && oldmode != VOIDmode
757 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (oldmode))
759 HOST_WIDE_INT val = INTVAL (x);
761 /* We must sign or zero-extend in this case. Start by
762 zero-extending, then sign extend if we need to. */
763 val &= GET_MODE_MASK (oldmode);
765 && val_signbit_known_set_p (oldmode, val))
766 val |= ~GET_MODE_MASK (oldmode);
768 return gen_int_mode (val, mode);
771 return gen_lowpart (mode, x);
774 /* Converting from integer constant into mode is always equivalent to an
776 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
778 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
779 return simplify_gen_subreg (mode, x, oldmode, 0);
782 temp = gen_reg_rtx (mode);
783 convert_move (temp, x, unsignedp);
787 /* Return the largest alignment we can use for doing a move (or store)
788 of MAX_PIECES. ALIGN is the largest alignment we could use. */
791 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
793 enum machine_mode tmode;
795 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
796 if (align >= GET_MODE_ALIGNMENT (tmode))
797 align = GET_MODE_ALIGNMENT (tmode);
800 enum machine_mode tmode, xmode;
802 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
804 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
805 if (GET_MODE_SIZE (tmode) > max_pieces
806 || SLOW_UNALIGNED_ACCESS (tmode, align))
809 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
815 /* Return the widest integer mode no wider than SIZE. If no such mode
816 can be found, return VOIDmode. */
818 static enum machine_mode
819 widest_int_mode_for_size (unsigned int size)
821 enum machine_mode tmode, mode = VOIDmode;
823 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
824 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
825 if (GET_MODE_SIZE (tmode) < size)
831 /* STORE_MAX_PIECES is the number of bytes at a time that we can
832 store efficiently. Due to internal GCC limitations, this is
833 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
834 for an immediate constant. */
836 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
838 /* Determine whether the LEN bytes can be moved by using several move
839 instructions. Return nonzero if a call to move_by_pieces should
843 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED,
844 unsigned int align ATTRIBUTE_UNUSED)
846 return MOVE_BY_PIECES_P (len, align);
849 /* Generate several move instructions to copy LEN bytes from block FROM to
850 block TO. (These are MEM rtx's with BLKmode).
852 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
853 used to push FROM to the stack.
855 ALIGN is maximum stack alignment we can assume.
857 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
858 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
862 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
863 unsigned int align, int endp)
865 struct move_by_pieces_d data;
866 enum machine_mode to_addr_mode;
867 enum machine_mode from_addr_mode = get_address_mode (from);
868 rtx to_addr, from_addr = XEXP (from, 0);
869 unsigned int max_size = MOVE_MAX_PIECES + 1;
870 enum insn_code icode;
872 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
875 data.from_addr = from_addr;
878 to_addr_mode = get_address_mode (to);
879 to_addr = XEXP (to, 0);
882 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
883 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
885 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
889 to_addr_mode = VOIDmode;
893 #ifdef STACK_GROWS_DOWNWARD
899 data.to_addr = to_addr;
902 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
903 || GET_CODE (from_addr) == POST_INC
904 || GET_CODE (from_addr) == POST_DEC);
906 data.explicit_inc_from = 0;
907 data.explicit_inc_to = 0;
908 if (data.reverse) data.offset = len;
911 /* If copying requires more than two move insns,
912 copy addresses to registers (to make displacements shorter)
913 and use post-increment if available. */
914 if (!(data.autinc_from && data.autinc_to)
915 && move_by_pieces_ninsns (len, align, max_size) > 2)
917 /* Find the mode of the largest move...
918 MODE might not be used depending on the definitions of the
919 USE_* macros below. */
920 enum machine_mode mode ATTRIBUTE_UNUSED
921 = widest_int_mode_for_size (max_size);
923 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
925 data.from_addr = copy_to_mode_reg (from_addr_mode,
926 plus_constant (from_addr_mode,
928 data.autinc_from = 1;
929 data.explicit_inc_from = -1;
931 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
933 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
934 data.autinc_from = 1;
935 data.explicit_inc_from = 1;
937 if (!data.autinc_from && CONSTANT_P (from_addr))
938 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
939 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
941 data.to_addr = copy_to_mode_reg (to_addr_mode,
942 plus_constant (to_addr_mode,
945 data.explicit_inc_to = -1;
947 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
949 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
951 data.explicit_inc_to = 1;
953 if (!data.autinc_to && CONSTANT_P (to_addr))
954 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
957 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
959 /* First move what we can in the largest integer mode, then go to
960 successively smaller modes. */
962 while (max_size > 1 && data.len > 0)
964 enum machine_mode mode = widest_int_mode_for_size (max_size);
966 if (mode == VOIDmode)
969 icode = optab_handler (mov_optab, mode);
970 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
971 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
973 max_size = GET_MODE_SIZE (mode);
976 /* The code above should have handled everything. */
977 gcc_assert (!data.len);
983 gcc_assert (!data.reverse);
988 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
989 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
991 data.to_addr = copy_to_mode_reg (to_addr_mode,
992 plus_constant (to_addr_mode,
996 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1003 to1 = adjust_address (data.to, QImode, data.offset);
1011 /* Return number of insns required to move L bytes by pieces.
1012 ALIGN (in bits) is maximum alignment we can assume. */
1014 unsigned HOST_WIDE_INT
1015 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1016 unsigned int max_size)
1018 unsigned HOST_WIDE_INT n_insns = 0;
1020 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1022 while (max_size > 1 && l > 0)
1024 enum machine_mode mode;
1025 enum insn_code icode;
1027 mode = widest_int_mode_for_size (max_size);
1029 if (mode == VOIDmode)
1032 icode = optab_handler (mov_optab, mode);
1033 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1034 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1036 max_size = GET_MODE_SIZE (mode);
1043 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1044 with move instructions for mode MODE. GENFUN is the gen_... function
1045 to make a move insn for that mode. DATA has all the other info. */
1048 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1049 struct move_by_pieces_d *data)
1051 unsigned int size = GET_MODE_SIZE (mode);
1052 rtx to1 = NULL_RTX, from1;
1054 while (data->len >= size)
1057 data->offset -= size;
1061 if (data->autinc_to)
1062 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1065 to1 = adjust_address (data->to, mode, data->offset);
1068 if (data->autinc_from)
1069 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1072 from1 = adjust_address (data->from, mode, data->offset);
1074 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1075 emit_insn (gen_add2_insn (data->to_addr,
1076 GEN_INT (-(HOST_WIDE_INT)size)));
1077 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1078 emit_insn (gen_add2_insn (data->from_addr,
1079 GEN_INT (-(HOST_WIDE_INT)size)));
1082 emit_insn ((*genfun) (to1, from1));
1085 #ifdef PUSH_ROUNDING
1086 emit_single_push_insn (mode, from1, NULL);
1092 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1093 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1094 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1095 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1097 if (! data->reverse)
1098 data->offset += size;
1104 /* Emit code to move a block Y to a block X. This may be done with
1105 string-move instructions, with multiple scalar move instructions,
1106 or with a library call.
1108 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1109 SIZE is an rtx that says how long they are.
1110 ALIGN is the maximum alignment we can assume they have.
1111 METHOD describes what kind of copy this is, and what mechanisms may be used.
1113 Return the address of the new block, if memcpy is called and returns it,
1117 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1118 unsigned int expected_align, HOST_WIDE_INT expected_size)
1125 if (CONST_INT_P (size)
1126 && INTVAL (size) == 0)
1131 case BLOCK_OP_NORMAL:
1132 case BLOCK_OP_TAILCALL:
1133 may_use_call = true;
1136 case BLOCK_OP_CALL_PARM:
1137 may_use_call = block_move_libcall_safe_for_call_parm ();
1139 /* Make inhibit_defer_pop nonzero around the library call
1140 to force it to pop the arguments right away. */
1144 case BLOCK_OP_NO_LIBCALL:
1145 may_use_call = false;
1152 gcc_assert (MEM_P (x) && MEM_P (y));
1153 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1154 gcc_assert (align >= BITS_PER_UNIT);
1156 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1157 block copy is more efficient for other large modes, e.g. DCmode. */
1158 x = adjust_address (x, BLKmode, 0);
1159 y = adjust_address (y, BLKmode, 0);
1161 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1162 can be incorrect is coming from __builtin_memcpy. */
1163 if (CONST_INT_P (size))
1165 x = shallow_copy_rtx (x);
1166 y = shallow_copy_rtx (y);
1167 set_mem_size (x, INTVAL (size));
1168 set_mem_size (y, INTVAL (size));
1171 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1172 move_by_pieces (x, y, INTVAL (size), align, 0);
1173 else if (emit_block_move_via_movmem (x, y, size, align,
1174 expected_align, expected_size))
1176 else if (may_use_call
1177 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1178 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1180 /* Since x and y are passed to a libcall, mark the corresponding
1181 tree EXPR as addressable. */
1182 tree y_expr = MEM_EXPR (y);
1183 tree x_expr = MEM_EXPR (x);
1185 mark_addressable (y_expr);
1187 mark_addressable (x_expr);
1188 retval = emit_block_move_via_libcall (x, y, size,
1189 method == BLOCK_OP_TAILCALL);
1193 emit_block_move_via_loop (x, y, size, align);
1195 if (method == BLOCK_OP_CALL_PARM)
1202 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1204 return emit_block_move_hints (x, y, size, method, 0, -1);
1207 /* A subroutine of emit_block_move. Returns true if calling the
1208 block move libcall will not clobber any parameters which may have
1209 already been placed on the stack. */
1212 block_move_libcall_safe_for_call_parm (void)
1214 #if defined (REG_PARM_STACK_SPACE)
1218 /* If arguments are pushed on the stack, then they're safe. */
1222 /* If registers go on the stack anyway, any argument is sure to clobber
1223 an outgoing argument. */
1224 #if defined (REG_PARM_STACK_SPACE)
1225 fn = emit_block_move_libcall_fn (false);
1226 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1227 depend on its argument. */
1229 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1230 && REG_PARM_STACK_SPACE (fn) != 0)
1234 /* If any argument goes in memory, then it might clobber an outgoing
1237 CUMULATIVE_ARGS args_so_far_v;
1238 cumulative_args_t args_so_far;
1241 fn = emit_block_move_libcall_fn (false);
1242 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1243 args_so_far = pack_cumulative_args (&args_so_far_v);
1245 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1246 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1248 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1249 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1251 if (!tmp || !REG_P (tmp))
1253 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1255 targetm.calls.function_arg_advance (args_so_far, mode,
1262 /* A subroutine of emit_block_move. Expand a movmem pattern;
1263 return true if successful. */
1266 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1267 unsigned int expected_align, HOST_WIDE_INT expected_size)
1269 int save_volatile_ok = volatile_ok;
1270 enum machine_mode mode;
1272 if (expected_align < align)
1273 expected_align = align;
1275 /* Since this is a move insn, we don't care about volatility. */
1278 /* Try the most limited insn first, because there's no point
1279 including more than one in the machine description unless
1280 the more limited one has some advantage. */
1282 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1283 mode = GET_MODE_WIDER_MODE (mode))
1285 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1287 if (code != CODE_FOR_nothing
1288 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1289 here because if SIZE is less than the mode mask, as it is
1290 returned by the macro, it will definitely be less than the
1291 actual mode mask. */
1292 && ((CONST_INT_P (size)
1293 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1294 <= (GET_MODE_MASK (mode) >> 1)))
1295 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
1297 struct expand_operand ops[6];
1300 /* ??? When called via emit_block_move_for_call, it'd be
1301 nice if there were some way to inform the backend, so
1302 that it doesn't fail the expansion because it thinks
1303 emitting the libcall would be more efficient. */
1304 nops = insn_data[(int) code].n_generator_args;
1305 gcc_assert (nops == 4 || nops == 6);
1307 create_fixed_operand (&ops[0], x);
1308 create_fixed_operand (&ops[1], y);
1309 /* The check above guarantees that this size conversion is valid. */
1310 create_convert_operand_to (&ops[2], size, mode, true);
1311 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1314 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1315 create_integer_operand (&ops[5], expected_size);
1317 if (maybe_expand_insn (code, nops, ops))
1319 volatile_ok = save_volatile_ok;
1325 volatile_ok = save_volatile_ok;
1329 /* A subroutine of emit_block_move. Expand a call to memcpy.
1330 Return the return value from memcpy, 0 otherwise. */
1333 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1335 rtx dst_addr, src_addr;
1336 tree call_expr, fn, src_tree, dst_tree, size_tree;
1337 enum machine_mode size_mode;
1340 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1341 pseudos. We can then place those new pseudos into a VAR_DECL and
1344 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1345 src_addr = copy_addr_to_reg (XEXP (src, 0));
1347 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1348 src_addr = convert_memory_address (ptr_mode, src_addr);
1350 dst_tree = make_tree (ptr_type_node, dst_addr);
1351 src_tree = make_tree (ptr_type_node, src_addr);
1353 size_mode = TYPE_MODE (sizetype);
1355 size = convert_to_mode (size_mode, size, 1);
1356 size = copy_to_mode_reg (size_mode, size);
1358 /* It is incorrect to use the libcall calling conventions to call
1359 memcpy in this context. This could be a user call to memcpy and
1360 the user may wish to examine the return value from memcpy. For
1361 targets where libcalls and normal calls have different conventions
1362 for returning pointers, we could end up generating incorrect code. */
1364 size_tree = make_tree (sizetype, size);
1366 fn = emit_block_move_libcall_fn (true);
1367 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1368 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1370 retval = expand_normal (call_expr);
1375 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1376 for the function we use for block copies. */
1378 static GTY(()) tree block_move_fn;
1381 init_block_move_fn (const char *asmspec)
1385 tree args, fn, attrs, attr_args;
1387 fn = get_identifier ("memcpy");
1388 args = build_function_type_list (ptr_type_node, ptr_type_node,
1389 const_ptr_type_node, sizetype,
1392 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1393 DECL_EXTERNAL (fn) = 1;
1394 TREE_PUBLIC (fn) = 1;
1395 DECL_ARTIFICIAL (fn) = 1;
1396 TREE_NOTHROW (fn) = 1;
1397 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1398 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1400 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1401 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1403 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1409 set_user_assembler_name (block_move_fn, asmspec);
1413 emit_block_move_libcall_fn (int for_call)
1415 static bool emitted_extern;
1418 init_block_move_fn (NULL);
1420 if (for_call && !emitted_extern)
1422 emitted_extern = true;
1423 make_decl_rtl (block_move_fn);
1426 return block_move_fn;
1429 /* A subroutine of emit_block_move. Copy the data via an explicit
1430 loop. This is used only when libcalls are forbidden. */
1431 /* ??? It'd be nice to copy in hunks larger than QImode. */
1434 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1435 unsigned int align ATTRIBUTE_UNUSED)
1437 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1438 enum machine_mode x_addr_mode = get_address_mode (x);
1439 enum machine_mode y_addr_mode = get_address_mode (y);
1440 enum machine_mode iter_mode;
1442 iter_mode = GET_MODE (size);
1443 if (iter_mode == VOIDmode)
1444 iter_mode = word_mode;
1446 top_label = gen_label_rtx ();
1447 cmp_label = gen_label_rtx ();
1448 iter = gen_reg_rtx (iter_mode);
1450 emit_move_insn (iter, const0_rtx);
1452 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1453 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1454 do_pending_stack_adjust ();
1456 emit_jump (cmp_label);
1457 emit_label (top_label);
1459 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1460 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1462 if (x_addr_mode != y_addr_mode)
1463 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1464 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1466 x = change_address (x, QImode, x_addr);
1467 y = change_address (y, QImode, y_addr);
1469 emit_move_insn (x, y);
1471 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1472 true, OPTAB_LIB_WIDEN);
1474 emit_move_insn (iter, tmp);
1476 emit_label (cmp_label);
1478 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1479 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1482 /* Copy all or part of a value X into registers starting at REGNO.
1483 The number of registers to be filled is NREGS. */
1486 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1489 #ifdef HAVE_load_multiple
1497 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1498 x = validize_mem (force_const_mem (mode, x));
1500 /* See if the machine can do this with a load multiple insn. */
1501 #ifdef HAVE_load_multiple
1502 if (HAVE_load_multiple)
1504 last = get_last_insn ();
1505 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1513 delete_insns_since (last);
1517 for (i = 0; i < nregs; i++)
1518 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1519 operand_subword_force (x, i, mode));
1522 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1523 The number of registers to be filled is NREGS. */
1526 move_block_from_reg (int regno, rtx x, int nregs)
1533 /* See if the machine can do this with a store multiple insn. */
1534 #ifdef HAVE_store_multiple
1535 if (HAVE_store_multiple)
1537 rtx last = get_last_insn ();
1538 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1546 delete_insns_since (last);
1550 for (i = 0; i < nregs; i++)
1552 rtx tem = operand_subword (x, i, 1, BLKmode);
1556 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1560 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1561 ORIG, where ORIG is a non-consecutive group of registers represented by
1562 a PARALLEL. The clone is identical to the original except in that the
1563 original set of registers is replaced by a new set of pseudo registers.
1564 The new set has the same modes as the original set. */
1567 gen_group_rtx (rtx orig)
1572 gcc_assert (GET_CODE (orig) == PARALLEL);
1574 length = XVECLEN (orig, 0);
1575 tmps = XALLOCAVEC (rtx, length);
1577 /* Skip a NULL entry in first slot. */
1578 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1583 for (; i < length; i++)
1585 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1586 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1588 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1591 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1594 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1595 except that values are placed in TMPS[i], and must later be moved
1596 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1599 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1603 enum machine_mode m = GET_MODE (orig_src);
1605 gcc_assert (GET_CODE (dst) == PARALLEL);
1608 && !SCALAR_INT_MODE_P (m)
1609 && !MEM_P (orig_src)
1610 && GET_CODE (orig_src) != CONCAT)
1612 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1613 if (imode == BLKmode)
1614 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1616 src = gen_reg_rtx (imode);
1617 if (imode != BLKmode)
1618 src = gen_lowpart (GET_MODE (orig_src), src);
1619 emit_move_insn (src, orig_src);
1620 /* ...and back again. */
1621 if (imode != BLKmode)
1622 src = gen_lowpart (imode, src);
1623 emit_group_load_1 (tmps, dst, src, type, ssize);
1627 /* Check for a NULL entry, used to indicate that the parameter goes
1628 both on the stack and in registers. */
1629 if (XEXP (XVECEXP (dst, 0, 0), 0))
1634 /* Process the pieces. */
1635 for (i = start; i < XVECLEN (dst, 0); i++)
1637 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1638 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1639 unsigned int bytelen = GET_MODE_SIZE (mode);
1642 /* Handle trailing fragments that run over the size of the struct. */
1643 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1645 /* Arrange to shift the fragment to where it belongs.
1646 extract_bit_field loads to the lsb of the reg. */
1648 #ifdef BLOCK_REG_PADDING
1649 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1650 == (BYTES_BIG_ENDIAN ? upward : downward)
1655 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1656 bytelen = ssize - bytepos;
1657 gcc_assert (bytelen > 0);
1660 /* If we won't be loading directly from memory, protect the real source
1661 from strange tricks we might play; but make sure that the source can
1662 be loaded directly into the destination. */
1664 if (!MEM_P (orig_src)
1665 && (!CONSTANT_P (orig_src)
1666 || (GET_MODE (orig_src) != mode
1667 && GET_MODE (orig_src) != VOIDmode)))
1669 if (GET_MODE (orig_src) == VOIDmode)
1670 src = gen_reg_rtx (mode);
1672 src = gen_reg_rtx (GET_MODE (orig_src));
1674 emit_move_insn (src, orig_src);
1677 /* Optimize the access just a bit. */
1679 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1680 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1681 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1682 && bytelen == GET_MODE_SIZE (mode))
1684 tmps[i] = gen_reg_rtx (mode);
1685 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1687 else if (COMPLEX_MODE_P (mode)
1688 && GET_MODE (src) == mode
1689 && bytelen == GET_MODE_SIZE (mode))
1690 /* Let emit_move_complex do the bulk of the work. */
1692 else if (GET_CODE (src) == CONCAT)
1694 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1695 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1697 if ((bytepos == 0 && bytelen == slen0)
1698 || (bytepos != 0 && bytepos + bytelen <= slen))
1700 /* The following assumes that the concatenated objects all
1701 have the same size. In this case, a simple calculation
1702 can be used to determine the object and the bit field
1704 tmps[i] = XEXP (src, bytepos / slen0);
1705 if (! CONSTANT_P (tmps[i])
1706 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1707 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1708 (bytepos % slen0) * BITS_PER_UNIT,
1709 1, false, NULL_RTX, mode, mode);
1715 gcc_assert (!bytepos);
1716 mem = assign_stack_temp (GET_MODE (src), slen);
1717 emit_move_insn (mem, src);
1718 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1719 0, 1, false, NULL_RTX, mode, mode);
1722 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1723 SIMD register, which is currently broken. While we get GCC
1724 to emit proper RTL for these cases, let's dump to memory. */
1725 else if (VECTOR_MODE_P (GET_MODE (dst))
1728 int slen = GET_MODE_SIZE (GET_MODE (src));
1731 mem = assign_stack_temp (GET_MODE (src), slen);
1732 emit_move_insn (mem, src);
1733 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1735 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1736 && XVECLEN (dst, 0) > 1)
1737 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1738 else if (CONSTANT_P (src))
1740 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1748 gcc_assert (2 * len == ssize);
1749 split_double (src, &first, &second);
1756 else if (REG_P (src) && GET_MODE (src) == mode)
1759 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1760 bytepos * BITS_PER_UNIT, 1, false, NULL_RTX,
1764 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1769 /* Emit code to move a block SRC of type TYPE to a block DST,
1770 where DST is non-consecutive registers represented by a PARALLEL.
1771 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1775 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1780 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1781 emit_group_load_1 (tmps, dst, src, type, ssize);
1783 /* Copy the extracted pieces into the proper (probable) hard regs. */
1784 for (i = 0; i < XVECLEN (dst, 0); i++)
1786 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1789 emit_move_insn (d, tmps[i]);
1793 /* Similar, but load SRC into new pseudos in a format that looks like
1794 PARALLEL. This can later be fed to emit_group_move to get things
1795 in the right place. */
1798 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1803 vec = rtvec_alloc (XVECLEN (parallel, 0));
1804 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1806 /* Convert the vector to look just like the original PARALLEL, except
1807 with the computed values. */
1808 for (i = 0; i < XVECLEN (parallel, 0); i++)
1810 rtx e = XVECEXP (parallel, 0, i);
1811 rtx d = XEXP (e, 0);
1815 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1816 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1818 RTVEC_ELT (vec, i) = e;
1821 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1824 /* Emit code to move a block SRC to block DST, where SRC and DST are
1825 non-consecutive groups of registers, each represented by a PARALLEL. */
1828 emit_group_move (rtx dst, rtx src)
1832 gcc_assert (GET_CODE (src) == PARALLEL
1833 && GET_CODE (dst) == PARALLEL
1834 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1836 /* Skip first entry if NULL. */
1837 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1838 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1839 XEXP (XVECEXP (src, 0, i), 0));
1842 /* Move a group of registers represented by a PARALLEL into pseudos. */
1845 emit_group_move_into_temps (rtx src)
1847 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1850 for (i = 0; i < XVECLEN (src, 0); i++)
1852 rtx e = XVECEXP (src, 0, i);
1853 rtx d = XEXP (e, 0);
1856 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1857 RTVEC_ELT (vec, i) = e;
1860 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1863 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1864 where SRC is non-consecutive registers represented by a PARALLEL.
1865 SSIZE represents the total size of block ORIG_DST, or -1 if not
1869 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1872 int start, finish, i;
1873 enum machine_mode m = GET_MODE (orig_dst);
1875 gcc_assert (GET_CODE (src) == PARALLEL);
1877 if (!SCALAR_INT_MODE_P (m)
1878 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1880 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1881 if (imode == BLKmode)
1882 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1884 dst = gen_reg_rtx (imode);
1885 emit_group_store (dst, src, type, ssize);
1886 if (imode != BLKmode)
1887 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1888 emit_move_insn (orig_dst, dst);
1892 /* Check for a NULL entry, used to indicate that the parameter goes
1893 both on the stack and in registers. */
1894 if (XEXP (XVECEXP (src, 0, 0), 0))
1898 finish = XVECLEN (src, 0);
1900 tmps = XALLOCAVEC (rtx, finish);
1902 /* Copy the (probable) hard regs into pseudos. */
1903 for (i = start; i < finish; i++)
1905 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1906 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1908 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1909 emit_move_insn (tmps[i], reg);
1915 /* If we won't be storing directly into memory, protect the real destination
1916 from strange tricks we might play. */
1918 if (GET_CODE (dst) == PARALLEL)
1922 /* We can get a PARALLEL dst if there is a conditional expression in
1923 a return statement. In that case, the dst and src are the same,
1924 so no action is necessary. */
1925 if (rtx_equal_p (dst, src))
1928 /* It is unclear if we can ever reach here, but we may as well handle
1929 it. Allocate a temporary, and split this into a store/load to/from
1932 temp = assign_stack_temp (GET_MODE (dst), ssize);
1933 emit_group_store (temp, src, type, ssize);
1934 emit_group_load (dst, temp, type, ssize);
1937 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1939 enum machine_mode outer = GET_MODE (dst);
1940 enum machine_mode inner;
1941 HOST_WIDE_INT bytepos;
1945 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1946 dst = gen_reg_rtx (outer);
1948 /* Make life a bit easier for combine. */
1949 /* If the first element of the vector is the low part
1950 of the destination mode, use a paradoxical subreg to
1951 initialize the destination. */
1954 inner = GET_MODE (tmps[start]);
1955 bytepos = subreg_lowpart_offset (inner, outer);
1956 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1958 temp = simplify_gen_subreg (outer, tmps[start],
1962 emit_move_insn (dst, temp);
1969 /* If the first element wasn't the low part, try the last. */
1971 && start < finish - 1)
1973 inner = GET_MODE (tmps[finish - 1]);
1974 bytepos = subreg_lowpart_offset (inner, outer);
1975 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1977 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1981 emit_move_insn (dst, temp);
1988 /* Otherwise, simply initialize the result to zero. */
1990 emit_move_insn (dst, CONST0_RTX (outer));
1993 /* Process the pieces. */
1994 for (i = start; i < finish; i++)
1996 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1997 enum machine_mode mode = GET_MODE (tmps[i]);
1998 unsigned int bytelen = GET_MODE_SIZE (mode);
1999 unsigned int adj_bytelen = bytelen;
2002 /* Handle trailing fragments that run over the size of the struct. */
2003 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2004 adj_bytelen = ssize - bytepos;
2006 if (GET_CODE (dst) == CONCAT)
2008 if (bytepos + adj_bytelen
2009 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2010 dest = XEXP (dst, 0);
2011 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2013 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2014 dest = XEXP (dst, 1);
2018 enum machine_mode dest_mode = GET_MODE (dest);
2019 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2021 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2023 if (GET_MODE_ALIGNMENT (dest_mode)
2024 >= GET_MODE_ALIGNMENT (tmp_mode))
2026 dest = assign_stack_temp (dest_mode,
2027 GET_MODE_SIZE (dest_mode));
2028 emit_move_insn (adjust_address (dest,
2036 dest = assign_stack_temp (tmp_mode,
2037 GET_MODE_SIZE (tmp_mode));
2038 emit_move_insn (dest, tmps[i]);
2039 dst = adjust_address (dest, dest_mode, bytepos);
2045 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2047 /* store_bit_field always takes its value from the lsb.
2048 Move the fragment to the lsb if it's not already there. */
2050 #ifdef BLOCK_REG_PADDING
2051 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2052 == (BYTES_BIG_ENDIAN ? upward : downward)
2058 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2059 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2062 bytelen = adj_bytelen;
2065 /* Optimize the access just a bit. */
2067 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2068 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2069 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2070 && bytelen == GET_MODE_SIZE (mode))
2071 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2073 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2074 0, 0, mode, tmps[i]);
2077 /* Copy from the pseudo into the (probable) hard reg. */
2078 if (orig_dst != dst)
2079 emit_move_insn (orig_dst, dst);
2082 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2084 This is used on targets that return BLKmode values in registers. */
2087 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2089 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2090 rtx src = NULL, dst = NULL;
2091 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2092 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2093 enum machine_mode mode = GET_MODE (srcreg);
2094 enum machine_mode tmode = GET_MODE (target);
2095 enum machine_mode copy_mode;
2097 /* BLKmode registers created in the back-end shouldn't have survived. */
2098 gcc_assert (mode != BLKmode);
2100 /* If the structure doesn't take up a whole number of words, see whether
2101 SRCREG is padded on the left or on the right. If it's on the left,
2102 set PADDING_CORRECTION to the number of bits to skip.
2104 In most ABIs, the structure will be returned at the least end of
2105 the register, which translates to right padding on little-endian
2106 targets and left padding on big-endian targets. The opposite
2107 holds if the structure is returned at the most significant
2108 end of the register. */
2109 if (bytes % UNITS_PER_WORD != 0
2110 && (targetm.calls.return_in_msb (type)
2112 : BYTES_BIG_ENDIAN))
2114 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2116 /* We can use a single move if we have an exact mode for the size. */
2117 else if (MEM_P (target)
2118 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2119 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2120 && bytes == GET_MODE_SIZE (mode))
2122 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2126 /* And if we additionally have the same mode for a register. */
2127 else if (REG_P (target)
2128 && GET_MODE (target) == mode
2129 && bytes == GET_MODE_SIZE (mode))
2131 emit_move_insn (target, srcreg);
2135 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2136 into a new pseudo which is a full word. */
2137 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2139 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2143 /* Copy the structure BITSIZE bits at a time. If the target lives in
2144 memory, take care of not reading/writing past its end by selecting
2145 a copy mode suited to BITSIZE. This should always be possible given
2148 If the target lives in register, make sure not to select a copy mode
2149 larger than the mode of the register.
2151 We could probably emit more efficient code for machines which do not use
2152 strict alignment, but it doesn't seem worth the effort at the current
2155 copy_mode = word_mode;
2158 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2159 if (mem_mode != BLKmode)
2160 copy_mode = mem_mode;
2162 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2165 for (bitpos = 0, xbitpos = padding_correction;
2166 bitpos < bytes * BITS_PER_UNIT;
2167 bitpos += bitsize, xbitpos += bitsize)
2169 /* We need a new source operand each time xbitpos is on a
2170 word boundary and when xbitpos == padding_correction
2171 (the first time through). */
2172 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2173 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2175 /* We need a new destination operand each time bitpos is on
2177 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2179 else if (bitpos % BITS_PER_WORD == 0)
2180 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2182 /* Use xbitpos for the source extraction (right justified) and
2183 bitpos for the destination store (left justified). */
2184 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2185 extract_bit_field (src, bitsize,
2186 xbitpos % BITS_PER_WORD, 1, false,
2187 NULL_RTX, copy_mode, copy_mode));
2191 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2192 register if it contains any data, otherwise return null.
2194 This is used on targets that return BLKmode values in registers. */
2197 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2200 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2201 unsigned int bitsize;
2202 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2203 enum machine_mode dst_mode;
2205 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2207 x = expand_normal (src);
2209 bytes = int_size_in_bytes (TREE_TYPE (src));
2213 /* If the structure doesn't take up a whole number of words, see
2214 whether the register value should be padded on the left or on
2215 the right. Set PADDING_CORRECTION to the number of padding
2216 bits needed on the left side.
2218 In most ABIs, the structure will be returned at the least end of
2219 the register, which translates to right padding on little-endian
2220 targets and left padding on big-endian targets. The opposite
2221 holds if the structure is returned at the most significant
2222 end of the register. */
2223 if (bytes % UNITS_PER_WORD != 0
2224 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2226 : BYTES_BIG_ENDIAN))
2227 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2230 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2231 dst_words = XALLOCAVEC (rtx, n_regs);
2232 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2234 /* Copy the structure BITSIZE bits at a time. */
2235 for (bitpos = 0, xbitpos = padding_correction;
2236 bitpos < bytes * BITS_PER_UNIT;
2237 bitpos += bitsize, xbitpos += bitsize)
2239 /* We need a new destination pseudo each time xbitpos is
2240 on a word boundary and when xbitpos == padding_correction
2241 (the first time through). */
2242 if (xbitpos % BITS_PER_WORD == 0
2243 || xbitpos == padding_correction)
2245 /* Generate an appropriate register. */
2246 dst_word = gen_reg_rtx (word_mode);
2247 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2249 /* Clear the destination before we move anything into it. */
2250 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2253 /* We need a new source operand each time bitpos is on a word
2255 if (bitpos % BITS_PER_WORD == 0)
2256 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2258 /* Use bitpos for the source extraction (left justified) and
2259 xbitpos for the destination store (right justified). */
2260 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2262 extract_bit_field (src_word, bitsize,
2263 bitpos % BITS_PER_WORD, 1, false,
2264 NULL_RTX, word_mode, word_mode));
2267 if (mode == BLKmode)
2269 /* Find the smallest integer mode large enough to hold the
2270 entire structure. */
2271 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2273 mode = GET_MODE_WIDER_MODE (mode))
2274 /* Have we found a large enough mode? */
2275 if (GET_MODE_SIZE (mode) >= bytes)
2278 /* A suitable mode should have been found. */
2279 gcc_assert (mode != VOIDmode);
2282 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2283 dst_mode = word_mode;
2286 dst = gen_reg_rtx (dst_mode);
2288 for (i = 0; i < n_regs; i++)
2289 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2291 if (mode != dst_mode)
2292 dst = gen_lowpart (mode, dst);
2297 /* Add a USE expression for REG to the (possibly empty) list pointed
2298 to by CALL_FUSAGE. REG must denote a hard register. */
2301 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2303 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2306 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2309 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2310 starting at REGNO. All of these registers must be hard registers. */
2313 use_regs (rtx *call_fusage, int regno, int nregs)
2317 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2319 for (i = 0; i < nregs; i++)
2320 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2323 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2324 PARALLEL REGS. This is for calls that pass values in multiple
2325 non-contiguous locations. The Irix 6 ABI has examples of this. */
2328 use_group_regs (rtx *call_fusage, rtx regs)
2332 for (i = 0; i < XVECLEN (regs, 0); i++)
2334 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2336 /* A NULL entry means the parameter goes both on the stack and in
2337 registers. This can also be a MEM for targets that pass values
2338 partially on the stack and partially in registers. */
2339 if (reg != 0 && REG_P (reg))
2340 use_reg (call_fusage, reg);
2344 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2345 assigment and the code of the expresion on the RHS is CODE. Return
2349 get_def_for_expr (tree name, enum tree_code code)
2353 if (TREE_CODE (name) != SSA_NAME)
2356 def_stmt = get_gimple_for_ssa_name (name);
2358 || gimple_assign_rhs_code (def_stmt) != code)
2364 #ifdef HAVE_conditional_move
2365 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2366 assigment and the class of the expresion on the RHS is CLASS. Return
2370 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2374 if (TREE_CODE (name) != SSA_NAME)
2377 def_stmt = get_gimple_for_ssa_name (name);
2379 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2387 /* Determine whether the LEN bytes generated by CONSTFUN can be
2388 stored to memory using several move instructions. CONSTFUNDATA is
2389 a pointer which will be passed as argument in every CONSTFUN call.
2390 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2391 a memset operation and false if it's a copy of a constant string.
2392 Return nonzero if a call to store_by_pieces should succeed. */
2395 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2396 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2397 void *constfundata, unsigned int align, bool memsetp)
2399 unsigned HOST_WIDE_INT l;
2400 unsigned int max_size;
2401 HOST_WIDE_INT offset = 0;
2402 enum machine_mode mode;
2403 enum insn_code icode;
2405 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2406 rtx cst ATTRIBUTE_UNUSED;
2412 ? SET_BY_PIECES_P (len, align)
2413 : STORE_BY_PIECES_P (len, align)))
2416 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2418 /* We would first store what we can in the largest integer mode, then go to
2419 successively smaller modes. */
2422 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2426 max_size = STORE_MAX_PIECES + 1;
2427 while (max_size > 1 && l > 0)
2429 mode = widest_int_mode_for_size (max_size);
2431 if (mode == VOIDmode)
2434 icode = optab_handler (mov_optab, mode);
2435 if (icode != CODE_FOR_nothing
2436 && align >= GET_MODE_ALIGNMENT (mode))
2438 unsigned int size = GET_MODE_SIZE (mode);
2445 cst = (*constfun) (constfundata, offset, mode);
2446 if (!targetm.legitimate_constant_p (mode, cst))
2456 max_size = GET_MODE_SIZE (mode);
2459 /* The code above should have handled everything. */
2466 /* Generate several move instructions to store LEN bytes generated by
2467 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2468 pointer which will be passed as argument in every CONSTFUN call.
2469 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2470 a memset operation and false if it's a copy of a constant string.
2471 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2472 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2476 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2477 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2478 void *constfundata, unsigned int align, bool memsetp, int endp)
2480 enum machine_mode to_addr_mode = get_address_mode (to);
2481 struct store_by_pieces_d data;
2485 gcc_assert (endp != 2);
2490 ? SET_BY_PIECES_P (len, align)
2491 : STORE_BY_PIECES_P (len, align));
2492 data.constfun = constfun;
2493 data.constfundata = constfundata;
2496 store_by_pieces_1 (&data, align);
2501 gcc_assert (!data.reverse);
2506 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2507 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2509 data.to_addr = copy_to_mode_reg (to_addr_mode,
2510 plus_constant (to_addr_mode,
2514 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2521 to1 = adjust_address (data.to, QImode, data.offset);
2529 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2530 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2533 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2535 struct store_by_pieces_d data;
2540 data.constfun = clear_by_pieces_1;
2541 data.constfundata = NULL;
2544 store_by_pieces_1 (&data, align);
2547 /* Callback routine for clear_by_pieces.
2548 Return const0_rtx unconditionally. */
2551 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2552 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2553 enum machine_mode mode ATTRIBUTE_UNUSED)
2558 /* Subroutine of clear_by_pieces and store_by_pieces.
2559 Generate several move instructions to store LEN bytes of block TO. (A MEM
2560 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2563 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2564 unsigned int align ATTRIBUTE_UNUSED)
2566 enum machine_mode to_addr_mode = get_address_mode (data->to);
2567 rtx to_addr = XEXP (data->to, 0);
2568 unsigned int max_size = STORE_MAX_PIECES + 1;
2569 enum insn_code icode;
2572 data->to_addr = to_addr;
2574 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2575 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2577 data->explicit_inc_to = 0;
2579 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2581 data->offset = data->len;
2583 /* If storing requires more than two move insns,
2584 copy addresses to registers (to make displacements shorter)
2585 and use post-increment if available. */
2586 if (!data->autinc_to
2587 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2589 /* Determine the main mode we'll be using.
2590 MODE might not be used depending on the definitions of the
2591 USE_* macros below. */
2592 enum machine_mode mode ATTRIBUTE_UNUSED
2593 = widest_int_mode_for_size (max_size);
2595 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2597 data->to_addr = copy_to_mode_reg (to_addr_mode,
2598 plus_constant (to_addr_mode,
2601 data->autinc_to = 1;
2602 data->explicit_inc_to = -1;
2605 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2606 && ! data->autinc_to)
2608 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2609 data->autinc_to = 1;
2610 data->explicit_inc_to = 1;
2613 if ( !data->autinc_to && CONSTANT_P (to_addr))
2614 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2617 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2619 /* First store what we can in the largest integer mode, then go to
2620 successively smaller modes. */
2622 while (max_size > 1 && data->len > 0)
2624 enum machine_mode mode = widest_int_mode_for_size (max_size);
2626 if (mode == VOIDmode)
2629 icode = optab_handler (mov_optab, mode);
2630 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2631 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2633 max_size = GET_MODE_SIZE (mode);
2636 /* The code above should have handled everything. */
2637 gcc_assert (!data->len);
2640 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2641 with move instructions for mode MODE. GENFUN is the gen_... function
2642 to make a move insn for that mode. DATA has all the other info. */
2645 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2646 struct store_by_pieces_d *data)
2648 unsigned int size = GET_MODE_SIZE (mode);
2651 while (data->len >= size)
2654 data->offset -= size;
2656 if (data->autinc_to)
2657 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2660 to1 = adjust_address (data->to, mode, data->offset);
2662 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2663 emit_insn (gen_add2_insn (data->to_addr,
2664 GEN_INT (-(HOST_WIDE_INT) size)));
2666 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2667 emit_insn ((*genfun) (to1, cst));
2669 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2670 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2672 if (! data->reverse)
2673 data->offset += size;
2679 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2680 its length in bytes. */
2683 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2684 unsigned int expected_align, HOST_WIDE_INT expected_size)
2686 enum machine_mode mode = GET_MODE (object);
2689 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2691 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2692 just move a zero. Otherwise, do this a piece at a time. */
2694 && CONST_INT_P (size)
2695 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2697 rtx zero = CONST0_RTX (mode);
2700 emit_move_insn (object, zero);
2704 if (COMPLEX_MODE_P (mode))
2706 zero = CONST0_RTX (GET_MODE_INNER (mode));
2709 write_complex_part (object, zero, 0);
2710 write_complex_part (object, zero, 1);
2716 if (size == const0_rtx)
2719 align = MEM_ALIGN (object);
2721 if (CONST_INT_P (size)
2722 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2723 clear_by_pieces (object, INTVAL (size), align);
2724 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2725 expected_align, expected_size))
2727 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2728 return set_storage_via_libcall (object, size, const0_rtx,
2729 method == BLOCK_OP_TAILCALL);
2737 clear_storage (rtx object, rtx size, enum block_op_methods method)
2739 return clear_storage_hints (object, size, method, 0, -1);
2743 /* A subroutine of clear_storage. Expand a call to memset.
2744 Return the return value of memset, 0 otherwise. */
2747 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2749 tree call_expr, fn, object_tree, size_tree, val_tree;
2750 enum machine_mode size_mode;
2753 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2754 place those into new pseudos into a VAR_DECL and use them later. */
2756 object = copy_addr_to_reg (XEXP (object, 0));
2758 size_mode = TYPE_MODE (sizetype);
2759 size = convert_to_mode (size_mode, size, 1);
2760 size = copy_to_mode_reg (size_mode, size);
2762 /* It is incorrect to use the libcall calling conventions to call
2763 memset in this context. This could be a user call to memset and
2764 the user may wish to examine the return value from memset. For
2765 targets where libcalls and normal calls have different conventions
2766 for returning pointers, we could end up generating incorrect code. */
2768 object_tree = make_tree (ptr_type_node, object);
2769 if (!CONST_INT_P (val))
2770 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2771 size_tree = make_tree (sizetype, size);
2772 val_tree = make_tree (integer_type_node, val);
2774 fn = clear_storage_libcall_fn (true);
2775 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2776 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2778 retval = expand_normal (call_expr);
2783 /* A subroutine of set_storage_via_libcall. Create the tree node
2784 for the function we use for block clears. */
2786 tree block_clear_fn;
2789 init_block_clear_fn (const char *asmspec)
2791 if (!block_clear_fn)
2795 fn = get_identifier ("memset");
2796 args = build_function_type_list (ptr_type_node, ptr_type_node,
2797 integer_type_node, sizetype,
2800 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2801 DECL_EXTERNAL (fn) = 1;
2802 TREE_PUBLIC (fn) = 1;
2803 DECL_ARTIFICIAL (fn) = 1;
2804 TREE_NOTHROW (fn) = 1;
2805 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2806 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2808 block_clear_fn = fn;
2812 set_user_assembler_name (block_clear_fn, asmspec);
2816 clear_storage_libcall_fn (int for_call)
2818 static bool emitted_extern;
2820 if (!block_clear_fn)
2821 init_block_clear_fn (NULL);
2823 if (for_call && !emitted_extern)
2825 emitted_extern = true;
2826 make_decl_rtl (block_clear_fn);
2829 return block_clear_fn;
2832 /* Expand a setmem pattern; return true if successful. */
2835 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2836 unsigned int expected_align, HOST_WIDE_INT expected_size)
2838 /* Try the most limited insn first, because there's no point
2839 including more than one in the machine description unless
2840 the more limited one has some advantage. */
2842 enum machine_mode mode;
2844 if (expected_align < align)
2845 expected_align = align;
2847 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2848 mode = GET_MODE_WIDER_MODE (mode))
2850 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2852 if (code != CODE_FOR_nothing
2853 /* We don't need MODE to be narrower than
2854 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2855 the mode mask, as it is returned by the macro, it will
2856 definitely be less than the actual mode mask. */
2857 && ((CONST_INT_P (size)
2858 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2859 <= (GET_MODE_MASK (mode) >> 1)))
2860 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
2862 struct expand_operand ops[6];
2865 nops = insn_data[(int) code].n_generator_args;
2866 gcc_assert (nops == 4 || nops == 6);
2868 create_fixed_operand (&ops[0], object);
2869 /* The check above guarantees that this size conversion is valid. */
2870 create_convert_operand_to (&ops[1], size, mode, true);
2871 create_convert_operand_from (&ops[2], val, byte_mode, true);
2872 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2875 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2876 create_integer_operand (&ops[5], expected_size);
2878 if (maybe_expand_insn (code, nops, ops))
2887 /* Write to one of the components of the complex value CPLX. Write VAL to
2888 the real part if IMAG_P is false, and the imaginary part if its true. */
2891 write_complex_part (rtx cplx, rtx val, bool imag_p)
2893 enum machine_mode cmode;
2894 enum machine_mode imode;
2897 if (GET_CODE (cplx) == CONCAT)
2899 emit_move_insn (XEXP (cplx, imag_p), val);
2903 cmode = GET_MODE (cplx);
2904 imode = GET_MODE_INNER (cmode);
2905 ibitsize = GET_MODE_BITSIZE (imode);
2907 /* For MEMs simplify_gen_subreg may generate an invalid new address
2908 because, e.g., the original address is considered mode-dependent
2909 by the target, which restricts simplify_subreg from invoking
2910 adjust_address_nv. Instead of preparing fallback support for an
2911 invalid address, we call adjust_address_nv directly. */
2914 emit_move_insn (adjust_address_nv (cplx, imode,
2915 imag_p ? GET_MODE_SIZE (imode) : 0),
2920 /* If the sub-object is at least word sized, then we know that subregging
2921 will work. This special case is important, since store_bit_field
2922 wants to operate on integer modes, and there's rarely an OImode to
2923 correspond to TCmode. */
2924 if (ibitsize >= BITS_PER_WORD
2925 /* For hard regs we have exact predicates. Assume we can split
2926 the original object if it spans an even number of hard regs.
2927 This special case is important for SCmode on 64-bit platforms
2928 where the natural size of floating-point regs is 32-bit. */
2930 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2931 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2933 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2934 imag_p ? GET_MODE_SIZE (imode) : 0);
2937 emit_move_insn (part, val);
2941 /* simplify_gen_subreg may fail for sub-word MEMs. */
2942 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2945 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
2948 /* Extract one of the components of the complex value CPLX. Extract the
2949 real part if IMAG_P is false, and the imaginary part if it's true. */
2952 read_complex_part (rtx cplx, bool imag_p)
2954 enum machine_mode cmode, imode;
2957 if (GET_CODE (cplx) == CONCAT)
2958 return XEXP (cplx, imag_p);
2960 cmode = GET_MODE (cplx);
2961 imode = GET_MODE_INNER (cmode);
2962 ibitsize = GET_MODE_BITSIZE (imode);
2964 /* Special case reads from complex constants that got spilled to memory. */
2965 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2967 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2968 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2970 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2971 if (CONSTANT_CLASS_P (part))
2972 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2976 /* For MEMs simplify_gen_subreg may generate an invalid new address
2977 because, e.g., the original address is considered mode-dependent
2978 by the target, which restricts simplify_subreg from invoking
2979 adjust_address_nv. Instead of preparing fallback support for an
2980 invalid address, we call adjust_address_nv directly. */
2982 return adjust_address_nv (cplx, imode,
2983 imag_p ? GET_MODE_SIZE (imode) : 0);
2985 /* If the sub-object is at least word sized, then we know that subregging
2986 will work. This special case is important, since extract_bit_field
2987 wants to operate on integer modes, and there's rarely an OImode to
2988 correspond to TCmode. */
2989 if (ibitsize >= BITS_PER_WORD
2990 /* For hard regs we have exact predicates. Assume we can split
2991 the original object if it spans an even number of hard regs.
2992 This special case is important for SCmode on 64-bit platforms
2993 where the natural size of floating-point regs is 32-bit. */
2995 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2996 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2998 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2999 imag_p ? GET_MODE_SIZE (imode) : 0);
3003 /* simplify_gen_subreg may fail for sub-word MEMs. */
3004 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3007 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3008 true, false, NULL_RTX, imode, imode);
3011 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3012 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3013 represented in NEW_MODE. If FORCE is true, this will never happen, as
3014 we'll force-create a SUBREG if needed. */
3017 emit_move_change_mode (enum machine_mode new_mode,
3018 enum machine_mode old_mode, rtx x, bool force)
3022 if (push_operand (x, GET_MODE (x)))
3024 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3025 MEM_COPY_ATTRIBUTES (ret, x);
3029 /* We don't have to worry about changing the address since the
3030 size in bytes is supposed to be the same. */
3031 if (reload_in_progress)
3033 /* Copy the MEM to change the mode and move any
3034 substitutions from the old MEM to the new one. */
3035 ret = adjust_address_nv (x, new_mode, 0);
3036 copy_replacements (x, ret);
3039 ret = adjust_address (x, new_mode, 0);
3043 /* Note that we do want simplify_subreg's behavior of validating
3044 that the new mode is ok for a hard register. If we were to use
3045 simplify_gen_subreg, we would create the subreg, but would
3046 probably run into the target not being able to implement it. */
3047 /* Except, of course, when FORCE is true, when this is exactly what
3048 we want. Which is needed for CCmodes on some targets. */
3050 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3052 ret = simplify_subreg (new_mode, x, old_mode, 0);
3058 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3059 an integer mode of the same size as MODE. Returns the instruction
3060 emitted, or NULL if such a move could not be generated. */
3063 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3065 enum machine_mode imode;
3066 enum insn_code code;
3068 /* There must exist a mode of the exact size we require. */
3069 imode = int_mode_for_mode (mode);
3070 if (imode == BLKmode)
3073 /* The target must support moves in this mode. */
3074 code = optab_handler (mov_optab, imode);
3075 if (code == CODE_FOR_nothing)
3078 x = emit_move_change_mode (imode, mode, x, force);
3081 y = emit_move_change_mode (imode, mode, y, force);
3084 return emit_insn (GEN_FCN (code) (x, y));
3087 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3088 Return an equivalent MEM that does not use an auto-increment. */
3091 emit_move_resolve_push (enum machine_mode mode, rtx x)
3093 enum rtx_code code = GET_CODE (XEXP (x, 0));
3094 HOST_WIDE_INT adjust;
3097 adjust = GET_MODE_SIZE (mode);
3098 #ifdef PUSH_ROUNDING
3099 adjust = PUSH_ROUNDING (adjust);
3101 if (code == PRE_DEC || code == POST_DEC)
3103 else if (code == PRE_MODIFY || code == POST_MODIFY)
3105 rtx expr = XEXP (XEXP (x, 0), 1);
3108 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3109 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3110 val = INTVAL (XEXP (expr, 1));
3111 if (GET_CODE (expr) == MINUS)
3113 gcc_assert (adjust == val || adjust == -val);
3117 /* Do not use anti_adjust_stack, since we don't want to update
3118 stack_pointer_delta. */
3119 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3120 GEN_INT (adjust), stack_pointer_rtx,
3121 0, OPTAB_LIB_WIDEN);
3122 if (temp != stack_pointer_rtx)
3123 emit_move_insn (stack_pointer_rtx, temp);
3130 temp = stack_pointer_rtx;
3135 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3141 return replace_equiv_address (x, temp);
3144 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3145 X is known to satisfy push_operand, and MODE is known to be complex.
3146 Returns the last instruction emitted. */
3149 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3151 enum machine_mode submode = GET_MODE_INNER (mode);
3154 #ifdef PUSH_ROUNDING
3155 unsigned int submodesize = GET_MODE_SIZE (submode);
3157 /* In case we output to the stack, but the size is smaller than the
3158 machine can push exactly, we need to use move instructions. */
3159 if (PUSH_ROUNDING (submodesize) != submodesize)
3161 x = emit_move_resolve_push (mode, x);
3162 return emit_move_insn (x, y);
3166 /* Note that the real part always precedes the imag part in memory
3167 regardless of machine's endianness. */
3168 switch (GET_CODE (XEXP (x, 0)))
3182 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3183 read_complex_part (y, imag_first));
3184 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3185 read_complex_part (y, !imag_first));
3188 /* A subroutine of emit_move_complex. Perform the move from Y to X
3189 via two moves of the parts. Returns the last instruction emitted. */
3192 emit_move_complex_parts (rtx x, rtx y)
3194 /* Show the output dies here. This is necessary for SUBREGs
3195 of pseudos since we cannot track their lifetimes correctly;
3196 hard regs shouldn't appear here except as return values. */
3197 if (!reload_completed && !reload_in_progress
3198 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3201 write_complex_part (x, read_complex_part (y, false), false);
3202 write_complex_part (x, read_complex_part (y, true), true);
3204 return get_last_insn ();
3207 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3208 MODE is known to be complex. Returns the last instruction emitted. */
3211 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3215 /* Need to take special care for pushes, to maintain proper ordering
3216 of the data, and possibly extra padding. */
3217 if (push_operand (x, mode))
3218 return emit_move_complex_push (mode, x, y);
3220 /* See if we can coerce the target into moving both values at once. */
3222 /* Move floating point as parts. */
3223 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3224 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing)
3226 /* Not possible if the values are inherently not adjacent. */
3227 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3229 /* Is possible if both are registers (or subregs of registers). */
3230 else if (register_operand (x, mode) && register_operand (y, mode))
3232 /* If one of the operands is a memory, and alignment constraints
3233 are friendly enough, we may be able to do combined memory operations.
3234 We do not attempt this if Y is a constant because that combination is
3235 usually better with the by-parts thing below. */
3236 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3237 && (!STRICT_ALIGNMENT
3238 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3247 /* For memory to memory moves, optimal behavior can be had with the
3248 existing block move logic. */
3249 if (MEM_P (x) && MEM_P (y))
3251 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3252 BLOCK_OP_NO_LIBCALL);
3253 return get_last_insn ();
3256 ret = emit_move_via_integer (mode, x, y, true);
3261 return emit_move_complex_parts (x, y);
3264 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3265 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3268 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3272 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3275 enum insn_code code = optab_handler (mov_optab, CCmode);
3276 if (code != CODE_FOR_nothing)
3278 x = emit_move_change_mode (CCmode, mode, x, true);
3279 y = emit_move_change_mode (CCmode, mode, y, true);
3280 return emit_insn (GEN_FCN (code) (x, y));
3284 /* Otherwise, find the MODE_INT mode of the same width. */
3285 ret = emit_move_via_integer (mode, x, y, false);
3286 gcc_assert (ret != NULL);
3290 /* Return true if word I of OP lies entirely in the
3291 undefined bits of a paradoxical subreg. */
3294 undefined_operand_subword_p (const_rtx op, int i)
3296 enum machine_mode innermode, innermostmode;
3298 if (GET_CODE (op) != SUBREG)
3300 innermode = GET_MODE (op);
3301 innermostmode = GET_MODE (SUBREG_REG (op));
3302 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3303 /* The SUBREG_BYTE represents offset, as if the value were stored in
3304 memory, except for a paradoxical subreg where we define
3305 SUBREG_BYTE to be 0; undo this exception as in
3307 if (SUBREG_BYTE (op) == 0
3308 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3310 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3311 if (WORDS_BIG_ENDIAN)
3312 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3313 if (BYTES_BIG_ENDIAN)
3314 offset += difference % UNITS_PER_WORD;
3316 if (offset >= GET_MODE_SIZE (innermostmode)
3317 || offset <= -GET_MODE_SIZE (word_mode))
3322 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3323 MODE is any multi-word or full-word mode that lacks a move_insn
3324 pattern. Note that you will get better code if you define such
3325 patterns, even if they must turn into multiple assembler instructions. */
3328 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3335 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3337 /* If X is a push on the stack, do the push now and replace
3338 X with a reference to the stack pointer. */
3339 if (push_operand (x, mode))
3340 x = emit_move_resolve_push (mode, x);
3342 /* If we are in reload, see if either operand is a MEM whose address
3343 is scheduled for replacement. */
3344 if (reload_in_progress && MEM_P (x)
3345 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3346 x = replace_equiv_address_nv (x, inner);
3347 if (reload_in_progress && MEM_P (y)
3348 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3349 y = replace_equiv_address_nv (y, inner);
3353 need_clobber = false;
3355 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3358 rtx xpart = operand_subword (x, i, 1, mode);
3361 /* Do not generate code for a move if it would come entirely
3362 from the undefined bits of a paradoxical subreg. */
3363 if (undefined_operand_subword_p (y, i))
3366 ypart = operand_subword (y, i, 1, mode);
3368 /* If we can't get a part of Y, put Y into memory if it is a
3369 constant. Otherwise, force it into a register. Then we must
3370 be able to get a part of Y. */
3371 if (ypart == 0 && CONSTANT_P (y))
3373 y = use_anchored_address (force_const_mem (mode, y));
3374 ypart = operand_subword (y, i, 1, mode);
3376 else if (ypart == 0)
3377 ypart = operand_subword_force (y, i, mode);
3379 gcc_assert (xpart && ypart);
3381 need_clobber |= (GET_CODE (xpart) == SUBREG);
3383 last_insn = emit_move_insn (xpart, ypart);
3389 /* Show the output dies here. This is necessary for SUBREGs
3390 of pseudos since we cannot track their lifetimes correctly;
3391 hard regs shouldn't appear here except as return values.
3392 We never want to emit such a clobber after reload. */
3394 && ! (reload_in_progress || reload_completed)
3395 && need_clobber != 0)
3403 /* Low level part of emit_move_insn.
3404 Called just like emit_move_insn, but assumes X and Y
3405 are basically valid. */
3408 emit_move_insn_1 (rtx x, rtx y)
3410 enum machine_mode mode = GET_MODE (x);
3411 enum insn_code code;
3413 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3415 code = optab_handler (mov_optab, mode);
3416 if (code != CODE_FOR_nothing)
3417 return emit_insn (GEN_FCN (code) (x, y));
3419 /* Expand complex moves by moving real part and imag part. */
3420 if (COMPLEX_MODE_P (mode))
3421 return emit_move_complex (mode, x, y);
3423 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3424 || ALL_FIXED_POINT_MODE_P (mode))
3426 rtx result = emit_move_via_integer (mode, x, y, true);
3428 /* If we can't find an integer mode, use multi words. */
3432 return emit_move_multi_word (mode, x, y);
3435 if (GET_MODE_CLASS (mode) == MODE_CC)
3436 return emit_move_ccmode (mode, x, y);
3438 /* Try using a move pattern for the corresponding integer mode. This is
3439 only safe when simplify_subreg can convert MODE constants into integer
3440 constants. At present, it can only do this reliably if the value
3441 fits within a HOST_WIDE_INT. */
3442 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3444 rtx ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3448 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3453 return emit_move_multi_word (mode, x, y);
3456 /* Generate code to copy Y into X.
3457 Both Y and X must have the same mode, except that
3458 Y can be a constant with VOIDmode.
3459 This mode cannot be BLKmode; use emit_block_move for that.
3461 Return the last instruction emitted. */
3464 emit_move_insn (rtx x, rtx y)
3466 enum machine_mode mode = GET_MODE (x);
3467 rtx y_cst = NULL_RTX;
3470 gcc_assert (mode != BLKmode
3471 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3476 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3477 && (last_insn = compress_float_constant (x, y)))
3482 if (!targetm.legitimate_constant_p (mode, y))
3484 y = force_const_mem (mode, y);
3486 /* If the target's cannot_force_const_mem prevented the spill,
3487 assume that the target's move expanders will also take care
3488 of the non-legitimate constant. */
3492 y = use_anchored_address (y);
3496 /* If X or Y are memory references, verify that their addresses are valid
3499 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3501 && ! push_operand (x, GET_MODE (x))))
3502 x = validize_mem (x);
3505 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3506 MEM_ADDR_SPACE (y)))
3507 y = validize_mem (y);
3509 gcc_assert (mode != BLKmode);
3511 last_insn = emit_move_insn_1 (x, y);
3513 if (y_cst && REG_P (x)
3514 && (set = single_set (last_insn)) != NULL_RTX
3515 && SET_DEST (set) == x
3516 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3517 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3522 /* If Y is representable exactly in a narrower mode, and the target can
3523 perform the extension directly from constant or memory, then emit the
3524 move as an extension. */
3527 compress_float_constant (rtx x, rtx y)
3529 enum machine_mode dstmode = GET_MODE (x);
3530 enum machine_mode orig_srcmode = GET_MODE (y);
3531 enum machine_mode srcmode;
3533 int oldcost, newcost;
3534 bool speed = optimize_insn_for_speed_p ();
3536 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3538 if (targetm.legitimate_constant_p (dstmode, y))
3539 oldcost = set_src_cost (y, speed);
3541 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3543 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3544 srcmode != orig_srcmode;
3545 srcmode = GET_MODE_WIDER_MODE (srcmode))
3548 rtx trunc_y, last_insn;
3550 /* Skip if the target can't extend this way. */
3551 ic = can_extend_p (dstmode, srcmode, 0);
3552 if (ic == CODE_FOR_nothing)
3555 /* Skip if the narrowed value isn't exact. */
3556 if (! exact_real_truncate (srcmode, &r))
3559 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3561 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3563 /* Skip if the target needs extra instructions to perform
3565 if (!insn_operand_matches (ic, 1, trunc_y))
3567 /* This is valid, but may not be cheaper than the original. */
3568 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3570 if (oldcost < newcost)
3573 else if (float_extend_from_mem[dstmode][srcmode])
3575 trunc_y = force_const_mem (srcmode, trunc_y);
3576 /* This is valid, but may not be cheaper than the original. */
3577 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3579 if (oldcost < newcost)
3581 trunc_y = validize_mem (trunc_y);
3586 /* For CSE's benefit, force the compressed constant pool entry
3587 into a new pseudo. This constant may be used in different modes,
3588 and if not, combine will put things back together for us. */
3589 trunc_y = force_reg (srcmode, trunc_y);
3590 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3591 last_insn = get_last_insn ();
3594 set_unique_reg_note (last_insn, REG_EQUAL, y);
3602 /* Pushing data onto the stack. */
3604 /* Push a block of length SIZE (perhaps variable)
3605 and return an rtx to address the beginning of the block.
3606 The value may be virtual_outgoing_args_rtx.
3608 EXTRA is the number of bytes of padding to push in addition to SIZE.
3609 BELOW nonzero means this padding comes at low addresses;
3610 otherwise, the padding comes at high addresses. */
3613 push_block (rtx size, int extra, int below)
3617 size = convert_modes (Pmode, ptr_mode, size, 1);
3618 if (CONSTANT_P (size))
3619 anti_adjust_stack (plus_constant (Pmode, size, extra));
3620 else if (REG_P (size) && extra == 0)
3621 anti_adjust_stack (size);
3624 temp = copy_to_mode_reg (Pmode, size);
3626 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3627 temp, 0, OPTAB_LIB_WIDEN);
3628 anti_adjust_stack (temp);
3631 #ifndef STACK_GROWS_DOWNWARD
3637 temp = virtual_outgoing_args_rtx;
3638 if (extra != 0 && below)
3639 temp = plus_constant (Pmode, temp, extra);
3643 if (CONST_INT_P (size))
3644 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3645 -INTVAL (size) - (below ? 0 : extra));
3646 else if (extra != 0 && !below)
3647 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3648 negate_rtx (Pmode, plus_constant (Pmode, size,
3651 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3652 negate_rtx (Pmode, size));
3655 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3658 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3661 mem_autoinc_base (rtx mem)
3665 rtx addr = XEXP (mem, 0);
3666 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3667 return XEXP (addr, 0);
3672 /* A utility routine used here, in reload, and in try_split. The insns
3673 after PREV up to and including LAST are known to adjust the stack,
3674 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3675 placing notes as appropriate. PREV may be NULL, indicating the
3676 entire insn sequence prior to LAST should be scanned.
3678 The set of allowed stack pointer modifications is small:
3679 (1) One or more auto-inc style memory references (aka pushes),
3680 (2) One or more addition/subtraction with the SP as destination,
3681 (3) A single move insn with the SP as destination,
3682 (4) A call_pop insn,
3683 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3685 Insns in the sequence that do not modify the SP are ignored,
3686 except for noreturn calls.
3688 The return value is the amount of adjustment that can be trivially
3689 verified, via immediate operand or auto-inc. If the adjustment
3690 cannot be trivially extracted, the return value is INT_MIN. */
3693 find_args_size_adjust (rtx insn)
3698 pat = PATTERN (insn);
3701 /* Look for a call_pop pattern. */
3704 /* We have to allow non-call_pop patterns for the case
3705 of emit_single_push_insn of a TLS address. */
3706 if (GET_CODE (pat) != PARALLEL)
3709 /* All call_pop have a stack pointer adjust in the parallel.
3710 The call itself is always first, and the stack adjust is
3711 usually last, so search from the end. */
3712 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3714 set = XVECEXP (pat, 0, i);
3715 if (GET_CODE (set) != SET)
3717 dest = SET_DEST (set);
3718 if (dest == stack_pointer_rtx)
3721 /* We'd better have found the stack pointer adjust. */
3724 /* Fall through to process the extracted SET and DEST
3725 as if it was a standalone insn. */
3727 else if (GET_CODE (pat) == SET)
3729 else if ((set = single_set (insn)) != NULL)
3731 else if (GET_CODE (pat) == PARALLEL)
3733 /* ??? Some older ports use a parallel with a stack adjust
3734 and a store for a PUSH_ROUNDING pattern, rather than a
3735 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3736 /* ??? See h8300 and m68k, pushqi1. */
3737 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3739 set = XVECEXP (pat, 0, i);
3740 if (GET_CODE (set) != SET)
3742 dest = SET_DEST (set);
3743 if (dest == stack_pointer_rtx)
3746 /* We do not expect an auto-inc of the sp in the parallel. */
3747 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3748 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3749 != stack_pointer_rtx);
3757 dest = SET_DEST (set);
3759 /* Look for direct modifications of the stack pointer. */
3760 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3762 /* Look for a trivial adjustment, otherwise assume nothing. */
3763 /* Note that the SPU restore_stack_block pattern refers to
3764 the stack pointer in V4SImode. Consider that non-trivial. */
3765 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3766 && GET_CODE (SET_SRC (set)) == PLUS
3767 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3768 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3769 return INTVAL (XEXP (SET_SRC (set), 1));
3770 /* ??? Reload can generate no-op moves, which will be cleaned
3771 up later. Recognize it and continue searching. */
3772 else if (rtx_equal_p (dest, SET_SRC (set)))
3775 return HOST_WIDE_INT_MIN;
3781 /* Otherwise only think about autoinc patterns. */
3782 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3785 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3786 != stack_pointer_rtx);
3788 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3789 mem = SET_SRC (set);
3793 addr = XEXP (mem, 0);
3794 switch (GET_CODE (addr))
3798 return GET_MODE_SIZE (GET_MODE (mem));
3801 return -GET_MODE_SIZE (GET_MODE (mem));
3804 addr = XEXP (addr, 1);
3805 gcc_assert (GET_CODE (addr) == PLUS);
3806 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3807 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3808 return INTVAL (XEXP (addr, 1));
3816 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3818 int args_size = end_args_size;
3819 bool saw_unknown = false;
3822 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3824 HOST_WIDE_INT this_delta;
3826 if (!NONDEBUG_INSN_P (insn))
3829 this_delta = find_args_size_adjust (insn);
3830 if (this_delta == 0)
3833 || ACCUMULATE_OUTGOING_ARGS
3834 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3838 gcc_assert (!saw_unknown);
3839 if (this_delta == HOST_WIDE_INT_MIN)
3842 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3843 #ifdef STACK_GROWS_DOWNWARD
3844 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3846 args_size -= this_delta;
3849 return saw_unknown ? INT_MIN : args_size;
3852 #ifdef PUSH_ROUNDING
3853 /* Emit single push insn. */
3856 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3859 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3861 enum insn_code icode;
3863 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3864 /* If there is push pattern, use it. Otherwise try old way of throwing
3865 MEM representing push operation to move expander. */
3866 icode = optab_handler (push_optab, mode);
3867 if (icode != CODE_FOR_nothing)
3869 struct expand_operand ops[1];
3871 create_input_operand (&ops[0], x, mode);
3872 if (maybe_expand_insn (icode, 1, ops))
3875 if (GET_MODE_SIZE (mode) == rounded_size)
3876 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3877 /* If we are to pad downward, adjust the stack pointer first and
3878 then store X into the stack location using an offset. This is
3879 because emit_move_insn does not know how to pad; it does not have
3881 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3883 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3884 HOST_WIDE_INT offset;
3886 emit_move_insn (stack_pointer_rtx,
3887 expand_binop (Pmode,
3888 #ifdef STACK_GROWS_DOWNWARD
3894 GEN_INT (rounded_size),
3895 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3897 offset = (HOST_WIDE_INT) padding_size;
3898 #ifdef STACK_GROWS_DOWNWARD
3899 if (STACK_PUSH_CODE == POST_DEC)
3900 /* We have already decremented the stack pointer, so get the
3902 offset += (HOST_WIDE_INT) rounded_size;
3904 if (STACK_PUSH_CODE == POST_INC)
3905 /* We have already incremented the stack pointer, so get the
3907 offset -= (HOST_WIDE_INT) rounded_size;
3909 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3913 #ifdef STACK_GROWS_DOWNWARD
3914 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3915 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3916 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3918 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3919 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3920 GEN_INT (rounded_size));
3922 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3925 dest = gen_rtx_MEM (mode, dest_addr);
3929 set_mem_attributes (dest, type, 1);
3931 if (flag_optimize_sibling_calls)
3932 /* Function incoming arguments may overlap with sibling call
3933 outgoing arguments and we cannot allow reordering of reads
3934 from function arguments with stores to outgoing arguments
3935 of sibling calls. */
3936 set_mem_alias_set (dest, 0);
3938 emit_move_insn (dest, x);
3941 /* Emit and annotate a single push insn. */
3944 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3946 int delta, old_delta = stack_pointer_delta;
3947 rtx prev = get_last_insn ();
3950 emit_single_push_insn_1 (mode, x, type);
3952 last = get_last_insn ();
3954 /* Notice the common case where we emitted exactly one insn. */
3955 if (PREV_INSN (last) == prev)
3957 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
3961 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
3962 gcc_assert (delta == INT_MIN || delta == old_delta);
3966 /* Generate code to push X onto the stack, assuming it has mode MODE and
3968 MODE is redundant except when X is a CONST_INT (since they don't
3970 SIZE is an rtx for the size of data to be copied (in bytes),
3971 needed only if X is BLKmode.
3973 ALIGN (in bits) is maximum alignment we can assume.
3975 If PARTIAL and REG are both nonzero, then copy that many of the first
3976 bytes of X into registers starting with REG, and push the rest of X.
3977 The amount of space pushed is decreased by PARTIAL bytes.
3978 REG must be a hard register in this case.
3979 If REG is zero but PARTIAL is not, take any all others actions for an
3980 argument partially in registers, but do not actually load any
3983 EXTRA is the amount in bytes of extra space to leave next to this arg.
3984 This is ignored if an argument block has already been allocated.
3986 On a machine that lacks real push insns, ARGS_ADDR is the address of
3987 the bottom of the argument block for this call. We use indexing off there
3988 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3989 argument block has not been preallocated.
3991 ARGS_SO_FAR is the size of args previously pushed for this call.
3993 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3994 for arguments passed in registers. If nonzero, it will be the number
3995 of bytes required. */
3998 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3999 unsigned int align, int partial, rtx reg, int extra,
4000 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4004 enum direction stack_direction
4005 #ifdef STACK_GROWS_DOWNWARD
4011 /* Decide where to pad the argument: `downward' for below,
4012 `upward' for above, or `none' for don't pad it.
4013 Default is below for small data on big-endian machines; else above. */
4014 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4016 /* Invert direction if stack is post-decrement.
4018 if (STACK_PUSH_CODE == POST_DEC)
4019 if (where_pad != none)
4020 where_pad = (where_pad == downward ? upward : downward);
4025 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4027 /* Copy a block into the stack, entirely or partially. */
4034 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4035 used = partial - offset;
4037 if (mode != BLKmode)
4039 /* A value is to be stored in an insufficiently aligned
4040 stack slot; copy via a suitably aligned slot if
4042 size = GEN_INT (GET_MODE_SIZE (mode));
4043 if (!MEM_P (xinner))
4045 temp = assign_temp (type, 1, 1);
4046 emit_move_insn (temp, xinner);
4053 /* USED is now the # of bytes we need not copy to the stack
4054 because registers will take care of them. */
4057 xinner = adjust_address (xinner, BLKmode, used);
4059 /* If the partial register-part of the arg counts in its stack size,
4060 skip the part of stack space corresponding to the registers.
4061 Otherwise, start copying to the beginning of the stack space,
4062 by setting SKIP to 0. */
4063 skip = (reg_parm_stack_space == 0) ? 0 : used;
4065 #ifdef PUSH_ROUNDING
4066 /* Do it with several push insns if that doesn't take lots of insns
4067 and if there is no difficulty with push insns that skip bytes
4068 on the stack for alignment purposes. */
4071 && CONST_INT_P (size)
4073 && MEM_ALIGN (xinner) >= align
4074 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4075 /* Here we avoid the case of a structure whose weak alignment
4076 forces many pushes of a small amount of data,
4077 and such small pushes do rounding that causes trouble. */
4078 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4079 || align >= BIGGEST_ALIGNMENT
4080 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4081 == (align / BITS_PER_UNIT)))
4082 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4084 /* Push padding now if padding above and stack grows down,
4085 or if padding below and stack grows up.
4086 But if space already allocated, this has already been done. */
4087 if (extra && args_addr == 0
4088 && where_pad != none && where_pad != stack_direction)
4089 anti_adjust_stack (GEN_INT (extra));
4091 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4094 #endif /* PUSH_ROUNDING */
4098 /* Otherwise make space on the stack and copy the data
4099 to the address of that space. */
4101 /* Deduct words put into registers from the size we must copy. */
4104 if (CONST_INT_P (size))
4105 size = GEN_INT (INTVAL (size) - used);
4107 size = expand_binop (GET_MODE (size), sub_optab, size,
4108 GEN_INT (used), NULL_RTX, 0,
4112 /* Get the address of the stack space.
4113 In this case, we do not deal with EXTRA separately.
4114 A single stack adjust will do. */
4117 temp = push_block (size, extra, where_pad == downward);
4120 else if (CONST_INT_P (args_so_far))
4121 temp = memory_address (BLKmode,
4122 plus_constant (Pmode, args_addr,
4123 skip + INTVAL (args_so_far)));
4125 temp = memory_address (BLKmode,
4126 plus_constant (Pmode,
4127 gen_rtx_PLUS (Pmode,
4132 if (!ACCUMULATE_OUTGOING_ARGS)
4134 /* If the source is referenced relative to the stack pointer,
4135 copy it to another register to stabilize it. We do not need
4136 to do this if we know that we won't be changing sp. */
4138 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4139 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4140 temp = copy_to_reg (temp);
4143 target = gen_rtx_MEM (BLKmode, temp);
4145 /* We do *not* set_mem_attributes here, because incoming arguments
4146 may overlap with sibling call outgoing arguments and we cannot
4147 allow reordering of reads from function arguments with stores
4148 to outgoing arguments of sibling calls. We do, however, want
4149 to record the alignment of the stack slot. */
4150 /* ALIGN may well be better aligned than TYPE, e.g. due to
4151 PARM_BOUNDARY. Assume the caller isn't lying. */
4152 set_mem_align (target, align);
4154 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4157 else if (partial > 0)
4159 /* Scalar partly in registers. */
4161 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4164 /* # bytes of start of argument
4165 that we must make space for but need not store. */
4166 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4167 int args_offset = INTVAL (args_so_far);
4170 /* Push padding now if padding above and stack grows down,
4171 or if padding below and stack grows up.
4172 But if space already allocated, this has already been done. */
4173 if (extra && args_addr == 0
4174 && where_pad != none && where_pad != stack_direction)
4175 anti_adjust_stack (GEN_INT (extra));
4177 /* If we make space by pushing it, we might as well push
4178 the real data. Otherwise, we can leave OFFSET nonzero
4179 and leave the space uninitialized. */
4183 /* Now NOT_STACK gets the number of words that we don't need to
4184 allocate on the stack. Convert OFFSET to words too. */
4185 not_stack = (partial - offset) / UNITS_PER_WORD;
4186 offset /= UNITS_PER_WORD;
4188 /* If the partial register-part of the arg counts in its stack size,
4189 skip the part of stack space corresponding to the registers.
4190 Otherwise, start copying to the beginning of the stack space,
4191 by setting SKIP to 0. */
4192 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4194 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4195 x = validize_mem (force_const_mem (mode, x));
4197 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4198 SUBREGs of such registers are not allowed. */
4199 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4200 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4201 x = copy_to_reg (x);
4203 /* Loop over all the words allocated on the stack for this arg. */
4204 /* We can do it by words, because any scalar bigger than a word
4205 has a size a multiple of a word. */
4206 #ifndef PUSH_ARGS_REVERSED
4207 for (i = not_stack; i < size; i++)
4209 for (i = size - 1; i >= not_stack; i--)
4211 if (i >= not_stack + offset)
4212 emit_push_insn (operand_subword_force (x, i, mode),
4213 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4215 GEN_INT (args_offset + ((i - not_stack + skip)
4217 reg_parm_stack_space, alignment_pad);
4224 /* Push padding now if padding above and stack grows down,
4225 or if padding below and stack grows up.
4226 But if space already allocated, this has already been done. */
4227 if (extra && args_addr == 0
4228 && where_pad != none && where_pad != stack_direction)
4229 anti_adjust_stack (GEN_INT (extra));
4231 #ifdef PUSH_ROUNDING
4232 if (args_addr == 0 && PUSH_ARGS)
4233 emit_single_push_insn (mode, x, type);
4237 if (CONST_INT_P (args_so_far))
4239 = memory_address (mode,
4240 plus_constant (Pmode, args_addr,
4241 INTVAL (args_so_far)));
4243 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4245 dest = gen_rtx_MEM (mode, addr);
4247 /* We do *not* set_mem_attributes here, because incoming arguments
4248 may overlap with sibling call outgoing arguments and we cannot
4249 allow reordering of reads from function arguments with stores
4250 to outgoing arguments of sibling calls. We do, however, want
4251 to record the alignment of the stack slot. */
4252 /* ALIGN may well be better aligned than TYPE, e.g. due to
4253 PARM_BOUNDARY. Assume the caller isn't lying. */
4254 set_mem_align (dest, align);
4256 emit_move_insn (dest, x);
4260 /* If part should go in registers, copy that part
4261 into the appropriate registers. Do this now, at the end,
4262 since mem-to-mem copies above may do function calls. */
4263 if (partial > 0 && reg != 0)
4265 /* Handle calls that pass values in multiple non-contiguous locations.
4266 The Irix 6 ABI has examples of this. */
4267 if (GET_CODE (reg) == PARALLEL)
4268 emit_group_load (reg, x, type, -1);
4271 gcc_assert (partial % UNITS_PER_WORD == 0);
4272 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4276 if (extra && args_addr == 0 && where_pad == stack_direction)
4277 anti_adjust_stack (GEN_INT (extra));
4279 if (alignment_pad && args_addr == 0)
4280 anti_adjust_stack (alignment_pad);
4283 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4287 get_subtarget (rtx x)
4291 /* Only registers can be subtargets. */
4293 /* Don't use hard regs to avoid extending their life. */
4294 || REGNO (x) < FIRST_PSEUDO_REGISTER
4298 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4299 FIELD is a bitfield. Returns true if the optimization was successful,
4300 and there's nothing else to do. */
4303 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4304 unsigned HOST_WIDE_INT bitpos,
4305 unsigned HOST_WIDE_INT bitregion_start,
4306 unsigned HOST_WIDE_INT bitregion_end,
4307 enum machine_mode mode1, rtx str_rtx,
4310 enum machine_mode str_mode = GET_MODE (str_rtx);
4311 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4316 enum tree_code code;
4318 if (mode1 != VOIDmode
4319 || bitsize >= BITS_PER_WORD
4320 || str_bitsize > BITS_PER_WORD
4321 || TREE_SIDE_EFFECTS (to)
4322 || TREE_THIS_VOLATILE (to))
4326 if (TREE_CODE (src) != SSA_NAME)
4328 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4331 srcstmt = get_gimple_for_ssa_name (src);
4333 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4336 code = gimple_assign_rhs_code (srcstmt);
4338 op0 = gimple_assign_rhs1 (srcstmt);
4340 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4341 to find its initialization. Hopefully the initialization will
4342 be from a bitfield load. */
4343 if (TREE_CODE (op0) == SSA_NAME)
4345 gimple op0stmt = get_gimple_for_ssa_name (op0);
4347 /* We want to eventually have OP0 be the same as TO, which
4348 should be a bitfield. */
4350 || !is_gimple_assign (op0stmt)
4351 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4353 op0 = gimple_assign_rhs1 (op0stmt);
4356 op1 = gimple_assign_rhs2 (srcstmt);
4358 if (!operand_equal_p (to, op0, 0))
4361 if (MEM_P (str_rtx))
4363 unsigned HOST_WIDE_INT offset1;
4365 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4366 str_mode = word_mode;
4367 str_mode = get_best_mode (bitsize, bitpos,
4368 bitregion_start, bitregion_end,
4369 MEM_ALIGN (str_rtx), str_mode, 0);
4370 if (str_mode == VOIDmode)
4372 str_bitsize = GET_MODE_BITSIZE (str_mode);
4375 bitpos %= str_bitsize;
4376 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4377 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4379 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4382 /* If the bit field covers the whole REG/MEM, store_field
4383 will likely generate better code. */
4384 if (bitsize >= str_bitsize)
4387 /* We can't handle fields split across multiple entities. */
4388 if (bitpos + bitsize > str_bitsize)
4391 if (BYTES_BIG_ENDIAN)
4392 bitpos = str_bitsize - bitpos - bitsize;
4398 /* For now, just optimize the case of the topmost bitfield
4399 where we don't need to do any masking and also
4400 1 bit bitfields where xor can be used.
4401 We might win by one instruction for the other bitfields
4402 too if insv/extv instructions aren't used, so that
4403 can be added later. */
4404 if (bitpos + bitsize != str_bitsize
4405 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4408 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4409 value = convert_modes (str_mode,
4410 TYPE_MODE (TREE_TYPE (op1)), value,
4411 TYPE_UNSIGNED (TREE_TYPE (op1)));
4413 /* We may be accessing data outside the field, which means
4414 we can alias adjacent data. */
4415 if (MEM_P (str_rtx))
4417 str_rtx = shallow_copy_rtx (str_rtx);
4418 set_mem_alias_set (str_rtx, 0);
4419 set_mem_expr (str_rtx, 0);
4422 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4423 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4425 value = expand_and (str_mode, value, const1_rtx, NULL);
4428 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4429 result = expand_binop (str_mode, binop, str_rtx,
4430 value, str_rtx, 1, OPTAB_WIDEN);
4431 if (result != str_rtx)
4432 emit_move_insn (str_rtx, result);
4437 if (TREE_CODE (op1) != INTEGER_CST)
4439 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4440 value = convert_modes (str_mode,
4441 TYPE_MODE (TREE_TYPE (op1)), value,
4442 TYPE_UNSIGNED (TREE_TYPE (op1)));
4444 /* We may be accessing data outside the field, which means
4445 we can alias adjacent data. */
4446 if (MEM_P (str_rtx))
4448 str_rtx = shallow_copy_rtx (str_rtx);
4449 set_mem_alias_set (str_rtx, 0);
4450 set_mem_expr (str_rtx, 0);
4453 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4454 if (bitpos + bitsize != str_bitsize)
4456 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1);
4457 value = expand_and (str_mode, value, mask, NULL_RTX);
4459 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4460 result = expand_binop (str_mode, binop, str_rtx,
4461 value, str_rtx, 1, OPTAB_WIDEN);
4462 if (result != str_rtx)
4463 emit_move_insn (str_rtx, result);
4473 /* In the C++ memory model, consecutive bit fields in a structure are
4474 considered one memory location.
4476 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4477 returns the bit range of consecutive bits in which this COMPONENT_REF
4478 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4479 and *OFFSET may be adjusted in the process.
4481 If the access does not need to be restricted, 0 is returned in both
4482 *BITSTART and *BITEND. */
4485 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4486 unsigned HOST_WIDE_INT *bitend,
4488 HOST_WIDE_INT *bitpos,
4491 HOST_WIDE_INT bitoffset;
4494 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4496 field = TREE_OPERAND (exp, 1);
4497 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4498 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4499 need to limit the range we can access. */
4502 *bitstart = *bitend = 0;
4506 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4507 part of a larger bit field, then the representative does not serve any
4508 useful purpose. This can occur in Ada. */
4509 if (handled_component_p (TREE_OPERAND (exp, 0)))
4511 enum machine_mode rmode;
4512 HOST_WIDE_INT rbitsize, rbitpos;
4516 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4517 &roffset, &rmode, &unsignedp, &volatilep, false);
4518 if ((rbitpos % BITS_PER_UNIT) != 0)
4520 *bitstart = *bitend = 0;
4525 /* Compute the adjustment to bitpos from the offset of the field
4526 relative to the representative. DECL_FIELD_OFFSET of field and
4527 repr are the same by construction if they are not constants,
4528 see finish_bitfield_layout. */
4529 if (host_integerp (DECL_FIELD_OFFSET (field), 1)
4530 && host_integerp (DECL_FIELD_OFFSET (repr), 1))
4531 bitoffset = (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
4532 - tree_low_cst (DECL_FIELD_OFFSET (repr), 1)) * BITS_PER_UNIT;
4535 bitoffset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
4536 - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
4538 /* If the adjustment is larger than bitpos, we would have a negative bit
4539 position for the lower bound and this may wreak havoc later. This can
4540 occur only if we have a non-null offset, so adjust offset and bitpos
4541 to make the lower bound non-negative. */
4542 if (bitoffset > *bitpos)
4544 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4546 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4547 gcc_assert (*offset != NULL_TREE);
4551 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4555 *bitstart = *bitpos - bitoffset;
4557 *bitend = *bitstart + tree_low_cst (DECL_SIZE (repr), 1) - 1;
4560 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4561 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4562 DECL_RTL was not set yet, return NORTL. */
4565 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4567 if (TREE_CODE (addr) != ADDR_EXPR)
4570 tree base = TREE_OPERAND (addr, 0);
4573 || TREE_ADDRESSABLE (base)
4574 || DECL_MODE (base) == BLKmode)
4577 if (!DECL_RTL_SET_P (base))
4580 return (!MEM_P (DECL_RTL (base)));
4583 /* Returns true if the MEM_REF REF refers to an object that does not
4584 reside in memory and has non-BLKmode. */
4587 mem_ref_refers_to_non_mem_p (tree ref)
4589 tree base = TREE_OPERAND (ref, 0);
4590 return addr_expr_of_non_mem_decl_p_1 (base, false);
4593 /* Return TRUE iff OP is an ADDR_EXPR of a DECL that's not
4594 addressable. This is very much like mem_ref_refers_to_non_mem_p,
4595 but instead of the MEM_REF, it takes its base, and it doesn't
4596 assume a DECL is in memory just because its RTL is not set yet. */
4599 addr_expr_of_non_mem_decl_p (tree op)
4601 return addr_expr_of_non_mem_decl_p_1 (op, true);
4604 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4605 is true, try generating a nontemporal store. */
4608 expand_assignment (tree to, tree from, bool nontemporal)
4612 enum machine_mode mode;
4614 enum insn_code icode;
4616 /* Don't crash if the lhs of the assignment was erroneous. */
4617 if (TREE_CODE (to) == ERROR_MARK)
4619 expand_normal (from);
4623 /* Optimize away no-op moves without side-effects. */
4624 if (operand_equal_p (to, from, 0))
4627 /* Handle misaligned stores. */
4628 mode = TYPE_MODE (TREE_TYPE (to));
4629 if ((TREE_CODE (to) == MEM_REF
4630 || TREE_CODE (to) == TARGET_MEM_REF)
4632 && !mem_ref_refers_to_non_mem_p (to)
4633 && ((align = get_object_alignment (to))
4634 < GET_MODE_ALIGNMENT (mode))
4635 && (((icode = optab_handler (movmisalign_optab, mode))
4636 != CODE_FOR_nothing)
4637 || SLOW_UNALIGNED_ACCESS (mode, align)))
4641 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4642 reg = force_not_mem (reg);
4643 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4645 if (icode != CODE_FOR_nothing)
4647 struct expand_operand ops[2];
4649 create_fixed_operand (&ops[0], mem);
4650 create_input_operand (&ops[1], reg, mode);
4651 /* The movmisalign<mode> pattern cannot fail, else the assignment
4652 would silently be omitted. */
4653 expand_insn (icode, 2, ops);
4656 store_bit_field (mem, GET_MODE_BITSIZE (mode),
4657 0, 0, 0, mode, reg);
4661 /* Assignment of a structure component needs special treatment
4662 if the structure component's rtx is not simply a MEM.
4663 Assignment of an array element at a constant index, and assignment of
4664 an array element in an unaligned packed structure field, has the same
4665 problem. Same for (partially) storing into a non-memory object. */
4666 if (handled_component_p (to)
4667 || (TREE_CODE (to) == MEM_REF
4668 && mem_ref_refers_to_non_mem_p (to))
4669 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4671 enum machine_mode mode1;
4672 HOST_WIDE_INT bitsize, bitpos;
4673 unsigned HOST_WIDE_INT bitregion_start = 0;
4674 unsigned HOST_WIDE_INT bitregion_end = 0;
4683 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4684 &unsignedp, &volatilep, true);
4686 if (TREE_CODE (to) == COMPONENT_REF
4687 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4688 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4690 /* If we are going to use store_bit_field and extract_bit_field,
4691 make sure to_rtx will be safe for multiple use. */
4692 mode = TYPE_MODE (TREE_TYPE (tem));
4693 if (TREE_CODE (tem) == MEM_REF
4695 && ((align = get_object_alignment (tem))
4696 < GET_MODE_ALIGNMENT (mode))
4697 && ((icode = optab_handler (movmisalign_optab, mode))
4698 != CODE_FOR_nothing))
4700 struct expand_operand ops[2];
4703 to_rtx = gen_reg_rtx (mode);
4704 mem = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4706 /* If the misaligned store doesn't overwrite all bits, perform
4707 rmw cycle on MEM. */
4708 if (bitsize != GET_MODE_BITSIZE (mode))
4710 create_input_operand (&ops[0], to_rtx, mode);
4711 create_fixed_operand (&ops[1], mem);
4712 /* The movmisalign<mode> pattern cannot fail, else the assignment
4713 would silently be omitted. */
4714 expand_insn (icode, 2, ops);
4716 mem = copy_rtx (mem);
4722 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4725 /* If the bitfield is volatile, we want to access it in the
4726 field's mode, not the computed mode.
4727 If a MEM has VOIDmode (external with incomplete type),
4728 use BLKmode for it instead. */
4731 if (volatilep && flag_strict_volatile_bitfields > 0)
4732 to_rtx = adjust_address (to_rtx, mode1, 0);
4733 else if (GET_MODE (to_rtx) == VOIDmode)
4734 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4739 enum machine_mode address_mode;
4742 if (!MEM_P (to_rtx))
4744 /* We can get constant negative offsets into arrays with broken
4745 user code. Translate this to a trap instead of ICEing. */
4746 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4747 expand_builtin_trap ();
4748 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4751 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4752 address_mode = get_address_mode (to_rtx);
4753 if (GET_MODE (offset_rtx) != address_mode)
4754 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4756 /* A constant address in TO_RTX can have VOIDmode, we must not try
4757 to call force_reg for that case. Avoid that case. */
4759 && GET_MODE (to_rtx) == BLKmode
4760 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4762 && (bitpos % bitsize) == 0
4763 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4764 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4766 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4770 to_rtx = offset_address (to_rtx, offset_rtx,
4771 highest_pow2_factor_for_target (to,
4775 /* No action is needed if the target is not a memory and the field
4776 lies completely outside that target. This can occur if the source
4777 code contains an out-of-bounds access to a small array. */
4779 && GET_MODE (to_rtx) != BLKmode
4780 && (unsigned HOST_WIDE_INT) bitpos
4781 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4783 expand_normal (from);
4786 /* Handle expand_expr of a complex value returning a CONCAT. */
4787 else if (GET_CODE (to_rtx) == CONCAT)
4789 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4790 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4792 && bitsize == mode_bitsize)
4793 result = store_expr (from, to_rtx, false, nontemporal);
4794 else if (bitsize == mode_bitsize / 2
4795 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4796 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4798 else if (bitpos + bitsize <= mode_bitsize / 2)
4799 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4800 bitregion_start, bitregion_end,
4802 get_alias_set (to), nontemporal);
4803 else if (bitpos >= mode_bitsize / 2)
4804 result = store_field (XEXP (to_rtx, 1), bitsize,
4805 bitpos - mode_bitsize / 2,
4806 bitregion_start, bitregion_end,
4808 get_alias_set (to), nontemporal);
4809 else if (bitpos == 0 && bitsize == mode_bitsize)
4812 result = expand_normal (from);
4813 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4814 TYPE_MODE (TREE_TYPE (from)), 0);
4815 emit_move_insn (XEXP (to_rtx, 0),
4816 read_complex_part (from_rtx, false));
4817 emit_move_insn (XEXP (to_rtx, 1),
4818 read_complex_part (from_rtx, true));
4822 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4823 GET_MODE_SIZE (GET_MODE (to_rtx)));
4824 write_complex_part (temp, XEXP (to_rtx, 0), false);
4825 write_complex_part (temp, XEXP (to_rtx, 1), true);
4826 result = store_field (temp, bitsize, bitpos,
4827 bitregion_start, bitregion_end,
4829 get_alias_set (to), nontemporal);
4830 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4831 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4838 /* If the field is at offset zero, we could have been given the
4839 DECL_RTX of the parent struct. Don't munge it. */
4840 to_rtx = shallow_copy_rtx (to_rtx);
4842 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4844 /* Deal with volatile and readonly fields. The former is only
4845 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4847 MEM_VOLATILE_P (to_rtx) = 1;
4850 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4851 bitregion_start, bitregion_end,
4856 result = store_field (to_rtx, bitsize, bitpos,
4857 bitregion_start, bitregion_end,
4859 get_alias_set (to), nontemporal);
4864 struct expand_operand ops[2];
4866 create_fixed_operand (&ops[0], mem);
4867 create_input_operand (&ops[1], to_rtx, mode);
4868 /* The movmisalign<mode> pattern cannot fail, else the assignment
4869 would silently be omitted. */
4870 expand_insn (icode, 2, ops);
4874 preserve_temp_slots (result);
4879 /* If the rhs is a function call and its value is not an aggregate,
4880 call the function before we start to compute the lhs.
4881 This is needed for correct code for cases such as
4882 val = setjmp (buf) on machines where reference to val
4883 requires loading up part of an address in a separate insn.
4885 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4886 since it might be a promoted variable where the zero- or sign- extension
4887 needs to be done. Handling this in the normal way is safe because no
4888 computation is done before the call. The same is true for SSA names. */
4889 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4890 && COMPLETE_TYPE_P (TREE_TYPE (from))
4891 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4892 && ! (((TREE_CODE (to) == VAR_DECL
4893 || TREE_CODE (to) == PARM_DECL
4894 || TREE_CODE (to) == RESULT_DECL)
4895 && REG_P (DECL_RTL (to)))
4896 || TREE_CODE (to) == SSA_NAME))
4901 value = expand_normal (from);
4903 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4905 /* Handle calls that return values in multiple non-contiguous locations.
4906 The Irix 6 ABI has examples of this. */
4907 if (GET_CODE (to_rtx) == PARALLEL)
4909 if (GET_CODE (value) == PARALLEL)
4910 emit_group_move (to_rtx, value);
4912 emit_group_load (to_rtx, value, TREE_TYPE (from),
4913 int_size_in_bytes (TREE_TYPE (from)));
4915 else if (GET_CODE (value) == PARALLEL)
4916 emit_group_store (to_rtx, value, TREE_TYPE (from),
4917 int_size_in_bytes (TREE_TYPE (from)));
4918 else if (GET_MODE (to_rtx) == BLKmode)
4920 /* Handle calls that return BLKmode values in registers. */
4922 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
4924 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4928 if (POINTER_TYPE_P (TREE_TYPE (to)))
4929 value = convert_memory_address_addr_space
4930 (GET_MODE (to_rtx), value,
4931 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4933 emit_move_insn (to_rtx, value);
4935 preserve_temp_slots (to_rtx);
4940 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
4941 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4943 /* Don't move directly into a return register. */
4944 if (TREE_CODE (to) == RESULT_DECL
4945 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4951 /* If the source is itself a return value, it still is in a pseudo at
4952 this point so we can move it back to the return register directly. */
4954 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
4955 && TREE_CODE (from) != CALL_EXPR)
4956 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
4958 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4960 /* Handle calls that return values in multiple non-contiguous locations.
4961 The Irix 6 ABI has examples of this. */
4962 if (GET_CODE (to_rtx) == PARALLEL)
4964 if (GET_CODE (temp) == PARALLEL)
4965 emit_group_move (to_rtx, temp);
4967 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4968 int_size_in_bytes (TREE_TYPE (from)));
4971 emit_move_insn (to_rtx, temp);
4973 preserve_temp_slots (to_rtx);
4978 /* In case we are returning the contents of an object which overlaps
4979 the place the value is being stored, use a safe function when copying
4980 a value through a pointer into a structure value return block. */
4981 if (TREE_CODE (to) == RESULT_DECL
4982 && TREE_CODE (from) == INDIRECT_REF
4983 && ADDR_SPACE_GENERIC_P
4984 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4985 && refs_may_alias_p (to, from)
4986 && cfun->returns_struct
4987 && !cfun->returns_pcc_struct)
4992 size = expr_size (from);
4993 from_rtx = expand_normal (from);
4995 emit_library_call (memmove_libfunc, LCT_NORMAL,
4996 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4997 XEXP (from_rtx, 0), Pmode,
4998 convert_to_mode (TYPE_MODE (sizetype),
4999 size, TYPE_UNSIGNED (sizetype)),
5000 TYPE_MODE (sizetype));
5002 preserve_temp_slots (to_rtx);
5007 /* Compute FROM and store the value in the rtx we got. */
5010 result = store_expr (from, to_rtx, 0, nontemporal);
5011 preserve_temp_slots (result);
5016 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5017 succeeded, false otherwise. */
5020 emit_storent_insn (rtx to, rtx from)
5022 struct expand_operand ops[2];
5023 enum machine_mode mode = GET_MODE (to);
5024 enum insn_code code = optab_handler (storent_optab, mode);
5026 if (code == CODE_FOR_nothing)
5029 create_fixed_operand (&ops[0], to);
5030 create_input_operand (&ops[1], from, mode);
5031 return maybe_expand_insn (code, 2, ops);
5034 /* Generate code for computing expression EXP,
5035 and storing the value into TARGET.
5037 If the mode is BLKmode then we may return TARGET itself.
5038 It turns out that in BLKmode it doesn't cause a problem.
5039 because C has no operators that could combine two different
5040 assignments into the same BLKmode object with different values
5041 with no sequence point. Will other languages need this to
5044 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5045 stack, and block moves may need to be treated specially.
5047 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5050 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5053 rtx alt_rtl = NULL_RTX;
5054 location_t loc = curr_insn_location ();
5056 if (VOID_TYPE_P (TREE_TYPE (exp)))
5058 /* C++ can generate ?: expressions with a throw expression in one
5059 branch and an rvalue in the other. Here, we resolve attempts to
5060 store the throw expression's nonexistent result. */
5061 gcc_assert (!call_param_p);
5062 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5065 if (TREE_CODE (exp) == COMPOUND_EXPR)
5067 /* Perform first part of compound expression, then assign from second
5069 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5070 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5071 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5074 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5076 /* For conditional expression, get safe form of the target. Then
5077 test the condition, doing the appropriate assignment on either
5078 side. This avoids the creation of unnecessary temporaries.
5079 For non-BLKmode, it is more efficient not to do this. */
5081 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5083 do_pending_stack_adjust ();
5085 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5086 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5088 emit_jump_insn (gen_jump (lab2));
5091 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5098 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5099 /* If this is a scalar in a register that is stored in a wider mode
5100 than the declared mode, compute the result into its declared mode
5101 and then convert to the wider mode. Our value is the computed
5104 rtx inner_target = 0;
5106 /* We can do the conversion inside EXP, which will often result
5107 in some optimizations. Do the conversion in two steps: first
5108 change the signedness, if needed, then the extend. But don't
5109 do this if the type of EXP is a subtype of something else
5110 since then the conversion might involve more than just
5111 converting modes. */
5112 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5113 && TREE_TYPE (TREE_TYPE (exp)) == 0
5114 && GET_MODE_PRECISION (GET_MODE (target))
5115 == TYPE_PRECISION (TREE_TYPE (exp)))
5117 if (TYPE_UNSIGNED (TREE_TYPE (exp))
5118 != SUBREG_PROMOTED_UNSIGNED_P (target))
5120 /* Some types, e.g. Fortran's logical*4, won't have a signed
5121 version, so use the mode instead. */
5123 = (signed_or_unsigned_type_for
5124 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5126 ntype = lang_hooks.types.type_for_mode
5127 (TYPE_MODE (TREE_TYPE (exp)),
5128 SUBREG_PROMOTED_UNSIGNED_P (target));
5130 exp = fold_convert_loc (loc, ntype, exp);
5133 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5134 (GET_MODE (SUBREG_REG (target)),
5135 SUBREG_PROMOTED_UNSIGNED_P (target)),
5138 inner_target = SUBREG_REG (target);
5141 temp = expand_expr (exp, inner_target, VOIDmode,
5142 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5144 /* If TEMP is a VOIDmode constant, use convert_modes to make
5145 sure that we properly convert it. */
5146 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5148 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5149 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5150 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5151 GET_MODE (target), temp,
5152 SUBREG_PROMOTED_UNSIGNED_P (target));
5155 convert_move (SUBREG_REG (target), temp,
5156 SUBREG_PROMOTED_UNSIGNED_P (target));
5160 else if ((TREE_CODE (exp) == STRING_CST
5161 || (TREE_CODE (exp) == MEM_REF
5162 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5163 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5165 && integer_zerop (TREE_OPERAND (exp, 1))))
5166 && !nontemporal && !call_param_p
5169 /* Optimize initialization of an array with a STRING_CST. */
5170 HOST_WIDE_INT exp_len, str_copy_len;
5172 tree str = TREE_CODE (exp) == STRING_CST
5173 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5175 exp_len = int_expr_size (exp);
5179 if (TREE_STRING_LENGTH (str) <= 0)
5182 str_copy_len = strlen (TREE_STRING_POINTER (str));
5183 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5186 str_copy_len = TREE_STRING_LENGTH (str);
5187 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5188 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5190 str_copy_len += STORE_MAX_PIECES - 1;
5191 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5193 str_copy_len = MIN (str_copy_len, exp_len);
5194 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5195 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5196 MEM_ALIGN (target), false))
5201 dest_mem = store_by_pieces (dest_mem,
5202 str_copy_len, builtin_strncpy_read_str,
5204 TREE_STRING_POINTER (str)),
5205 MEM_ALIGN (target), false,
5206 exp_len > str_copy_len ? 1 : 0);
5207 if (exp_len > str_copy_len)
5208 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5209 GEN_INT (exp_len - str_copy_len),
5218 /* If we want to use a nontemporal store, force the value to
5220 tmp_target = nontemporal ? NULL_RTX : target;
5221 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5223 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5227 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5228 the same as that of TARGET, adjust the constant. This is needed, for
5229 example, in case it is a CONST_DOUBLE and we want only a word-sized
5231 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5232 && TREE_CODE (exp) != ERROR_MARK
5233 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5234 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5235 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5237 /* If value was not generated in the target, store it there.
5238 Convert the value to TARGET's type first if necessary and emit the
5239 pending incrementations that have been queued when expanding EXP.
5240 Note that we cannot emit the whole queue blindly because this will
5241 effectively disable the POST_INC optimization later.
5243 If TEMP and TARGET compare equal according to rtx_equal_p, but
5244 one or both of them are volatile memory refs, we have to distinguish
5246 - expand_expr has used TARGET. In this case, we must not generate
5247 another copy. This can be detected by TARGET being equal according
5249 - expand_expr has not used TARGET - that means that the source just
5250 happens to have the same RTX form. Since temp will have been created
5251 by expand_expr, it will compare unequal according to == .
5252 We must generate a copy in this case, to reach the correct number
5253 of volatile memory references. */
5255 if ((! rtx_equal_p (temp, target)
5256 || (temp != target && (side_effects_p (temp)
5257 || side_effects_p (target))))
5258 && TREE_CODE (exp) != ERROR_MARK
5259 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5260 but TARGET is not valid memory reference, TEMP will differ
5261 from TARGET although it is really the same location. */
5263 && rtx_equal_p (alt_rtl, target)
5264 && !side_effects_p (alt_rtl)
5265 && !side_effects_p (target))
5266 /* If there's nothing to copy, don't bother. Don't call
5267 expr_size unless necessary, because some front-ends (C++)
5268 expr_size-hook must not be given objects that are not
5269 supposed to be bit-copied or bit-initialized. */
5270 && expr_size (exp) != const0_rtx)
5272 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5274 if (GET_MODE (target) == BLKmode)
5276 /* Handle calls that return BLKmode values in registers. */
5277 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5278 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5280 store_bit_field (target,
5281 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5282 0, 0, 0, GET_MODE (temp), temp);
5285 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5288 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5290 /* Handle copying a string constant into an array. The string
5291 constant may be shorter than the array. So copy just the string's
5292 actual length, and clear the rest. First get the size of the data
5293 type of the string, which is actually the size of the target. */
5294 rtx size = expr_size (exp);
5296 if (CONST_INT_P (size)
5297 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5298 emit_block_move (target, temp, size,
5300 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5303 enum machine_mode pointer_mode
5304 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5305 enum machine_mode address_mode = get_address_mode (target);
5307 /* Compute the size of the data to copy from the string. */
5309 = size_binop_loc (loc, MIN_EXPR,
5310 make_tree (sizetype, size),
5311 size_int (TREE_STRING_LENGTH (exp)));
5313 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5315 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5318 /* Copy that much. */
5319 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5320 TYPE_UNSIGNED (sizetype));
5321 emit_block_move (target, temp, copy_size_rtx,
5323 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5325 /* Figure out how much is left in TARGET that we have to clear.
5326 Do all calculations in pointer_mode. */
5327 if (CONST_INT_P (copy_size_rtx))
5329 size = plus_constant (address_mode, size,
5330 -INTVAL (copy_size_rtx));
5331 target = adjust_address (target, BLKmode,
5332 INTVAL (copy_size_rtx));
5336 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5337 copy_size_rtx, NULL_RTX, 0,
5340 if (GET_MODE (copy_size_rtx) != address_mode)
5341 copy_size_rtx = convert_to_mode (address_mode,
5343 TYPE_UNSIGNED (sizetype));
5345 target = offset_address (target, copy_size_rtx,
5346 highest_pow2_factor (copy_size));
5347 label = gen_label_rtx ();
5348 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5349 GET_MODE (size), 0, label);
5352 if (size != const0_rtx)
5353 clear_storage (target, size, BLOCK_OP_NORMAL);
5359 /* Handle calls that return values in multiple non-contiguous locations.
5360 The Irix 6 ABI has examples of this. */
5361 else if (GET_CODE (target) == PARALLEL)
5363 if (GET_CODE (temp) == PARALLEL)
5364 emit_group_move (target, temp);
5366 emit_group_load (target, temp, TREE_TYPE (exp),
5367 int_size_in_bytes (TREE_TYPE (exp)));
5369 else if (GET_CODE (temp) == PARALLEL)
5370 emit_group_store (target, temp, TREE_TYPE (exp),
5371 int_size_in_bytes (TREE_TYPE (exp)));
5372 else if (GET_MODE (temp) == BLKmode)
5373 emit_block_move (target, temp, expr_size (exp),
5375 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5376 /* If we emit a nontemporal store, there is nothing else to do. */
5377 else if (nontemporal && emit_storent_insn (target, temp))
5381 temp = force_operand (temp, target);
5383 emit_move_insn (target, temp);
5390 /* Return true if field F of structure TYPE is a flexible array. */
5393 flexible_array_member_p (const_tree f, const_tree type)
5398 return (DECL_CHAIN (f) == NULL
5399 && TREE_CODE (tf) == ARRAY_TYPE
5401 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5402 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5403 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5404 && int_size_in_bytes (type) >= 0);
5407 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5408 must have in order for it to completely initialize a value of type TYPE.
5409 Return -1 if the number isn't known.
5411 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5413 static HOST_WIDE_INT
5414 count_type_elements (const_tree type, bool for_ctor_p)
5416 switch (TREE_CODE (type))
5422 nelts = array_type_nelts (type);
5423 if (nelts && host_integerp (nelts, 1))
5425 unsigned HOST_WIDE_INT n;
5427 n = tree_low_cst (nelts, 1) + 1;
5428 if (n == 0 || for_ctor_p)
5431 return n * count_type_elements (TREE_TYPE (type), false);
5433 return for_ctor_p ? -1 : 1;
5438 unsigned HOST_WIDE_INT n;
5442 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5443 if (TREE_CODE (f) == FIELD_DECL)
5446 n += count_type_elements (TREE_TYPE (f), false);
5447 else if (!flexible_array_member_p (f, type))
5448 /* Don't count flexible arrays, which are not supposed
5449 to be initialized. */
5457 case QUAL_UNION_TYPE:
5462 gcc_assert (!for_ctor_p);
5463 /* Estimate the number of scalars in each field and pick the
5464 maximum. Other estimates would do instead; the idea is simply
5465 to make sure that the estimate is not sensitive to the ordering
5468 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5469 if (TREE_CODE (f) == FIELD_DECL)
5471 m = count_type_elements (TREE_TYPE (f), false);
5472 /* If the field doesn't span the whole union, add an extra
5473 scalar for the rest. */
5474 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5475 TYPE_SIZE (type)) != 1)
5487 return TYPE_VECTOR_SUBPARTS (type);
5491 case FIXED_POINT_TYPE:
5496 case REFERENCE_TYPE:
5512 /* Helper for categorize_ctor_elements. Identical interface. */
5515 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5516 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5518 unsigned HOST_WIDE_INT idx;
5519 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5520 tree value, purpose, elt_type;
5522 /* Whether CTOR is a valid constant initializer, in accordance with what
5523 initializer_constant_valid_p does. If inferred from the constructor
5524 elements, true until proven otherwise. */
5525 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5526 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5531 elt_type = NULL_TREE;
5533 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5535 HOST_WIDE_INT mult = 1;
5537 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5539 tree lo_index = TREE_OPERAND (purpose, 0);
5540 tree hi_index = TREE_OPERAND (purpose, 1);
5542 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
5543 mult = (tree_low_cst (hi_index, 1)
5544 - tree_low_cst (lo_index, 1) + 1);
5547 elt_type = TREE_TYPE (value);
5549 switch (TREE_CODE (value))
5553 HOST_WIDE_INT nz = 0, ic = 0;
5555 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5558 nz_elts += mult * nz;
5559 init_elts += mult * ic;
5561 if (const_from_elts_p && const_p)
5562 const_p = const_elt_p;
5569 if (!initializer_zerop (value))
5575 nz_elts += mult * TREE_STRING_LENGTH (value);
5576 init_elts += mult * TREE_STRING_LENGTH (value);
5580 if (!initializer_zerop (TREE_REALPART (value)))
5582 if (!initializer_zerop (TREE_IMAGPART (value)))
5590 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5592 tree v = VECTOR_CST_ELT (value, i);
5593 if (!initializer_zerop (v))
5602 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5603 nz_elts += mult * tc;
5604 init_elts += mult * tc;
5606 if (const_from_elts_p && const_p)
5607 const_p = initializer_constant_valid_p (value, elt_type)
5614 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5615 num_fields, elt_type))
5616 *p_complete = false;
5618 *p_nz_elts += nz_elts;
5619 *p_init_elts += init_elts;
5624 /* Examine CTOR to discover:
5625 * how many scalar fields are set to nonzero values,
5626 and place it in *P_NZ_ELTS;
5627 * how many scalar fields in total are in CTOR,
5628 and place it in *P_ELT_COUNT.
5629 * whether the constructor is complete -- in the sense that every
5630 meaningful byte is explicitly given a value --
5631 and place it in *P_COMPLETE.
5633 Return whether or not CTOR is a valid static constant initializer, the same
5634 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5637 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5638 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5644 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5647 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5648 of which had type LAST_TYPE. Each element was itself a complete
5649 initializer, in the sense that every meaningful byte was explicitly
5650 given a value. Return true if the same is true for the constructor
5654 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5655 const_tree last_type)
5657 if (TREE_CODE (type) == UNION_TYPE
5658 || TREE_CODE (type) == QUAL_UNION_TYPE)
5663 gcc_assert (num_elts == 1 && last_type);
5665 /* ??? We could look at each element of the union, and find the
5666 largest element. Which would avoid comparing the size of the
5667 initialized element against any tail padding in the union.
5668 Doesn't seem worth the effort... */
5669 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5672 return count_type_elements (type, true) == num_elts;
5675 /* Return 1 if EXP contains mostly (3/4) zeros. */
5678 mostly_zeros_p (const_tree exp)
5680 if (TREE_CODE (exp) == CONSTRUCTOR)
5682 HOST_WIDE_INT nz_elts, init_elts;
5685 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5686 return !complete_p || nz_elts < init_elts / 4;
5689 return initializer_zerop (exp);
5692 /* Return 1 if EXP contains all zeros. */
5695 all_zeros_p (const_tree exp)
5697 if (TREE_CODE (exp) == CONSTRUCTOR)
5699 HOST_WIDE_INT nz_elts, init_elts;
5702 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5703 return nz_elts == 0;
5706 return initializer_zerop (exp);
5709 /* Helper function for store_constructor.
5710 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5711 CLEARED is as for store_constructor.
5712 ALIAS_SET is the alias set to use for any stores.
5714 This provides a recursive shortcut back to store_constructor when it isn't
5715 necessary to go through store_field. This is so that we can pass through
5716 the cleared field to let store_constructor know that we may not have to
5717 clear a substructure if the outer structure has already been cleared. */
5720 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5721 HOST_WIDE_INT bitpos, enum machine_mode mode,
5722 tree exp, int cleared, alias_set_type alias_set)
5724 if (TREE_CODE (exp) == CONSTRUCTOR
5725 /* We can only call store_constructor recursively if the size and
5726 bit position are on a byte boundary. */
5727 && bitpos % BITS_PER_UNIT == 0
5728 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5729 /* If we have a nonzero bitpos for a register target, then we just
5730 let store_field do the bitfield handling. This is unlikely to
5731 generate unnecessary clear instructions anyways. */
5732 && (bitpos == 0 || MEM_P (target)))
5736 = adjust_address (target,
5737 GET_MODE (target) == BLKmode
5739 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5740 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5743 /* Update the alias set, if required. */
5744 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5745 && MEM_ALIAS_SET (target) != 0)
5747 target = copy_rtx (target);
5748 set_mem_alias_set (target, alias_set);
5751 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5754 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5757 /* Store the value of constructor EXP into the rtx TARGET.
5758 TARGET is either a REG or a MEM; we know it cannot conflict, since
5759 safe_from_p has been called.
5760 CLEARED is true if TARGET is known to have been zero'd.
5761 SIZE is the number of bytes of TARGET we are allowed to modify: this
5762 may not be the same as the size of EXP if we are assigning to a field
5763 which has been packed to exclude padding bits. */
5766 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5768 tree type = TREE_TYPE (exp);
5769 #ifdef WORD_REGISTER_OPERATIONS
5770 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5773 switch (TREE_CODE (type))
5777 case QUAL_UNION_TYPE:
5779 unsigned HOST_WIDE_INT idx;
5782 /* If size is zero or the target is already cleared, do nothing. */
5783 if (size == 0 || cleared)
5785 /* We either clear the aggregate or indicate the value is dead. */
5786 else if ((TREE_CODE (type) == UNION_TYPE
5787 || TREE_CODE (type) == QUAL_UNION_TYPE)
5788 && ! CONSTRUCTOR_ELTS (exp))
5789 /* If the constructor is empty, clear the union. */
5791 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5795 /* If we are building a static constructor into a register,
5796 set the initial value as zero so we can fold the value into
5797 a constant. But if more than one register is involved,
5798 this probably loses. */
5799 else if (REG_P (target) && TREE_STATIC (exp)
5800 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5802 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5806 /* If the constructor has fewer fields than the structure or
5807 if we are initializing the structure to mostly zeros, clear
5808 the whole structure first. Don't do this if TARGET is a
5809 register whose mode size isn't equal to SIZE since
5810 clear_storage can't handle this case. */
5812 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5813 != fields_length (type))
5814 || mostly_zeros_p (exp))
5816 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5819 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5823 if (REG_P (target) && !cleared)
5824 emit_clobber (target);
5826 /* Store each element of the constructor into the
5827 corresponding field of TARGET. */
5828 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5830 enum machine_mode mode;
5831 HOST_WIDE_INT bitsize;
5832 HOST_WIDE_INT bitpos = 0;
5834 rtx to_rtx = target;
5836 /* Just ignore missing fields. We cleared the whole
5837 structure, above, if any fields are missing. */
5841 if (cleared && initializer_zerop (value))
5844 if (host_integerp (DECL_SIZE (field), 1))
5845 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5849 mode = DECL_MODE (field);
5850 if (DECL_BIT_FIELD (field))
5853 offset = DECL_FIELD_OFFSET (field);
5854 if (host_integerp (offset, 0)
5855 && host_integerp (bit_position (field), 0))
5857 bitpos = int_bit_position (field);
5861 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5865 enum machine_mode address_mode;
5869 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5870 make_tree (TREE_TYPE (exp),
5873 offset_rtx = expand_normal (offset);
5874 gcc_assert (MEM_P (to_rtx));
5876 address_mode = get_address_mode (to_rtx);
5877 if (GET_MODE (offset_rtx) != address_mode)
5878 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5880 to_rtx = offset_address (to_rtx, offset_rtx,
5881 highest_pow2_factor (offset));
5884 #ifdef WORD_REGISTER_OPERATIONS
5885 /* If this initializes a field that is smaller than a
5886 word, at the start of a word, try to widen it to a full
5887 word. This special case allows us to output C++ member
5888 function initializations in a form that the optimizers
5891 && bitsize < BITS_PER_WORD
5892 && bitpos % BITS_PER_WORD == 0
5893 && GET_MODE_CLASS (mode) == MODE_INT
5894 && TREE_CODE (value) == INTEGER_CST
5896 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5898 tree type = TREE_TYPE (value);
5900 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5902 type = lang_hooks.types.type_for_mode
5903 (word_mode, TYPE_UNSIGNED (type));
5904 value = fold_convert (type, value);
5907 if (BYTES_BIG_ENDIAN)
5909 = fold_build2 (LSHIFT_EXPR, type, value,
5910 build_int_cst (type,
5911 BITS_PER_WORD - bitsize));
5912 bitsize = BITS_PER_WORD;
5917 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5918 && DECL_NONADDRESSABLE_P (field))
5920 to_rtx = copy_rtx (to_rtx);
5921 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5924 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5926 get_alias_set (TREE_TYPE (field)));
5933 unsigned HOST_WIDE_INT i;
5936 tree elttype = TREE_TYPE (type);
5938 HOST_WIDE_INT minelt = 0;
5939 HOST_WIDE_INT maxelt = 0;
5941 domain = TYPE_DOMAIN (type);
5942 const_bounds_p = (TYPE_MIN_VALUE (domain)
5943 && TYPE_MAX_VALUE (domain)
5944 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5945 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5947 /* If we have constant bounds for the range of the type, get them. */
5950 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5951 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5954 /* If the constructor has fewer elements than the array, clear
5955 the whole array first. Similarly if this is static
5956 constructor of a non-BLKmode object. */
5959 else if (REG_P (target) && TREE_STATIC (exp))
5963 unsigned HOST_WIDE_INT idx;
5965 HOST_WIDE_INT count = 0, zero_count = 0;
5966 need_to_clear = ! const_bounds_p;
5968 /* This loop is a more accurate version of the loop in
5969 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5970 is also needed to check for missing elements. */
5971 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5973 HOST_WIDE_INT this_node_count;
5978 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5980 tree lo_index = TREE_OPERAND (index, 0);
5981 tree hi_index = TREE_OPERAND (index, 1);
5983 if (! host_integerp (lo_index, 1)
5984 || ! host_integerp (hi_index, 1))
5990 this_node_count = (tree_low_cst (hi_index, 1)
5991 - tree_low_cst (lo_index, 1) + 1);
5994 this_node_count = 1;
5996 count += this_node_count;
5997 if (mostly_zeros_p (value))
5998 zero_count += this_node_count;
6001 /* Clear the entire array first if there are any missing
6002 elements, or if the incidence of zero elements is >=
6005 && (count < maxelt - minelt + 1
6006 || 4 * zero_count >= 3 * count))
6010 if (need_to_clear && size > 0)
6013 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6015 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6019 if (!cleared && REG_P (target))
6020 /* Inform later passes that the old value is dead. */
6021 emit_clobber (target);
6023 /* Store each element of the constructor into the
6024 corresponding element of TARGET, determined by counting the
6026 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6028 enum machine_mode mode;
6029 HOST_WIDE_INT bitsize;
6030 HOST_WIDE_INT bitpos;
6031 rtx xtarget = target;
6033 if (cleared && initializer_zerop (value))
6036 mode = TYPE_MODE (elttype);
6037 if (mode == BLKmode)
6038 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
6039 ? tree_low_cst (TYPE_SIZE (elttype), 1)
6042 bitsize = GET_MODE_BITSIZE (mode);
6044 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6046 tree lo_index = TREE_OPERAND (index, 0);
6047 tree hi_index = TREE_OPERAND (index, 1);
6048 rtx index_r, pos_rtx;
6049 HOST_WIDE_INT lo, hi, count;
6052 /* If the range is constant and "small", unroll the loop. */
6054 && host_integerp (lo_index, 0)
6055 && host_integerp (hi_index, 0)
6056 && (lo = tree_low_cst (lo_index, 0),
6057 hi = tree_low_cst (hi_index, 0),
6058 count = hi - lo + 1,
6061 || (host_integerp (TYPE_SIZE (elttype), 1)
6062 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
6065 lo -= minelt; hi -= minelt;
6066 for (; lo <= hi; lo++)
6068 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
6071 && !MEM_KEEP_ALIAS_SET_P (target)
6072 && TREE_CODE (type) == ARRAY_TYPE
6073 && TYPE_NONALIASED_COMPONENT (type))
6075 target = copy_rtx (target);
6076 MEM_KEEP_ALIAS_SET_P (target) = 1;
6079 store_constructor_field
6080 (target, bitsize, bitpos, mode, value, cleared,
6081 get_alias_set (elttype));
6086 rtx loop_start = gen_label_rtx ();
6087 rtx loop_end = gen_label_rtx ();
6090 expand_normal (hi_index);
6092 index = build_decl (EXPR_LOCATION (exp),
6093 VAR_DECL, NULL_TREE, domain);
6094 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6095 SET_DECL_RTL (index, index_r);
6096 store_expr (lo_index, index_r, 0, false);
6098 /* Build the head of the loop. */
6099 do_pending_stack_adjust ();
6100 emit_label (loop_start);
6102 /* Assign value to element index. */
6104 fold_convert (ssizetype,
6105 fold_build2 (MINUS_EXPR,
6108 TYPE_MIN_VALUE (domain)));
6111 size_binop (MULT_EXPR, position,
6112 fold_convert (ssizetype,
6113 TYPE_SIZE_UNIT (elttype)));
6115 pos_rtx = expand_normal (position);
6116 xtarget = offset_address (target, pos_rtx,
6117 highest_pow2_factor (position));
6118 xtarget = adjust_address (xtarget, mode, 0);
6119 if (TREE_CODE (value) == CONSTRUCTOR)
6120 store_constructor (value, xtarget, cleared,
6121 bitsize / BITS_PER_UNIT);
6123 store_expr (value, xtarget, 0, false);
6125 /* Generate a conditional jump to exit the loop. */
6126 exit_cond = build2 (LT_EXPR, integer_type_node,
6128 jumpif (exit_cond, loop_end, -1);
6130 /* Update the loop counter, and jump to the head of
6132 expand_assignment (index,
6133 build2 (PLUS_EXPR, TREE_TYPE (index),
6134 index, integer_one_node),
6137 emit_jump (loop_start);
6139 /* Build the end of the loop. */
6140 emit_label (loop_end);
6143 else if ((index != 0 && ! host_integerp (index, 0))
6144 || ! host_integerp (TYPE_SIZE (elttype), 1))
6149 index = ssize_int (1);
6152 index = fold_convert (ssizetype,
6153 fold_build2 (MINUS_EXPR,
6156 TYPE_MIN_VALUE (domain)));
6159 size_binop (MULT_EXPR, index,
6160 fold_convert (ssizetype,
6161 TYPE_SIZE_UNIT (elttype)));
6162 xtarget = offset_address (target,
6163 expand_normal (position),
6164 highest_pow2_factor (position));
6165 xtarget = adjust_address (xtarget, mode, 0);
6166 store_expr (value, xtarget, 0, false);
6171 bitpos = ((tree_low_cst (index, 0) - minelt)
6172 * tree_low_cst (TYPE_SIZE (elttype), 1));
6174 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
6176 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6177 && TREE_CODE (type) == ARRAY_TYPE
6178 && TYPE_NONALIASED_COMPONENT (type))
6180 target = copy_rtx (target);
6181 MEM_KEEP_ALIAS_SET_P (target) = 1;
6183 store_constructor_field (target, bitsize, bitpos, mode, value,
6184 cleared, get_alias_set (elttype));
6192 unsigned HOST_WIDE_INT idx;
6193 constructor_elt *ce;
6196 int icode = CODE_FOR_nothing;
6197 tree elttype = TREE_TYPE (type);
6198 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
6199 enum machine_mode eltmode = TYPE_MODE (elttype);
6200 HOST_WIDE_INT bitsize;
6201 HOST_WIDE_INT bitpos;
6202 rtvec vector = NULL;
6204 alias_set_type alias;
6206 gcc_assert (eltmode != BLKmode);
6208 n_elts = TYPE_VECTOR_SUBPARTS (type);
6209 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6211 enum machine_mode mode = GET_MODE (target);
6213 icode = (int) optab_handler (vec_init_optab, mode);
6214 if (icode != CODE_FOR_nothing)
6218 vector = rtvec_alloc (n_elts);
6219 for (i = 0; i < n_elts; i++)
6220 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6224 /* If the constructor has fewer elements than the vector,
6225 clear the whole array first. Similarly if this is static
6226 constructor of a non-BLKmode object. */
6229 else if (REG_P (target) && TREE_STATIC (exp))
6233 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6236 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6238 int n_elts_here = tree_low_cst
6239 (int_const_binop (TRUNC_DIV_EXPR,
6240 TYPE_SIZE (TREE_TYPE (value)),
6241 TYPE_SIZE (elttype)), 1);
6243 count += n_elts_here;
6244 if (mostly_zeros_p (value))
6245 zero_count += n_elts_here;
6248 /* Clear the entire vector first if there are any missing elements,
6249 or if the incidence of zero elements is >= 75%. */
6250 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6253 if (need_to_clear && size > 0 && !vector)
6256 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6258 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6262 /* Inform later passes that the old value is dead. */
6263 if (!cleared && !vector && REG_P (target))
6264 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6267 alias = MEM_ALIAS_SET (target);
6269 alias = get_alias_set (elttype);
6271 /* Store each element of the constructor into the corresponding
6272 element of TARGET, determined by counting the elements. */
6273 for (idx = 0, i = 0;
6274 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6275 idx++, i += bitsize / elt_size)
6277 HOST_WIDE_INT eltpos;
6278 tree value = ce->value;
6280 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
6281 if (cleared && initializer_zerop (value))
6285 eltpos = tree_low_cst (ce->index, 1);
6291 /* Vector CONSTRUCTORs should only be built from smaller
6292 vectors in the case of BLKmode vectors. */
6293 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6294 RTVEC_ELT (vector, eltpos)
6295 = expand_normal (value);
6299 enum machine_mode value_mode =
6300 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6301 ? TYPE_MODE (TREE_TYPE (value))
6303 bitpos = eltpos * elt_size;
6304 store_constructor_field (target, bitsize, bitpos, value_mode,
6305 value, cleared, alias);
6310 emit_insn (GEN_FCN (icode)
6312 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6321 /* Store the value of EXP (an expression tree)
6322 into a subfield of TARGET which has mode MODE and occupies
6323 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6324 If MODE is VOIDmode, it means that we are storing into a bit-field.
6326 BITREGION_START is bitpos of the first bitfield in this region.
6327 BITREGION_END is the bitpos of the ending bitfield in this region.
6328 These two fields are 0, if the C++ memory model does not apply,
6329 or we are not interested in keeping track of bitfield regions.
6331 Always return const0_rtx unless we have something particular to
6334 ALIAS_SET is the alias set for the destination. This value will
6335 (in general) be different from that for TARGET, since TARGET is a
6336 reference to the containing structure.
6338 If NONTEMPORAL is true, try generating a nontemporal store. */
6341 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6342 unsigned HOST_WIDE_INT bitregion_start,
6343 unsigned HOST_WIDE_INT bitregion_end,
6344 enum machine_mode mode, tree exp,
6345 alias_set_type alias_set, bool nontemporal)
6347 if (TREE_CODE (exp) == ERROR_MARK)
6350 /* If we have nothing to store, do nothing unless the expression has
6353 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6355 if (GET_CODE (target) == CONCAT)
6357 /* We're storing into a struct containing a single __complex. */
6359 gcc_assert (!bitpos);
6360 return store_expr (exp, target, 0, nontemporal);
6363 /* If the structure is in a register or if the component
6364 is a bit field, we cannot use addressing to access it.
6365 Use bit-field techniques or SUBREG to store in it. */
6367 if (mode == VOIDmode
6368 || (mode != BLKmode && ! direct_store[(int) mode]
6369 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6370 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6372 || GET_CODE (target) == SUBREG
6373 /* If the field isn't aligned enough to store as an ordinary memref,
6374 store it as a bit field. */
6376 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6377 || bitpos % GET_MODE_ALIGNMENT (mode))
6378 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6379 || (bitpos % BITS_PER_UNIT != 0)))
6380 || (bitsize >= 0 && mode != BLKmode
6381 && GET_MODE_BITSIZE (mode) > bitsize)
6382 /* If the RHS and field are a constant size and the size of the
6383 RHS isn't the same size as the bitfield, we must use bitfield
6386 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6387 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6388 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6389 decl we must use bitfield operations. */
6391 && TREE_CODE (exp) == MEM_REF
6392 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6393 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6394 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6395 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6400 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6401 implies a mask operation. If the precision is the same size as
6402 the field we're storing into, that mask is redundant. This is
6403 particularly common with bit field assignments generated by the
6405 nop_def = get_def_for_expr (exp, NOP_EXPR);
6408 tree type = TREE_TYPE (exp);
6409 if (INTEGRAL_TYPE_P (type)
6410 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6411 && bitsize == TYPE_PRECISION (type))
6413 tree op = gimple_assign_rhs1 (nop_def);
6414 type = TREE_TYPE (op);
6415 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6420 temp = expand_normal (exp);
6422 /* If BITSIZE is narrower than the size of the type of EXP
6423 we will be narrowing TEMP. Normally, what's wanted are the
6424 low-order bits. However, if EXP's type is a record and this is
6425 big-endian machine, we want the upper BITSIZE bits. */
6426 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6427 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6428 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6429 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6430 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6433 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6434 if (mode != VOIDmode && mode != BLKmode
6435 && mode != TYPE_MODE (TREE_TYPE (exp)))
6436 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6438 /* If the modes of TEMP and TARGET are both BLKmode, both
6439 must be in memory and BITPOS must be aligned on a byte
6440 boundary. If so, we simply do a block copy. Likewise
6441 for a BLKmode-like TARGET. */
6442 if (GET_MODE (temp) == BLKmode
6443 && (GET_MODE (target) == BLKmode
6445 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6446 && (bitpos % BITS_PER_UNIT) == 0
6447 && (bitsize % BITS_PER_UNIT) == 0)))
6449 gcc_assert (MEM_P (target) && MEM_P (temp)
6450 && (bitpos % BITS_PER_UNIT) == 0);
6452 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6453 emit_block_move (target, temp,
6454 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6461 /* Handle calls that return values in multiple non-contiguous locations.
6462 The Irix 6 ABI has examples of this. */
6463 if (GET_CODE (temp) == PARALLEL)
6465 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6467 if (mode == BLKmode)
6468 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6469 temp_target = gen_reg_rtx (mode);
6470 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6473 else if (mode == BLKmode)
6475 /* Handle calls that return BLKmode values in registers. */
6476 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6478 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6479 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6484 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6486 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6487 temp_target = gen_reg_rtx (mode);
6489 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6490 false, temp_target, mode, mode);
6495 /* Store the value in the bitfield. */
6496 store_bit_field (target, bitsize, bitpos,
6497 bitregion_start, bitregion_end,
6504 /* Now build a reference to just the desired component. */
6505 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6507 if (to_rtx == target)
6508 to_rtx = copy_rtx (to_rtx);
6510 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6511 set_mem_alias_set (to_rtx, alias_set);
6513 return store_expr (exp, to_rtx, 0, nontemporal);
6517 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6518 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6519 codes and find the ultimate containing object, which we return.
6521 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6522 bit position, and *PUNSIGNEDP to the signedness of the field.
6523 If the position of the field is variable, we store a tree
6524 giving the variable offset (in units) in *POFFSET.
6525 This offset is in addition to the bit position.
6526 If the position is not variable, we store 0 in *POFFSET.
6528 If any of the extraction expressions is volatile,
6529 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6531 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6532 Otherwise, it is a mode that can be used to access the field.
6534 If the field describes a variable-sized object, *PMODE is set to
6535 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6536 this case, but the address of the object can be found.
6538 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6539 look through nodes that serve as markers of a greater alignment than
6540 the one that can be deduced from the expression. These nodes make it
6541 possible for front-ends to prevent temporaries from being created by
6542 the middle-end on alignment considerations. For that purpose, the
6543 normal operating mode at high-level is to always pass FALSE so that
6544 the ultimate containing object is really returned; moreover, the
6545 associated predicate handled_component_p will always return TRUE
6546 on these nodes, thus indicating that they are essentially handled
6547 by get_inner_reference. TRUE should only be passed when the caller
6548 is scanning the expression in order to build another representation
6549 and specifically knows how to handle these nodes; as such, this is
6550 the normal operating mode in the RTL expanders. */
6553 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6554 HOST_WIDE_INT *pbitpos, tree *poffset,
6555 enum machine_mode *pmode, int *punsignedp,
6556 int *pvolatilep, bool keep_aligning)
6559 enum machine_mode mode = VOIDmode;
6560 bool blkmode_bitfield = false;
6561 tree offset = size_zero_node;
6562 double_int bit_offset = double_int_zero;
6564 /* First get the mode, signedness, and size. We do this from just the
6565 outermost expression. */
6567 if (TREE_CODE (exp) == COMPONENT_REF)
6569 tree field = TREE_OPERAND (exp, 1);
6570 size_tree = DECL_SIZE (field);
6571 if (!DECL_BIT_FIELD (field))
6572 mode = DECL_MODE (field);
6573 else if (DECL_MODE (field) == BLKmode)
6574 blkmode_bitfield = true;
6575 else if (TREE_THIS_VOLATILE (exp)
6576 && flag_strict_volatile_bitfields > 0)
6577 /* Volatile bitfields should be accessed in the mode of the
6578 field's type, not the mode computed based on the bit
6580 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6582 *punsignedp = DECL_UNSIGNED (field);
6584 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6586 size_tree = TREE_OPERAND (exp, 1);
6587 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6588 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6590 /* For vector types, with the correct size of access, use the mode of
6592 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6593 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6594 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6595 mode = TYPE_MODE (TREE_TYPE (exp));
6599 mode = TYPE_MODE (TREE_TYPE (exp));
6600 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6602 if (mode == BLKmode)
6603 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6605 *pbitsize = GET_MODE_BITSIZE (mode);
6610 if (! host_integerp (size_tree, 1))
6611 mode = BLKmode, *pbitsize = -1;
6613 *pbitsize = tree_low_cst (size_tree, 1);
6616 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6617 and find the ultimate containing object. */
6620 switch (TREE_CODE (exp))
6623 bit_offset += tree_to_double_int (TREE_OPERAND (exp, 2));
6628 tree field = TREE_OPERAND (exp, 1);
6629 tree this_offset = component_ref_field_offset (exp);
6631 /* If this field hasn't been filled in yet, don't go past it.
6632 This should only happen when folding expressions made during
6633 type construction. */
6634 if (this_offset == 0)
6637 offset = size_binop (PLUS_EXPR, offset, this_offset);
6638 bit_offset += tree_to_double_int (DECL_FIELD_BIT_OFFSET (field));
6640 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6645 case ARRAY_RANGE_REF:
6647 tree index = TREE_OPERAND (exp, 1);
6648 tree low_bound = array_ref_low_bound (exp);
6649 tree unit_size = array_ref_element_size (exp);
6651 /* We assume all arrays have sizes that are a multiple of a byte.
6652 First subtract the lower bound, if any, in the type of the
6653 index, then convert to sizetype and multiply by the size of
6654 the array element. */
6655 if (! integer_zerop (low_bound))
6656 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6659 offset = size_binop (PLUS_EXPR, offset,
6660 size_binop (MULT_EXPR,
6661 fold_convert (sizetype, index),
6670 bit_offset += double_int::from_uhwi (*pbitsize);
6673 case VIEW_CONVERT_EXPR:
6674 if (keep_aligning && STRICT_ALIGNMENT
6675 && (TYPE_ALIGN (TREE_TYPE (exp))
6676 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6677 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6678 < BIGGEST_ALIGNMENT)
6679 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6680 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6685 /* Hand back the decl for MEM[&decl, off]. */
6686 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6688 tree off = TREE_OPERAND (exp, 1);
6689 if (!integer_zerop (off))
6691 double_int boff, coff = mem_ref_offset (exp);
6692 boff = coff.alshift (BITS_PER_UNIT == 8
6693 ? 3 : exact_log2 (BITS_PER_UNIT),
6694 HOST_BITS_PER_DOUBLE_INT);
6697 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6705 /* If any reference in the chain is volatile, the effect is volatile. */
6706 if (TREE_THIS_VOLATILE (exp))
6709 exp = TREE_OPERAND (exp, 0);
6713 /* If OFFSET is constant, see if we can return the whole thing as a
6714 constant bit position. Make sure to handle overflow during
6716 if (TREE_CODE (offset) == INTEGER_CST)
6718 double_int tem = tree_to_double_int (offset);
6719 tem = tem.sext (TYPE_PRECISION (sizetype));
6720 tem = tem.alshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT),
6721 HOST_BITS_PER_DOUBLE_INT);
6723 if (tem.fits_shwi ())
6725 *pbitpos = tem.to_shwi ();
6726 *poffset = offset = NULL_TREE;
6730 /* Otherwise, split it up. */
6733 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6734 if (bit_offset.is_negative ())
6737 = double_int::mask (BITS_PER_UNIT == 8
6738 ? 3 : exact_log2 (BITS_PER_UNIT));
6739 double_int tem = bit_offset.and_not (mask);
6740 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6741 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6743 tem = tem.arshift (BITS_PER_UNIT == 8
6744 ? 3 : exact_log2 (BITS_PER_UNIT),
6745 HOST_BITS_PER_DOUBLE_INT);
6746 offset = size_binop (PLUS_EXPR, offset,
6747 double_int_to_tree (sizetype, tem));
6750 *pbitpos = bit_offset.to_shwi ();
6754 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6755 if (mode == VOIDmode
6757 && (*pbitpos % BITS_PER_UNIT) == 0
6758 && (*pbitsize % BITS_PER_UNIT) == 0)
6766 /* Return a tree of sizetype representing the size, in bytes, of the element
6767 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6770 array_ref_element_size (tree exp)
6772 tree aligned_size = TREE_OPERAND (exp, 3);
6773 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6774 location_t loc = EXPR_LOCATION (exp);
6776 /* If a size was specified in the ARRAY_REF, it's the size measured
6777 in alignment units of the element type. So multiply by that value. */
6780 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6781 sizetype from another type of the same width and signedness. */
6782 if (TREE_TYPE (aligned_size) != sizetype)
6783 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6784 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6785 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6788 /* Otherwise, take the size from that of the element type. Substitute
6789 any PLACEHOLDER_EXPR that we have. */
6791 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6794 /* Return a tree representing the lower bound of the array mentioned in
6795 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6798 array_ref_low_bound (tree exp)
6800 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6802 /* If a lower bound is specified in EXP, use it. */
6803 if (TREE_OPERAND (exp, 2))
6804 return TREE_OPERAND (exp, 2);
6806 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6807 substituting for a PLACEHOLDER_EXPR as needed. */
6808 if (domain_type && TYPE_MIN_VALUE (domain_type))
6809 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6811 /* Otherwise, return a zero of the appropriate type. */
6812 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6815 /* Returns true if REF is an array reference to an array at the end of
6816 a structure. If this is the case, the array may be allocated larger
6817 than its upper bound implies. */
6820 array_at_struct_end_p (tree ref)
6822 if (TREE_CODE (ref) != ARRAY_REF
6823 && TREE_CODE (ref) != ARRAY_RANGE_REF)
6826 while (handled_component_p (ref))
6828 /* If the reference chain contains a component reference to a
6829 non-union type and there follows another field the reference
6830 is not at the end of a structure. */
6831 if (TREE_CODE (ref) == COMPONENT_REF
6832 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6834 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6835 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6836 nextf = DECL_CHAIN (nextf);
6841 ref = TREE_OPERAND (ref, 0);
6844 /* If the reference is based on a declared entity, the size of the array
6845 is constrained by its given domain. */
6852 /* Return a tree representing the upper bound of the array mentioned in
6853 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6856 array_ref_up_bound (tree exp)
6858 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6860 /* If there is a domain type and it has an upper bound, use it, substituting
6861 for a PLACEHOLDER_EXPR as needed. */
6862 if (domain_type && TYPE_MAX_VALUE (domain_type))
6863 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6865 /* Otherwise fail. */
6869 /* Return a tree representing the offset, in bytes, of the field referenced
6870 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6873 component_ref_field_offset (tree exp)
6875 tree aligned_offset = TREE_OPERAND (exp, 2);
6876 tree field = TREE_OPERAND (exp, 1);
6877 location_t loc = EXPR_LOCATION (exp);
6879 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6880 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6884 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6885 sizetype from another type of the same width and signedness. */
6886 if (TREE_TYPE (aligned_offset) != sizetype)
6887 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6888 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6889 size_int (DECL_OFFSET_ALIGN (field)
6893 /* Otherwise, take the offset from that of the field. Substitute
6894 any PLACEHOLDER_EXPR that we have. */
6896 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6899 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6901 static unsigned HOST_WIDE_INT
6902 target_align (const_tree target)
6904 /* We might have a chain of nested references with intermediate misaligning
6905 bitfields components, so need to recurse to find out. */
6907 unsigned HOST_WIDE_INT this_align, outer_align;
6909 switch (TREE_CODE (target))
6915 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6916 outer_align = target_align (TREE_OPERAND (target, 0));
6917 return MIN (this_align, outer_align);
6920 case ARRAY_RANGE_REF:
6921 this_align = TYPE_ALIGN (TREE_TYPE (target));
6922 outer_align = target_align (TREE_OPERAND (target, 0));
6923 return MIN (this_align, outer_align);
6926 case NON_LVALUE_EXPR:
6927 case VIEW_CONVERT_EXPR:
6928 this_align = TYPE_ALIGN (TREE_TYPE (target));
6929 outer_align = target_align (TREE_OPERAND (target, 0));
6930 return MAX (this_align, outer_align);
6933 return TYPE_ALIGN (TREE_TYPE (target));
6938 /* Given an rtx VALUE that may contain additions and multiplications, return
6939 an equivalent value that just refers to a register, memory, or constant.
6940 This is done by generating instructions to perform the arithmetic and
6941 returning a pseudo-register containing the value.
6943 The returned value may be a REG, SUBREG, MEM or constant. */
6946 force_operand (rtx value, rtx target)
6949 /* Use subtarget as the target for operand 0 of a binary operation. */
6950 rtx subtarget = get_subtarget (target);
6951 enum rtx_code code = GET_CODE (value);
6953 /* Check for subreg applied to an expression produced by loop optimizer. */
6955 && !REG_P (SUBREG_REG (value))
6956 && !MEM_P (SUBREG_REG (value)))
6959 = simplify_gen_subreg (GET_MODE (value),
6960 force_reg (GET_MODE (SUBREG_REG (value)),
6961 force_operand (SUBREG_REG (value),
6963 GET_MODE (SUBREG_REG (value)),
6964 SUBREG_BYTE (value));
6965 code = GET_CODE (value);
6968 /* Check for a PIC address load. */
6969 if ((code == PLUS || code == MINUS)
6970 && XEXP (value, 0) == pic_offset_table_rtx
6971 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6972 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6973 || GET_CODE (XEXP (value, 1)) == CONST))
6976 subtarget = gen_reg_rtx (GET_MODE (value));
6977 emit_move_insn (subtarget, value);
6981 if (ARITHMETIC_P (value))
6983 op2 = XEXP (value, 1);
6984 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6986 if (code == MINUS && CONST_INT_P (op2))
6989 op2 = negate_rtx (GET_MODE (value), op2);
6992 /* Check for an addition with OP2 a constant integer and our first
6993 operand a PLUS of a virtual register and something else. In that
6994 case, we want to emit the sum of the virtual register and the
6995 constant first and then add the other value. This allows virtual
6996 register instantiation to simply modify the constant rather than
6997 creating another one around this addition. */
6998 if (code == PLUS && CONST_INT_P (op2)
6999 && GET_CODE (XEXP (value, 0)) == PLUS
7000 && REG_P (XEXP (XEXP (value, 0), 0))
7001 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7002 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7004 rtx temp = expand_simple_binop (GET_MODE (value), code,
7005 XEXP (XEXP (value, 0), 0), op2,
7006 subtarget, 0, OPTAB_LIB_WIDEN);
7007 return expand_simple_binop (GET_MODE (value), code, temp,
7008 force_operand (XEXP (XEXP (value,
7010 target, 0, OPTAB_LIB_WIDEN);
7013 op1 = force_operand (XEXP (value, 0), subtarget);
7014 op2 = force_operand (op2, NULL_RTX);
7018 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7020 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7021 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7022 target, 1, OPTAB_LIB_WIDEN);
7024 return expand_divmod (0,
7025 FLOAT_MODE_P (GET_MODE (value))
7026 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7027 GET_MODE (value), op1, op2, target, 0);
7029 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7032 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7035 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7038 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7039 target, 0, OPTAB_LIB_WIDEN);
7041 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7042 target, 1, OPTAB_LIB_WIDEN);
7045 if (UNARY_P (value))
7048 target = gen_reg_rtx (GET_MODE (value));
7049 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7056 case FLOAT_TRUNCATE:
7057 convert_move (target, op1, code == ZERO_EXTEND);
7062 expand_fix (target, op1, code == UNSIGNED_FIX);
7066 case UNSIGNED_FLOAT:
7067 expand_float (target, op1, code == UNSIGNED_FLOAT);
7071 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7075 #ifdef INSN_SCHEDULING
7076 /* On machines that have insn scheduling, we want all memory reference to be
7077 explicit, so we need to deal with such paradoxical SUBREGs. */
7078 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7080 = simplify_gen_subreg (GET_MODE (value),
7081 force_reg (GET_MODE (SUBREG_REG (value)),
7082 force_operand (SUBREG_REG (value),
7084 GET_MODE (SUBREG_REG (value)),
7085 SUBREG_BYTE (value));
7091 /* Subroutine of expand_expr: return nonzero iff there is no way that
7092 EXP can reference X, which is being modified. TOP_P is nonzero if this
7093 call is going to be used to determine whether we need a temporary
7094 for EXP, as opposed to a recursive call to this function.
7096 It is always safe for this routine to return zero since it merely
7097 searches for optimization opportunities. */
7100 safe_from_p (const_rtx x, tree exp, int top_p)
7106 /* If EXP has varying size, we MUST use a target since we currently
7107 have no way of allocating temporaries of variable size
7108 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7109 So we assume here that something at a higher level has prevented a
7110 clash. This is somewhat bogus, but the best we can do. Only
7111 do this when X is BLKmode and when we are at the top level. */
7112 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7113 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7114 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7115 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7116 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7118 && GET_MODE (x) == BLKmode)
7119 /* If X is in the outgoing argument area, it is always safe. */
7121 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7122 || (GET_CODE (XEXP (x, 0)) == PLUS
7123 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7126 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7127 find the underlying pseudo. */
7128 if (GET_CODE (x) == SUBREG)
7131 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7135 /* Now look at our tree code and possibly recurse. */
7136 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7138 case tcc_declaration:
7139 exp_rtl = DECL_RTL_IF_SET (exp);
7145 case tcc_exceptional:
7146 if (TREE_CODE (exp) == TREE_LIST)
7150 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7152 exp = TREE_CHAIN (exp);
7155 if (TREE_CODE (exp) != TREE_LIST)
7156 return safe_from_p (x, exp, 0);
7159 else if (TREE_CODE (exp) == CONSTRUCTOR)
7161 constructor_elt *ce;
7162 unsigned HOST_WIDE_INT idx;
7164 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7165 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7166 || !safe_from_p (x, ce->value, 0))
7170 else if (TREE_CODE (exp) == ERROR_MARK)
7171 return 1; /* An already-visited SAVE_EXPR? */
7176 /* The only case we look at here is the DECL_INITIAL inside a
7178 return (TREE_CODE (exp) != DECL_EXPR
7179 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7180 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7181 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7184 case tcc_comparison:
7185 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7190 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7192 case tcc_expression:
7195 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7196 the expression. If it is set, we conflict iff we are that rtx or
7197 both are in memory. Otherwise, we check all operands of the
7198 expression recursively. */
7200 switch (TREE_CODE (exp))
7203 /* If the operand is static or we are static, we can't conflict.
7204 Likewise if we don't conflict with the operand at all. */
7205 if (staticp (TREE_OPERAND (exp, 0))
7206 || TREE_STATIC (exp)
7207 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7210 /* Otherwise, the only way this can conflict is if we are taking
7211 the address of a DECL a that address if part of X, which is
7213 exp = TREE_OPERAND (exp, 0);
7216 if (!DECL_RTL_SET_P (exp)
7217 || !MEM_P (DECL_RTL (exp)))
7220 exp_rtl = XEXP (DECL_RTL (exp), 0);
7226 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7227 get_alias_set (exp)))
7232 /* Assume that the call will clobber all hard registers and
7234 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7239 case WITH_CLEANUP_EXPR:
7240 case CLEANUP_POINT_EXPR:
7241 /* Lowered by gimplify.c. */
7245 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7251 /* If we have an rtx, we do not need to scan our operands. */
7255 nops = TREE_OPERAND_LENGTH (exp);
7256 for (i = 0; i < nops; i++)
7257 if (TREE_OPERAND (exp, i) != 0
7258 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7264 /* Should never get a type here. */
7268 /* If we have an rtl, find any enclosed object. Then see if we conflict
7272 if (GET_CODE (exp_rtl) == SUBREG)
7274 exp_rtl = SUBREG_REG (exp_rtl);
7276 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7280 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7281 are memory and they conflict. */
7282 return ! (rtx_equal_p (x, exp_rtl)
7283 || (MEM_P (x) && MEM_P (exp_rtl)
7284 && true_dependence (exp_rtl, VOIDmode, x)));
7287 /* If we reach here, it is safe. */
7292 /* Return the highest power of two that EXP is known to be a multiple of.
7293 This is used in updating alignment of MEMs in array references. */
7295 unsigned HOST_WIDE_INT
7296 highest_pow2_factor (const_tree exp)
7298 unsigned HOST_WIDE_INT c0, c1;
7300 switch (TREE_CODE (exp))
7303 /* We can find the lowest bit that's a one. If the low
7304 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
7305 We need to handle this case since we can find it in a COND_EXPR,
7306 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
7307 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
7309 if (TREE_OVERFLOW (exp))
7310 return BIGGEST_ALIGNMENT;
7313 /* Note: tree_low_cst is intentionally not used here,
7314 we don't care about the upper bits. */
7315 c0 = TREE_INT_CST_LOW (exp);
7317 return c0 ? c0 : BIGGEST_ALIGNMENT;
7321 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
7322 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7323 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7324 return MIN (c0, c1);
7327 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7328 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7331 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
7333 if (integer_pow2p (TREE_OPERAND (exp, 1))
7334 && host_integerp (TREE_OPERAND (exp, 1), 1))
7336 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7337 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
7338 return MAX (1, c0 / c1);
7343 /* The highest power of two of a bit-and expression is the maximum of
7344 that of its operands. We typically get here for a complex LHS and
7345 a constant negative power of two on the RHS to force an explicit
7346 alignment, so don't bother looking at the LHS. */
7347 return highest_pow2_factor (TREE_OPERAND (exp, 1));
7351 return highest_pow2_factor (TREE_OPERAND (exp, 0));
7354 return highest_pow2_factor (TREE_OPERAND (exp, 1));
7357 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7358 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
7359 return MIN (c0, c1);
7368 /* Similar, except that the alignment requirements of TARGET are
7369 taken into account. Assume it is at least as aligned as its
7370 type, unless it is a COMPONENT_REF in which case the layout of
7371 the structure gives the alignment. */
7373 static unsigned HOST_WIDE_INT
7374 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7376 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7377 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7379 return MAX (factor, talign);
7382 #ifdef HAVE_conditional_move
7383 /* Convert the tree comparison code TCODE to the rtl one where the
7384 signedness is UNSIGNEDP. */
7386 static enum rtx_code
7387 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7399 code = unsignedp ? LTU : LT;
7402 code = unsignedp ? LEU : LE;
7405 code = unsignedp ? GTU : GT;
7408 code = unsignedp ? GEU : GE;
7410 case UNORDERED_EXPR:
7442 /* Subroutine of expand_expr. Expand the two operands of a binary
7443 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7444 The value may be stored in TARGET if TARGET is nonzero. The
7445 MODIFIER argument is as documented by expand_expr. */
7448 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7449 enum expand_modifier modifier)
7451 if (! safe_from_p (target, exp1, 1))
7453 if (operand_equal_p (exp0, exp1, 0))
7455 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7456 *op1 = copy_rtx (*op0);
7460 /* If we need to preserve evaluation order, copy exp0 into its own
7461 temporary variable so that it can't be clobbered by exp1. */
7462 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7463 exp0 = save_expr (exp0);
7464 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7465 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7470 /* Return a MEM that contains constant EXP. DEFER is as for
7471 output_constant_def and MODIFIER is as for expand_expr. */
7474 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7478 mem = output_constant_def (exp, defer);
7479 if (modifier != EXPAND_INITIALIZER)
7480 mem = use_anchored_address (mem);
7484 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7485 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7488 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7489 enum expand_modifier modifier, addr_space_t as)
7491 rtx result, subtarget;
7493 HOST_WIDE_INT bitsize, bitpos;
7494 int volatilep, unsignedp;
7495 enum machine_mode mode1;
7497 /* If we are taking the address of a constant and are at the top level,
7498 we have to use output_constant_def since we can't call force_const_mem
7500 /* ??? This should be considered a front-end bug. We should not be
7501 generating ADDR_EXPR of something that isn't an LVALUE. The only
7502 exception here is STRING_CST. */
7503 if (CONSTANT_CLASS_P (exp))
7505 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7506 if (modifier < EXPAND_SUM)
7507 result = force_operand (result, target);
7511 /* Everything must be something allowed by is_gimple_addressable. */
7512 switch (TREE_CODE (exp))
7515 /* This case will happen via recursion for &a->b. */
7516 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7520 tree tem = TREE_OPERAND (exp, 0);
7521 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7522 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7523 return expand_expr (tem, target, tmode, modifier);
7527 /* Expand the initializer like constants above. */
7528 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7530 if (modifier < EXPAND_SUM)
7531 result = force_operand (result, target);
7535 /* The real part of the complex number is always first, therefore
7536 the address is the same as the address of the parent object. */
7539 inner = TREE_OPERAND (exp, 0);
7543 /* The imaginary part of the complex number is always second.
7544 The expression is therefore always offset by the size of the
7547 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7548 inner = TREE_OPERAND (exp, 0);
7552 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7553 expand_expr, as that can have various side effects; LABEL_DECLs for
7554 example, may not have their DECL_RTL set yet. Expand the rtl of
7555 CONSTRUCTORs too, which should yield a memory reference for the
7556 constructor's contents. Assume language specific tree nodes can
7557 be expanded in some interesting way. */
7558 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7560 || TREE_CODE (exp) == CONSTRUCTOR
7561 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7563 result = expand_expr (exp, target, tmode,
7564 modifier == EXPAND_INITIALIZER
7565 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7567 /* If the DECL isn't in memory, then the DECL wasn't properly
7568 marked TREE_ADDRESSABLE, which will be either a front-end
7569 or a tree optimizer bug. */
7571 if (TREE_ADDRESSABLE (exp)
7573 && ! targetm.calls.allocate_stack_slots_for_args())
7575 error ("local frame unavailable (naked function?)");
7579 gcc_assert (MEM_P (result));
7580 result = XEXP (result, 0);
7582 /* ??? Is this needed anymore? */
7584 TREE_USED (exp) = 1;
7586 if (modifier != EXPAND_INITIALIZER
7587 && modifier != EXPAND_CONST_ADDRESS
7588 && modifier != EXPAND_SUM)
7589 result = force_operand (result, target);
7593 /* Pass FALSE as the last argument to get_inner_reference although
7594 we are expanding to RTL. The rationale is that we know how to
7595 handle "aligning nodes" here: we can just bypass them because
7596 they won't change the final object whose address will be returned
7597 (they actually exist only for that purpose). */
7598 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7599 &mode1, &unsignedp, &volatilep, false);
7603 /* We must have made progress. */
7604 gcc_assert (inner != exp);
7606 subtarget = offset || bitpos ? NULL_RTX : target;
7607 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7608 inner alignment, force the inner to be sufficiently aligned. */
7609 if (CONSTANT_CLASS_P (inner)
7610 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7612 inner = copy_node (inner);
7613 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7614 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7615 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7617 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7623 if (modifier != EXPAND_NORMAL)
7624 result = force_operand (result, NULL);
7625 tmp = expand_expr (offset, NULL_RTX, tmode,
7626 modifier == EXPAND_INITIALIZER
7627 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7629 result = convert_memory_address_addr_space (tmode, result, as);
7630 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7632 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7633 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7636 subtarget = bitpos ? NULL_RTX : target;
7637 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7638 1, OPTAB_LIB_WIDEN);
7644 /* Someone beforehand should have rejected taking the address
7645 of such an object. */
7646 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7648 result = convert_memory_address_addr_space (tmode, result, as);
7649 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7650 if (modifier < EXPAND_SUM)
7651 result = force_operand (result, target);
7657 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7658 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7661 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7662 enum expand_modifier modifier)
7664 addr_space_t as = ADDR_SPACE_GENERIC;
7665 enum machine_mode address_mode = Pmode;
7666 enum machine_mode pointer_mode = ptr_mode;
7667 enum machine_mode rmode;
7670 /* Target mode of VOIDmode says "whatever's natural". */
7671 if (tmode == VOIDmode)
7672 tmode = TYPE_MODE (TREE_TYPE (exp));
7674 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7676 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7677 address_mode = targetm.addr_space.address_mode (as);
7678 pointer_mode = targetm.addr_space.pointer_mode (as);
7681 /* We can get called with some Weird Things if the user does silliness
7682 like "(short) &a". In that case, convert_memory_address won't do
7683 the right thing, so ignore the given target mode. */
7684 if (tmode != address_mode && tmode != pointer_mode)
7685 tmode = address_mode;
7687 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7688 tmode, modifier, as);
7690 /* Despite expand_expr claims concerning ignoring TMODE when not
7691 strictly convenient, stuff breaks if we don't honor it. Note
7692 that combined with the above, we only do this for pointer modes. */
7693 rmode = GET_MODE (result);
7694 if (rmode == VOIDmode)
7697 result = convert_memory_address_addr_space (tmode, result, as);
7702 /* Generate code for computing CONSTRUCTOR EXP.
7703 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7704 is TRUE, instead of creating a temporary variable in memory
7705 NULL is returned and the caller needs to handle it differently. */
7708 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7709 bool avoid_temp_mem)
7711 tree type = TREE_TYPE (exp);
7712 enum machine_mode mode = TYPE_MODE (type);
7714 /* Try to avoid creating a temporary at all. This is possible
7715 if all of the initializer is zero.
7716 FIXME: try to handle all [0..255] initializers we can handle
7718 if (TREE_STATIC (exp)
7719 && !TREE_ADDRESSABLE (exp)
7720 && target != 0 && mode == BLKmode
7721 && all_zeros_p (exp))
7723 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7727 /* All elts simple constants => refer to a constant in memory. But
7728 if this is a non-BLKmode mode, let it store a field at a time
7729 since that should make a CONST_INT or CONST_DOUBLE when we
7730 fold. Likewise, if we have a target we can use, it is best to
7731 store directly into the target unless the type is large enough
7732 that memcpy will be used. If we are making an initializer and
7733 all operands are constant, put it in memory as well.
7735 FIXME: Avoid trying to fill vector constructors piece-meal.
7736 Output them with output_constant_def below unless we're sure
7737 they're zeros. This should go away when vector initializers
7738 are treated like VECTOR_CST instead of arrays. */
7739 if ((TREE_STATIC (exp)
7740 && ((mode == BLKmode
7741 && ! (target != 0 && safe_from_p (target, exp, 1)))
7742 || TREE_ADDRESSABLE (exp)
7743 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7744 && (! MOVE_BY_PIECES_P
7745 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7747 && ! mostly_zeros_p (exp))))
7748 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7749 && TREE_CONSTANT (exp)))
7756 constructor = expand_expr_constant (exp, 1, modifier);
7758 if (modifier != EXPAND_CONST_ADDRESS
7759 && modifier != EXPAND_INITIALIZER
7760 && modifier != EXPAND_SUM)
7761 constructor = validize_mem (constructor);
7766 /* Handle calls that pass values in multiple non-contiguous
7767 locations. The Irix 6 ABI has examples of this. */
7768 if (target == 0 || ! safe_from_p (target, exp, 1)
7769 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7775 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7776 | (TREE_READONLY (exp)
7777 * TYPE_QUAL_CONST))),
7778 TREE_ADDRESSABLE (exp), 1);
7781 store_constructor (exp, target, 0, int_expr_size (exp));
7786 /* expand_expr: generate code for computing expression EXP.
7787 An rtx for the computed value is returned. The value is never null.
7788 In the case of a void EXP, const0_rtx is returned.
7790 The value may be stored in TARGET if TARGET is nonzero.
7791 TARGET is just a suggestion; callers must assume that
7792 the rtx returned may not be the same as TARGET.
7794 If TARGET is CONST0_RTX, it means that the value will be ignored.
7796 If TMODE is not VOIDmode, it suggests generating the
7797 result in mode TMODE. But this is done only when convenient.
7798 Otherwise, TMODE is ignored and the value generated in its natural mode.
7799 TMODE is just a suggestion; callers must assume that
7800 the rtx returned may not have mode TMODE.
7802 Note that TARGET may have neither TMODE nor MODE. In that case, it
7803 probably will not be used.
7805 If MODIFIER is EXPAND_SUM then when EXP is an addition
7806 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7807 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7808 products as above, or REG or MEM, or constant.
7809 Ordinarily in such cases we would output mul or add instructions
7810 and then return a pseudo reg containing the sum.
7812 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7813 it also marks a label as absolutely required (it can't be dead).
7814 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7815 This is used for outputting expressions used in initializers.
7817 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7818 with a constant address even if that address is not normally legitimate.
7819 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7821 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7822 a call parameter. Such targets require special care as we haven't yet
7823 marked TARGET so that it's safe from being trashed by libcalls. We
7824 don't want to use TARGET for anything but the final result;
7825 Intermediate values must go elsewhere. Additionally, calls to
7826 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7828 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7829 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7830 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7831 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7835 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7836 enum expand_modifier modifier, rtx *alt_rtl)
7840 /* Handle ERROR_MARK before anybody tries to access its type. */
7841 if (TREE_CODE (exp) == ERROR_MARK
7842 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7844 ret = CONST0_RTX (tmode);
7845 return ret ? ret : const0_rtx;
7848 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7852 /* Try to expand the conditional expression which is represented by
7853 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7854 return the rtl reg which repsents the result. Otherwise return
7858 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7859 tree treeop1 ATTRIBUTE_UNUSED,
7860 tree treeop2 ATTRIBUTE_UNUSED)
7862 #ifdef HAVE_conditional_move
7864 rtx op00, op01, op1, op2;
7865 enum rtx_code comparison_code;
7866 enum machine_mode comparison_mode;
7869 tree type = TREE_TYPE (treeop1);
7870 int unsignedp = TYPE_UNSIGNED (type);
7871 enum machine_mode mode = TYPE_MODE (type);
7873 /* If we cannot do a conditional move on the mode, try doing it
7874 with the promoted mode. */
7875 if (!can_conditionally_move_p (mode))
7877 mode = promote_mode (type, mode, &unsignedp);
7878 if (!can_conditionally_move_p (mode))
7880 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7883 temp = assign_temp (type, 0, 1);
7886 expand_operands (treeop1, treeop2,
7887 temp, &op1, &op2, EXPAND_NORMAL);
7889 if (TREE_CODE (treeop0) == SSA_NAME
7890 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7892 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7893 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7894 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7895 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7896 comparison_mode = TYPE_MODE (type);
7897 unsignedp = TYPE_UNSIGNED (type);
7898 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7900 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
7902 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7903 enum tree_code cmpcode = TREE_CODE (treeop0);
7904 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7905 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7906 unsignedp = TYPE_UNSIGNED (type);
7907 comparison_mode = TYPE_MODE (type);
7908 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7912 op00 = expand_normal (treeop0);
7914 comparison_code = NE;
7915 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7918 if (GET_MODE (op1) != mode)
7919 op1 = gen_lowpart (mode, op1);
7921 if (GET_MODE (op2) != mode)
7922 op2 = gen_lowpart (mode, op2);
7924 /* Try to emit the conditional move. */
7925 insn = emit_conditional_move (temp, comparison_code,
7926 op00, op01, comparison_mode,
7930 /* If we could do the conditional move, emit the sequence,
7934 rtx seq = get_insns ();
7940 /* Otherwise discard the sequence and fall back to code with
7948 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7949 enum expand_modifier modifier)
7951 rtx op0, op1, op2, temp;
7954 enum machine_mode mode;
7955 enum tree_code code = ops->code;
7957 rtx subtarget, original_target;
7959 bool reduce_bit_field;
7960 location_t loc = ops->location;
7961 tree treeop0, treeop1, treeop2;
7962 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7963 ? reduce_to_bit_field_precision ((expr), \
7969 mode = TYPE_MODE (type);
7970 unsignedp = TYPE_UNSIGNED (type);
7976 /* We should be called only on simple (binary or unary) expressions,
7977 exactly those that are valid in gimple expressions that aren't
7978 GIMPLE_SINGLE_RHS (or invalid). */
7979 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7980 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7981 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7983 ignore = (target == const0_rtx
7984 || ((CONVERT_EXPR_CODE_P (code)
7985 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7986 && TREE_CODE (type) == VOID_TYPE));
7988 /* We should be called only if we need the result. */
7989 gcc_assert (!ignore);
7991 /* An operation in what may be a bit-field type needs the
7992 result to be reduced to the precision of the bit-field type,
7993 which is narrower than that of the type's mode. */
7994 reduce_bit_field = (INTEGRAL_TYPE_P (type)
7995 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7997 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8000 /* Use subtarget as the target for operand 0 of a binary operation. */
8001 subtarget = get_subtarget (target);
8002 original_target = target;
8006 case NON_LVALUE_EXPR:
8009 if (treeop0 == error_mark_node)
8012 if (TREE_CODE (type) == UNION_TYPE)
8014 tree valtype = TREE_TYPE (treeop0);
8016 /* If both input and output are BLKmode, this conversion isn't doing
8017 anything except possibly changing memory attribute. */
8018 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8020 rtx result = expand_expr (treeop0, target, tmode,
8023 result = copy_rtx (result);
8024 set_mem_attributes (result, type, 0);
8030 if (TYPE_MODE (type) != BLKmode)
8031 target = gen_reg_rtx (TYPE_MODE (type));
8033 target = assign_temp (type, 1, 1);
8037 /* Store data into beginning of memory target. */
8038 store_expr (treeop0,
8039 adjust_address (target, TYPE_MODE (valtype), 0),
8040 modifier == EXPAND_STACK_PARM,
8045 gcc_assert (REG_P (target));
8047 /* Store this field into a union of the proper type. */
8048 store_field (target,
8049 MIN ((int_size_in_bytes (TREE_TYPE
8052 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8053 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8056 /* Return the entire union. */
8060 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8062 op0 = expand_expr (treeop0, target, VOIDmode,
8065 /* If the signedness of the conversion differs and OP0 is
8066 a promoted SUBREG, clear that indication since we now
8067 have to do the proper extension. */
8068 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8069 && GET_CODE (op0) == SUBREG)
8070 SUBREG_PROMOTED_VAR_P (op0) = 0;
8072 return REDUCE_BIT_FIELD (op0);
8075 op0 = expand_expr (treeop0, NULL_RTX, mode,
8076 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8077 if (GET_MODE (op0) == mode)
8080 /* If OP0 is a constant, just convert it into the proper mode. */
8081 else if (CONSTANT_P (op0))
8083 tree inner_type = TREE_TYPE (treeop0);
8084 enum machine_mode inner_mode = GET_MODE (op0);
8086 if (inner_mode == VOIDmode)
8087 inner_mode = TYPE_MODE (inner_type);
8089 if (modifier == EXPAND_INITIALIZER)
8090 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8091 subreg_lowpart_offset (mode,
8094 op0= convert_modes (mode, inner_mode, op0,
8095 TYPE_UNSIGNED (inner_type));
8098 else if (modifier == EXPAND_INITIALIZER)
8099 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8101 else if (target == 0)
8102 op0 = convert_to_mode (mode, op0,
8103 TYPE_UNSIGNED (TREE_TYPE
8107 convert_move (target, op0,
8108 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8112 return REDUCE_BIT_FIELD (op0);
8114 case ADDR_SPACE_CONVERT_EXPR:
8116 tree treeop0_type = TREE_TYPE (treeop0);
8118 addr_space_t as_from;
8120 gcc_assert (POINTER_TYPE_P (type));
8121 gcc_assert (POINTER_TYPE_P (treeop0_type));
8123 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8124 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8126 /* Conversions between pointers to the same address space should
8127 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8128 gcc_assert (as_to != as_from);
8130 /* Ask target code to handle conversion between pointers
8131 to overlapping address spaces. */
8132 if (targetm.addr_space.subset_p (as_to, as_from)
8133 || targetm.addr_space.subset_p (as_from, as_to))
8135 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8136 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8141 /* For disjoint address spaces, converting anything but
8142 a null pointer invokes undefined behaviour. We simply
8143 always return a null pointer here. */
8144 return CONST0_RTX (mode);
8147 case POINTER_PLUS_EXPR:
8148 /* Even though the sizetype mode and the pointer's mode can be different
8149 expand is able to handle this correctly and get the correct result out
8150 of the PLUS_EXPR code. */
8151 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8152 if sizetype precision is smaller than pointer precision. */
8153 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8154 treeop1 = fold_convert_loc (loc, type,
8155 fold_convert_loc (loc, ssizetype,
8157 /* If sizetype precision is larger than pointer precision, truncate the
8158 offset to have matching modes. */
8159 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8160 treeop1 = fold_convert_loc (loc, type, treeop1);
8163 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8164 something else, make sure we add the register to the constant and
8165 then to the other thing. This case can occur during strength
8166 reduction and doing it this way will produce better code if the
8167 frame pointer or argument pointer is eliminated.
8169 fold-const.c will ensure that the constant is always in the inner
8170 PLUS_EXPR, so the only case we need to do anything about is if
8171 sp, ap, or fp is our second argument, in which case we must swap
8172 the innermost first argument and our second argument. */
8174 if (TREE_CODE (treeop0) == PLUS_EXPR
8175 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8176 && TREE_CODE (treeop1) == VAR_DECL
8177 && (DECL_RTL (treeop1) == frame_pointer_rtx
8178 || DECL_RTL (treeop1) == stack_pointer_rtx
8179 || DECL_RTL (treeop1) == arg_pointer_rtx))
8184 /* If the result is to be ptr_mode and we are adding an integer to
8185 something, we might be forming a constant. So try to use
8186 plus_constant. If it produces a sum and we can't accept it,
8187 use force_operand. This allows P = &ARR[const] to generate
8188 efficient code on machines where a SYMBOL_REF is not a valid
8191 If this is an EXPAND_SUM call, always return the sum. */
8192 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8193 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8195 if (modifier == EXPAND_STACK_PARM)
8197 if (TREE_CODE (treeop0) == INTEGER_CST
8198 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8199 && TREE_CONSTANT (treeop1))
8203 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8205 /* Use immed_double_const to ensure that the constant is
8206 truncated according to the mode of OP1, then sign extended
8207 to a HOST_WIDE_INT. Using the constant directly can result
8208 in non-canonical RTL in a 64x32 cross compile. */
8210 = immed_double_const (TREE_INT_CST_LOW (treeop0),
8212 TYPE_MODE (TREE_TYPE (treeop1)));
8213 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8214 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8215 op1 = force_operand (op1, target);
8216 return REDUCE_BIT_FIELD (op1);
8219 else if (TREE_CODE (treeop1) == INTEGER_CST
8220 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8221 && TREE_CONSTANT (treeop0))
8225 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8226 (modifier == EXPAND_INITIALIZER
8227 ? EXPAND_INITIALIZER : EXPAND_SUM));
8228 if (! CONSTANT_P (op0))
8230 op1 = expand_expr (treeop1, NULL_RTX,
8231 VOIDmode, modifier);
8232 /* Return a PLUS if modifier says it's OK. */
8233 if (modifier == EXPAND_SUM
8234 || modifier == EXPAND_INITIALIZER)
8235 return simplify_gen_binary (PLUS, mode, op0, op1);
8238 /* Use immed_double_const to ensure that the constant is
8239 truncated according to the mode of OP1, then sign extended
8240 to a HOST_WIDE_INT. Using the constant directly can result
8241 in non-canonical RTL in a 64x32 cross compile. */
8243 = immed_double_const (TREE_INT_CST_LOW (treeop1),
8245 TYPE_MODE (TREE_TYPE (treeop0)));
8246 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8247 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8248 op0 = force_operand (op0, target);
8249 return REDUCE_BIT_FIELD (op0);
8253 /* Use TER to expand pointer addition of a negated value
8254 as pointer subtraction. */
8255 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8256 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8257 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8258 && TREE_CODE (treeop1) == SSA_NAME
8259 && TYPE_MODE (TREE_TYPE (treeop0))
8260 == TYPE_MODE (TREE_TYPE (treeop1)))
8262 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8265 treeop1 = gimple_assign_rhs1 (def);
8271 /* No sense saving up arithmetic to be done
8272 if it's all in the wrong mode to form part of an address.
8273 And force_operand won't know whether to sign-extend or
8275 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8276 || mode != ptr_mode)
8278 expand_operands (treeop0, treeop1,
8279 subtarget, &op0, &op1, EXPAND_NORMAL);
8280 if (op0 == const0_rtx)
8282 if (op1 == const0_rtx)
8287 expand_operands (treeop0, treeop1,
8288 subtarget, &op0, &op1, modifier);
8289 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8293 /* For initializers, we are allowed to return a MINUS of two
8294 symbolic constants. Here we handle all cases when both operands
8296 /* Handle difference of two symbolic constants,
8297 for the sake of an initializer. */
8298 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8299 && really_constant_p (treeop0)
8300 && really_constant_p (treeop1))
8302 expand_operands (treeop0, treeop1,
8303 NULL_RTX, &op0, &op1, modifier);
8305 /* If the last operand is a CONST_INT, use plus_constant of
8306 the negated constant. Else make the MINUS. */
8307 if (CONST_INT_P (op1))
8308 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8311 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8314 /* No sense saving up arithmetic to be done
8315 if it's all in the wrong mode to form part of an address.
8316 And force_operand won't know whether to sign-extend or
8318 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8319 || mode != ptr_mode)
8322 expand_operands (treeop0, treeop1,
8323 subtarget, &op0, &op1, modifier);
8325 /* Convert A - const to A + (-const). */
8326 if (CONST_INT_P (op1))
8328 op1 = negate_rtx (mode, op1);
8329 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8334 case WIDEN_MULT_PLUS_EXPR:
8335 case WIDEN_MULT_MINUS_EXPR:
8336 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8337 op2 = expand_normal (treeop2);
8338 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8342 case WIDEN_MULT_EXPR:
8343 /* If first operand is constant, swap them.
8344 Thus the following special case checks need only
8345 check the second operand. */
8346 if (TREE_CODE (treeop0) == INTEGER_CST)
8353 /* First, check if we have a multiplication of one signed and one
8354 unsigned operand. */
8355 if (TREE_CODE (treeop1) != INTEGER_CST
8356 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8357 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8359 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8360 this_optab = usmul_widen_optab;
8361 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8362 != CODE_FOR_nothing)
8364 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8365 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8368 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8373 /* Check for a multiplication with matching signedness. */
8374 else if ((TREE_CODE (treeop1) == INTEGER_CST
8375 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8376 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8377 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8379 tree op0type = TREE_TYPE (treeop0);
8380 enum machine_mode innermode = TYPE_MODE (op0type);
8381 bool zextend_p = TYPE_UNSIGNED (op0type);
8382 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8383 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8385 if (TREE_CODE (treeop0) != INTEGER_CST)
8387 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8388 != CODE_FOR_nothing)
8390 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8392 temp = expand_widening_mult (mode, op0, op1, target,
8393 unsignedp, this_optab);
8394 return REDUCE_BIT_FIELD (temp);
8396 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8398 && innermode == word_mode)
8401 op0 = expand_normal (treeop0);
8402 if (TREE_CODE (treeop1) == INTEGER_CST)
8403 op1 = convert_modes (innermode, mode,
8404 expand_normal (treeop1), unsignedp);
8406 op1 = expand_normal (treeop1);
8407 temp = expand_binop (mode, other_optab, op0, op1, target,
8408 unsignedp, OPTAB_LIB_WIDEN);
8409 hipart = gen_highpart (innermode, temp);
8410 htem = expand_mult_highpart_adjust (innermode, hipart,
8414 emit_move_insn (hipart, htem);
8415 return REDUCE_BIT_FIELD (temp);
8419 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8420 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8421 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8422 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8426 optab opt = fma_optab;
8429 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8431 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8433 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8436 gcc_assert (fn != NULL_TREE);
8437 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8438 return expand_builtin (call_expr, target, subtarget, mode, false);
8441 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8442 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8447 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8450 op0 = expand_normal (gimple_assign_rhs1 (def0));
8451 op2 = expand_normal (gimple_assign_rhs1 (def2));
8454 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8457 op0 = expand_normal (gimple_assign_rhs1 (def0));
8460 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8463 op2 = expand_normal (gimple_assign_rhs1 (def2));
8467 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8469 op2 = expand_normal (treeop2);
8470 op1 = expand_normal (treeop1);
8472 return expand_ternary_op (TYPE_MODE (type), opt,
8473 op0, op1, op2, target, 0);
8477 /* If this is a fixed-point operation, then we cannot use the code
8478 below because "expand_mult" doesn't support sat/no-sat fixed-point
8480 if (ALL_FIXED_POINT_MODE_P (mode))
8483 /* If first operand is constant, swap them.
8484 Thus the following special case checks need only
8485 check the second operand. */
8486 if (TREE_CODE (treeop0) == INTEGER_CST)
8493 /* Attempt to return something suitable for generating an
8494 indexed address, for machines that support that. */
8496 if (modifier == EXPAND_SUM && mode == ptr_mode
8497 && host_integerp (treeop1, 0))
8499 tree exp1 = treeop1;
8501 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8505 op0 = force_operand (op0, NULL_RTX);
8507 op0 = copy_to_mode_reg (mode, op0);
8509 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8510 gen_int_mode (tree_low_cst (exp1, 0),
8511 TYPE_MODE (TREE_TYPE (exp1)))));
8514 if (modifier == EXPAND_STACK_PARM)
8517 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8518 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8520 case TRUNC_DIV_EXPR:
8521 case FLOOR_DIV_EXPR:
8523 case ROUND_DIV_EXPR:
8524 case EXACT_DIV_EXPR:
8525 /* If this is a fixed-point operation, then we cannot use the code
8526 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8528 if (ALL_FIXED_POINT_MODE_P (mode))
8531 if (modifier == EXPAND_STACK_PARM)
8533 /* Possible optimization: compute the dividend with EXPAND_SUM
8534 then if the divisor is constant can optimize the case
8535 where some terms of the dividend have coeffs divisible by it. */
8536 expand_operands (treeop0, treeop1,
8537 subtarget, &op0, &op1, EXPAND_NORMAL);
8538 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8543 case MULT_HIGHPART_EXPR:
8544 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8545 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8549 case TRUNC_MOD_EXPR:
8550 case FLOOR_MOD_EXPR:
8552 case ROUND_MOD_EXPR:
8553 if (modifier == EXPAND_STACK_PARM)
8555 expand_operands (treeop0, treeop1,
8556 subtarget, &op0, &op1, EXPAND_NORMAL);
8557 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8559 case FIXED_CONVERT_EXPR:
8560 op0 = expand_normal (treeop0);
8561 if (target == 0 || modifier == EXPAND_STACK_PARM)
8562 target = gen_reg_rtx (mode);
8564 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8565 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8566 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8567 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8569 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8572 case FIX_TRUNC_EXPR:
8573 op0 = expand_normal (treeop0);
8574 if (target == 0 || modifier == EXPAND_STACK_PARM)
8575 target = gen_reg_rtx (mode);
8576 expand_fix (target, op0, unsignedp);
8580 op0 = expand_normal (treeop0);
8581 if (target == 0 || modifier == EXPAND_STACK_PARM)
8582 target = gen_reg_rtx (mode);
8583 /* expand_float can't figure out what to do if FROM has VOIDmode.
8584 So give it the correct mode. With -O, cse will optimize this. */
8585 if (GET_MODE (op0) == VOIDmode)
8586 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8588 expand_float (target, op0,
8589 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8593 op0 = expand_expr (treeop0, subtarget,
8594 VOIDmode, EXPAND_NORMAL);
8595 if (modifier == EXPAND_STACK_PARM)
8597 temp = expand_unop (mode,
8598 optab_for_tree_code (NEGATE_EXPR, type,
8602 return REDUCE_BIT_FIELD (temp);
8605 op0 = expand_expr (treeop0, subtarget,
8606 VOIDmode, EXPAND_NORMAL);
8607 if (modifier == EXPAND_STACK_PARM)
8610 /* ABS_EXPR is not valid for complex arguments. */
8611 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8612 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8614 /* Unsigned abs is simply the operand. Testing here means we don't
8615 risk generating incorrect code below. */
8616 if (TYPE_UNSIGNED (type))
8619 return expand_abs (mode, op0, target, unsignedp,
8620 safe_from_p (target, treeop0, 1));
8624 target = original_target;
8626 || modifier == EXPAND_STACK_PARM
8627 || (MEM_P (target) && MEM_VOLATILE_P (target))
8628 || GET_MODE (target) != mode
8630 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8631 target = gen_reg_rtx (mode);
8632 expand_operands (treeop0, treeop1,
8633 target, &op0, &op1, EXPAND_NORMAL);
8635 /* First try to do it with a special MIN or MAX instruction.
8636 If that does not win, use a conditional jump to select the proper
8638 this_optab = optab_for_tree_code (code, type, optab_default);
8639 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8644 /* At this point, a MEM target is no longer useful; we will get better
8647 if (! REG_P (target))
8648 target = gen_reg_rtx (mode);
8650 /* If op1 was placed in target, swap op0 and op1. */
8651 if (target != op0 && target == op1)
8658 /* We generate better code and avoid problems with op1 mentioning
8659 target by forcing op1 into a pseudo if it isn't a constant. */
8660 if (! CONSTANT_P (op1))
8661 op1 = force_reg (mode, op1);
8664 enum rtx_code comparison_code;
8667 if (code == MAX_EXPR)
8668 comparison_code = unsignedp ? GEU : GE;
8670 comparison_code = unsignedp ? LEU : LE;
8672 /* Canonicalize to comparisons against 0. */
8673 if (op1 == const1_rtx)
8675 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8676 or (a != 0 ? a : 1) for unsigned.
8677 For MIN we are safe converting (a <= 1 ? a : 1)
8678 into (a <= 0 ? a : 1) */
8679 cmpop1 = const0_rtx;
8680 if (code == MAX_EXPR)
8681 comparison_code = unsignedp ? NE : GT;
8683 if (op1 == constm1_rtx && !unsignedp)
8685 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8686 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8687 cmpop1 = const0_rtx;
8688 if (code == MIN_EXPR)
8689 comparison_code = LT;
8691 #ifdef HAVE_conditional_move
8692 /* Use a conditional move if possible. */
8693 if (can_conditionally_move_p (mode))
8697 /* ??? Same problem as in expmed.c: emit_conditional_move
8698 forces a stack adjustment via compare_from_rtx, and we
8699 lose the stack adjustment if the sequence we are about
8700 to create is discarded. */
8701 do_pending_stack_adjust ();
8705 /* Try to emit the conditional move. */
8706 insn = emit_conditional_move (target, comparison_code,
8711 /* If we could do the conditional move, emit the sequence,
8715 rtx seq = get_insns ();
8721 /* Otherwise discard the sequence and fall back to code with
8727 emit_move_insn (target, op0);
8729 temp = gen_label_rtx ();
8730 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8731 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8734 emit_move_insn (target, op1);
8739 op0 = expand_expr (treeop0, subtarget,
8740 VOIDmode, EXPAND_NORMAL);
8741 if (modifier == EXPAND_STACK_PARM)
8743 /* In case we have to reduce the result to bitfield precision
8744 for unsigned bitfield expand this as XOR with a proper constant
8746 if (reduce_bit_field && TYPE_UNSIGNED (type))
8747 temp = expand_binop (mode, xor_optab, op0,
8748 immed_double_int_const
8749 (double_int::mask (TYPE_PRECISION (type)), mode),
8750 target, 1, OPTAB_LIB_WIDEN);
8752 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8756 /* ??? Can optimize bitwise operations with one arg constant.
8757 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8758 and (a bitwise1 b) bitwise2 b (etc)
8759 but that is probably not worth while. */
8768 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8769 || (GET_MODE_PRECISION (TYPE_MODE (type))
8770 == TYPE_PRECISION (type)));
8775 /* If this is a fixed-point operation, then we cannot use the code
8776 below because "expand_shift" doesn't support sat/no-sat fixed-point
8778 if (ALL_FIXED_POINT_MODE_P (mode))
8781 if (! safe_from_p (subtarget, treeop1, 1))
8783 if (modifier == EXPAND_STACK_PARM)
8785 op0 = expand_expr (treeop0, subtarget,
8786 VOIDmode, EXPAND_NORMAL);
8787 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8789 if (code == LSHIFT_EXPR)
8790 temp = REDUCE_BIT_FIELD (temp);
8793 /* Could determine the answer when only additive constants differ. Also,
8794 the addition of one can be handled by changing the condition. */
8801 case UNORDERED_EXPR:
8809 temp = do_store_flag (ops,
8810 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8811 tmode != VOIDmode ? tmode : mode);
8815 /* Use a compare and a jump for BLKmode comparisons, or for function
8816 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8819 || modifier == EXPAND_STACK_PARM
8820 || ! safe_from_p (target, treeop0, 1)
8821 || ! safe_from_p (target, treeop1, 1)
8822 /* Make sure we don't have a hard reg (such as function's return
8823 value) live across basic blocks, if not optimizing. */
8824 || (!optimize && REG_P (target)
8825 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8826 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8828 emit_move_insn (target, const0_rtx);
8830 op1 = gen_label_rtx ();
8831 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8833 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8834 emit_move_insn (target, constm1_rtx);
8836 emit_move_insn (target, const1_rtx);
8842 /* Get the rtx code of the operands. */
8843 op0 = expand_normal (treeop0);
8844 op1 = expand_normal (treeop1);
8847 target = gen_reg_rtx (TYPE_MODE (type));
8849 /* Move the real (op0) and imaginary (op1) parts to their location. */
8850 write_complex_part (target, op0, false);
8851 write_complex_part (target, op1, true);
8855 case WIDEN_SUM_EXPR:
8857 tree oprnd0 = treeop0;
8858 tree oprnd1 = treeop1;
8860 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8861 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8866 case REDUC_MAX_EXPR:
8867 case REDUC_MIN_EXPR:
8868 case REDUC_PLUS_EXPR:
8870 op0 = expand_normal (treeop0);
8871 this_optab = optab_for_tree_code (code, type, optab_default);
8872 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8877 case VEC_LSHIFT_EXPR:
8878 case VEC_RSHIFT_EXPR:
8880 target = expand_vec_shift_expr (ops, target);
8884 case VEC_UNPACK_HI_EXPR:
8885 case VEC_UNPACK_LO_EXPR:
8887 op0 = expand_normal (treeop0);
8888 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8894 case VEC_UNPACK_FLOAT_HI_EXPR:
8895 case VEC_UNPACK_FLOAT_LO_EXPR:
8897 op0 = expand_normal (treeop0);
8898 /* The signedness is determined from input operand. */
8899 temp = expand_widen_pattern_expr
8900 (ops, op0, NULL_RTX, NULL_RTX,
8901 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8907 case VEC_WIDEN_MULT_HI_EXPR:
8908 case VEC_WIDEN_MULT_LO_EXPR:
8909 case VEC_WIDEN_MULT_EVEN_EXPR:
8910 case VEC_WIDEN_MULT_ODD_EXPR:
8911 case VEC_WIDEN_LSHIFT_HI_EXPR:
8912 case VEC_WIDEN_LSHIFT_LO_EXPR:
8913 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8914 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8916 gcc_assert (target);
8919 case VEC_PACK_TRUNC_EXPR:
8920 case VEC_PACK_SAT_EXPR:
8921 case VEC_PACK_FIX_TRUNC_EXPR:
8922 mode = TYPE_MODE (TREE_TYPE (treeop0));
8926 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
8927 op2 = expand_normal (treeop2);
8929 /* Careful here: if the target doesn't support integral vector modes,
8930 a constant selection vector could wind up smooshed into a normal
8931 integral constant. */
8932 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
8934 tree sel_type = TREE_TYPE (treeop2);
8935 enum machine_mode vmode
8936 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
8937 TYPE_VECTOR_SUBPARTS (sel_type));
8938 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
8939 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
8940 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
8943 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
8945 temp = expand_vec_perm (mode, op0, op1, op2, target);
8951 tree oprnd0 = treeop0;
8952 tree oprnd1 = treeop1;
8953 tree oprnd2 = treeop2;
8956 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8957 op2 = expand_normal (oprnd2);
8958 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8963 case REALIGN_LOAD_EXPR:
8965 tree oprnd0 = treeop0;
8966 tree oprnd1 = treeop1;
8967 tree oprnd2 = treeop2;
8970 this_optab = optab_for_tree_code (code, type, optab_default);
8971 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8972 op2 = expand_normal (oprnd2);
8973 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8980 /* A COND_EXPR with its type being VOID_TYPE represents a
8981 conditional jump and is handled in
8982 expand_gimple_cond_expr. */
8983 gcc_assert (!VOID_TYPE_P (type));
8985 /* Note that COND_EXPRs whose type is a structure or union
8986 are required to be constructed to contain assignments of
8987 a temporary variable, so that we can evaluate them here
8988 for side effect only. If type is void, we must do likewise. */
8990 gcc_assert (!TREE_ADDRESSABLE (type)
8992 && TREE_TYPE (treeop1) != void_type_node
8993 && TREE_TYPE (treeop2) != void_type_node);
8995 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
8999 /* If we are not to produce a result, we have no target. Otherwise,
9000 if a target was specified use it; it will not be used as an
9001 intermediate target unless it is safe. If no target, use a
9004 if (modifier != EXPAND_STACK_PARM
9006 && safe_from_p (original_target, treeop0, 1)
9007 && GET_MODE (original_target) == mode
9008 && !MEM_P (original_target))
9009 temp = original_target;
9011 temp = assign_temp (type, 0, 1);
9013 do_pending_stack_adjust ();
9015 op0 = gen_label_rtx ();
9016 op1 = gen_label_rtx ();
9017 jumpifnot (treeop0, op0, -1);
9018 store_expr (treeop1, temp,
9019 modifier == EXPAND_STACK_PARM,
9022 emit_jump_insn (gen_jump (op1));
9025 store_expr (treeop2, temp,
9026 modifier == EXPAND_STACK_PARM,
9034 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9041 /* Here to do an ordinary binary operator. */
9043 expand_operands (treeop0, treeop1,
9044 subtarget, &op0, &op1, EXPAND_NORMAL);
9046 this_optab = optab_for_tree_code (code, type, optab_default);
9048 if (modifier == EXPAND_STACK_PARM)
9050 temp = expand_binop (mode, this_optab, op0, op1, target,
9051 unsignedp, OPTAB_LIB_WIDEN);
9053 /* Bitwise operations do not need bitfield reduction as we expect their
9054 operands being properly truncated. */
9055 if (code == BIT_XOR_EXPR
9056 || code == BIT_AND_EXPR
9057 || code == BIT_IOR_EXPR)
9059 return REDUCE_BIT_FIELD (temp);
9061 #undef REDUCE_BIT_FIELD
9064 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9065 enum expand_modifier modifier, rtx *alt_rtl)
9067 rtx op0, op1, temp, decl_rtl;
9070 enum machine_mode mode;
9071 enum tree_code code = TREE_CODE (exp);
9072 rtx subtarget, original_target;
9075 bool reduce_bit_field;
9076 location_t loc = EXPR_LOCATION (exp);
9077 struct separate_ops ops;
9078 tree treeop0, treeop1, treeop2;
9079 tree ssa_name = NULL_TREE;
9082 type = TREE_TYPE (exp);
9083 mode = TYPE_MODE (type);
9084 unsignedp = TYPE_UNSIGNED (type);
9086 treeop0 = treeop1 = treeop2 = NULL_TREE;
9087 if (!VL_EXP_CLASS_P (exp))
9088 switch (TREE_CODE_LENGTH (code))
9091 case 3: treeop2 = TREE_OPERAND (exp, 2);
9092 case 2: treeop1 = TREE_OPERAND (exp, 1);
9093 case 1: treeop0 = TREE_OPERAND (exp, 0);
9103 ignore = (target == const0_rtx
9104 || ((CONVERT_EXPR_CODE_P (code)
9105 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9106 && TREE_CODE (type) == VOID_TYPE));
9108 /* An operation in what may be a bit-field type needs the
9109 result to be reduced to the precision of the bit-field type,
9110 which is narrower than that of the type's mode. */
9111 reduce_bit_field = (!ignore
9112 && INTEGRAL_TYPE_P (type)
9113 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9115 /* If we are going to ignore this result, we need only do something
9116 if there is a side-effect somewhere in the expression. If there
9117 is, short-circuit the most common cases here. Note that we must
9118 not call expand_expr with anything but const0_rtx in case this
9119 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9123 if (! TREE_SIDE_EFFECTS (exp))
9126 /* Ensure we reference a volatile object even if value is ignored, but
9127 don't do this if all we are doing is taking its address. */
9128 if (TREE_THIS_VOLATILE (exp)
9129 && TREE_CODE (exp) != FUNCTION_DECL
9130 && mode != VOIDmode && mode != BLKmode
9131 && modifier != EXPAND_CONST_ADDRESS)
9133 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9139 if (TREE_CODE_CLASS (code) == tcc_unary
9140 || code == BIT_FIELD_REF
9141 || code == COMPONENT_REF
9142 || code == INDIRECT_REF)
9143 return expand_expr (treeop0, const0_rtx, VOIDmode,
9146 else if (TREE_CODE_CLASS (code) == tcc_binary
9147 || TREE_CODE_CLASS (code) == tcc_comparison
9148 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9150 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9151 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9158 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9161 /* Use subtarget as the target for operand 0 of a binary operation. */
9162 subtarget = get_subtarget (target);
9163 original_target = target;
9169 tree function = decl_function_context (exp);
9171 temp = label_rtx (exp);
9172 temp = gen_rtx_LABEL_REF (Pmode, temp);
9174 if (function != current_function_decl
9176 LABEL_REF_NONLOCAL_P (temp) = 1;
9178 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9183 /* ??? ivopts calls expander, without any preparation from
9184 out-of-ssa. So fake instructions as if this was an access to the
9185 base variable. This unnecessarily allocates a pseudo, see how we can
9186 reuse it, if partition base vars have it set already. */
9187 if (!currently_expanding_to_rtl)
9189 tree var = SSA_NAME_VAR (exp);
9190 if (var && DECL_RTL_SET_P (var))
9191 return DECL_RTL (var);
9192 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9193 LAST_VIRTUAL_REGISTER + 1);
9196 g = get_gimple_for_ssa_name (exp);
9197 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9199 && modifier == EXPAND_INITIALIZER
9200 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9201 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9202 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9203 g = SSA_NAME_DEF_STMT (exp);
9207 location_t saved_loc = curr_insn_location ();
9209 set_curr_insn_location (gimple_location (g));
9210 r = expand_expr_real (gimple_assign_rhs_to_tree (g), target,
9211 tmode, modifier, NULL);
9212 set_curr_insn_location (saved_loc);
9213 if (REG_P (r) && !REG_EXPR (r))
9214 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9219 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9220 exp = SSA_NAME_VAR (ssa_name);
9221 goto expand_decl_rtl;
9225 /* If a static var's type was incomplete when the decl was written,
9226 but the type is complete now, lay out the decl now. */
9227 if (DECL_SIZE (exp) == 0
9228 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9229 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9230 layout_decl (exp, 0);
9232 /* ... fall through ... */
9236 decl_rtl = DECL_RTL (exp);
9238 gcc_assert (decl_rtl);
9239 decl_rtl = copy_rtx (decl_rtl);
9240 /* Record writes to register variables. */
9241 if (modifier == EXPAND_WRITE
9243 && HARD_REGISTER_P (decl_rtl))
9244 add_to_hard_reg_set (&crtl->asm_clobbers,
9245 GET_MODE (decl_rtl), REGNO (decl_rtl));
9247 /* Ensure variable marked as used even if it doesn't go through
9248 a parser. If it hasn't be used yet, write out an external
9250 TREE_USED (exp) = 1;
9252 /* Show we haven't gotten RTL for this yet. */
9255 /* Variables inherited from containing functions should have
9256 been lowered by this point. */
9257 context = decl_function_context (exp);
9258 gcc_assert (!context
9259 || context == current_function_decl
9260 || TREE_STATIC (exp)
9261 || DECL_EXTERNAL (exp)
9262 /* ??? C++ creates functions that are not TREE_STATIC. */
9263 || TREE_CODE (exp) == FUNCTION_DECL);
9265 /* This is the case of an array whose size is to be determined
9266 from its initializer, while the initializer is still being parsed.
9267 ??? We aren't parsing while expanding anymore. */
9269 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9270 temp = validize_mem (decl_rtl);
9272 /* If DECL_RTL is memory, we are in the normal case and the
9273 address is not valid, get the address into a register. */
9275 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9278 *alt_rtl = decl_rtl;
9279 decl_rtl = use_anchored_address (decl_rtl);
9280 if (modifier != EXPAND_CONST_ADDRESS
9281 && modifier != EXPAND_SUM
9282 && !memory_address_addr_space_p (DECL_MODE (exp),
9284 MEM_ADDR_SPACE (decl_rtl)))
9285 temp = replace_equiv_address (decl_rtl,
9286 copy_rtx (XEXP (decl_rtl, 0)));
9289 /* If we got something, return it. But first, set the alignment
9290 if the address is a register. */
9293 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9294 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9299 /* If the mode of DECL_RTL does not match that of the decl,
9300 there are two cases: we are dealing with a BLKmode value
9301 that is returned in a register, or we are dealing with
9302 a promoted value. In the latter case, return a SUBREG
9303 of the wanted mode, but mark it so that we know that it
9304 was already extended. */
9305 if (REG_P (decl_rtl)
9306 && DECL_MODE (exp) != BLKmode
9307 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9309 enum machine_mode pmode;
9311 /* Get the signedness to be used for this variable. Ensure we get
9312 the same mode we got when the variable was declared. */
9313 if (code == SSA_NAME
9314 && (g = SSA_NAME_DEF_STMT (ssa_name))
9315 && gimple_code (g) == GIMPLE_CALL)
9317 gcc_assert (!gimple_call_internal_p (g));
9318 pmode = promote_function_mode (type, mode, &unsignedp,
9319 gimple_call_fntype (g),
9323 pmode = promote_decl_mode (exp, &unsignedp);
9324 gcc_assert (GET_MODE (decl_rtl) == pmode);
9326 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9327 SUBREG_PROMOTED_VAR_P (temp) = 1;
9328 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9335 temp = immed_double_const (TREE_INT_CST_LOW (exp),
9336 TREE_INT_CST_HIGH (exp), mode);
9342 tree tmp = NULL_TREE;
9343 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9344 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9345 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9346 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9347 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9348 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9349 return const_vector_from_tree (exp);
9350 if (GET_MODE_CLASS (mode) == MODE_INT)
9352 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9354 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9358 vec<constructor_elt, va_gc> *v;
9360 vec_alloc (v, VECTOR_CST_NELTS (exp));
9361 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9362 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9363 tmp = build_constructor (type, v);
9365 return expand_expr (tmp, ignore ? const0_rtx : target,
9370 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9373 /* If optimized, generate immediate CONST_DOUBLE
9374 which will be turned into memory by reload if necessary.
9376 We used to force a register so that loop.c could see it. But
9377 this does not allow gen_* patterns to perform optimizations with
9378 the constants. It also produces two insns in cases like "x = 1.0;".
9379 On most machines, floating-point constants are not permitted in
9380 many insns, so we'd end up copying it to a register in any case.
9382 Now, we do the copying in expand_binop, if appropriate. */
9383 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9384 TYPE_MODE (TREE_TYPE (exp)));
9387 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9388 TYPE_MODE (TREE_TYPE (exp)));
9391 /* Handle evaluating a complex constant in a CONCAT target. */
9392 if (original_target && GET_CODE (original_target) == CONCAT)
9394 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9397 rtarg = XEXP (original_target, 0);
9398 itarg = XEXP (original_target, 1);
9400 /* Move the real and imaginary parts separately. */
9401 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9402 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9405 emit_move_insn (rtarg, op0);
9407 emit_move_insn (itarg, op1);
9409 return original_target;
9412 /* ... fall through ... */
9415 temp = expand_expr_constant (exp, 1, modifier);
9417 /* temp contains a constant address.
9418 On RISC machines where a constant address isn't valid,
9419 make some insns to get that address into a register. */
9420 if (modifier != EXPAND_CONST_ADDRESS
9421 && modifier != EXPAND_INITIALIZER
9422 && modifier != EXPAND_SUM
9423 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9424 MEM_ADDR_SPACE (temp)))
9425 return replace_equiv_address (temp,
9426 copy_rtx (XEXP (temp, 0)));
9432 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
9434 if (!SAVE_EXPR_RESOLVED_P (exp))
9436 /* We can indeed still hit this case, typically via builtin
9437 expanders calling save_expr immediately before expanding
9438 something. Assume this means that we only have to deal
9439 with non-BLKmode values. */
9440 gcc_assert (GET_MODE (ret) != BLKmode);
9442 val = build_decl (curr_insn_location (),
9443 VAR_DECL, NULL, TREE_TYPE (exp));
9444 DECL_ARTIFICIAL (val) = 1;
9445 DECL_IGNORED_P (val) = 1;
9447 TREE_OPERAND (exp, 0) = treeop0;
9448 SAVE_EXPR_RESOLVED_P (exp) = 1;
9450 if (!CONSTANT_P (ret))
9451 ret = copy_to_reg (ret);
9452 SET_DECL_RTL (val, ret);
9460 /* If we don't need the result, just ensure we evaluate any
9464 unsigned HOST_WIDE_INT idx;
9467 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9468 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9473 return expand_constructor (exp, target, modifier, false);
9475 case TARGET_MEM_REF:
9478 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9479 struct mem_address addr;
9480 enum insn_code icode;
9483 get_address_description (exp, &addr);
9484 op0 = addr_for_mem_ref (&addr, as, true);
9485 op0 = memory_address_addr_space (mode, op0, as);
9486 temp = gen_rtx_MEM (mode, op0);
9487 set_mem_attributes (temp, exp, 0);
9488 set_mem_addr_space (temp, as);
9489 align = get_object_alignment (exp);
9490 if (modifier != EXPAND_WRITE
9492 && align < GET_MODE_ALIGNMENT (mode)
9493 /* If the target does not have special handling for unaligned
9494 loads of mode then it can use regular moves for them. */
9495 && ((icode = optab_handler (movmisalign_optab, mode))
9496 != CODE_FOR_nothing))
9498 struct expand_operand ops[2];
9500 /* We've already validated the memory, and we're creating a
9501 new pseudo destination. The predicates really can't fail,
9502 nor can the generator. */
9503 create_output_operand (&ops[0], NULL_RTX, mode);
9504 create_fixed_operand (&ops[1], temp);
9505 expand_insn (icode, 2, ops);
9506 return ops[0].value;
9514 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9515 enum machine_mode address_mode;
9516 tree base = TREE_OPERAND (exp, 0);
9518 enum insn_code icode;
9520 /* Handle expansion of non-aliased memory with non-BLKmode. That
9521 might end up in a register. */
9522 if (mem_ref_refers_to_non_mem_p (exp))
9524 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
9527 base = TREE_OPERAND (base, 0);
9529 && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
9530 && (GET_MODE_BITSIZE (DECL_MODE (base))
9531 == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
9532 return expand_expr (build1 (VIEW_CONVERT_EXPR,
9533 TREE_TYPE (exp), base),
9534 target, tmode, modifier);
9535 bit_offset = bitsize_int (offset * BITS_PER_UNIT);
9536 bftype = TREE_TYPE (base);
9537 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
9538 bftype = TREE_TYPE (exp);
9541 temp = assign_stack_temp (DECL_MODE (base),
9542 GET_MODE_SIZE (DECL_MODE (base)));
9543 store_expr (base, temp, 0, false);
9544 temp = adjust_address (temp, BLKmode, offset);
9545 set_mem_size (temp, int_size_in_bytes (TREE_TYPE (exp)));
9548 return expand_expr (build3 (BIT_FIELD_REF, bftype,
9550 TYPE_SIZE (TREE_TYPE (exp)),
9552 target, tmode, modifier);
9554 address_mode = targetm.addr_space.address_mode (as);
9555 base = TREE_OPERAND (exp, 0);
9556 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9558 tree mask = gimple_assign_rhs2 (def_stmt);
9559 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9560 gimple_assign_rhs1 (def_stmt), mask);
9561 TREE_OPERAND (exp, 0) = base;
9563 align = get_object_alignment (exp);
9564 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9565 op0 = memory_address_addr_space (address_mode, op0, as);
9566 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9569 = immed_double_int_const (mem_ref_offset (exp), address_mode);
9570 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9572 op0 = memory_address_addr_space (mode, op0, as);
9573 temp = gen_rtx_MEM (mode, op0);
9574 set_mem_attributes (temp, exp, 0);
9575 set_mem_addr_space (temp, as);
9576 if (TREE_THIS_VOLATILE (exp))
9577 MEM_VOLATILE_P (temp) = 1;
9578 if (modifier != EXPAND_WRITE
9580 && align < GET_MODE_ALIGNMENT (mode))
9582 if ((icode = optab_handler (movmisalign_optab, mode))
9583 != CODE_FOR_nothing)
9585 struct expand_operand ops[2];
9587 /* We've already validated the memory, and we're creating a
9588 new pseudo destination. The predicates really can't fail,
9589 nor can the generator. */
9590 create_output_operand (&ops[0], NULL_RTX, mode);
9591 create_fixed_operand (&ops[1], temp);
9592 expand_insn (icode, 2, ops);
9593 return ops[0].value;
9595 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9596 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9597 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9598 true, (modifier == EXPAND_STACK_PARM
9599 ? NULL_RTX : target),
9608 tree array = treeop0;
9609 tree index = treeop1;
9611 /* Fold an expression like: "foo"[2].
9612 This is not done in fold so it won't happen inside &.
9613 Don't fold if this is for wide characters since it's too
9614 difficult to do correctly and this is a very rare case. */
9616 if (modifier != EXPAND_CONST_ADDRESS
9617 && modifier != EXPAND_INITIALIZER
9618 && modifier != EXPAND_MEMORY)
9620 tree t = fold_read_from_constant_string (exp);
9623 return expand_expr (t, target, tmode, modifier);
9626 /* If this is a constant index into a constant array,
9627 just get the value from the array. Handle both the cases when
9628 we have an explicit constructor and when our operand is a variable
9629 that was declared const. */
9631 if (modifier != EXPAND_CONST_ADDRESS
9632 && modifier != EXPAND_INITIALIZER
9633 && modifier != EXPAND_MEMORY
9634 && TREE_CODE (array) == CONSTRUCTOR
9635 && ! TREE_SIDE_EFFECTS (array)
9636 && TREE_CODE (index) == INTEGER_CST)
9638 unsigned HOST_WIDE_INT ix;
9641 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9643 if (tree_int_cst_equal (field, index))
9645 if (!TREE_SIDE_EFFECTS (value))
9646 return expand_expr (fold (value), target, tmode, modifier);
9651 else if (optimize >= 1
9652 && modifier != EXPAND_CONST_ADDRESS
9653 && modifier != EXPAND_INITIALIZER
9654 && modifier != EXPAND_MEMORY
9655 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9656 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
9657 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
9658 && const_value_known_p (array))
9660 if (TREE_CODE (index) == INTEGER_CST)
9662 tree init = DECL_INITIAL (array);
9664 if (TREE_CODE (init) == CONSTRUCTOR)
9666 unsigned HOST_WIDE_INT ix;
9669 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9671 if (tree_int_cst_equal (field, index))
9673 if (TREE_SIDE_EFFECTS (value))
9676 if (TREE_CODE (value) == CONSTRUCTOR)
9678 /* If VALUE is a CONSTRUCTOR, this
9679 optimization is only useful if
9680 this doesn't store the CONSTRUCTOR
9681 into memory. If it does, it is more
9682 efficient to just load the data from
9683 the array directly. */
9684 rtx ret = expand_constructor (value, target,
9686 if (ret == NULL_RTX)
9690 return expand_expr (fold (value), target, tmode,
9694 else if(TREE_CODE (init) == STRING_CST)
9696 tree index1 = index;
9697 tree low_bound = array_ref_low_bound (exp);
9698 index1 = fold_convert_loc (loc, sizetype,
9701 /* Optimize the special-case of a zero lower bound.
9703 We convert the low_bound to sizetype to avoid some problems
9704 with constant folding. (E.g. suppose the lower bound is 1,
9705 and its mode is QI. Without the conversion,l (ARRAY
9706 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
9707 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
9709 if (! integer_zerop (low_bound))
9710 index1 = size_diffop_loc (loc, index1,
9711 fold_convert_loc (loc, sizetype,
9714 if (0 > compare_tree_int (index1,
9715 TREE_STRING_LENGTH (init)))
9717 tree type = TREE_TYPE (TREE_TYPE (init));
9718 enum machine_mode mode = TYPE_MODE (type);
9720 if (GET_MODE_CLASS (mode) == MODE_INT
9721 && GET_MODE_SIZE (mode) == 1)
9722 return gen_int_mode (TREE_STRING_POINTER (init)
9723 [TREE_INT_CST_LOW (index1)],
9730 goto normal_inner_ref;
9733 /* If the operand is a CONSTRUCTOR, we can just extract the
9734 appropriate field if it is present. */
9735 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9737 unsigned HOST_WIDE_INT idx;
9740 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9742 if (field == treeop1
9743 /* We can normally use the value of the field in the
9744 CONSTRUCTOR. However, if this is a bitfield in
9745 an integral mode that we can fit in a HOST_WIDE_INT,
9746 we must mask only the number of bits in the bitfield,
9747 since this is done implicitly by the constructor. If
9748 the bitfield does not meet either of those conditions,
9749 we can't do this optimization. */
9750 && (! DECL_BIT_FIELD (field)
9751 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9752 && (GET_MODE_PRECISION (DECL_MODE (field))
9753 <= HOST_BITS_PER_WIDE_INT))))
9755 if (DECL_BIT_FIELD (field)
9756 && modifier == EXPAND_STACK_PARM)
9758 op0 = expand_expr (value, target, tmode, modifier);
9759 if (DECL_BIT_FIELD (field))
9761 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9762 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9764 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9766 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
9767 op0 = expand_and (imode, op0, op1, target);
9771 int count = GET_MODE_PRECISION (imode) - bitsize;
9773 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9775 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9783 goto normal_inner_ref;
9786 case ARRAY_RANGE_REF:
9789 enum machine_mode mode1, mode2;
9790 HOST_WIDE_INT bitsize, bitpos;
9792 int volatilep = 0, must_force_mem;
9793 bool packedp = false;
9794 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9795 &mode1, &unsignedp, &volatilep, true);
9796 rtx orig_op0, memloc;
9797 bool mem_attrs_from_type = false;
9799 /* If we got back the original object, something is wrong. Perhaps
9800 we are evaluating an expression too early. In any event, don't
9801 infinitely recurse. */
9802 gcc_assert (tem != exp);
9804 if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp, 0)))
9805 || (TREE_CODE (TREE_OPERAND (exp, 1)) == FIELD_DECL
9806 && DECL_PACKED (TREE_OPERAND (exp, 1))))
9809 /* If TEM's type is a union of variable size, pass TARGET to the inner
9810 computation, since it will need a temporary and TARGET is known
9811 to have to do. This occurs in unchecked conversion in Ada. */
9814 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9815 && COMPLETE_TYPE_P (TREE_TYPE (tem))
9816 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9818 && modifier != EXPAND_STACK_PARM
9819 ? target : NULL_RTX),
9821 (modifier == EXPAND_INITIALIZER
9822 || modifier == EXPAND_CONST_ADDRESS
9823 || modifier == EXPAND_STACK_PARM)
9824 ? modifier : EXPAND_NORMAL);
9827 /* If the bitfield is volatile, we want to access it in the
9828 field's mode, not the computed mode.
9829 If a MEM has VOIDmode (external with incomplete type),
9830 use BLKmode for it instead. */
9833 if (volatilep && flag_strict_volatile_bitfields > 0)
9834 op0 = adjust_address (op0, mode1, 0);
9835 else if (GET_MODE (op0) == VOIDmode)
9836 op0 = adjust_address (op0, BLKmode, 0);
9840 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9842 /* If we have either an offset, a BLKmode result, or a reference
9843 outside the underlying object, we must force it to memory.
9844 Such a case can occur in Ada if we have unchecked conversion
9845 of an expression from a scalar type to an aggregate type or
9846 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9847 passed a partially uninitialized object or a view-conversion
9848 to a larger size. */
9849 must_force_mem = (offset
9851 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9853 /* Handle CONCAT first. */
9854 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9857 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9860 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9863 op0 = XEXP (op0, 0);
9864 mode2 = GET_MODE (op0);
9866 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9867 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9871 op0 = XEXP (op0, 1);
9873 mode2 = GET_MODE (op0);
9876 /* Otherwise force into memory. */
9880 /* If this is a constant, put it in a register if it is a legitimate
9881 constant and we don't need a memory reference. */
9882 if (CONSTANT_P (op0)
9884 && targetm.legitimate_constant_p (mode2, op0)
9886 op0 = force_reg (mode2, op0);
9888 /* Otherwise, if this is a constant, try to force it to the constant
9889 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9890 is a legitimate constant. */
9891 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9892 op0 = validize_mem (memloc);
9894 /* Otherwise, if this is a constant or the object is not in memory
9895 and need be, put it there. */
9896 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9898 tree nt = build_qualified_type (TREE_TYPE (tem),
9899 (TYPE_QUALS (TREE_TYPE (tem))
9900 | TYPE_QUAL_CONST));
9901 memloc = assign_temp (nt, 1, 1);
9902 emit_move_insn (memloc, op0);
9904 mem_attrs_from_type = true;
9909 enum machine_mode address_mode;
9910 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9913 gcc_assert (MEM_P (op0));
9915 address_mode = get_address_mode (op0);
9916 if (GET_MODE (offset_rtx) != address_mode)
9917 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9919 if (GET_MODE (op0) == BLKmode
9920 /* A constant address in OP0 can have VOIDmode, we must
9921 not try to call force_reg in that case. */
9922 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9924 && (bitpos % bitsize) == 0
9925 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9926 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9928 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9932 op0 = offset_address (op0, offset_rtx,
9933 highest_pow2_factor (offset));
9936 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9937 record its alignment as BIGGEST_ALIGNMENT. */
9938 if (MEM_P (op0) && bitpos == 0 && offset != 0
9939 && is_aligning_offset (offset, tem))
9940 set_mem_align (op0, BIGGEST_ALIGNMENT);
9942 /* Don't forget about volatility even if this is a bitfield. */
9943 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9945 if (op0 == orig_op0)
9946 op0 = copy_rtx (op0);
9948 MEM_VOLATILE_P (op0) = 1;
9951 /* In cases where an aligned union has an unaligned object
9952 as a field, we might be extracting a BLKmode value from
9953 an integer-mode (e.g., SImode) object. Handle this case
9954 by doing the extract into an object as wide as the field
9955 (which we know to be the width of a basic mode), then
9956 storing into memory, and changing the mode to BLKmode. */
9957 if (mode1 == VOIDmode
9958 || REG_P (op0) || GET_CODE (op0) == SUBREG
9959 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9960 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9961 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9962 && modifier != EXPAND_CONST_ADDRESS
9963 && modifier != EXPAND_INITIALIZER)
9964 /* If the field is volatile, we always want an aligned
9965 access. Do this in following two situations:
9966 1. the access is not already naturally
9967 aligned, otherwise "normal" (non-bitfield) volatile fields
9968 become non-addressable.
9969 2. the bitsize is narrower than the access size. Need
9970 to extract bitfields from the access. */
9971 || (volatilep && flag_strict_volatile_bitfields > 0
9972 && (bitpos % GET_MODE_ALIGNMENT (mode) != 0
9973 || (mode1 != BLKmode
9974 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)))
9975 /* If the field isn't aligned enough to fetch as a memref,
9976 fetch it as a bit field. */
9977 || (mode1 != BLKmode
9978 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9979 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9981 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9982 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9983 && ((modifier == EXPAND_CONST_ADDRESS
9984 || modifier == EXPAND_INITIALIZER)
9986 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9987 || (bitpos % BITS_PER_UNIT != 0)))
9988 /* If the type and the field are a constant size and the
9989 size of the type isn't the same size as the bitfield,
9990 we must use bitfield operations. */
9992 && TYPE_SIZE (TREE_TYPE (exp))
9993 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9994 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9997 enum machine_mode ext_mode = mode;
9999 if (ext_mode == BLKmode
10000 && ! (target != 0 && MEM_P (op0)
10002 && bitpos % BITS_PER_UNIT == 0))
10003 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10005 if (ext_mode == BLKmode)
10008 target = assign_temp (type, 1, 1);
10013 /* In this case, BITPOS must start at a byte boundary and
10014 TARGET, if specified, must be a MEM. */
10015 gcc_assert (MEM_P (op0)
10016 && (!target || MEM_P (target))
10017 && !(bitpos % BITS_PER_UNIT));
10019 emit_block_move (target,
10020 adjust_address (op0, VOIDmode,
10021 bitpos / BITS_PER_UNIT),
10022 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10024 (modifier == EXPAND_STACK_PARM
10025 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10030 op0 = validize_mem (op0);
10032 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10033 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10035 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, packedp,
10036 (modifier == EXPAND_STACK_PARM
10037 ? NULL_RTX : target),
10038 ext_mode, ext_mode);
10040 /* If the result is a record type and BITSIZE is narrower than
10041 the mode of OP0, an integral mode, and this is a big endian
10042 machine, we must put the field into the high-order bits. */
10043 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10044 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10045 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10046 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10047 GET_MODE_BITSIZE (GET_MODE (op0))
10048 - bitsize, op0, 1);
10050 /* If the result type is BLKmode, store the data into a temporary
10051 of the appropriate type, but with the mode corresponding to the
10052 mode for the data we have (op0's mode). It's tempting to make
10053 this a constant type, since we know it's only being stored once,
10054 but that can cause problems if we are taking the address of this
10055 COMPONENT_REF because the MEM of any reference via that address
10056 will have flags corresponding to the type, which will not
10057 necessarily be constant. */
10058 if (mode == BLKmode)
10062 new_rtx = assign_stack_temp_for_type (ext_mode,
10063 GET_MODE_BITSIZE (ext_mode),
10065 emit_move_insn (new_rtx, op0);
10066 op0 = copy_rtx (new_rtx);
10067 PUT_MODE (op0, BLKmode);
10073 /* If the result is BLKmode, use that to access the object
10075 if (mode == BLKmode)
10078 /* Get a reference to just this component. */
10079 if (modifier == EXPAND_CONST_ADDRESS
10080 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10081 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10083 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10085 if (op0 == orig_op0)
10086 op0 = copy_rtx (op0);
10088 /* If op0 is a temporary because of forcing to memory, pass only the
10089 type to set_mem_attributes so that the original expression is never
10090 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10091 if (mem_attrs_from_type)
10092 set_mem_attributes (op0, type, 0);
10094 set_mem_attributes (op0, exp, 0);
10096 if (REG_P (XEXP (op0, 0)))
10097 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10099 MEM_VOLATILE_P (op0) |= volatilep;
10100 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10101 || modifier == EXPAND_CONST_ADDRESS
10102 || modifier == EXPAND_INITIALIZER)
10104 else if (target == 0)
10105 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10107 convert_move (target, op0, unsignedp);
10112 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10115 /* All valid uses of __builtin_va_arg_pack () are removed during
10117 if (CALL_EXPR_VA_ARG_PACK (exp))
10118 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10120 tree fndecl = get_callee_fndecl (exp), attr;
10123 && (attr = lookup_attribute ("error",
10124 DECL_ATTRIBUTES (fndecl))) != NULL)
10125 error ("%Kcall to %qs declared with attribute error: %s",
10126 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10127 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10129 && (attr = lookup_attribute ("warning",
10130 DECL_ATTRIBUTES (fndecl))) != NULL)
10131 warning_at (tree_nonartificial_location (exp),
10132 0, "%Kcall to %qs declared with attribute warning: %s",
10133 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10134 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10136 /* Check for a built-in function. */
10137 if (fndecl && DECL_BUILT_IN (fndecl))
10139 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10140 return expand_builtin (exp, target, subtarget, tmode, ignore);
10143 return expand_call (exp, target, ignore);
10145 case VIEW_CONVERT_EXPR:
10148 /* If we are converting to BLKmode, try to avoid an intermediate
10149 temporary by fetching an inner memory reference. */
10150 if (mode == BLKmode
10151 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10152 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10153 && handled_component_p (treeop0))
10155 enum machine_mode mode1;
10156 HOST_WIDE_INT bitsize, bitpos;
10161 = get_inner_reference (treeop0, &bitsize, &bitpos,
10162 &offset, &mode1, &unsignedp, &volatilep,
10166 /* ??? We should work harder and deal with non-zero offsets. */
10168 && (bitpos % BITS_PER_UNIT) == 0
10170 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
10172 /* See the normal_inner_ref case for the rationale. */
10174 = expand_expr (tem,
10175 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10176 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10178 && modifier != EXPAND_STACK_PARM
10179 ? target : NULL_RTX),
10181 (modifier == EXPAND_INITIALIZER
10182 || modifier == EXPAND_CONST_ADDRESS
10183 || modifier == EXPAND_STACK_PARM)
10184 ? modifier : EXPAND_NORMAL);
10186 if (MEM_P (orig_op0))
10190 /* Get a reference to just this component. */
10191 if (modifier == EXPAND_CONST_ADDRESS
10192 || modifier == EXPAND_SUM
10193 || modifier == EXPAND_INITIALIZER)
10194 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10196 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10198 if (op0 == orig_op0)
10199 op0 = copy_rtx (op0);
10201 set_mem_attributes (op0, treeop0, 0);
10202 if (REG_P (XEXP (op0, 0)))
10203 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10205 MEM_VOLATILE_P (op0) |= volatilep;
10211 op0 = expand_expr (treeop0,
10212 NULL_RTX, VOIDmode, modifier);
10214 /* If the input and output modes are both the same, we are done. */
10215 if (mode == GET_MODE (op0))
10217 /* If neither mode is BLKmode, and both modes are the same size
10218 then we can use gen_lowpart. */
10219 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10220 && (GET_MODE_PRECISION (mode)
10221 == GET_MODE_PRECISION (GET_MODE (op0)))
10222 && !COMPLEX_MODE_P (GET_MODE (op0)))
10224 if (GET_CODE (op0) == SUBREG)
10225 op0 = force_reg (GET_MODE (op0), op0);
10226 temp = gen_lowpart_common (mode, op0);
10231 if (!REG_P (op0) && !MEM_P (op0))
10232 op0 = force_reg (GET_MODE (op0), op0);
10233 op0 = gen_lowpart (mode, op0);
10236 /* If both types are integral, convert from one mode to the other. */
10237 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10238 op0 = convert_modes (mode, GET_MODE (op0), op0,
10239 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10240 /* As a last resort, spill op0 to memory, and reload it in a
10242 else if (!MEM_P (op0))
10244 /* If the operand is not a MEM, force it into memory. Since we
10245 are going to be changing the mode of the MEM, don't call
10246 force_const_mem for constants because we don't allow pool
10247 constants to change mode. */
10248 tree inner_type = TREE_TYPE (treeop0);
10250 gcc_assert (!TREE_ADDRESSABLE (exp));
10252 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10254 = assign_stack_temp_for_type
10255 (TYPE_MODE (inner_type),
10256 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10258 emit_move_insn (target, op0);
10262 /* At this point, OP0 is in the correct mode. If the output type is
10263 such that the operand is known to be aligned, indicate that it is.
10264 Otherwise, we need only be concerned about alignment for non-BLKmode
10268 enum insn_code icode;
10270 if (TYPE_ALIGN_OK (type))
10272 /* ??? Copying the MEM without substantially changing it might
10273 run afoul of the code handling volatile memory references in
10274 store_expr, which assumes that TARGET is returned unmodified
10275 if it has been used. */
10276 op0 = copy_rtx (op0);
10277 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10279 else if (mode != BLKmode
10280 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode)
10281 /* If the target does have special handling for unaligned
10282 loads of mode then use them. */
10283 && ((icode = optab_handler (movmisalign_optab, mode))
10284 != CODE_FOR_nothing))
10288 op0 = adjust_address (op0, mode, 0);
10289 /* We've already validated the memory, and we're creating a
10290 new pseudo destination. The predicates really can't
10292 reg = gen_reg_rtx (mode);
10294 /* Nor can the insn generator. */
10295 insn = GEN_FCN (icode) (reg, op0);
10299 else if (STRICT_ALIGNMENT
10301 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10303 tree inner_type = TREE_TYPE (treeop0);
10304 HOST_WIDE_INT temp_size
10305 = MAX (int_size_in_bytes (inner_type),
10306 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10308 = assign_stack_temp_for_type (mode, temp_size, type);
10309 rtx new_with_op0_mode
10310 = adjust_address (new_rtx, GET_MODE (op0), 0);
10312 gcc_assert (!TREE_ADDRESSABLE (exp));
10314 if (GET_MODE (op0) == BLKmode)
10315 emit_block_move (new_with_op0_mode, op0,
10316 GEN_INT (GET_MODE_SIZE (mode)),
10317 (modifier == EXPAND_STACK_PARM
10318 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10320 emit_move_insn (new_with_op0_mode, op0);
10325 op0 = adjust_address (op0, mode, 0);
10332 tree lhs = treeop0;
10333 tree rhs = treeop1;
10334 gcc_assert (ignore);
10336 /* Check for |= or &= of a bitfield of size one into another bitfield
10337 of size 1. In this case, (unless we need the result of the
10338 assignment) we can do this more efficiently with a
10339 test followed by an assignment, if necessary.
10341 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10342 things change so we do, this code should be enhanced to
10344 if (TREE_CODE (lhs) == COMPONENT_REF
10345 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10346 || TREE_CODE (rhs) == BIT_AND_EXPR)
10347 && TREE_OPERAND (rhs, 0) == lhs
10348 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10349 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10350 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10352 rtx label = gen_label_rtx ();
10353 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10354 do_jump (TREE_OPERAND (rhs, 1),
10356 value ? 0 : label, -1);
10357 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10359 do_pending_stack_adjust ();
10360 emit_label (label);
10364 expand_assignment (lhs, rhs, false);
10369 return expand_expr_addr_expr (exp, target, tmode, modifier);
10371 case REALPART_EXPR:
10372 op0 = expand_normal (treeop0);
10373 return read_complex_part (op0, false);
10375 case IMAGPART_EXPR:
10376 op0 = expand_normal (treeop0);
10377 return read_complex_part (op0, true);
10384 /* Expanded in cfgexpand.c. */
10385 gcc_unreachable ();
10387 case TRY_CATCH_EXPR:
10389 case EH_FILTER_EXPR:
10390 case TRY_FINALLY_EXPR:
10391 /* Lowered by tree-eh.c. */
10392 gcc_unreachable ();
10394 case WITH_CLEANUP_EXPR:
10395 case CLEANUP_POINT_EXPR:
10397 case CASE_LABEL_EXPR:
10402 case COMPOUND_EXPR:
10403 case PREINCREMENT_EXPR:
10404 case PREDECREMENT_EXPR:
10405 case POSTINCREMENT_EXPR:
10406 case POSTDECREMENT_EXPR:
10409 case COMPOUND_LITERAL_EXPR:
10410 /* Lowered by gimplify.c. */
10411 gcc_unreachable ();
10414 /* Function descriptors are not valid except for as
10415 initialization constants, and should not be expanded. */
10416 gcc_unreachable ();
10418 case WITH_SIZE_EXPR:
10419 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10420 have pulled out the size to use in whatever context it needed. */
10421 return expand_expr_real (treeop0, original_target, tmode,
10422 modifier, alt_rtl);
10425 return expand_expr_real_2 (&ops, target, tmode, modifier);
10429 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10430 signedness of TYPE), possibly returning the result in TARGET. */
10432 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10434 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10435 if (target && GET_MODE (target) != GET_MODE (exp))
10437 /* For constant values, reduce using build_int_cst_type. */
10438 if (CONST_INT_P (exp))
10440 HOST_WIDE_INT value = INTVAL (exp);
10441 tree t = build_int_cst_type (type, value);
10442 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10444 else if (TYPE_UNSIGNED (type))
10446 rtx mask = immed_double_int_const (double_int::mask (prec),
10448 return expand_and (GET_MODE (exp), exp, mask, target);
10452 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10453 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10454 exp, count, target, 0);
10455 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10456 exp, count, target, 0);
10460 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10461 when applied to the address of EXP produces an address known to be
10462 aligned more than BIGGEST_ALIGNMENT. */
10465 is_aligning_offset (const_tree offset, const_tree exp)
10467 /* Strip off any conversions. */
10468 while (CONVERT_EXPR_P (offset))
10469 offset = TREE_OPERAND (offset, 0);
10471 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10472 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10473 if (TREE_CODE (offset) != BIT_AND_EXPR
10474 || !host_integerp (TREE_OPERAND (offset, 1), 1)
10475 || compare_tree_int (TREE_OPERAND (offset, 1),
10476 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10477 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
10480 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10481 It must be NEGATE_EXPR. Then strip any more conversions. */
10482 offset = TREE_OPERAND (offset, 0);
10483 while (CONVERT_EXPR_P (offset))
10484 offset = TREE_OPERAND (offset, 0);
10486 if (TREE_CODE (offset) != NEGATE_EXPR)
10489 offset = TREE_OPERAND (offset, 0);
10490 while (CONVERT_EXPR_P (offset))
10491 offset = TREE_OPERAND (offset, 0);
10493 /* This must now be the address of EXP. */
10494 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10497 /* Return the tree node if an ARG corresponds to a string constant or zero
10498 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10499 in bytes within the string that ARG is accessing. The type of the
10500 offset will be `sizetype'. */
10503 string_constant (tree arg, tree *ptr_offset)
10505 tree array, offset, lower_bound;
10508 if (TREE_CODE (arg) == ADDR_EXPR)
10510 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10512 *ptr_offset = size_zero_node;
10513 return TREE_OPERAND (arg, 0);
10515 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10517 array = TREE_OPERAND (arg, 0);
10518 offset = size_zero_node;
10520 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10522 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10523 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10524 if (TREE_CODE (array) != STRING_CST
10525 && TREE_CODE (array) != VAR_DECL)
10528 /* Check if the array has a nonzero lower bound. */
10529 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10530 if (!integer_zerop (lower_bound))
10532 /* If the offset and base aren't both constants, return 0. */
10533 if (TREE_CODE (lower_bound) != INTEGER_CST)
10535 if (TREE_CODE (offset) != INTEGER_CST)
10537 /* Adjust offset by the lower bound. */
10538 offset = size_diffop (fold_convert (sizetype, offset),
10539 fold_convert (sizetype, lower_bound));
10542 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10544 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10545 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10546 if (TREE_CODE (array) != ADDR_EXPR)
10548 array = TREE_OPERAND (array, 0);
10549 if (TREE_CODE (array) != STRING_CST
10550 && TREE_CODE (array) != VAR_DECL)
10556 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10558 tree arg0 = TREE_OPERAND (arg, 0);
10559 tree arg1 = TREE_OPERAND (arg, 1);
10564 if (TREE_CODE (arg0) == ADDR_EXPR
10565 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10566 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10568 array = TREE_OPERAND (arg0, 0);
10571 else if (TREE_CODE (arg1) == ADDR_EXPR
10572 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10573 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10575 array = TREE_OPERAND (arg1, 0);
10584 if (TREE_CODE (array) == STRING_CST)
10586 *ptr_offset = fold_convert (sizetype, offset);
10589 else if (TREE_CODE (array) == VAR_DECL
10590 || TREE_CODE (array) == CONST_DECL)
10594 /* Variables initialized to string literals can be handled too. */
10595 if (!const_value_known_p (array)
10596 || !DECL_INITIAL (array)
10597 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
10600 /* Avoid const char foo[4] = "abcde"; */
10601 if (DECL_SIZE_UNIT (array) == NULL_TREE
10602 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10603 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
10604 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10607 /* If variable is bigger than the string literal, OFFSET must be constant
10608 and inside of the bounds of the string literal. */
10609 offset = fold_convert (sizetype, offset);
10610 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10611 && (! host_integerp (offset, 1)
10612 || compare_tree_int (offset, length) >= 0))
10615 *ptr_offset = offset;
10616 return DECL_INITIAL (array);
10622 /* Generate code to calculate OPS, and exploded expression
10623 using a store-flag instruction and return an rtx for the result.
10624 OPS reflects a comparison.
10626 If TARGET is nonzero, store the result there if convenient.
10628 Return zero if there is no suitable set-flag instruction
10629 available on this machine.
10631 Once expand_expr has been called on the arguments of the comparison,
10632 we are committed to doing the store flag, since it is not safe to
10633 re-evaluate the expression. We emit the store-flag insn by calling
10634 emit_store_flag, but only expand the arguments if we have a reason
10635 to believe that emit_store_flag will be successful. If we think that
10636 it will, but it isn't, we have to simulate the store-flag with a
10637 set/jump/set sequence. */
10640 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10642 enum rtx_code code;
10643 tree arg0, arg1, type;
10645 enum machine_mode operand_mode;
10648 rtx subtarget = target;
10649 location_t loc = ops->location;
10654 /* Don't crash if the comparison was erroneous. */
10655 if (arg0 == error_mark_node || arg1 == error_mark_node)
10658 type = TREE_TYPE (arg0);
10659 operand_mode = TYPE_MODE (type);
10660 unsignedp = TYPE_UNSIGNED (type);
10662 /* We won't bother with BLKmode store-flag operations because it would mean
10663 passing a lot of information to emit_store_flag. */
10664 if (operand_mode == BLKmode)
10667 /* We won't bother with store-flag operations involving function pointers
10668 when function pointers must be canonicalized before comparisons. */
10669 #ifdef HAVE_canonicalize_funcptr_for_compare
10670 if (HAVE_canonicalize_funcptr_for_compare
10671 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10672 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10674 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10675 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10676 == FUNCTION_TYPE))))
10683 /* For vector typed comparisons emit code to generate the desired
10684 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10685 expander for this. */
10686 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10688 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10689 tree if_true = constant_boolean_node (true, ops->type);
10690 tree if_false = constant_boolean_node (false, ops->type);
10691 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10694 /* Get the rtx comparison code to use. We know that EXP is a comparison
10695 operation of some type. Some comparisons against 1 and -1 can be
10696 converted to comparisons with zero. Do so here so that the tests
10697 below will be aware that we have a comparison with zero. These
10698 tests will not catch constants in the first operand, but constants
10699 are rarely passed as the first operand. */
10710 if (integer_onep (arg1))
10711 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10713 code = unsignedp ? LTU : LT;
10716 if (! unsignedp && integer_all_onesp (arg1))
10717 arg1 = integer_zero_node, code = LT;
10719 code = unsignedp ? LEU : LE;
10722 if (! unsignedp && integer_all_onesp (arg1))
10723 arg1 = integer_zero_node, code = GE;
10725 code = unsignedp ? GTU : GT;
10728 if (integer_onep (arg1))
10729 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10731 code = unsignedp ? GEU : GE;
10734 case UNORDERED_EXPR:
10760 gcc_unreachable ();
10763 /* Put a constant second. */
10764 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10765 || TREE_CODE (arg0) == FIXED_CST)
10767 tem = arg0; arg0 = arg1; arg1 = tem;
10768 code = swap_condition (code);
10771 /* If this is an equality or inequality test of a single bit, we can
10772 do this by shifting the bit being tested to the low-order bit and
10773 masking the result with the constant 1. If the condition was EQ,
10774 we xor it with 1. This does not require an scc insn and is faster
10775 than an scc insn even if we have it.
10777 The code to make this transformation was moved into fold_single_bit_test,
10778 so we just call into the folder and expand its result. */
10780 if ((code == NE || code == EQ)
10781 && integer_zerop (arg1)
10782 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10784 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
10786 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
10788 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
10789 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10790 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
10791 gimple_assign_rhs1 (srcstmt),
10792 gimple_assign_rhs2 (srcstmt));
10793 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
10795 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
10799 if (! get_subtarget (target)
10800 || GET_MODE (subtarget) != operand_mode)
10803 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10806 target = gen_reg_rtx (mode);
10808 /* Try a cstore if possible. */
10809 return emit_store_flag_force (target, code, op0, op1,
10810 operand_mode, unsignedp,
10811 (TYPE_PRECISION (ops->type) == 1
10812 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10816 /* Stubs in case we haven't got a casesi insn. */
10817 #ifndef HAVE_casesi
10818 # define HAVE_casesi 0
10819 # define gen_casesi(a, b, c, d, e) (0)
10820 # define CODE_FOR_casesi CODE_FOR_nothing
10823 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10824 0 otherwise (i.e. if there is no casesi instruction).
10826 DEFAULT_PROBABILITY is the probability of jumping to the default
10829 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10830 rtx table_label, rtx default_label, rtx fallback_label,
10831 int default_probability)
10833 struct expand_operand ops[5];
10834 enum machine_mode index_mode = SImode;
10835 rtx op1, op2, index;
10840 /* Convert the index to SImode. */
10841 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10843 enum machine_mode omode = TYPE_MODE (index_type);
10844 rtx rangertx = expand_normal (range);
10846 /* We must handle the endpoints in the original mode. */
10847 index_expr = build2 (MINUS_EXPR, index_type,
10848 index_expr, minval);
10849 minval = integer_zero_node;
10850 index = expand_normal (index_expr);
10852 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10853 omode, 1, default_label,
10854 default_probability);
10855 /* Now we can safely truncate. */
10856 index = convert_to_mode (index_mode, index, 0);
10860 if (TYPE_MODE (index_type) != index_mode)
10862 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
10863 index_expr = fold_convert (index_type, index_expr);
10866 index = expand_normal (index_expr);
10869 do_pending_stack_adjust ();
10871 op1 = expand_normal (minval);
10872 op2 = expand_normal (range);
10874 create_input_operand (&ops[0], index, index_mode);
10875 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
10876 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
10877 create_fixed_operand (&ops[3], table_label);
10878 create_fixed_operand (&ops[4], (default_label
10880 : fallback_label));
10881 expand_jump_insn (CODE_FOR_casesi, 5, ops);
10885 /* Attempt to generate a tablejump instruction; same concept. */
10886 #ifndef HAVE_tablejump
10887 #define HAVE_tablejump 0
10888 #define gen_tablejump(x, y) (0)
10891 /* Subroutine of the next function.
10893 INDEX is the value being switched on, with the lowest value
10894 in the table already subtracted.
10895 MODE is its expected mode (needed if INDEX is constant).
10896 RANGE is the length of the jump table.
10897 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10899 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10900 index value is out of range.
10901 DEFAULT_PROBABILITY is the probability of jumping to
10902 the default label. */
10905 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10906 rtx default_label, int default_probability)
10910 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10911 cfun->cfg->max_jumptable_ents = INTVAL (range);
10913 /* Do an unsigned comparison (in the proper mode) between the index
10914 expression and the value which represents the length of the range.
10915 Since we just finished subtracting the lower bound of the range
10916 from the index expression, this comparison allows us to simultaneously
10917 check that the original index expression value is both greater than
10918 or equal to the minimum value of the range and less than or equal to
10919 the maximum value of the range. */
10922 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10923 default_label, default_probability);
10926 /* If index is in range, it must fit in Pmode.
10927 Convert to Pmode so we can index with it. */
10929 index = convert_to_mode (Pmode, index, 1);
10931 /* Don't let a MEM slip through, because then INDEX that comes
10932 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10933 and break_out_memory_refs will go to work on it and mess it up. */
10934 #ifdef PIC_CASE_VECTOR_ADDRESS
10935 if (flag_pic && !REG_P (index))
10936 index = copy_to_mode_reg (Pmode, index);
10939 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10940 GET_MODE_SIZE, because this indicates how large insns are. The other
10941 uses should all be Pmode, because they are addresses. This code
10942 could fail if addresses and insns are not the same size. */
10943 index = gen_rtx_PLUS (Pmode,
10944 gen_rtx_MULT (Pmode, index,
10945 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10946 gen_rtx_LABEL_REF (Pmode, table_label));
10947 #ifdef PIC_CASE_VECTOR_ADDRESS
10949 index = PIC_CASE_VECTOR_ADDRESS (index);
10952 index = memory_address (CASE_VECTOR_MODE, index);
10953 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10954 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10955 convert_move (temp, vector, 0);
10957 emit_jump_insn (gen_tablejump (temp, table_label));
10959 /* If we are generating PIC code or if the table is PC-relative, the
10960 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10961 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10966 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10967 rtx table_label, rtx default_label, int default_probability)
10971 if (! HAVE_tablejump)
10974 index_expr = fold_build2 (MINUS_EXPR, index_type,
10975 fold_convert (index_type, index_expr),
10976 fold_convert (index_type, minval));
10977 index = expand_normal (index_expr);
10978 do_pending_stack_adjust ();
10980 do_tablejump (index, TYPE_MODE (index_type),
10981 convert_modes (TYPE_MODE (index_type),
10982 TYPE_MODE (TREE_TYPE (range)),
10983 expand_normal (range),
10984 TYPE_UNSIGNED (TREE_TYPE (range))),
10985 table_label, default_label, default_probability);
10989 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10991 const_vector_from_tree (tree exp)
10997 enum machine_mode inner, mode;
10999 mode = TYPE_MODE (TREE_TYPE (exp));
11001 if (initializer_zerop (exp))
11002 return CONST0_RTX (mode);
11004 units = GET_MODE_NUNITS (mode);
11005 inner = GET_MODE_INNER (mode);
11007 v = rtvec_alloc (units);
11009 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11011 elt = VECTOR_CST_ELT (exp, i);
11013 if (TREE_CODE (elt) == REAL_CST)
11014 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11016 else if (TREE_CODE (elt) == FIXED_CST)
11017 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11020 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
11024 return gen_rtx_CONST_VECTOR (mode, v);
11027 /* Build a decl for a personality function given a language prefix. */
11030 build_personality_function (const char *lang)
11032 const char *unwind_and_version;
11036 switch (targetm_common.except_unwind_info (&global_options))
11041 unwind_and_version = "_sj0";
11045 unwind_and_version = "_v0";
11048 unwind_and_version = "_seh0";
11051 gcc_unreachable ();
11054 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11056 type = build_function_type_list (integer_type_node, integer_type_node,
11057 long_long_unsigned_type_node,
11058 ptr_type_node, ptr_type_node, NULL_TREE);
11059 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11060 get_identifier (name), type);
11061 DECL_ARTIFICIAL (decl) = 1;
11062 DECL_EXTERNAL (decl) = 1;
11063 TREE_PUBLIC (decl) = 1;
11065 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11066 are the flags assigned by targetm.encode_section_info. */
11067 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11072 /* Extracts the personality function of DECL and returns the corresponding
11076 get_personality_function (tree decl)
11078 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11079 enum eh_personality_kind pk;
11081 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11082 if (pk == eh_personality_none)
11086 && pk == eh_personality_any)
11087 personality = lang_hooks.eh_personality ();
11089 if (pk == eh_personality_lang)
11090 gcc_assert (personality != NULL_TREE);
11092 return XEXP (DECL_RTL (personality), 0);
11095 #include "gt-expr.h"