1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
37 #include "diagnostic.h"
39 #include "fold-const.h"
40 #include "stor-layout.h"
44 #include "insn-attr.h"
49 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
51 #include "optabs-tree.h"
54 #include "langhooks.h"
55 #include "common/common-target.h"
56 #include "tree-ssa-live.h"
57 #include "tree-outof-ssa.h"
58 #include "tree-ssa-address.h"
60 #include "tree-chkp.h"
65 /* If this is nonzero, we do not bother generating VOLATILE
66 around volatile memory references, and we are willing to
67 output indirect addresses. If cse is to follow, we reject
68 indirect addresses so a useful potential cse is generated;
69 if it is used only once, instruction combination will produce
70 the same indirect address eventually. */
73 /* This structure is used by move_by_pieces to describe the move to
75 struct move_by_pieces_d
84 int explicit_inc_from;
85 unsigned HOST_WIDE_INT len;
90 /* This structure is used by store_by_pieces to describe the clear to
93 struct store_by_pieces_d
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
101 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
106 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
107 struct move_by_pieces_d *);
108 static bool block_move_libcall_safe_for_call_parm (void);
109 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
110 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
111 unsigned HOST_WIDE_INT);
112 static tree emit_block_move_libcall_fn (int);
113 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
114 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
115 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
116 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
117 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
118 struct store_by_pieces_d *);
119 static tree clear_storage_libcall_fn (int);
120 static rtx_insn *compress_float_constant (rtx, rtx);
121 static rtx get_subtarget (rtx);
122 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
123 HOST_WIDE_INT, unsigned HOST_WIDE_INT,
124 unsigned HOST_WIDE_INT, machine_mode,
125 tree, int, alias_set_type, bool);
126 static void store_constructor (tree, rtx, int, HOST_WIDE_INT, bool);
127 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
128 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
129 machine_mode, tree, alias_set_type, bool, bool);
131 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
133 static int is_aligning_offset (const_tree, const_tree);
134 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
135 static rtx do_store_flag (sepops, rtx, machine_mode);
137 static void emit_single_push_insn (machine_mode, rtx, tree);
139 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
140 static rtx const_vector_from_tree (tree);
141 static rtx const_scalar_mask_from_tree (tree);
142 static tree tree_expr_size (const_tree);
143 static HOST_WIDE_INT int_expr_size (tree);
146 /* This is run to set up which modes can be used
147 directly in memory and to initialize the block move optab. It is run
148 at the beginning of compilation and when the target is reinitialized. */
151 init_expr_target (void)
159 /* Try indexing by frame ptr and try by stack ptr.
160 It is known that on the Convex the stack ptr isn't a valid index.
161 With luck, one or the other is valid on any machine. */
162 mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
163 mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
165 /* A scratch register we can modify in-place below to avoid
166 useless RTL allocations. */
167 reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
169 insn = rtx_alloc (INSN);
170 pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
171 PATTERN (insn) = pat;
173 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
174 mode = (machine_mode) ((int) mode + 1))
178 direct_load[(int) mode] = direct_store[(int) mode] = 0;
179 PUT_MODE (mem, mode);
180 PUT_MODE (mem1, mode);
182 /* See if there is some register that can be used in this mode and
183 directly loaded or stored from memory. */
185 if (mode != VOIDmode && mode != BLKmode)
186 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
187 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
190 if (! HARD_REGNO_MODE_OK (regno, mode))
193 set_mode_and_regno (reg, mode, regno);
196 SET_DEST (pat) = reg;
197 if (recog (pat, insn, &num_clobbers) >= 0)
198 direct_load[(int) mode] = 1;
200 SET_SRC (pat) = mem1;
201 SET_DEST (pat) = reg;
202 if (recog (pat, insn, &num_clobbers) >= 0)
203 direct_load[(int) mode] = 1;
206 SET_DEST (pat) = mem;
207 if (recog (pat, insn, &num_clobbers) >= 0)
208 direct_store[(int) mode] = 1;
211 SET_DEST (pat) = mem1;
212 if (recog (pat, insn, &num_clobbers) >= 0)
213 direct_store[(int) mode] = 1;
217 mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
219 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
220 mode = GET_MODE_WIDER_MODE (mode))
222 machine_mode srcmode;
223 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
224 srcmode = GET_MODE_WIDER_MODE (srcmode))
228 ic = can_extend_p (mode, srcmode, 0);
229 if (ic == CODE_FOR_nothing)
232 PUT_MODE (mem, srcmode);
234 if (insn_operand_matches (ic, 1, mem))
235 float_extend_from_mem[mode][srcmode] = true;
240 /* This is run at the start of compiling a function. */
245 memset (&crtl->expr, 0, sizeof (crtl->expr));
248 /* Copy data from FROM to TO, where the machine modes are not the same.
249 Both modes may be integer, or both may be floating, or both may be
251 UNSIGNEDP should be nonzero if FROM is an unsigned type.
252 This causes zero-extension instead of sign-extension. */
255 convert_move (rtx to, rtx from, int unsignedp)
257 machine_mode to_mode = GET_MODE (to);
258 machine_mode from_mode = GET_MODE (from);
259 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
260 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
264 /* rtx code for making an equivalent value. */
265 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
266 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
269 gcc_assert (to_real == from_real);
270 gcc_assert (to_mode != BLKmode);
271 gcc_assert (from_mode != BLKmode);
273 /* If the source and destination are already the same, then there's
278 /* If FROM is a SUBREG that indicates that we have already done at least
279 the required extension, strip it. We don't handle such SUBREGs as
282 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
283 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
284 >= GET_MODE_PRECISION (to_mode))
285 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
286 from = gen_lowpart (to_mode, from), from_mode = to_mode;
288 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
290 if (to_mode == from_mode
291 || (from_mode == VOIDmode && CONSTANT_P (from)))
293 emit_move_insn (to, from);
297 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
299 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
301 if (VECTOR_MODE_P (to_mode))
302 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
304 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
306 emit_move_insn (to, from);
310 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
312 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
313 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
323 gcc_assert ((GET_MODE_PRECISION (from_mode)
324 != GET_MODE_PRECISION (to_mode))
325 || (DECIMAL_FLOAT_MODE_P (from_mode)
326 != DECIMAL_FLOAT_MODE_P (to_mode)));
328 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
329 /* Conversion between decimal float and binary float, same size. */
330 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
331 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
336 /* Try converting directly if the insn is supported. */
338 code = convert_optab_handler (tab, to_mode, from_mode);
339 if (code != CODE_FOR_nothing)
341 emit_unop_insn (code, to, from,
342 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
346 /* Otherwise use a libcall. */
347 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
349 /* Is this conversion implemented yet? */
350 gcc_assert (libcall);
353 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
355 insns = get_insns ();
357 emit_libcall_block (insns, to, value,
358 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
360 : gen_rtx_FLOAT_EXTEND (to_mode, from));
364 /* Handle pointer conversion. */ /* SPEE 900220. */
365 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
369 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
376 if (convert_optab_handler (ctab, to_mode, from_mode)
379 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
385 /* Targets are expected to provide conversion insns between PxImode and
386 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
387 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
389 machine_mode full_mode
390 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
392 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
393 != CODE_FOR_nothing);
395 if (full_mode != from_mode)
396 from = convert_to_mode (full_mode, from, unsignedp);
397 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
401 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
404 machine_mode full_mode
405 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
406 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
407 enum insn_code icode;
409 icode = convert_optab_handler (ctab, full_mode, from_mode);
410 gcc_assert (icode != CODE_FOR_nothing);
412 if (to_mode == full_mode)
414 emit_unop_insn (icode, to, from, UNKNOWN);
418 new_from = gen_reg_rtx (full_mode);
419 emit_unop_insn (icode, new_from, from, UNKNOWN);
421 /* else proceed to integer conversions below. */
422 from_mode = full_mode;
426 /* Make sure both are fixed-point modes or both are not. */
427 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
428 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
429 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
431 /* If we widen from_mode to to_mode and they are in the same class,
432 we won't saturate the result.
433 Otherwise, always saturate the result to play safe. */
434 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
435 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
436 expand_fixed_convert (to, from, 0, 0);
438 expand_fixed_convert (to, from, 0, 1);
442 /* Now both modes are integers. */
444 /* Handle expanding beyond a word. */
445 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
446 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
453 machine_mode lowpart_mode;
454 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
456 /* Try converting directly if the insn is supported. */
457 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
460 /* If FROM is a SUBREG, put it into a register. Do this
461 so that we always generate the same set of insns for
462 better cse'ing; if an intermediate assignment occurred,
463 we won't be doing the operation directly on the SUBREG. */
464 if (optimize > 0 && GET_CODE (from) == SUBREG)
465 from = force_reg (from_mode, from);
466 emit_unop_insn (code, to, from, equiv_code);
469 /* Next, try converting via full word. */
470 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
471 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
472 != CODE_FOR_nothing))
474 rtx word_to = gen_reg_rtx (word_mode);
477 if (reg_overlap_mentioned_p (to, from))
478 from = force_reg (from_mode, from);
481 convert_move (word_to, from, unsignedp);
482 emit_unop_insn (code, to, word_to, equiv_code);
486 /* No special multiword conversion insn; do it by hand. */
489 /* Since we will turn this into a no conflict block, we must ensure
490 the source does not overlap the target so force it into an isolated
491 register when maybe so. Likewise for any MEM input, since the
492 conversion sequence might require several references to it and we
493 must ensure we're getting the same value every time. */
495 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
496 from = force_reg (from_mode, from);
498 /* Get a copy of FROM widened to a word, if necessary. */
499 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
500 lowpart_mode = word_mode;
502 lowpart_mode = from_mode;
504 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
506 lowpart = gen_lowpart (lowpart_mode, to);
507 emit_move_insn (lowpart, lowfrom);
509 /* Compute the value to put in each remaining word. */
511 fill_value = const0_rtx;
513 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
514 LT, lowfrom, const0_rtx,
515 lowpart_mode, 0, -1);
517 /* Fill the remaining words. */
518 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
520 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
521 rtx subword = operand_subword (to, index, 1, to_mode);
523 gcc_assert (subword);
525 if (fill_value != subword)
526 emit_move_insn (subword, fill_value);
529 insns = get_insns ();
536 /* Truncating multi-word to a word or less. */
537 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
538 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
541 && ! MEM_VOLATILE_P (from)
542 && direct_load[(int) to_mode]
543 && ! mode_dependent_address_p (XEXP (from, 0),
544 MEM_ADDR_SPACE (from)))
546 || GET_CODE (from) == SUBREG))
547 from = force_reg (from_mode, from);
548 convert_move (to, gen_lowpart (word_mode, from), 0);
552 /* Now follow all the conversions between integers
553 no more than a word long. */
555 /* For truncation, usually we can just refer to FROM in a narrower mode. */
556 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
557 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
560 && ! MEM_VOLATILE_P (from)
561 && direct_load[(int) to_mode]
562 && ! mode_dependent_address_p (XEXP (from, 0),
563 MEM_ADDR_SPACE (from)))
565 || GET_CODE (from) == SUBREG))
566 from = force_reg (from_mode, from);
567 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
568 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
569 from = copy_to_reg (from);
570 emit_move_insn (to, gen_lowpart (to_mode, from));
574 /* Handle extension. */
575 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
577 /* Convert directly if that works. */
578 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
581 emit_unop_insn (code, to, from, equiv_code);
586 machine_mode intermediate;
590 /* Search for a mode to convert via. */
591 for (intermediate = from_mode; intermediate != VOIDmode;
592 intermediate = GET_MODE_WIDER_MODE (intermediate))
593 if (((can_extend_p (to_mode, intermediate, unsignedp)
595 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
596 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
597 && (can_extend_p (intermediate, from_mode, unsignedp)
598 != CODE_FOR_nothing))
600 convert_move (to, convert_to_mode (intermediate, from,
601 unsignedp), unsignedp);
605 /* No suitable intermediate mode.
606 Generate what we need with shifts. */
607 shift_amount = (GET_MODE_PRECISION (to_mode)
608 - GET_MODE_PRECISION (from_mode));
609 from = gen_lowpart (to_mode, force_reg (from_mode, from));
610 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
612 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
615 emit_move_insn (to, tmp);
620 /* Support special truncate insns for certain modes. */
621 if (convert_optab_handler (trunc_optab, to_mode,
622 from_mode) != CODE_FOR_nothing)
624 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
629 /* Handle truncation of volatile memrefs, and so on;
630 the things that couldn't be truncated directly,
631 and for which there was no special instruction.
633 ??? Code above formerly short-circuited this, for most integer
634 mode pairs, with a force_reg in from_mode followed by a recursive
635 call to this routine. Appears always to have been wrong. */
636 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
638 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
639 emit_move_insn (to, temp);
643 /* Mode combination is not recognized. */
647 /* Return an rtx for a value that would result
648 from converting X to mode MODE.
649 Both X and MODE may be floating, or both integer.
650 UNSIGNEDP is nonzero if X is an unsigned value.
651 This can be done by referring to a part of X in place
652 or by copying to a new temporary with conversion. */
655 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
657 return convert_modes (mode, VOIDmode, x, unsignedp);
660 /* Return an rtx for a value that would result
661 from converting X from mode OLDMODE to mode MODE.
662 Both modes may be floating, or both integer.
663 UNSIGNEDP is nonzero if X is an unsigned value.
665 This can be done by referring to a part of X in place
666 or by copying to a new temporary with conversion.
668 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
671 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
675 /* If FROM is a SUBREG that indicates that we have already done at least
676 the required extension, strip it. */
678 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
679 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
680 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
681 x = gen_lowpart (mode, SUBREG_REG (x));
683 if (GET_MODE (x) != VOIDmode)
684 oldmode = GET_MODE (x);
689 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
691 /* If the caller did not tell us the old mode, then there is not
692 much to do with respect to canonicalization. We have to
693 assume that all the bits are significant. */
694 if (GET_MODE_CLASS (oldmode) != MODE_INT)
695 oldmode = MAX_MODE_INT;
696 wide_int w = wide_int::from (std::make_pair (x, oldmode),
697 GET_MODE_PRECISION (mode),
698 unsignedp ? UNSIGNED : SIGNED);
699 return immed_wide_int_const (w, mode);
702 /* We can do this with a gen_lowpart if both desired and current modes
703 are integer, and this is either a constant integer, a register, or a
705 if (GET_MODE_CLASS (mode) == MODE_INT
706 && GET_MODE_CLASS (oldmode) == MODE_INT
707 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
708 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
710 && (!HARD_REGISTER_P (x)
711 || HARD_REGNO_MODE_OK (REGNO (x), mode))
712 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
714 return gen_lowpart (mode, x);
716 /* Converting from integer constant into mode is always equivalent to an
718 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
720 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
721 return simplify_gen_subreg (mode, x, oldmode, 0);
724 temp = gen_reg_rtx (mode);
725 convert_move (temp, x, unsignedp);
729 /* Return the largest alignment we can use for doing a move (or store)
730 of MAX_PIECES. ALIGN is the largest alignment we could use. */
733 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
737 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
738 if (align >= GET_MODE_ALIGNMENT (tmode))
739 align = GET_MODE_ALIGNMENT (tmode);
742 machine_mode tmode, xmode;
744 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
746 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
747 if (GET_MODE_SIZE (tmode) > max_pieces
748 || SLOW_UNALIGNED_ACCESS (tmode, align))
751 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
757 /* Return the widest integer mode no wider than SIZE. If no such mode
758 can be found, return VOIDmode. */
761 widest_int_mode_for_size (unsigned int size)
763 machine_mode tmode, mode = VOIDmode;
765 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
766 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
767 if (GET_MODE_SIZE (tmode) < size)
773 /* Determine whether the LEN bytes can be moved by using several move
774 instructions. Return nonzero if a call to move_by_pieces should
778 can_move_by_pieces (unsigned HOST_WIDE_INT len,
781 return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
782 optimize_insn_for_speed_p ());
785 /* Generate several move instructions to copy LEN bytes from block FROM to
786 block TO. (These are MEM rtx's with BLKmode).
788 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
789 used to push FROM to the stack.
791 ALIGN is maximum stack alignment we can assume.
793 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
794 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
798 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
799 unsigned int align, int endp)
801 struct move_by_pieces_d data;
802 machine_mode to_addr_mode;
803 machine_mode from_addr_mode = get_address_mode (from);
804 rtx to_addr, from_addr = XEXP (from, 0);
805 unsigned int max_size = MOVE_MAX_PIECES + 1;
806 enum insn_code icode;
808 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
811 data.from_addr = from_addr;
814 to_addr_mode = get_address_mode (to);
815 to_addr = XEXP (to, 0);
818 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
819 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
821 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
825 to_addr_mode = VOIDmode;
829 if (STACK_GROWS_DOWNWARD)
834 data.to_addr = to_addr;
837 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
838 || GET_CODE (from_addr) == POST_INC
839 || GET_CODE (from_addr) == POST_DEC);
841 data.explicit_inc_from = 0;
842 data.explicit_inc_to = 0;
843 if (data.reverse) data.offset = len;
846 /* If copying requires more than two move insns,
847 copy addresses to registers (to make displacements shorter)
848 and use post-increment if available. */
849 if (!(data.autinc_from && data.autinc_to)
850 && move_by_pieces_ninsns (len, align, max_size) > 2)
852 /* Find the mode of the largest move...
853 MODE might not be used depending on the definitions of the
854 USE_* macros below. */
855 machine_mode mode ATTRIBUTE_UNUSED
856 = widest_int_mode_for_size (max_size);
858 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
860 data.from_addr = copy_to_mode_reg (from_addr_mode,
861 plus_constant (from_addr_mode,
863 data.autinc_from = 1;
864 data.explicit_inc_from = -1;
866 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
868 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
869 data.autinc_from = 1;
870 data.explicit_inc_from = 1;
872 if (!data.autinc_from && CONSTANT_P (from_addr))
873 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
874 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
876 data.to_addr = copy_to_mode_reg (to_addr_mode,
877 plus_constant (to_addr_mode,
880 data.explicit_inc_to = -1;
882 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
884 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
886 data.explicit_inc_to = 1;
888 if (!data.autinc_to && CONSTANT_P (to_addr))
889 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
892 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
894 /* First move what we can in the largest integer mode, then go to
895 successively smaller modes. */
897 while (max_size > 1 && data.len > 0)
899 machine_mode mode = widest_int_mode_for_size (max_size);
901 if (mode == VOIDmode)
904 icode = optab_handler (mov_optab, mode);
905 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
906 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
908 max_size = GET_MODE_SIZE (mode);
911 /* The code above should have handled everything. */
912 gcc_assert (!data.len);
918 gcc_assert (!data.reverse);
923 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
924 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
926 data.to_addr = copy_to_mode_reg (to_addr_mode,
927 plus_constant (to_addr_mode,
931 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
938 to1 = adjust_address (data.to, QImode, data.offset);
946 /* Return number of insns required to move L bytes by pieces.
947 ALIGN (in bits) is maximum alignment we can assume. */
949 unsigned HOST_WIDE_INT
950 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
951 unsigned int max_size)
953 unsigned HOST_WIDE_INT n_insns = 0;
955 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
957 while (max_size > 1 && l > 0)
960 enum insn_code icode;
962 mode = widest_int_mode_for_size (max_size);
964 if (mode == VOIDmode)
967 icode = optab_handler (mov_optab, mode);
968 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
969 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
971 max_size = GET_MODE_SIZE (mode);
978 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
979 with move instructions for mode MODE. GENFUN is the gen_... function
980 to make a move insn for that mode. DATA has all the other info. */
983 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
984 struct move_by_pieces_d *data)
986 unsigned int size = GET_MODE_SIZE (mode);
987 rtx to1 = NULL_RTX, from1;
989 while (data->len >= size)
992 data->offset -= size;
997 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1000 to1 = adjust_address (data->to, mode, data->offset);
1003 if (data->autinc_from)
1004 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1007 from1 = adjust_address (data->from, mode, data->offset);
1009 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1010 emit_insn (gen_add2_insn (data->to_addr,
1011 gen_int_mode (-(HOST_WIDE_INT) size,
1012 GET_MODE (data->to_addr))));
1013 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1014 emit_insn (gen_add2_insn (data->from_addr,
1015 gen_int_mode (-(HOST_WIDE_INT) size,
1016 GET_MODE (data->from_addr))));
1019 emit_insn ((*genfun) (to1, from1));
1022 #ifdef PUSH_ROUNDING
1023 emit_single_push_insn (mode, from1, NULL);
1029 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1030 emit_insn (gen_add2_insn (data->to_addr,
1032 GET_MODE (data->to_addr))));
1033 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1034 emit_insn (gen_add2_insn (data->from_addr,
1036 GET_MODE (data->from_addr))));
1038 if (! data->reverse)
1039 data->offset += size;
1045 /* Emit code to move a block Y to a block X. This may be done with
1046 string-move instructions, with multiple scalar move instructions,
1047 or with a library call.
1049 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1050 SIZE is an rtx that says how long they are.
1051 ALIGN is the maximum alignment we can assume they have.
1052 METHOD describes what kind of copy this is, and what mechanisms may be used.
1053 MIN_SIZE is the minimal size of block to move
1054 MAX_SIZE is the maximal size of block to move, if it can not be represented
1055 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1057 Return the address of the new block, if memcpy is called and returns it,
1061 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1062 unsigned int expected_align, HOST_WIDE_INT expected_size,
1063 unsigned HOST_WIDE_INT min_size,
1064 unsigned HOST_WIDE_INT max_size,
1065 unsigned HOST_WIDE_INT probable_max_size)
1072 if (CONST_INT_P (size)
1073 && INTVAL (size) == 0)
1078 case BLOCK_OP_NORMAL:
1079 case BLOCK_OP_TAILCALL:
1080 may_use_call = true;
1083 case BLOCK_OP_CALL_PARM:
1084 may_use_call = block_move_libcall_safe_for_call_parm ();
1086 /* Make inhibit_defer_pop nonzero around the library call
1087 to force it to pop the arguments right away. */
1091 case BLOCK_OP_NO_LIBCALL:
1092 may_use_call = false;
1099 gcc_assert (MEM_P (x) && MEM_P (y));
1100 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1101 gcc_assert (align >= BITS_PER_UNIT);
1103 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1104 block copy is more efficient for other large modes, e.g. DCmode. */
1105 x = adjust_address (x, BLKmode, 0);
1106 y = adjust_address (y, BLKmode, 0);
1108 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1109 can be incorrect is coming from __builtin_memcpy. */
1110 if (CONST_INT_P (size))
1112 x = shallow_copy_rtx (x);
1113 y = shallow_copy_rtx (y);
1114 set_mem_size (x, INTVAL (size));
1115 set_mem_size (y, INTVAL (size));
1118 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1119 move_by_pieces (x, y, INTVAL (size), align, 0);
1120 else if (emit_block_move_via_movmem (x, y, size, align,
1121 expected_align, expected_size,
1122 min_size, max_size, probable_max_size))
1124 else if (may_use_call
1125 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1126 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1128 /* Since x and y are passed to a libcall, mark the corresponding
1129 tree EXPR as addressable. */
1130 tree y_expr = MEM_EXPR (y);
1131 tree x_expr = MEM_EXPR (x);
1133 mark_addressable (y_expr);
1135 mark_addressable (x_expr);
1136 retval = emit_block_move_via_libcall (x, y, size,
1137 method == BLOCK_OP_TAILCALL);
1141 emit_block_move_via_loop (x, y, size, align);
1143 if (method == BLOCK_OP_CALL_PARM)
1150 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1152 unsigned HOST_WIDE_INT max, min = 0;
1153 if (GET_CODE (size) == CONST_INT)
1154 min = max = UINTVAL (size);
1156 max = GET_MODE_MASK (GET_MODE (size));
1157 return emit_block_move_hints (x, y, size, method, 0, -1,
1161 /* A subroutine of emit_block_move. Returns true if calling the
1162 block move libcall will not clobber any parameters which may have
1163 already been placed on the stack. */
1166 block_move_libcall_safe_for_call_parm (void)
1168 #if defined (REG_PARM_STACK_SPACE)
1172 /* If arguments are pushed on the stack, then they're safe. */
1176 /* If registers go on the stack anyway, any argument is sure to clobber
1177 an outgoing argument. */
1178 #if defined (REG_PARM_STACK_SPACE)
1179 fn = emit_block_move_libcall_fn (false);
1180 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1181 depend on its argument. */
1183 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1184 && REG_PARM_STACK_SPACE (fn) != 0)
1188 /* If any argument goes in memory, then it might clobber an outgoing
1191 CUMULATIVE_ARGS args_so_far_v;
1192 cumulative_args_t args_so_far;
1195 fn = emit_block_move_libcall_fn (false);
1196 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1197 args_so_far = pack_cumulative_args (&args_so_far_v);
1199 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1200 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1202 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1203 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1205 if (!tmp || !REG_P (tmp))
1207 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1209 targetm.calls.function_arg_advance (args_so_far, mode,
1216 /* A subroutine of emit_block_move. Expand a movmem pattern;
1217 return true if successful. */
1220 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1221 unsigned int expected_align, HOST_WIDE_INT expected_size,
1222 unsigned HOST_WIDE_INT min_size,
1223 unsigned HOST_WIDE_INT max_size,
1224 unsigned HOST_WIDE_INT probable_max_size)
1226 int save_volatile_ok = volatile_ok;
1229 if (expected_align < align)
1230 expected_align = align;
1231 if (expected_size != -1)
1233 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1234 expected_size = probable_max_size;
1235 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1236 expected_size = min_size;
1239 /* Since this is a move insn, we don't care about volatility. */
1242 /* Try the most limited insn first, because there's no point
1243 including more than one in the machine description unless
1244 the more limited one has some advantage. */
1246 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1247 mode = GET_MODE_WIDER_MODE (mode))
1249 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1251 if (code != CODE_FOR_nothing
1252 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1253 here because if SIZE is less than the mode mask, as it is
1254 returned by the macro, it will definitely be less than the
1255 actual mode mask. Since SIZE is within the Pmode address
1256 space, we limit MODE to Pmode. */
1257 && ((CONST_INT_P (size)
1258 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1259 <= (GET_MODE_MASK (mode) >> 1)))
1260 || max_size <= (GET_MODE_MASK (mode) >> 1)
1261 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1263 struct expand_operand ops[9];
1266 /* ??? When called via emit_block_move_for_call, it'd be
1267 nice if there were some way to inform the backend, so
1268 that it doesn't fail the expansion because it thinks
1269 emitting the libcall would be more efficient. */
1270 nops = insn_data[(int) code].n_generator_args;
1271 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1273 create_fixed_operand (&ops[0], x);
1274 create_fixed_operand (&ops[1], y);
1275 /* The check above guarantees that this size conversion is valid. */
1276 create_convert_operand_to (&ops[2], size, mode, true);
1277 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1280 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1281 create_integer_operand (&ops[5], expected_size);
1285 create_integer_operand (&ops[6], min_size);
1286 /* If we can not represent the maximal size,
1287 make parameter NULL. */
1288 if ((HOST_WIDE_INT) max_size != -1)
1289 create_integer_operand (&ops[7], max_size);
1291 create_fixed_operand (&ops[7], NULL);
1295 /* If we can not represent the maximal size,
1296 make parameter NULL. */
1297 if ((HOST_WIDE_INT) probable_max_size != -1)
1298 create_integer_operand (&ops[8], probable_max_size);
1300 create_fixed_operand (&ops[8], NULL);
1302 if (maybe_expand_insn (code, nops, ops))
1304 volatile_ok = save_volatile_ok;
1310 volatile_ok = save_volatile_ok;
1314 /* A subroutine of emit_block_move. Expand a call to memcpy.
1315 Return the return value from memcpy, 0 otherwise. */
1318 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1320 rtx dst_addr, src_addr;
1321 tree call_expr, fn, src_tree, dst_tree, size_tree;
1322 machine_mode size_mode;
1325 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1326 pseudos. We can then place those new pseudos into a VAR_DECL and
1329 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1330 src_addr = copy_addr_to_reg (XEXP (src, 0));
1332 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1333 src_addr = convert_memory_address (ptr_mode, src_addr);
1335 dst_tree = make_tree (ptr_type_node, dst_addr);
1336 src_tree = make_tree (ptr_type_node, src_addr);
1338 size_mode = TYPE_MODE (sizetype);
1340 size = convert_to_mode (size_mode, size, 1);
1341 size = copy_to_mode_reg (size_mode, size);
1343 /* It is incorrect to use the libcall calling conventions to call
1344 memcpy in this context. This could be a user call to memcpy and
1345 the user may wish to examine the return value from memcpy. For
1346 targets where libcalls and normal calls have different conventions
1347 for returning pointers, we could end up generating incorrect code. */
1349 size_tree = make_tree (sizetype, size);
1351 fn = emit_block_move_libcall_fn (true);
1352 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1353 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1355 retval = expand_normal (call_expr);
1360 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1361 for the function we use for block copies. */
1363 static GTY(()) tree block_move_fn;
1366 init_block_move_fn (const char *asmspec)
1370 tree args, fn, attrs, attr_args;
1372 fn = get_identifier ("memcpy");
1373 args = build_function_type_list (ptr_type_node, ptr_type_node,
1374 const_ptr_type_node, sizetype,
1377 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1378 DECL_EXTERNAL (fn) = 1;
1379 TREE_PUBLIC (fn) = 1;
1380 DECL_ARTIFICIAL (fn) = 1;
1381 TREE_NOTHROW (fn) = 1;
1382 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1383 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1385 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1386 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1388 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1394 set_user_assembler_name (block_move_fn, asmspec);
1398 emit_block_move_libcall_fn (int for_call)
1400 static bool emitted_extern;
1403 init_block_move_fn (NULL);
1405 if (for_call && !emitted_extern)
1407 emitted_extern = true;
1408 make_decl_rtl (block_move_fn);
1411 return block_move_fn;
1414 /* A subroutine of emit_block_move. Copy the data via an explicit
1415 loop. This is used only when libcalls are forbidden. */
1416 /* ??? It'd be nice to copy in hunks larger than QImode. */
1419 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1420 unsigned int align ATTRIBUTE_UNUSED)
1422 rtx_code_label *cmp_label, *top_label;
1423 rtx iter, x_addr, y_addr, tmp;
1424 machine_mode x_addr_mode = get_address_mode (x);
1425 machine_mode y_addr_mode = get_address_mode (y);
1426 machine_mode iter_mode;
1428 iter_mode = GET_MODE (size);
1429 if (iter_mode == VOIDmode)
1430 iter_mode = word_mode;
1432 top_label = gen_label_rtx ();
1433 cmp_label = gen_label_rtx ();
1434 iter = gen_reg_rtx (iter_mode);
1436 emit_move_insn (iter, const0_rtx);
1438 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1439 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1440 do_pending_stack_adjust ();
1442 emit_jump (cmp_label);
1443 emit_label (top_label);
1445 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1446 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1448 if (x_addr_mode != y_addr_mode)
1449 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1450 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1452 x = change_address (x, QImode, x_addr);
1453 y = change_address (y, QImode, y_addr);
1455 emit_move_insn (x, y);
1457 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1458 true, OPTAB_LIB_WIDEN);
1460 emit_move_insn (iter, tmp);
1462 emit_label (cmp_label);
1464 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1465 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1468 /* Copy all or part of a value X into registers starting at REGNO.
1469 The number of registers to be filled is NREGS. */
1472 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1477 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1478 x = validize_mem (force_const_mem (mode, x));
1480 /* See if the machine can do this with a load multiple insn. */
1481 if (targetm.have_load_multiple ())
1483 rtx_insn *last = get_last_insn ();
1484 rtx first = gen_rtx_REG (word_mode, regno);
1485 if (rtx_insn *pat = targetm.gen_load_multiple (first, x,
1492 delete_insns_since (last);
1495 for (int i = 0; i < nregs; i++)
1496 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1497 operand_subword_force (x, i, mode));
1500 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1501 The number of registers to be filled is NREGS. */
1504 move_block_from_reg (int regno, rtx x, int nregs)
1509 /* See if the machine can do this with a store multiple insn. */
1510 if (targetm.have_store_multiple ())
1512 rtx_insn *last = get_last_insn ();
1513 rtx first = gen_rtx_REG (word_mode, regno);
1514 if (rtx_insn *pat = targetm.gen_store_multiple (x, first,
1521 delete_insns_since (last);
1524 for (int i = 0; i < nregs; i++)
1526 rtx tem = operand_subword (x, i, 1, BLKmode);
1530 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1534 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1535 ORIG, where ORIG is a non-consecutive group of registers represented by
1536 a PARALLEL. The clone is identical to the original except in that the
1537 original set of registers is replaced by a new set of pseudo registers.
1538 The new set has the same modes as the original set. */
1541 gen_group_rtx (rtx orig)
1546 gcc_assert (GET_CODE (orig) == PARALLEL);
1548 length = XVECLEN (orig, 0);
1549 tmps = XALLOCAVEC (rtx, length);
1551 /* Skip a NULL entry in first slot. */
1552 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1557 for (; i < length; i++)
1559 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1560 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1562 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1565 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1568 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1569 except that values are placed in TMPS[i], and must later be moved
1570 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1573 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1577 machine_mode m = GET_MODE (orig_src);
1579 gcc_assert (GET_CODE (dst) == PARALLEL);
1582 && !SCALAR_INT_MODE_P (m)
1583 && !MEM_P (orig_src)
1584 && GET_CODE (orig_src) != CONCAT)
1586 machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1587 if (imode == BLKmode)
1588 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1590 src = gen_reg_rtx (imode);
1591 if (imode != BLKmode)
1592 src = gen_lowpart (GET_MODE (orig_src), src);
1593 emit_move_insn (src, orig_src);
1594 /* ...and back again. */
1595 if (imode != BLKmode)
1596 src = gen_lowpart (imode, src);
1597 emit_group_load_1 (tmps, dst, src, type, ssize);
1601 /* Check for a NULL entry, used to indicate that the parameter goes
1602 both on the stack and in registers. */
1603 if (XEXP (XVECEXP (dst, 0, 0), 0))
1608 /* Process the pieces. */
1609 for (i = start; i < XVECLEN (dst, 0); i++)
1611 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1612 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1613 unsigned int bytelen = GET_MODE_SIZE (mode);
1616 /* Handle trailing fragments that run over the size of the struct. */
1617 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1619 /* Arrange to shift the fragment to where it belongs.
1620 extract_bit_field loads to the lsb of the reg. */
1622 #ifdef BLOCK_REG_PADDING
1623 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1624 == (BYTES_BIG_ENDIAN ? upward : downward)
1629 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1630 bytelen = ssize - bytepos;
1631 gcc_assert (bytelen > 0);
1634 /* If we won't be loading directly from memory, protect the real source
1635 from strange tricks we might play; but make sure that the source can
1636 be loaded directly into the destination. */
1638 if (!MEM_P (orig_src)
1639 && (!CONSTANT_P (orig_src)
1640 || (GET_MODE (orig_src) != mode
1641 && GET_MODE (orig_src) != VOIDmode)))
1643 if (GET_MODE (orig_src) == VOIDmode)
1644 src = gen_reg_rtx (mode);
1646 src = gen_reg_rtx (GET_MODE (orig_src));
1648 emit_move_insn (src, orig_src);
1651 /* Optimize the access just a bit. */
1653 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1654 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1655 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1656 && bytelen == GET_MODE_SIZE (mode))
1658 tmps[i] = gen_reg_rtx (mode);
1659 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1661 else if (COMPLEX_MODE_P (mode)
1662 && GET_MODE (src) == mode
1663 && bytelen == GET_MODE_SIZE (mode))
1664 /* Let emit_move_complex do the bulk of the work. */
1666 else if (GET_CODE (src) == CONCAT)
1668 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1669 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1671 if ((bytepos == 0 && bytelen == slen0)
1672 || (bytepos != 0 && bytepos + bytelen <= slen))
1674 /* The following assumes that the concatenated objects all
1675 have the same size. In this case, a simple calculation
1676 can be used to determine the object and the bit field
1678 tmps[i] = XEXP (src, bytepos / slen0);
1679 if (! CONSTANT_P (tmps[i])
1680 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1681 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1682 (bytepos % slen0) * BITS_PER_UNIT,
1683 1, NULL_RTX, mode, mode, false);
1689 gcc_assert (!bytepos);
1690 mem = assign_stack_temp (GET_MODE (src), slen);
1691 emit_move_insn (mem, src);
1692 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1693 0, 1, NULL_RTX, mode, mode, false);
1696 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1697 SIMD register, which is currently broken. While we get GCC
1698 to emit proper RTL for these cases, let's dump to memory. */
1699 else if (VECTOR_MODE_P (GET_MODE (dst))
1702 int slen = GET_MODE_SIZE (GET_MODE (src));
1705 mem = assign_stack_temp (GET_MODE (src), slen);
1706 emit_move_insn (mem, src);
1707 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1709 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1710 && XVECLEN (dst, 0) > 1)
1711 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1712 else if (CONSTANT_P (src))
1714 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1722 /* TODO: const_wide_int can have sizes other than this... */
1723 gcc_assert (2 * len == ssize);
1724 split_double (src, &first, &second);
1731 else if (REG_P (src) && GET_MODE (src) == mode)
1734 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1735 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1739 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1744 /* Emit code to move a block SRC of type TYPE to a block DST,
1745 where DST is non-consecutive registers represented by a PARALLEL.
1746 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1750 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1755 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1756 emit_group_load_1 (tmps, dst, src, type, ssize);
1758 /* Copy the extracted pieces into the proper (probable) hard regs. */
1759 for (i = 0; i < XVECLEN (dst, 0); i++)
1761 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1764 emit_move_insn (d, tmps[i]);
1768 /* Similar, but load SRC into new pseudos in a format that looks like
1769 PARALLEL. This can later be fed to emit_group_move to get things
1770 in the right place. */
1773 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1778 vec = rtvec_alloc (XVECLEN (parallel, 0));
1779 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1781 /* Convert the vector to look just like the original PARALLEL, except
1782 with the computed values. */
1783 for (i = 0; i < XVECLEN (parallel, 0); i++)
1785 rtx e = XVECEXP (parallel, 0, i);
1786 rtx d = XEXP (e, 0);
1790 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1791 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1793 RTVEC_ELT (vec, i) = e;
1796 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1799 /* Emit code to move a block SRC to block DST, where SRC and DST are
1800 non-consecutive groups of registers, each represented by a PARALLEL. */
1803 emit_group_move (rtx dst, rtx src)
1807 gcc_assert (GET_CODE (src) == PARALLEL
1808 && GET_CODE (dst) == PARALLEL
1809 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1811 /* Skip first entry if NULL. */
1812 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1813 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1814 XEXP (XVECEXP (src, 0, i), 0));
1817 /* Move a group of registers represented by a PARALLEL into pseudos. */
1820 emit_group_move_into_temps (rtx src)
1822 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1825 for (i = 0; i < XVECLEN (src, 0); i++)
1827 rtx e = XVECEXP (src, 0, i);
1828 rtx d = XEXP (e, 0);
1831 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1832 RTVEC_ELT (vec, i) = e;
1835 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1838 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1839 where SRC is non-consecutive registers represented by a PARALLEL.
1840 SSIZE represents the total size of block ORIG_DST, or -1 if not
1844 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1847 int start, finish, i;
1848 machine_mode m = GET_MODE (orig_dst);
1850 gcc_assert (GET_CODE (src) == PARALLEL);
1852 if (!SCALAR_INT_MODE_P (m)
1853 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1855 machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1856 if (imode == BLKmode)
1857 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1859 dst = gen_reg_rtx (imode);
1860 emit_group_store (dst, src, type, ssize);
1861 if (imode != BLKmode)
1862 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1863 emit_move_insn (orig_dst, dst);
1867 /* Check for a NULL entry, used to indicate that the parameter goes
1868 both on the stack and in registers. */
1869 if (XEXP (XVECEXP (src, 0, 0), 0))
1873 finish = XVECLEN (src, 0);
1875 tmps = XALLOCAVEC (rtx, finish);
1877 /* Copy the (probable) hard regs into pseudos. */
1878 for (i = start; i < finish; i++)
1880 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1881 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1883 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1884 emit_move_insn (tmps[i], reg);
1890 /* If we won't be storing directly into memory, protect the real destination
1891 from strange tricks we might play. */
1893 if (GET_CODE (dst) == PARALLEL)
1897 /* We can get a PARALLEL dst if there is a conditional expression in
1898 a return statement. In that case, the dst and src are the same,
1899 so no action is necessary. */
1900 if (rtx_equal_p (dst, src))
1903 /* It is unclear if we can ever reach here, but we may as well handle
1904 it. Allocate a temporary, and split this into a store/load to/from
1906 temp = assign_stack_temp (GET_MODE (dst), ssize);
1907 emit_group_store (temp, src, type, ssize);
1908 emit_group_load (dst, temp, type, ssize);
1911 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1913 machine_mode outer = GET_MODE (dst);
1915 HOST_WIDE_INT bytepos;
1919 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1920 dst = gen_reg_rtx (outer);
1922 /* Make life a bit easier for combine. */
1923 /* If the first element of the vector is the low part
1924 of the destination mode, use a paradoxical subreg to
1925 initialize the destination. */
1928 inner = GET_MODE (tmps[start]);
1929 bytepos = subreg_lowpart_offset (inner, outer);
1930 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1932 temp = simplify_gen_subreg (outer, tmps[start],
1936 emit_move_insn (dst, temp);
1943 /* If the first element wasn't the low part, try the last. */
1945 && start < finish - 1)
1947 inner = GET_MODE (tmps[finish - 1]);
1948 bytepos = subreg_lowpart_offset (inner, outer);
1949 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1951 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1955 emit_move_insn (dst, temp);
1962 /* Otherwise, simply initialize the result to zero. */
1964 emit_move_insn (dst, CONST0_RTX (outer));
1967 /* Process the pieces. */
1968 for (i = start; i < finish; i++)
1970 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1971 machine_mode mode = GET_MODE (tmps[i]);
1972 unsigned int bytelen = GET_MODE_SIZE (mode);
1973 unsigned int adj_bytelen;
1976 /* Handle trailing fragments that run over the size of the struct. */
1977 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1978 adj_bytelen = ssize - bytepos;
1980 adj_bytelen = bytelen;
1982 if (GET_CODE (dst) == CONCAT)
1984 if (bytepos + adj_bytelen
1985 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1986 dest = XEXP (dst, 0);
1987 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1989 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1990 dest = XEXP (dst, 1);
1994 machine_mode dest_mode = GET_MODE (dest);
1995 machine_mode tmp_mode = GET_MODE (tmps[i]);
1997 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1999 if (GET_MODE_ALIGNMENT (dest_mode)
2000 >= GET_MODE_ALIGNMENT (tmp_mode))
2002 dest = assign_stack_temp (dest_mode,
2003 GET_MODE_SIZE (dest_mode));
2004 emit_move_insn (adjust_address (dest,
2012 dest = assign_stack_temp (tmp_mode,
2013 GET_MODE_SIZE (tmp_mode));
2014 emit_move_insn (dest, tmps[i]);
2015 dst = adjust_address (dest, dest_mode, bytepos);
2021 /* Handle trailing fragments that run over the size of the struct. */
2022 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2024 /* store_bit_field always takes its value from the lsb.
2025 Move the fragment to the lsb if it's not already there. */
2027 #ifdef BLOCK_REG_PADDING
2028 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2029 == (BYTES_BIG_ENDIAN ? upward : downward)
2035 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2036 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2040 /* Make sure not to write past the end of the struct. */
2041 store_bit_field (dest,
2042 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2043 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2044 VOIDmode, tmps[i], false);
2047 /* Optimize the access just a bit. */
2048 else if (MEM_P (dest)
2049 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2050 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2051 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2052 && bytelen == GET_MODE_SIZE (mode))
2053 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2056 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2057 0, 0, mode, tmps[i], false);
2060 /* Copy from the pseudo into the (probable) hard reg. */
2061 if (orig_dst != dst)
2062 emit_move_insn (orig_dst, dst);
2065 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2066 of the value stored in X. */
2069 maybe_emit_group_store (rtx x, tree type)
2071 machine_mode mode = TYPE_MODE (type);
2072 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2073 if (GET_CODE (x) == PARALLEL)
2075 rtx result = gen_reg_rtx (mode);
2076 emit_group_store (result, x, type, int_size_in_bytes (type));
2082 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2084 This is used on targets that return BLKmode values in registers. */
2087 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2089 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2090 rtx src = NULL, dst = NULL;
2091 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2092 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2093 machine_mode mode = GET_MODE (srcreg);
2094 machine_mode tmode = GET_MODE (target);
2095 machine_mode copy_mode;
2097 /* BLKmode registers created in the back-end shouldn't have survived. */
2098 gcc_assert (mode != BLKmode);
2100 /* If the structure doesn't take up a whole number of words, see whether
2101 SRCREG is padded on the left or on the right. If it's on the left,
2102 set PADDING_CORRECTION to the number of bits to skip.
2104 In most ABIs, the structure will be returned at the least end of
2105 the register, which translates to right padding on little-endian
2106 targets and left padding on big-endian targets. The opposite
2107 holds if the structure is returned at the most significant
2108 end of the register. */
2109 if (bytes % UNITS_PER_WORD != 0
2110 && (targetm.calls.return_in_msb (type)
2112 : BYTES_BIG_ENDIAN))
2114 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2116 /* We can use a single move if we have an exact mode for the size. */
2117 else if (MEM_P (target)
2118 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2119 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2120 && bytes == GET_MODE_SIZE (mode))
2122 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2126 /* And if we additionally have the same mode for a register. */
2127 else if (REG_P (target)
2128 && GET_MODE (target) == mode
2129 && bytes == GET_MODE_SIZE (mode))
2131 emit_move_insn (target, srcreg);
2135 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2136 into a new pseudo which is a full word. */
2137 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2139 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2143 /* Copy the structure BITSIZE bits at a time. If the target lives in
2144 memory, take care of not reading/writing past its end by selecting
2145 a copy mode suited to BITSIZE. This should always be possible given
2148 If the target lives in register, make sure not to select a copy mode
2149 larger than the mode of the register.
2151 We could probably emit more efficient code for machines which do not use
2152 strict alignment, but it doesn't seem worth the effort at the current
2155 copy_mode = word_mode;
2158 machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2159 if (mem_mode != BLKmode)
2160 copy_mode = mem_mode;
2162 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2165 for (bitpos = 0, xbitpos = padding_correction;
2166 bitpos < bytes * BITS_PER_UNIT;
2167 bitpos += bitsize, xbitpos += bitsize)
2169 /* We need a new source operand each time xbitpos is on a
2170 word boundary and when xbitpos == padding_correction
2171 (the first time through). */
2172 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2173 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2175 /* We need a new destination operand each time bitpos is on
2177 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2179 else if (bitpos % BITS_PER_WORD == 0)
2180 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2182 /* Use xbitpos for the source extraction (right justified) and
2183 bitpos for the destination store (left justified). */
2184 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2185 extract_bit_field (src, bitsize,
2186 xbitpos % BITS_PER_WORD, 1,
2187 NULL_RTX, copy_mode, copy_mode,
2193 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2194 register if it contains any data, otherwise return null.
2196 This is used on targets that return BLKmode values in registers. */
2199 copy_blkmode_to_reg (machine_mode mode, tree src)
2202 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2203 unsigned int bitsize;
2204 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2205 machine_mode dst_mode;
2207 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2209 x = expand_normal (src);
2211 bytes = int_size_in_bytes (TREE_TYPE (src));
2215 /* If the structure doesn't take up a whole number of words, see
2216 whether the register value should be padded on the left or on
2217 the right. Set PADDING_CORRECTION to the number of padding
2218 bits needed on the left side.
2220 In most ABIs, the structure will be returned at the least end of
2221 the register, which translates to right padding on little-endian
2222 targets and left padding on big-endian targets. The opposite
2223 holds if the structure is returned at the most significant
2224 end of the register. */
2225 if (bytes % UNITS_PER_WORD != 0
2226 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2228 : BYTES_BIG_ENDIAN))
2229 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2232 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2233 dst_words = XALLOCAVEC (rtx, n_regs);
2234 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2236 /* Copy the structure BITSIZE bits at a time. */
2237 for (bitpos = 0, xbitpos = padding_correction;
2238 bitpos < bytes * BITS_PER_UNIT;
2239 bitpos += bitsize, xbitpos += bitsize)
2241 /* We need a new destination pseudo each time xbitpos is
2242 on a word boundary and when xbitpos == padding_correction
2243 (the first time through). */
2244 if (xbitpos % BITS_PER_WORD == 0
2245 || xbitpos == padding_correction)
2247 /* Generate an appropriate register. */
2248 dst_word = gen_reg_rtx (word_mode);
2249 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2251 /* Clear the destination before we move anything into it. */
2252 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2255 /* We need a new source operand each time bitpos is on a word
2257 if (bitpos % BITS_PER_WORD == 0)
2258 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2260 /* Use bitpos for the source extraction (left justified) and
2261 xbitpos for the destination store (right justified). */
2262 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2264 extract_bit_field (src_word, bitsize,
2265 bitpos % BITS_PER_WORD, 1,
2266 NULL_RTX, word_mode, word_mode,
2271 if (mode == BLKmode)
2273 /* Find the smallest integer mode large enough to hold the
2274 entire structure. */
2275 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2277 mode = GET_MODE_WIDER_MODE (mode))
2278 /* Have we found a large enough mode? */
2279 if (GET_MODE_SIZE (mode) >= bytes)
2282 /* A suitable mode should have been found. */
2283 gcc_assert (mode != VOIDmode);
2286 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2287 dst_mode = word_mode;
2290 dst = gen_reg_rtx (dst_mode);
2292 for (i = 0; i < n_regs; i++)
2293 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2295 if (mode != dst_mode)
2296 dst = gen_lowpart (mode, dst);
2301 /* Add a USE expression for REG to the (possibly empty) list pointed
2302 to by CALL_FUSAGE. REG must denote a hard register. */
2305 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2307 gcc_assert (REG_P (reg));
2309 if (!HARD_REGISTER_P (reg))
2313 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2316 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2317 to by CALL_FUSAGE. REG must denote a hard register. */
2320 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2322 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2325 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2328 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2329 starting at REGNO. All of these registers must be hard registers. */
2332 use_regs (rtx *call_fusage, int regno, int nregs)
2336 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2338 for (i = 0; i < nregs; i++)
2339 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2342 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2343 PARALLEL REGS. This is for calls that pass values in multiple
2344 non-contiguous locations. The Irix 6 ABI has examples of this. */
2347 use_group_regs (rtx *call_fusage, rtx regs)
2351 for (i = 0; i < XVECLEN (regs, 0); i++)
2353 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2355 /* A NULL entry means the parameter goes both on the stack and in
2356 registers. This can also be a MEM for targets that pass values
2357 partially on the stack and partially in registers. */
2358 if (reg != 0 && REG_P (reg))
2359 use_reg (call_fusage, reg);
2363 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2364 assigment and the code of the expresion on the RHS is CODE. Return
2368 get_def_for_expr (tree name, enum tree_code code)
2372 if (TREE_CODE (name) != SSA_NAME)
2375 def_stmt = get_gimple_for_ssa_name (name);
2377 || gimple_assign_rhs_code (def_stmt) != code)
2383 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2384 assigment and the class of the expresion on the RHS is CLASS. Return
2388 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2392 if (TREE_CODE (name) != SSA_NAME)
2395 def_stmt = get_gimple_for_ssa_name (name);
2397 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2404 /* Determine whether the LEN bytes generated by CONSTFUN can be
2405 stored to memory using several move instructions. CONSTFUNDATA is
2406 a pointer which will be passed as argument in every CONSTFUN call.
2407 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2408 a memset operation and false if it's a copy of a constant string.
2409 Return nonzero if a call to store_by_pieces should succeed. */
2412 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2413 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2414 void *constfundata, unsigned int align, bool memsetp)
2416 unsigned HOST_WIDE_INT l;
2417 unsigned int max_size;
2418 HOST_WIDE_INT offset = 0;
2420 enum insn_code icode;
2422 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2423 rtx cst ATTRIBUTE_UNUSED;
2428 if (!targetm.use_by_pieces_infrastructure_p (len, align,
2432 optimize_insn_for_speed_p ()))
2435 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2437 /* We would first store what we can in the largest integer mode, then go to
2438 successively smaller modes. */
2441 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2445 max_size = STORE_MAX_PIECES + 1;
2446 while (max_size > 1 && l > 0)
2448 mode = widest_int_mode_for_size (max_size);
2450 if (mode == VOIDmode)
2453 icode = optab_handler (mov_optab, mode);
2454 if (icode != CODE_FOR_nothing
2455 && align >= GET_MODE_ALIGNMENT (mode))
2457 unsigned int size = GET_MODE_SIZE (mode);
2464 cst = (*constfun) (constfundata, offset, mode);
2465 if (!targetm.legitimate_constant_p (mode, cst))
2475 max_size = GET_MODE_SIZE (mode);
2478 /* The code above should have handled everything. */
2485 /* Generate several move instructions to store LEN bytes generated by
2486 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2487 pointer which will be passed as argument in every CONSTFUN call.
2488 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2489 a memset operation and false if it's a copy of a constant string.
2490 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2491 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2495 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2496 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2497 void *constfundata, unsigned int align, bool memsetp, int endp)
2499 machine_mode to_addr_mode = get_address_mode (to);
2500 struct store_by_pieces_d data;
2504 gcc_assert (endp != 2);
2508 gcc_assert (targetm.use_by_pieces_infrastructure_p
2513 optimize_insn_for_speed_p ()));
2515 data.constfun = constfun;
2516 data.constfundata = constfundata;
2519 store_by_pieces_1 (&data, align);
2524 gcc_assert (!data.reverse);
2529 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2530 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2532 data.to_addr = copy_to_mode_reg (to_addr_mode,
2533 plus_constant (to_addr_mode,
2537 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2544 to1 = adjust_address (data.to, QImode, data.offset);
2552 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2553 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2556 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2558 struct store_by_pieces_d data;
2563 data.constfun = clear_by_pieces_1;
2564 data.constfundata = NULL;
2567 store_by_pieces_1 (&data, align);
2570 /* Callback routine for clear_by_pieces.
2571 Return const0_rtx unconditionally. */
2574 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2575 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2576 machine_mode mode ATTRIBUTE_UNUSED)
2581 /* Subroutine of clear_by_pieces and store_by_pieces.
2582 Generate several move instructions to store LEN bytes of block TO. (A MEM
2583 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2586 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2587 unsigned int align ATTRIBUTE_UNUSED)
2589 machine_mode to_addr_mode = get_address_mode (data->to);
2590 rtx to_addr = XEXP (data->to, 0);
2591 unsigned int max_size = STORE_MAX_PIECES + 1;
2592 enum insn_code icode;
2595 data->to_addr = to_addr;
2597 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2598 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2600 data->explicit_inc_to = 0;
2602 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2604 data->offset = data->len;
2606 /* If storing requires more than two move insns,
2607 copy addresses to registers (to make displacements shorter)
2608 and use post-increment if available. */
2609 if (!data->autinc_to
2610 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2612 /* Determine the main mode we'll be using.
2613 MODE might not be used depending on the definitions of the
2614 USE_* macros below. */
2615 machine_mode mode ATTRIBUTE_UNUSED
2616 = widest_int_mode_for_size (max_size);
2618 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2620 data->to_addr = copy_to_mode_reg (to_addr_mode,
2621 plus_constant (to_addr_mode,
2624 data->autinc_to = 1;
2625 data->explicit_inc_to = -1;
2628 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2629 && ! data->autinc_to)
2631 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2632 data->autinc_to = 1;
2633 data->explicit_inc_to = 1;
2636 if ( !data->autinc_to && CONSTANT_P (to_addr))
2637 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2640 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2642 /* First store what we can in the largest integer mode, then go to
2643 successively smaller modes. */
2645 while (max_size > 1 && data->len > 0)
2647 machine_mode mode = widest_int_mode_for_size (max_size);
2649 if (mode == VOIDmode)
2652 icode = optab_handler (mov_optab, mode);
2653 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2654 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2656 max_size = GET_MODE_SIZE (mode);
2659 /* The code above should have handled everything. */
2660 gcc_assert (!data->len);
2663 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2664 with move instructions for mode MODE. GENFUN is the gen_... function
2665 to make a move insn for that mode. DATA has all the other info. */
2668 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2669 struct store_by_pieces_d *data)
2671 unsigned int size = GET_MODE_SIZE (mode);
2674 while (data->len >= size)
2677 data->offset -= size;
2679 if (data->autinc_to)
2680 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2683 to1 = adjust_address (data->to, mode, data->offset);
2685 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2686 emit_insn (gen_add2_insn (data->to_addr,
2687 gen_int_mode (-(HOST_WIDE_INT) size,
2688 GET_MODE (data->to_addr))));
2690 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2691 emit_insn ((*genfun) (to1, cst));
2693 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2694 emit_insn (gen_add2_insn (data->to_addr,
2696 GET_MODE (data->to_addr))));
2698 if (! data->reverse)
2699 data->offset += size;
2705 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2706 its length in bytes. */
2709 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2710 unsigned int expected_align, HOST_WIDE_INT expected_size,
2711 unsigned HOST_WIDE_INT min_size,
2712 unsigned HOST_WIDE_INT max_size,
2713 unsigned HOST_WIDE_INT probable_max_size)
2715 machine_mode mode = GET_MODE (object);
2718 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2720 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2721 just move a zero. Otherwise, do this a piece at a time. */
2723 && CONST_INT_P (size)
2724 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2726 rtx zero = CONST0_RTX (mode);
2729 emit_move_insn (object, zero);
2733 if (COMPLEX_MODE_P (mode))
2735 zero = CONST0_RTX (GET_MODE_INNER (mode));
2738 write_complex_part (object, zero, 0);
2739 write_complex_part (object, zero, 1);
2745 if (size == const0_rtx)
2748 align = MEM_ALIGN (object);
2750 if (CONST_INT_P (size)
2751 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2753 optimize_insn_for_speed_p ()))
2754 clear_by_pieces (object, INTVAL (size), align);
2755 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2756 expected_align, expected_size,
2757 min_size, max_size, probable_max_size))
2759 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2760 return set_storage_via_libcall (object, size, const0_rtx,
2761 method == BLOCK_OP_TAILCALL);
2769 clear_storage (rtx object, rtx size, enum block_op_methods method)
2771 unsigned HOST_WIDE_INT max, min = 0;
2772 if (GET_CODE (size) == CONST_INT)
2773 min = max = UINTVAL (size);
2775 max = GET_MODE_MASK (GET_MODE (size));
2776 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2780 /* A subroutine of clear_storage. Expand a call to memset.
2781 Return the return value of memset, 0 otherwise. */
2784 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2786 tree call_expr, fn, object_tree, size_tree, val_tree;
2787 machine_mode size_mode;
2790 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2791 place those into new pseudos into a VAR_DECL and use them later. */
2793 object = copy_addr_to_reg (XEXP (object, 0));
2795 size_mode = TYPE_MODE (sizetype);
2796 size = convert_to_mode (size_mode, size, 1);
2797 size = copy_to_mode_reg (size_mode, size);
2799 /* It is incorrect to use the libcall calling conventions to call
2800 memset in this context. This could be a user call to memset and
2801 the user may wish to examine the return value from memset. For
2802 targets where libcalls and normal calls have different conventions
2803 for returning pointers, we could end up generating incorrect code. */
2805 object_tree = make_tree (ptr_type_node, object);
2806 if (!CONST_INT_P (val))
2807 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2808 size_tree = make_tree (sizetype, size);
2809 val_tree = make_tree (integer_type_node, val);
2811 fn = clear_storage_libcall_fn (true);
2812 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2813 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2815 retval = expand_normal (call_expr);
2820 /* A subroutine of set_storage_via_libcall. Create the tree node
2821 for the function we use for block clears. */
2823 tree block_clear_fn;
2826 init_block_clear_fn (const char *asmspec)
2828 if (!block_clear_fn)
2832 fn = get_identifier ("memset");
2833 args = build_function_type_list (ptr_type_node, ptr_type_node,
2834 integer_type_node, sizetype,
2837 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2838 DECL_EXTERNAL (fn) = 1;
2839 TREE_PUBLIC (fn) = 1;
2840 DECL_ARTIFICIAL (fn) = 1;
2841 TREE_NOTHROW (fn) = 1;
2842 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2843 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2845 block_clear_fn = fn;
2849 set_user_assembler_name (block_clear_fn, asmspec);
2853 clear_storage_libcall_fn (int for_call)
2855 static bool emitted_extern;
2857 if (!block_clear_fn)
2858 init_block_clear_fn (NULL);
2860 if (for_call && !emitted_extern)
2862 emitted_extern = true;
2863 make_decl_rtl (block_clear_fn);
2866 return block_clear_fn;
2869 /* Expand a setmem pattern; return true if successful. */
2872 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2873 unsigned int expected_align, HOST_WIDE_INT expected_size,
2874 unsigned HOST_WIDE_INT min_size,
2875 unsigned HOST_WIDE_INT max_size,
2876 unsigned HOST_WIDE_INT probable_max_size)
2878 /* Try the most limited insn first, because there's no point
2879 including more than one in the machine description unless
2880 the more limited one has some advantage. */
2884 if (expected_align < align)
2885 expected_align = align;
2886 if (expected_size != -1)
2888 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2889 expected_size = max_size;
2890 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2891 expected_size = min_size;
2894 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2895 mode = GET_MODE_WIDER_MODE (mode))
2897 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2899 if (code != CODE_FOR_nothing
2900 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2901 here because if SIZE is less than the mode mask, as it is
2902 returned by the macro, it will definitely be less than the
2903 actual mode mask. Since SIZE is within the Pmode address
2904 space, we limit MODE to Pmode. */
2905 && ((CONST_INT_P (size)
2906 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2907 <= (GET_MODE_MASK (mode) >> 1)))
2908 || max_size <= (GET_MODE_MASK (mode) >> 1)
2909 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2911 struct expand_operand ops[9];
2914 nops = insn_data[(int) code].n_generator_args;
2915 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2917 create_fixed_operand (&ops[0], object);
2918 /* The check above guarantees that this size conversion is valid. */
2919 create_convert_operand_to (&ops[1], size, mode, true);
2920 create_convert_operand_from (&ops[2], val, byte_mode, true);
2921 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2924 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2925 create_integer_operand (&ops[5], expected_size);
2929 create_integer_operand (&ops[6], min_size);
2930 /* If we can not represent the maximal size,
2931 make parameter NULL. */
2932 if ((HOST_WIDE_INT) max_size != -1)
2933 create_integer_operand (&ops[7], max_size);
2935 create_fixed_operand (&ops[7], NULL);
2939 /* If we can not represent the maximal size,
2940 make parameter NULL. */
2941 if ((HOST_WIDE_INT) probable_max_size != -1)
2942 create_integer_operand (&ops[8], probable_max_size);
2944 create_fixed_operand (&ops[8], NULL);
2946 if (maybe_expand_insn (code, nops, ops))
2955 /* Write to one of the components of the complex value CPLX. Write VAL to
2956 the real part if IMAG_P is false, and the imaginary part if its true. */
2959 write_complex_part (rtx cplx, rtx val, bool imag_p)
2965 if (GET_CODE (cplx) == CONCAT)
2967 emit_move_insn (XEXP (cplx, imag_p), val);
2971 cmode = GET_MODE (cplx);
2972 imode = GET_MODE_INNER (cmode);
2973 ibitsize = GET_MODE_BITSIZE (imode);
2975 /* For MEMs simplify_gen_subreg may generate an invalid new address
2976 because, e.g., the original address is considered mode-dependent
2977 by the target, which restricts simplify_subreg from invoking
2978 adjust_address_nv. Instead of preparing fallback support for an
2979 invalid address, we call adjust_address_nv directly. */
2982 emit_move_insn (adjust_address_nv (cplx, imode,
2983 imag_p ? GET_MODE_SIZE (imode) : 0),
2988 /* If the sub-object is at least word sized, then we know that subregging
2989 will work. This special case is important, since store_bit_field
2990 wants to operate on integer modes, and there's rarely an OImode to
2991 correspond to TCmode. */
2992 if (ibitsize >= BITS_PER_WORD
2993 /* For hard regs we have exact predicates. Assume we can split
2994 the original object if it spans an even number of hard regs.
2995 This special case is important for SCmode on 64-bit platforms
2996 where the natural size of floating-point regs is 32-bit. */
2998 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2999 && REG_NREGS (cplx) % 2 == 0))
3001 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3002 imag_p ? GET_MODE_SIZE (imode) : 0);
3005 emit_move_insn (part, val);
3009 /* simplify_gen_subreg may fail for sub-word MEMs. */
3010 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3013 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val,
3017 /* Extract one of the components of the complex value CPLX. Extract the
3018 real part if IMAG_P is false, and the imaginary part if it's true. */
3021 read_complex_part (rtx cplx, bool imag_p)
3023 machine_mode cmode, imode;
3026 if (GET_CODE (cplx) == CONCAT)
3027 return XEXP (cplx, imag_p);
3029 cmode = GET_MODE (cplx);
3030 imode = GET_MODE_INNER (cmode);
3031 ibitsize = GET_MODE_BITSIZE (imode);
3033 /* Special case reads from complex constants that got spilled to memory. */
3034 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3036 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3037 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3039 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3040 if (CONSTANT_CLASS_P (part))
3041 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3045 /* For MEMs simplify_gen_subreg may generate an invalid new address
3046 because, e.g., the original address is considered mode-dependent
3047 by the target, which restricts simplify_subreg from invoking
3048 adjust_address_nv. Instead of preparing fallback support for an
3049 invalid address, we call adjust_address_nv directly. */
3051 return adjust_address_nv (cplx, imode,
3052 imag_p ? GET_MODE_SIZE (imode) : 0);
3054 /* If the sub-object is at least word sized, then we know that subregging
3055 will work. This special case is important, since extract_bit_field
3056 wants to operate on integer modes, and there's rarely an OImode to
3057 correspond to TCmode. */
3058 if (ibitsize >= BITS_PER_WORD
3059 /* For hard regs we have exact predicates. Assume we can split
3060 the original object if it spans an even number of hard regs.
3061 This special case is important for SCmode on 64-bit platforms
3062 where the natural size of floating-point regs is 32-bit. */
3064 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3065 && REG_NREGS (cplx) % 2 == 0))
3067 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3068 imag_p ? GET_MODE_SIZE (imode) : 0);
3072 /* simplify_gen_subreg may fail for sub-word MEMs. */
3073 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3076 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3077 true, NULL_RTX, imode, imode, false);
3080 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3081 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3082 represented in NEW_MODE. If FORCE is true, this will never happen, as
3083 we'll force-create a SUBREG if needed. */
3086 emit_move_change_mode (machine_mode new_mode,
3087 machine_mode old_mode, rtx x, bool force)
3091 if (push_operand (x, GET_MODE (x)))
3093 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3094 MEM_COPY_ATTRIBUTES (ret, x);
3098 /* We don't have to worry about changing the address since the
3099 size in bytes is supposed to be the same. */
3100 if (reload_in_progress)
3102 /* Copy the MEM to change the mode and move any
3103 substitutions from the old MEM to the new one. */
3104 ret = adjust_address_nv (x, new_mode, 0);
3105 copy_replacements (x, ret);
3108 ret = adjust_address (x, new_mode, 0);
3112 /* Note that we do want simplify_subreg's behavior of validating
3113 that the new mode is ok for a hard register. If we were to use
3114 simplify_gen_subreg, we would create the subreg, but would
3115 probably run into the target not being able to implement it. */
3116 /* Except, of course, when FORCE is true, when this is exactly what
3117 we want. Which is needed for CCmodes on some targets. */
3119 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3121 ret = simplify_subreg (new_mode, x, old_mode, 0);
3127 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3128 an integer mode of the same size as MODE. Returns the instruction
3129 emitted, or NULL if such a move could not be generated. */
3132 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3135 enum insn_code code;
3137 /* There must exist a mode of the exact size we require. */
3138 imode = int_mode_for_mode (mode);
3139 if (imode == BLKmode)
3142 /* The target must support moves in this mode. */
3143 code = optab_handler (mov_optab, imode);
3144 if (code == CODE_FOR_nothing)
3147 x = emit_move_change_mode (imode, mode, x, force);
3150 y = emit_move_change_mode (imode, mode, y, force);
3153 return emit_insn (GEN_FCN (code) (x, y));
3156 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3157 Return an equivalent MEM that does not use an auto-increment. */
3160 emit_move_resolve_push (machine_mode mode, rtx x)
3162 enum rtx_code code = GET_CODE (XEXP (x, 0));
3163 HOST_WIDE_INT adjust;
3166 adjust = GET_MODE_SIZE (mode);
3167 #ifdef PUSH_ROUNDING
3168 adjust = PUSH_ROUNDING (adjust);
3170 if (code == PRE_DEC || code == POST_DEC)
3172 else if (code == PRE_MODIFY || code == POST_MODIFY)
3174 rtx expr = XEXP (XEXP (x, 0), 1);
3177 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3178 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3179 val = INTVAL (XEXP (expr, 1));
3180 if (GET_CODE (expr) == MINUS)
3182 gcc_assert (adjust == val || adjust == -val);
3186 /* Do not use anti_adjust_stack, since we don't want to update
3187 stack_pointer_delta. */
3188 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3189 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3190 0, OPTAB_LIB_WIDEN);
3191 if (temp != stack_pointer_rtx)
3192 emit_move_insn (stack_pointer_rtx, temp);
3199 temp = stack_pointer_rtx;
3204 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3210 return replace_equiv_address (x, temp);
3213 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3214 X is known to satisfy push_operand, and MODE is known to be complex.
3215 Returns the last instruction emitted. */
3218 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3220 machine_mode submode = GET_MODE_INNER (mode);
3223 #ifdef PUSH_ROUNDING
3224 unsigned int submodesize = GET_MODE_SIZE (submode);
3226 /* In case we output to the stack, but the size is smaller than the
3227 machine can push exactly, we need to use move instructions. */
3228 if (PUSH_ROUNDING (submodesize) != submodesize)
3230 x = emit_move_resolve_push (mode, x);
3231 return emit_move_insn (x, y);
3235 /* Note that the real part always precedes the imag part in memory
3236 regardless of machine's endianness. */
3237 switch (GET_CODE (XEXP (x, 0)))
3251 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3252 read_complex_part (y, imag_first));
3253 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3254 read_complex_part (y, !imag_first));
3257 /* A subroutine of emit_move_complex. Perform the move from Y to X
3258 via two moves of the parts. Returns the last instruction emitted. */
3261 emit_move_complex_parts (rtx x, rtx y)
3263 /* Show the output dies here. This is necessary for SUBREGs
3264 of pseudos since we cannot track their lifetimes correctly;
3265 hard regs shouldn't appear here except as return values. */
3266 if (!reload_completed && !reload_in_progress
3267 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3270 write_complex_part (x, read_complex_part (y, false), false);
3271 write_complex_part (x, read_complex_part (y, true), true);
3273 return get_last_insn ();
3276 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3277 MODE is known to be complex. Returns the last instruction emitted. */
3280 emit_move_complex (machine_mode mode, rtx x, rtx y)
3284 /* Need to take special care for pushes, to maintain proper ordering
3285 of the data, and possibly extra padding. */
3286 if (push_operand (x, mode))
3287 return emit_move_complex_push (mode, x, y);
3289 /* See if we can coerce the target into moving both values at once, except
3290 for floating point where we favor moving as parts if this is easy. */
3291 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3292 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3294 && HARD_REGISTER_P (x)
3295 && REG_NREGS (x) == 1)
3297 && HARD_REGISTER_P (y)
3298 && REG_NREGS (y) == 1))
3300 /* Not possible if the values are inherently not adjacent. */
3301 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3303 /* Is possible if both are registers (or subregs of registers). */
3304 else if (register_operand (x, mode) && register_operand (y, mode))
3306 /* If one of the operands is a memory, and alignment constraints
3307 are friendly enough, we may be able to do combined memory operations.
3308 We do not attempt this if Y is a constant because that combination is
3309 usually better with the by-parts thing below. */
3310 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3311 && (!STRICT_ALIGNMENT
3312 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3321 /* For memory to memory moves, optimal behavior can be had with the
3322 existing block move logic. */
3323 if (MEM_P (x) && MEM_P (y))
3325 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3326 BLOCK_OP_NO_LIBCALL);
3327 return get_last_insn ();
3330 ret = emit_move_via_integer (mode, x, y, true);
3335 return emit_move_complex_parts (x, y);
3338 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3339 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3342 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3346 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3349 enum insn_code code = optab_handler (mov_optab, CCmode);
3350 if (code != CODE_FOR_nothing)
3352 x = emit_move_change_mode (CCmode, mode, x, true);
3353 y = emit_move_change_mode (CCmode, mode, y, true);
3354 return emit_insn (GEN_FCN (code) (x, y));
3358 /* Otherwise, find the MODE_INT mode of the same width. */
3359 ret = emit_move_via_integer (mode, x, y, false);
3360 gcc_assert (ret != NULL);
3364 /* Return true if word I of OP lies entirely in the
3365 undefined bits of a paradoxical subreg. */
3368 undefined_operand_subword_p (const_rtx op, int i)
3370 machine_mode innermode, innermostmode;
3372 if (GET_CODE (op) != SUBREG)
3374 innermode = GET_MODE (op);
3375 innermostmode = GET_MODE (SUBREG_REG (op));
3376 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3377 /* The SUBREG_BYTE represents offset, as if the value were stored in
3378 memory, except for a paradoxical subreg where we define
3379 SUBREG_BYTE to be 0; undo this exception as in
3381 if (SUBREG_BYTE (op) == 0
3382 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3384 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3385 if (WORDS_BIG_ENDIAN)
3386 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3387 if (BYTES_BIG_ENDIAN)
3388 offset += difference % UNITS_PER_WORD;
3390 if (offset >= GET_MODE_SIZE (innermostmode)
3391 || offset <= -GET_MODE_SIZE (word_mode))
3396 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3397 MODE is any multi-word or full-word mode that lacks a move_insn
3398 pattern. Note that you will get better code if you define such
3399 patterns, even if they must turn into multiple assembler instructions. */
3402 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3404 rtx_insn *last_insn = 0;
3410 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3412 /* If X is a push on the stack, do the push now and replace
3413 X with a reference to the stack pointer. */
3414 if (push_operand (x, mode))
3415 x = emit_move_resolve_push (mode, x);
3417 /* If we are in reload, see if either operand is a MEM whose address
3418 is scheduled for replacement. */
3419 if (reload_in_progress && MEM_P (x)
3420 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3421 x = replace_equiv_address_nv (x, inner);
3422 if (reload_in_progress && MEM_P (y)
3423 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3424 y = replace_equiv_address_nv (y, inner);
3428 need_clobber = false;
3430 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3433 rtx xpart = operand_subword (x, i, 1, mode);
3436 /* Do not generate code for a move if it would come entirely
3437 from the undefined bits of a paradoxical subreg. */
3438 if (undefined_operand_subword_p (y, i))
3441 ypart = operand_subword (y, i, 1, mode);
3443 /* If we can't get a part of Y, put Y into memory if it is a
3444 constant. Otherwise, force it into a register. Then we must
3445 be able to get a part of Y. */
3446 if (ypart == 0 && CONSTANT_P (y))
3448 y = use_anchored_address (force_const_mem (mode, y));
3449 ypart = operand_subword (y, i, 1, mode);
3451 else if (ypart == 0)
3452 ypart = operand_subword_force (y, i, mode);
3454 gcc_assert (xpart && ypart);
3456 need_clobber |= (GET_CODE (xpart) == SUBREG);
3458 last_insn = emit_move_insn (xpart, ypart);
3464 /* Show the output dies here. This is necessary for SUBREGs
3465 of pseudos since we cannot track their lifetimes correctly;
3466 hard regs shouldn't appear here except as return values.
3467 We never want to emit such a clobber after reload. */
3469 && ! (reload_in_progress || reload_completed)
3470 && need_clobber != 0)
3478 /* Low level part of emit_move_insn.
3479 Called just like emit_move_insn, but assumes X and Y
3480 are basically valid. */
3483 emit_move_insn_1 (rtx x, rtx y)
3485 machine_mode mode = GET_MODE (x);
3486 enum insn_code code;
3488 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3490 code = optab_handler (mov_optab, mode);
3491 if (code != CODE_FOR_nothing)
3492 return emit_insn (GEN_FCN (code) (x, y));
3494 /* Expand complex moves by moving real part and imag part. */
3495 if (COMPLEX_MODE_P (mode))
3496 return emit_move_complex (mode, x, y);
3498 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3499 || ALL_FIXED_POINT_MODE_P (mode))
3501 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3503 /* If we can't find an integer mode, use multi words. */
3507 return emit_move_multi_word (mode, x, y);
3510 if (GET_MODE_CLASS (mode) == MODE_CC)
3511 return emit_move_ccmode (mode, x, y);
3513 /* Try using a move pattern for the corresponding integer mode. This is
3514 only safe when simplify_subreg can convert MODE constants into integer
3515 constants. At present, it can only do this reliably if the value
3516 fits within a HOST_WIDE_INT. */
3517 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3519 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3523 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3528 return emit_move_multi_word (mode, x, y);
3531 /* Generate code to copy Y into X.
3532 Both Y and X must have the same mode, except that
3533 Y can be a constant with VOIDmode.
3534 This mode cannot be BLKmode; use emit_block_move for that.
3536 Return the last instruction emitted. */
3539 emit_move_insn (rtx x, rtx y)
3541 machine_mode mode = GET_MODE (x);
3542 rtx y_cst = NULL_RTX;
3543 rtx_insn *last_insn;
3546 gcc_assert (mode != BLKmode
3547 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3552 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3553 && (last_insn = compress_float_constant (x, y)))
3558 if (!targetm.legitimate_constant_p (mode, y))
3560 y = force_const_mem (mode, y);
3562 /* If the target's cannot_force_const_mem prevented the spill,
3563 assume that the target's move expanders will also take care
3564 of the non-legitimate constant. */
3568 y = use_anchored_address (y);
3572 /* If X or Y are memory references, verify that their addresses are valid
3575 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3577 && ! push_operand (x, GET_MODE (x))))
3578 x = validize_mem (x);
3581 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3582 MEM_ADDR_SPACE (y)))
3583 y = validize_mem (y);
3585 gcc_assert (mode != BLKmode);
3587 last_insn = emit_move_insn_1 (x, y);
3589 if (y_cst && REG_P (x)
3590 && (set = single_set (last_insn)) != NULL_RTX
3591 && SET_DEST (set) == x
3592 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3593 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3598 /* Generate the body of an instruction to copy Y into X.
3599 It may be a list of insns, if one insn isn't enough. */
3602 gen_move_insn (rtx x, rtx y)
3607 emit_move_insn_1 (x, y);
3613 /* If Y is representable exactly in a narrower mode, and the target can
3614 perform the extension directly from constant or memory, then emit the
3615 move as an extension. */
3618 compress_float_constant (rtx x, rtx y)
3620 machine_mode dstmode = GET_MODE (x);
3621 machine_mode orig_srcmode = GET_MODE (y);
3622 machine_mode srcmode;
3623 const REAL_VALUE_TYPE *r;
3624 int oldcost, newcost;
3625 bool speed = optimize_insn_for_speed_p ();
3627 r = CONST_DOUBLE_REAL_VALUE (y);
3629 if (targetm.legitimate_constant_p (dstmode, y))
3630 oldcost = set_src_cost (y, orig_srcmode, speed);
3632 oldcost = set_src_cost (force_const_mem (dstmode, y), dstmode, speed);
3634 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3635 srcmode != orig_srcmode;
3636 srcmode = GET_MODE_WIDER_MODE (srcmode))
3640 rtx_insn *last_insn;
3642 /* Skip if the target can't extend this way. */
3643 ic = can_extend_p (dstmode, srcmode, 0);
3644 if (ic == CODE_FOR_nothing)
3647 /* Skip if the narrowed value isn't exact. */
3648 if (! exact_real_truncate (srcmode, r))
3651 trunc_y = const_double_from_real_value (*r, srcmode);
3653 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3655 /* Skip if the target needs extra instructions to perform
3657 if (!insn_operand_matches (ic, 1, trunc_y))
3659 /* This is valid, but may not be cheaper than the original. */
3660 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3662 if (oldcost < newcost)
3665 else if (float_extend_from_mem[dstmode][srcmode])
3667 trunc_y = force_const_mem (srcmode, trunc_y);
3668 /* This is valid, but may not be cheaper than the original. */
3669 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3671 if (oldcost < newcost)
3673 trunc_y = validize_mem (trunc_y);
3678 /* For CSE's benefit, force the compressed constant pool entry
3679 into a new pseudo. This constant may be used in different modes,
3680 and if not, combine will put things back together for us. */
3681 trunc_y = force_reg (srcmode, trunc_y);
3683 /* If x is a hard register, perform the extension into a pseudo,
3684 so that e.g. stack realignment code is aware of it. */
3686 if (REG_P (x) && HARD_REGISTER_P (x))
3687 target = gen_reg_rtx (dstmode);
3689 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3690 last_insn = get_last_insn ();
3693 set_unique_reg_note (last_insn, REG_EQUAL, y);
3696 return emit_move_insn (x, target);
3703 /* Pushing data onto the stack. */
3705 /* Push a block of length SIZE (perhaps variable)
3706 and return an rtx to address the beginning of the block.
3707 The value may be virtual_outgoing_args_rtx.
3709 EXTRA is the number of bytes of padding to push in addition to SIZE.
3710 BELOW nonzero means this padding comes at low addresses;
3711 otherwise, the padding comes at high addresses. */
3714 push_block (rtx size, int extra, int below)
3718 size = convert_modes (Pmode, ptr_mode, size, 1);
3719 if (CONSTANT_P (size))
3720 anti_adjust_stack (plus_constant (Pmode, size, extra));
3721 else if (REG_P (size) && extra == 0)
3722 anti_adjust_stack (size);
3725 temp = copy_to_mode_reg (Pmode, size);
3727 temp = expand_binop (Pmode, add_optab, temp,
3728 gen_int_mode (extra, Pmode),
3729 temp, 0, OPTAB_LIB_WIDEN);
3730 anti_adjust_stack (temp);
3733 if (STACK_GROWS_DOWNWARD)
3735 temp = virtual_outgoing_args_rtx;
3736 if (extra != 0 && below)
3737 temp = plus_constant (Pmode, temp, extra);
3741 if (CONST_INT_P (size))
3742 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3743 -INTVAL (size) - (below ? 0 : extra));
3744 else if (extra != 0 && !below)
3745 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3746 negate_rtx (Pmode, plus_constant (Pmode, size,
3749 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3750 negate_rtx (Pmode, size));
3753 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3756 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3759 mem_autoinc_base (rtx mem)
3763 rtx addr = XEXP (mem, 0);
3764 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3765 return XEXP (addr, 0);
3770 /* A utility routine used here, in reload, and in try_split. The insns
3771 after PREV up to and including LAST are known to adjust the stack,
3772 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3773 placing notes as appropriate. PREV may be NULL, indicating the
3774 entire insn sequence prior to LAST should be scanned.
3776 The set of allowed stack pointer modifications is small:
3777 (1) One or more auto-inc style memory references (aka pushes),
3778 (2) One or more addition/subtraction with the SP as destination,
3779 (3) A single move insn with the SP as destination,
3780 (4) A call_pop insn,
3781 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3783 Insns in the sequence that do not modify the SP are ignored,
3784 except for noreturn calls.
3786 The return value is the amount of adjustment that can be trivially
3787 verified, via immediate operand or auto-inc. If the adjustment
3788 cannot be trivially extracted, the return value is INT_MIN. */
3791 find_args_size_adjust (rtx_insn *insn)
3796 pat = PATTERN (insn);
3799 /* Look for a call_pop pattern. */
3802 /* We have to allow non-call_pop patterns for the case
3803 of emit_single_push_insn of a TLS address. */
3804 if (GET_CODE (pat) != PARALLEL)
3807 /* All call_pop have a stack pointer adjust in the parallel.
3808 The call itself is always first, and the stack adjust is
3809 usually last, so search from the end. */
3810 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3812 set = XVECEXP (pat, 0, i);
3813 if (GET_CODE (set) != SET)
3815 dest = SET_DEST (set);
3816 if (dest == stack_pointer_rtx)
3819 /* We'd better have found the stack pointer adjust. */
3822 /* Fall through to process the extracted SET and DEST
3823 as if it was a standalone insn. */
3825 else if (GET_CODE (pat) == SET)
3827 else if ((set = single_set (insn)) != NULL)
3829 else if (GET_CODE (pat) == PARALLEL)
3831 /* ??? Some older ports use a parallel with a stack adjust
3832 and a store for a PUSH_ROUNDING pattern, rather than a
3833 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3834 /* ??? See h8300 and m68k, pushqi1. */
3835 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3837 set = XVECEXP (pat, 0, i);
3838 if (GET_CODE (set) != SET)
3840 dest = SET_DEST (set);
3841 if (dest == stack_pointer_rtx)
3844 /* We do not expect an auto-inc of the sp in the parallel. */
3845 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3846 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3847 != stack_pointer_rtx);
3855 dest = SET_DEST (set);
3857 /* Look for direct modifications of the stack pointer. */
3858 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3860 /* Look for a trivial adjustment, otherwise assume nothing. */
3861 /* Note that the SPU restore_stack_block pattern refers to
3862 the stack pointer in V4SImode. Consider that non-trivial. */
3863 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3864 && GET_CODE (SET_SRC (set)) == PLUS
3865 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3866 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3867 return INTVAL (XEXP (SET_SRC (set), 1));
3868 /* ??? Reload can generate no-op moves, which will be cleaned
3869 up later. Recognize it and continue searching. */
3870 else if (rtx_equal_p (dest, SET_SRC (set)))
3873 return HOST_WIDE_INT_MIN;
3879 /* Otherwise only think about autoinc patterns. */
3880 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3883 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3884 != stack_pointer_rtx);
3886 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3887 mem = SET_SRC (set);
3891 addr = XEXP (mem, 0);
3892 switch (GET_CODE (addr))
3896 return GET_MODE_SIZE (GET_MODE (mem));
3899 return -GET_MODE_SIZE (GET_MODE (mem));
3902 addr = XEXP (addr, 1);
3903 gcc_assert (GET_CODE (addr) == PLUS);
3904 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3905 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3906 return INTVAL (XEXP (addr, 1));
3914 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3916 int args_size = end_args_size;
3917 bool saw_unknown = false;
3920 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3922 HOST_WIDE_INT this_delta;
3924 if (!NONDEBUG_INSN_P (insn))
3927 this_delta = find_args_size_adjust (insn);
3928 if (this_delta == 0)
3931 || ACCUMULATE_OUTGOING_ARGS
3932 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3936 gcc_assert (!saw_unknown);
3937 if (this_delta == HOST_WIDE_INT_MIN)
3940 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3941 if (STACK_GROWS_DOWNWARD)
3942 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3944 args_size -= this_delta;
3947 return saw_unknown ? INT_MIN : args_size;
3950 #ifdef PUSH_ROUNDING
3951 /* Emit single push insn. */
3954 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
3957 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3959 enum insn_code icode;
3961 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3962 /* If there is push pattern, use it. Otherwise try old way of throwing
3963 MEM representing push operation to move expander. */
3964 icode = optab_handler (push_optab, mode);
3965 if (icode != CODE_FOR_nothing)
3967 struct expand_operand ops[1];
3969 create_input_operand (&ops[0], x, mode);
3970 if (maybe_expand_insn (icode, 1, ops))
3973 if (GET_MODE_SIZE (mode) == rounded_size)
3974 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3975 /* If we are to pad downward, adjust the stack pointer first and
3976 then store X into the stack location using an offset. This is
3977 because emit_move_insn does not know how to pad; it does not have
3979 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3981 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3982 HOST_WIDE_INT offset;
3984 emit_move_insn (stack_pointer_rtx,
3985 expand_binop (Pmode,
3986 STACK_GROWS_DOWNWARD ? sub_optab
3989 gen_int_mode (rounded_size, Pmode),
3990 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3992 offset = (HOST_WIDE_INT) padding_size;
3993 if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
3994 /* We have already decremented the stack pointer, so get the
3996 offset += (HOST_WIDE_INT) rounded_size;
3998 if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
3999 /* We have already incremented the stack pointer, so get the
4001 offset -= (HOST_WIDE_INT) rounded_size;
4003 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4004 gen_int_mode (offset, Pmode));
4008 if (STACK_GROWS_DOWNWARD)
4009 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4010 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4011 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4014 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4015 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4016 gen_int_mode (rounded_size, Pmode));
4018 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4021 dest = gen_rtx_MEM (mode, dest_addr);
4025 set_mem_attributes (dest, type, 1);
4027 if (cfun->tail_call_marked)
4028 /* Function incoming arguments may overlap with sibling call
4029 outgoing arguments and we cannot allow reordering of reads
4030 from function arguments with stores to outgoing arguments
4031 of sibling calls. */
4032 set_mem_alias_set (dest, 0);
4034 emit_move_insn (dest, x);
4037 /* Emit and annotate a single push insn. */
4040 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4042 int delta, old_delta = stack_pointer_delta;
4043 rtx_insn *prev = get_last_insn ();
4046 emit_single_push_insn_1 (mode, x, type);
4048 last = get_last_insn ();
4050 /* Notice the common case where we emitted exactly one insn. */
4051 if (PREV_INSN (last) == prev)
4053 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4057 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4058 gcc_assert (delta == INT_MIN || delta == old_delta);
4062 /* If reading SIZE bytes from X will end up reading from
4063 Y return the number of bytes that overlap. Return -1
4064 if there is no overlap or -2 if we can't determine
4065 (for example when X and Y have different base registers). */
4068 memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
4070 rtx tmp = plus_constant (Pmode, x, size);
4071 rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
4073 if (!CONST_INT_P (sub))
4076 HOST_WIDE_INT val = INTVAL (sub);
4078 return IN_RANGE (val, 1, size) ? val : -1;
4081 /* Generate code to push X onto the stack, assuming it has mode MODE and
4083 MODE is redundant except when X is a CONST_INT (since they don't
4085 SIZE is an rtx for the size of data to be copied (in bytes),
4086 needed only if X is BLKmode.
4087 Return true if successful. May return false if asked to push a
4088 partial argument during a sibcall optimization (as specified by
4089 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4092 ALIGN (in bits) is maximum alignment we can assume.
4094 If PARTIAL and REG are both nonzero, then copy that many of the first
4095 bytes of X into registers starting with REG, and push the rest of X.
4096 The amount of space pushed is decreased by PARTIAL bytes.
4097 REG must be a hard register in this case.
4098 If REG is zero but PARTIAL is not, take any all others actions for an
4099 argument partially in registers, but do not actually load any
4102 EXTRA is the amount in bytes of extra space to leave next to this arg.
4103 This is ignored if an argument block has already been allocated.
4105 On a machine that lacks real push insns, ARGS_ADDR is the address of
4106 the bottom of the argument block for this call. We use indexing off there
4107 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4108 argument block has not been preallocated.
4110 ARGS_SO_FAR is the size of args previously pushed for this call.
4112 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4113 for arguments passed in registers. If nonzero, it will be the number
4114 of bytes required. */
4117 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4118 unsigned int align, int partial, rtx reg, int extra,
4119 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4120 rtx alignment_pad, bool sibcall_p)
4123 enum direction stack_direction = STACK_GROWS_DOWNWARD ? downward : upward;
4125 /* Decide where to pad the argument: `downward' for below,
4126 `upward' for above, or `none' for don't pad it.
4127 Default is below for small data on big-endian machines; else above. */
4128 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4130 /* Invert direction if stack is post-decrement.
4132 if (STACK_PUSH_CODE == POST_DEC)
4133 if (where_pad != none)
4134 where_pad = (where_pad == downward ? upward : downward);
4138 int nregs = partial / UNITS_PER_WORD;
4139 rtx *tmp_regs = NULL;
4140 int overlapping = 0;
4143 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4145 /* Copy a block into the stack, entirely or partially. */
4152 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4153 used = partial - offset;
4155 if (mode != BLKmode)
4157 /* A value is to be stored in an insufficiently aligned
4158 stack slot; copy via a suitably aligned slot if
4160 size = GEN_INT (GET_MODE_SIZE (mode));
4161 if (!MEM_P (xinner))
4163 temp = assign_temp (type, 1, 1);
4164 emit_move_insn (temp, xinner);
4171 /* USED is now the # of bytes we need not copy to the stack
4172 because registers will take care of them. */
4175 xinner = adjust_address (xinner, BLKmode, used);
4177 /* If the partial register-part of the arg counts in its stack size,
4178 skip the part of stack space corresponding to the registers.
4179 Otherwise, start copying to the beginning of the stack space,
4180 by setting SKIP to 0. */
4181 skip = (reg_parm_stack_space == 0) ? 0 : used;
4183 #ifdef PUSH_ROUNDING
4184 /* Do it with several push insns if that doesn't take lots of insns
4185 and if there is no difficulty with push insns that skip bytes
4186 on the stack for alignment purposes. */
4189 && CONST_INT_P (size)
4191 && MEM_ALIGN (xinner) >= align
4192 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4193 /* Here we avoid the case of a structure whose weak alignment
4194 forces many pushes of a small amount of data,
4195 and such small pushes do rounding that causes trouble. */
4196 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4197 || align >= BIGGEST_ALIGNMENT
4198 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4199 == (align / BITS_PER_UNIT)))
4200 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4202 /* Push padding now if padding above and stack grows down,
4203 or if padding below and stack grows up.
4204 But if space already allocated, this has already been done. */
4205 if (extra && args_addr == 0
4206 && where_pad != none && where_pad != stack_direction)
4207 anti_adjust_stack (GEN_INT (extra));
4209 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4212 #endif /* PUSH_ROUNDING */
4216 /* Otherwise make space on the stack and copy the data
4217 to the address of that space. */
4219 /* Deduct words put into registers from the size we must copy. */
4222 if (CONST_INT_P (size))
4223 size = GEN_INT (INTVAL (size) - used);
4225 size = expand_binop (GET_MODE (size), sub_optab, size,
4226 gen_int_mode (used, GET_MODE (size)),
4227 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4230 /* Get the address of the stack space.
4231 In this case, we do not deal with EXTRA separately.
4232 A single stack adjust will do. */
4235 temp = push_block (size, extra, where_pad == downward);
4238 else if (CONST_INT_P (args_so_far))
4239 temp = memory_address (BLKmode,
4240 plus_constant (Pmode, args_addr,
4241 skip + INTVAL (args_so_far)));
4243 temp = memory_address (BLKmode,
4244 plus_constant (Pmode,
4245 gen_rtx_PLUS (Pmode,
4250 if (!ACCUMULATE_OUTGOING_ARGS)
4252 /* If the source is referenced relative to the stack pointer,
4253 copy it to another register to stabilize it. We do not need
4254 to do this if we know that we won't be changing sp. */
4256 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4257 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4258 temp = copy_to_reg (temp);
4261 target = gen_rtx_MEM (BLKmode, temp);
4263 /* We do *not* set_mem_attributes here, because incoming arguments
4264 may overlap with sibling call outgoing arguments and we cannot
4265 allow reordering of reads from function arguments with stores
4266 to outgoing arguments of sibling calls. We do, however, want
4267 to record the alignment of the stack slot. */
4268 /* ALIGN may well be better aligned than TYPE, e.g. due to
4269 PARM_BOUNDARY. Assume the caller isn't lying. */
4270 set_mem_align (target, align);
4272 /* If part should go in registers and pushing to that part would
4273 overwrite some of the values that need to go into regs, load the
4274 overlapping values into temporary pseudos to be moved into the hard
4275 regs at the end after the stack pushing has completed.
4276 We cannot load them directly into the hard regs here because
4277 they can be clobbered by the block move expansions.
4280 if (partial > 0 && reg != 0 && mode == BLKmode
4281 && GET_CODE (reg) != PARALLEL)
4283 overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4284 if (overlapping > 0)
4286 gcc_assert (overlapping % UNITS_PER_WORD == 0);
4287 overlapping /= UNITS_PER_WORD;
4289 tmp_regs = XALLOCAVEC (rtx, overlapping);
4291 for (int i = 0; i < overlapping; i++)
4292 tmp_regs[i] = gen_reg_rtx (word_mode);
4294 for (int i = 0; i < overlapping; i++)
4295 emit_move_insn (tmp_regs[i],
4296 operand_subword_force (target, i, mode));
4298 else if (overlapping == -1)
4300 /* Could not determine whether there is overlap.
4301 Fail the sibcall. */
4309 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4312 else if (partial > 0)
4314 /* Scalar partly in registers. */
4316 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4319 /* # bytes of start of argument
4320 that we must make space for but need not store. */
4321 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4322 int args_offset = INTVAL (args_so_far);
4325 /* Push padding now if padding above and stack grows down,
4326 or if padding below and stack grows up.
4327 But if space already allocated, this has already been done. */
4328 if (extra && args_addr == 0
4329 && where_pad != none && where_pad != stack_direction)
4330 anti_adjust_stack (GEN_INT (extra));
4332 /* If we make space by pushing it, we might as well push
4333 the real data. Otherwise, we can leave OFFSET nonzero
4334 and leave the space uninitialized. */
4338 /* Now NOT_STACK gets the number of words that we don't need to
4339 allocate on the stack. Convert OFFSET to words too. */
4340 not_stack = (partial - offset) / UNITS_PER_WORD;
4341 offset /= UNITS_PER_WORD;
4343 /* If the partial register-part of the arg counts in its stack size,
4344 skip the part of stack space corresponding to the registers.
4345 Otherwise, start copying to the beginning of the stack space,
4346 by setting SKIP to 0. */
4347 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4349 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4350 x = validize_mem (force_const_mem (mode, x));
4352 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4353 SUBREGs of such registers are not allowed. */
4354 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4355 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4356 x = copy_to_reg (x);
4358 /* Loop over all the words allocated on the stack for this arg. */
4359 /* We can do it by words, because any scalar bigger than a word
4360 has a size a multiple of a word. */
4361 for (i = size - 1; i >= not_stack; i--)
4362 if (i >= not_stack + offset)
4363 if (!emit_push_insn (operand_subword_force (x, i, mode),
4364 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4366 GEN_INT (args_offset + ((i - not_stack + skip)
4368 reg_parm_stack_space, alignment_pad, sibcall_p))
4376 /* Push padding now if padding above and stack grows down,
4377 or if padding below and stack grows up.
4378 But if space already allocated, this has already been done. */
4379 if (extra && args_addr == 0
4380 && where_pad != none && where_pad != stack_direction)
4381 anti_adjust_stack (GEN_INT (extra));
4383 #ifdef PUSH_ROUNDING
4384 if (args_addr == 0 && PUSH_ARGS)
4385 emit_single_push_insn (mode, x, type);
4389 if (CONST_INT_P (args_so_far))
4391 = memory_address (mode,
4392 plus_constant (Pmode, args_addr,
4393 INTVAL (args_so_far)));
4395 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4397 dest = gen_rtx_MEM (mode, addr);
4399 /* We do *not* set_mem_attributes here, because incoming arguments
4400 may overlap with sibling call outgoing arguments and we cannot
4401 allow reordering of reads from function arguments with stores
4402 to outgoing arguments of sibling calls. We do, however, want
4403 to record the alignment of the stack slot. */
4404 /* ALIGN may well be better aligned than TYPE, e.g. due to
4405 PARM_BOUNDARY. Assume the caller isn't lying. */
4406 set_mem_align (dest, align);
4408 emit_move_insn (dest, x);
4412 /* Move the partial arguments into the registers and any overlapping
4413 values that we moved into the pseudos in tmp_regs. */
4414 if (partial > 0 && reg != 0)
4416 /* Handle calls that pass values in multiple non-contiguous locations.
4417 The Irix 6 ABI has examples of this. */
4418 if (GET_CODE (reg) == PARALLEL)
4419 emit_group_load (reg, x, type, -1);
4422 gcc_assert (partial % UNITS_PER_WORD == 0);
4423 move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4425 for (int i = 0; i < overlapping; i++)
4426 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4427 + nregs - overlapping + i),
4433 if (extra && args_addr == 0 && where_pad == stack_direction)
4434 anti_adjust_stack (GEN_INT (extra));
4436 if (alignment_pad && args_addr == 0)
4437 anti_adjust_stack (alignment_pad);
4442 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4446 get_subtarget (rtx x)
4450 /* Only registers can be subtargets. */
4452 /* Don't use hard regs to avoid extending their life. */
4453 || REGNO (x) < FIRST_PSEUDO_REGISTER
4457 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4458 FIELD is a bitfield. Returns true if the optimization was successful,
4459 and there's nothing else to do. */
4462 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4463 unsigned HOST_WIDE_INT bitpos,
4464 unsigned HOST_WIDE_INT bitregion_start,
4465 unsigned HOST_WIDE_INT bitregion_end,
4466 machine_mode mode1, rtx str_rtx,
4467 tree to, tree src, bool reverse)
4469 machine_mode str_mode = GET_MODE (str_rtx);
4470 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4475 enum tree_code code;
4477 if (mode1 != VOIDmode
4478 || bitsize >= BITS_PER_WORD
4479 || str_bitsize > BITS_PER_WORD
4480 || TREE_SIDE_EFFECTS (to)
4481 || TREE_THIS_VOLATILE (to))
4485 if (TREE_CODE (src) != SSA_NAME)
4487 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4490 srcstmt = get_gimple_for_ssa_name (src);
4492 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4495 code = gimple_assign_rhs_code (srcstmt);
4497 op0 = gimple_assign_rhs1 (srcstmt);
4499 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4500 to find its initialization. Hopefully the initialization will
4501 be from a bitfield load. */
4502 if (TREE_CODE (op0) == SSA_NAME)
4504 gimple *op0stmt = get_gimple_for_ssa_name (op0);
4506 /* We want to eventually have OP0 be the same as TO, which
4507 should be a bitfield. */
4509 || !is_gimple_assign (op0stmt)
4510 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4512 op0 = gimple_assign_rhs1 (op0stmt);
4515 op1 = gimple_assign_rhs2 (srcstmt);
4517 if (!operand_equal_p (to, op0, 0))
4520 if (MEM_P (str_rtx))
4522 unsigned HOST_WIDE_INT offset1;
4524 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4525 str_mode = word_mode;
4526 str_mode = get_best_mode (bitsize, bitpos,
4527 bitregion_start, bitregion_end,
4528 MEM_ALIGN (str_rtx), str_mode, 0);
4529 if (str_mode == VOIDmode)
4531 str_bitsize = GET_MODE_BITSIZE (str_mode);
4534 bitpos %= str_bitsize;
4535 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4536 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4538 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4541 gcc_assert (!reverse);
4543 /* If the bit field covers the whole REG/MEM, store_field
4544 will likely generate better code. */
4545 if (bitsize >= str_bitsize)
4548 /* We can't handle fields split across multiple entities. */
4549 if (bitpos + bitsize > str_bitsize)
4552 if (reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4553 bitpos = str_bitsize - bitpos - bitsize;
4559 /* For now, just optimize the case of the topmost bitfield
4560 where we don't need to do any masking and also
4561 1 bit bitfields where xor can be used.
4562 We might win by one instruction for the other bitfields
4563 too if insv/extv instructions aren't used, so that
4564 can be added later. */
4565 if ((reverse || bitpos + bitsize != str_bitsize)
4566 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4569 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4570 value = convert_modes (str_mode,
4571 TYPE_MODE (TREE_TYPE (op1)), value,
4572 TYPE_UNSIGNED (TREE_TYPE (op1)));
4574 /* We may be accessing data outside the field, which means
4575 we can alias adjacent data. */
4576 if (MEM_P (str_rtx))
4578 str_rtx = shallow_copy_rtx (str_rtx);
4579 set_mem_alias_set (str_rtx, 0);
4580 set_mem_expr (str_rtx, 0);
4583 if (bitsize == 1 && (reverse || bitpos + bitsize != str_bitsize))
4585 value = expand_and (str_mode, value, const1_rtx, NULL);
4589 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4591 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4593 value = flip_storage_order (str_mode, value);
4594 result = expand_binop (str_mode, binop, str_rtx,
4595 value, str_rtx, 1, OPTAB_WIDEN);
4596 if (result != str_rtx)
4597 emit_move_insn (str_rtx, result);
4602 if (TREE_CODE (op1) != INTEGER_CST)
4604 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4605 value = convert_modes (str_mode,
4606 TYPE_MODE (TREE_TYPE (op1)), value,
4607 TYPE_UNSIGNED (TREE_TYPE (op1)));
4609 /* We may be accessing data outside the field, which means
4610 we can alias adjacent data. */
4611 if (MEM_P (str_rtx))
4613 str_rtx = shallow_copy_rtx (str_rtx);
4614 set_mem_alias_set (str_rtx, 0);
4615 set_mem_expr (str_rtx, 0);
4618 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4619 if (bitpos + bitsize != str_bitsize)
4621 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4623 value = expand_and (str_mode, value, mask, NULL_RTX);
4625 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4627 value = flip_storage_order (str_mode, value);
4628 result = expand_binop (str_mode, binop, str_rtx,
4629 value, str_rtx, 1, OPTAB_WIDEN);
4630 if (result != str_rtx)
4631 emit_move_insn (str_rtx, result);
4641 /* In the C++ memory model, consecutive bit fields in a structure are
4642 considered one memory location.
4644 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4645 returns the bit range of consecutive bits in which this COMPONENT_REF
4646 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4647 and *OFFSET may be adjusted in the process.
4649 If the access does not need to be restricted, 0 is returned in both
4650 *BITSTART and *BITEND. */
4653 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4654 unsigned HOST_WIDE_INT *bitend,
4656 HOST_WIDE_INT *bitpos,
4659 HOST_WIDE_INT bitoffset;
4662 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4664 field = TREE_OPERAND (exp, 1);
4665 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4666 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4667 need to limit the range we can access. */
4670 *bitstart = *bitend = 0;
4674 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4675 part of a larger bit field, then the representative does not serve any
4676 useful purpose. This can occur in Ada. */
4677 if (handled_component_p (TREE_OPERAND (exp, 0)))
4680 HOST_WIDE_INT rbitsize, rbitpos;
4682 int unsignedp, reversep, volatilep = 0;
4683 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4684 &roffset, &rmode, &unsignedp, &reversep,
4686 if ((rbitpos % BITS_PER_UNIT) != 0)
4688 *bitstart = *bitend = 0;
4693 /* Compute the adjustment to bitpos from the offset of the field
4694 relative to the representative. DECL_FIELD_OFFSET of field and
4695 repr are the same by construction if they are not constants,
4696 see finish_bitfield_layout. */
4697 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4698 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4699 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4700 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4703 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4704 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4706 /* If the adjustment is larger than bitpos, we would have a negative bit
4707 position for the lower bound and this may wreak havoc later. Adjust
4708 offset and bitpos to make the lower bound non-negative in that case. */
4709 if (bitoffset > *bitpos)
4711 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4712 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4715 if (*offset == NULL_TREE)
4716 *offset = size_int (-adjust / BITS_PER_UNIT);
4719 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4723 *bitstart = *bitpos - bitoffset;
4725 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4728 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4729 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4730 DECL_RTL was not set yet, return NORTL. */
4733 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4735 if (TREE_CODE (addr) != ADDR_EXPR)
4738 tree base = TREE_OPERAND (addr, 0);
4741 || TREE_ADDRESSABLE (base)
4742 || DECL_MODE (base) == BLKmode)
4745 if (!DECL_RTL_SET_P (base))
4748 return (!MEM_P (DECL_RTL (base)));
4751 /* Returns true if the MEM_REF REF refers to an object that does not
4752 reside in memory and has non-BLKmode. */
4755 mem_ref_refers_to_non_mem_p (tree ref)
4757 tree base = TREE_OPERAND (ref, 0);
4758 return addr_expr_of_non_mem_decl_p_1 (base, false);
4761 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4762 is true, try generating a nontemporal store. */
4765 expand_assignment (tree to, tree from, bool nontemporal)
4771 enum insn_code icode;
4773 /* Don't crash if the lhs of the assignment was erroneous. */
4774 if (TREE_CODE (to) == ERROR_MARK)
4776 expand_normal (from);
4780 /* Optimize away no-op moves without side-effects. */
4781 if (operand_equal_p (to, from, 0))
4784 /* Handle misaligned stores. */
4785 mode = TYPE_MODE (TREE_TYPE (to));
4786 if ((TREE_CODE (to) == MEM_REF
4787 || TREE_CODE (to) == TARGET_MEM_REF)
4789 && !mem_ref_refers_to_non_mem_p (to)
4790 && ((align = get_object_alignment (to))
4791 < GET_MODE_ALIGNMENT (mode))
4792 && (((icode = optab_handler (movmisalign_optab, mode))
4793 != CODE_FOR_nothing)
4794 || SLOW_UNALIGNED_ACCESS (mode, align)))
4798 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4799 reg = force_not_mem (reg);
4800 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4801 if (TREE_CODE (to) == MEM_REF && REF_REVERSE_STORAGE_ORDER (to))
4802 reg = flip_storage_order (mode, reg);
4804 if (icode != CODE_FOR_nothing)
4806 struct expand_operand ops[2];
4808 create_fixed_operand (&ops[0], mem);
4809 create_input_operand (&ops[1], reg, mode);
4810 /* The movmisalign<mode> pattern cannot fail, else the assignment
4811 would silently be omitted. */
4812 expand_insn (icode, 2, ops);
4815 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg,
4820 /* Assignment of a structure component needs special treatment
4821 if the structure component's rtx is not simply a MEM.
4822 Assignment of an array element at a constant index, and assignment of
4823 an array element in an unaligned packed structure field, has the same
4824 problem. Same for (partially) storing into a non-memory object. */
4825 if (handled_component_p (to)
4826 || (TREE_CODE (to) == MEM_REF
4827 && (REF_REVERSE_STORAGE_ORDER (to)
4828 || mem_ref_refers_to_non_mem_p (to)))
4829 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4832 HOST_WIDE_INT bitsize, bitpos;
4833 unsigned HOST_WIDE_INT bitregion_start = 0;
4834 unsigned HOST_WIDE_INT bitregion_end = 0;
4836 int unsignedp, reversep, volatilep = 0;
4840 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4841 &unsignedp, &reversep, &volatilep, true);
4843 /* Make sure bitpos is not negative, it can wreak havoc later. */
4846 gcc_assert (offset == NULL_TREE);
4847 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4848 ? 3 : exact_log2 (BITS_PER_UNIT)));
4849 bitpos &= BITS_PER_UNIT - 1;
4852 if (TREE_CODE (to) == COMPONENT_REF
4853 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4854 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4855 /* The C++ memory model naturally applies to byte-aligned fields.
4856 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4857 BITSIZE are not byte-aligned, there is no need to limit the range
4858 we can access. This can occur with packed structures in Ada. */
4859 else if (bitsize > 0
4860 && bitsize % BITS_PER_UNIT == 0
4861 && bitpos % BITS_PER_UNIT == 0)
4863 bitregion_start = bitpos;
4864 bitregion_end = bitpos + bitsize - 1;
4867 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4869 /* If the field has a mode, we want to access it in the
4870 field's mode, not the computed mode.
4871 If a MEM has VOIDmode (external with incomplete type),
4872 use BLKmode for it instead. */
4875 if (mode1 != VOIDmode)
4876 to_rtx = adjust_address (to_rtx, mode1, 0);
4877 else if (GET_MODE (to_rtx) == VOIDmode)
4878 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4883 machine_mode address_mode;
4886 if (!MEM_P (to_rtx))
4888 /* We can get constant negative offsets into arrays with broken
4889 user code. Translate this to a trap instead of ICEing. */
4890 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4891 expand_builtin_trap ();
4892 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4895 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4896 address_mode = get_address_mode (to_rtx);
4897 if (GET_MODE (offset_rtx) != address_mode)
4899 /* We cannot be sure that the RTL in offset_rtx is valid outside
4900 of a memory address context, so force it into a register
4901 before attempting to convert it to the desired mode. */
4902 offset_rtx = force_operand (offset_rtx, NULL_RTX);
4903 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4906 /* If we have an expression in OFFSET_RTX and a non-zero
4907 byte offset in BITPOS, adding the byte offset before the
4908 OFFSET_RTX results in better intermediate code, which makes
4909 later rtl optimization passes perform better.
4911 We prefer intermediate code like this:
4913 r124:DI=r123:DI+0x18
4918 r124:DI=r123:DI+0x10
4919 [r124:DI+0x8]=r121:DI
4921 This is only done for aligned data values, as these can
4922 be expected to result in single move instructions. */
4923 if (mode1 != VOIDmode
4926 && (bitpos % bitsize) == 0
4927 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4928 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4930 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4931 bitregion_start = 0;
4932 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4933 bitregion_end -= bitpos;
4937 to_rtx = offset_address (to_rtx, offset_rtx,
4938 highest_pow2_factor_for_target (to,
4942 /* No action is needed if the target is not a memory and the field
4943 lies completely outside that target. This can occur if the source
4944 code contains an out-of-bounds access to a small array. */
4946 && GET_MODE (to_rtx) != BLKmode
4947 && (unsigned HOST_WIDE_INT) bitpos
4948 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4950 expand_normal (from);
4953 /* Handle expand_expr of a complex value returning a CONCAT. */
4954 else if (GET_CODE (to_rtx) == CONCAT)
4956 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4957 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4959 && bitsize == mode_bitsize)
4960 result = store_expr (from, to_rtx, false, nontemporal, reversep);
4961 else if (bitsize == mode_bitsize / 2
4962 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4963 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4964 nontemporal, reversep);
4965 else if (bitpos + bitsize <= mode_bitsize / 2)
4966 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4967 bitregion_start, bitregion_end,
4968 mode1, from, get_alias_set (to),
4969 nontemporal, reversep);
4970 else if (bitpos >= mode_bitsize / 2)
4971 result = store_field (XEXP (to_rtx, 1), bitsize,
4972 bitpos - mode_bitsize / 2,
4973 bitregion_start, bitregion_end,
4974 mode1, from, get_alias_set (to),
4975 nontemporal, reversep);
4976 else if (bitpos == 0 && bitsize == mode_bitsize)
4979 result = expand_normal (from);
4980 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4981 TYPE_MODE (TREE_TYPE (from)), 0);
4982 emit_move_insn (XEXP (to_rtx, 0),
4983 read_complex_part (from_rtx, false));
4984 emit_move_insn (XEXP (to_rtx, 1),
4985 read_complex_part (from_rtx, true));
4989 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4990 GET_MODE_SIZE (GET_MODE (to_rtx)));
4991 write_complex_part (temp, XEXP (to_rtx, 0), false);
4992 write_complex_part (temp, XEXP (to_rtx, 1), true);
4993 result = store_field (temp, bitsize, bitpos,
4994 bitregion_start, bitregion_end,
4995 mode1, from, get_alias_set (to),
4996 nontemporal, reversep);
4997 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4998 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
5005 /* If the field is at offset zero, we could have been given the
5006 DECL_RTX of the parent struct. Don't munge it. */
5007 to_rtx = shallow_copy_rtx (to_rtx);
5008 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
5010 MEM_VOLATILE_P (to_rtx) = 1;
5013 if (optimize_bitfield_assignment_op (bitsize, bitpos,
5014 bitregion_start, bitregion_end,
5015 mode1, to_rtx, to, from,
5019 result = store_field (to_rtx, bitsize, bitpos,
5020 bitregion_start, bitregion_end,
5021 mode1, from, get_alias_set (to),
5022 nontemporal, reversep);
5026 preserve_temp_slots (result);
5031 /* If the rhs is a function call and its value is not an aggregate,
5032 call the function before we start to compute the lhs.
5033 This is needed for correct code for cases such as
5034 val = setjmp (buf) on machines where reference to val
5035 requires loading up part of an address in a separate insn.
5037 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5038 since it might be a promoted variable where the zero- or sign- extension
5039 needs to be done. Handling this in the normal way is safe because no
5040 computation is done before the call. The same is true for SSA names. */
5041 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5042 && COMPLETE_TYPE_P (TREE_TYPE (from))
5043 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5044 && ! (((TREE_CODE (to) == VAR_DECL
5045 || TREE_CODE (to) == PARM_DECL
5046 || TREE_CODE (to) == RESULT_DECL)
5047 && REG_P (DECL_RTL (to)))
5048 || TREE_CODE (to) == SSA_NAME))
5054 value = expand_normal (from);
5056 /* Split value and bounds to store them separately. */
5057 chkp_split_slot (value, &value, &bounds);
5060 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5062 /* Handle calls that return values in multiple non-contiguous locations.
5063 The Irix 6 ABI has examples of this. */
5064 if (GET_CODE (to_rtx) == PARALLEL)
5066 if (GET_CODE (value) == PARALLEL)
5067 emit_group_move (to_rtx, value);
5069 emit_group_load (to_rtx, value, TREE_TYPE (from),
5070 int_size_in_bytes (TREE_TYPE (from)));
5072 else if (GET_CODE (value) == PARALLEL)
5073 emit_group_store (to_rtx, value, TREE_TYPE (from),
5074 int_size_in_bytes (TREE_TYPE (from)));
5075 else if (GET_MODE (to_rtx) == BLKmode)
5077 /* Handle calls that return BLKmode values in registers. */
5079 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5081 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5085 if (POINTER_TYPE_P (TREE_TYPE (to)))
5086 value = convert_memory_address_addr_space
5087 (GET_MODE (to_rtx), value,
5088 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5090 emit_move_insn (to_rtx, value);
5093 /* Store bounds if required. */
5095 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5097 gcc_assert (MEM_P (to_rtx));
5098 chkp_emit_bounds_store (bounds, value, to_rtx);
5101 preserve_temp_slots (to_rtx);
5106 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5107 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5109 /* Don't move directly into a return register. */
5110 if (TREE_CODE (to) == RESULT_DECL
5111 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5117 /* If the source is itself a return value, it still is in a pseudo at
5118 this point so we can move it back to the return register directly. */
5120 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5121 && TREE_CODE (from) != CALL_EXPR)
5122 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5124 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5126 /* Handle calls that return values in multiple non-contiguous locations.
5127 The Irix 6 ABI has examples of this. */
5128 if (GET_CODE (to_rtx) == PARALLEL)
5130 if (GET_CODE (temp) == PARALLEL)
5131 emit_group_move (to_rtx, temp);
5133 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5134 int_size_in_bytes (TREE_TYPE (from)));
5137 emit_move_insn (to_rtx, temp);
5139 preserve_temp_slots (to_rtx);
5144 /* In case we are returning the contents of an object which overlaps
5145 the place the value is being stored, use a safe function when copying
5146 a value through a pointer into a structure value return block. */
5147 if (TREE_CODE (to) == RESULT_DECL
5148 && TREE_CODE (from) == INDIRECT_REF
5149 && ADDR_SPACE_GENERIC_P
5150 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5151 && refs_may_alias_p (to, from)
5152 && cfun->returns_struct
5153 && !cfun->returns_pcc_struct)
5158 size = expr_size (from);
5159 from_rtx = expand_normal (from);
5161 emit_library_call (memmove_libfunc, LCT_NORMAL,
5162 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5163 XEXP (from_rtx, 0), Pmode,
5164 convert_to_mode (TYPE_MODE (sizetype),
5165 size, TYPE_UNSIGNED (sizetype)),
5166 TYPE_MODE (sizetype));
5168 preserve_temp_slots (to_rtx);
5173 /* Compute FROM and store the value in the rtx we got. */
5176 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, false, to);
5177 preserve_temp_slots (result);
5182 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5183 succeeded, false otherwise. */
5186 emit_storent_insn (rtx to, rtx from)
5188 struct expand_operand ops[2];
5189 machine_mode mode = GET_MODE (to);
5190 enum insn_code code = optab_handler (storent_optab, mode);
5192 if (code == CODE_FOR_nothing)
5195 create_fixed_operand (&ops[0], to);
5196 create_input_operand (&ops[1], from, mode);
5197 return maybe_expand_insn (code, 2, ops);
5200 /* Generate code for computing expression EXP,
5201 and storing the value into TARGET.
5203 If the mode is BLKmode then we may return TARGET itself.
5204 It turns out that in BLKmode it doesn't cause a problem.
5205 because C has no operators that could combine two different
5206 assignments into the same BLKmode object with different values
5207 with no sequence point. Will other languages need this to
5210 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5211 stack, and block moves may need to be treated specially.
5213 If NONTEMPORAL is true, try using a nontemporal store instruction.
5215 If REVERSE is true, the store is to be done in reverse order.
5217 If BTARGET is not NULL then computed bounds of EXP are
5218 associated with BTARGET. */
5221 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5222 bool nontemporal, bool reverse, tree btarget)
5225 rtx alt_rtl = NULL_RTX;
5226 location_t loc = curr_insn_location ();
5228 if (VOID_TYPE_P (TREE_TYPE (exp)))
5230 /* C++ can generate ?: expressions with a throw expression in one
5231 branch and an rvalue in the other. Here, we resolve attempts to
5232 store the throw expression's nonexistent result. */
5233 gcc_assert (!call_param_p);
5234 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5237 if (TREE_CODE (exp) == COMPOUND_EXPR)
5239 /* Perform first part of compound expression, then assign from second
5241 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5242 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5243 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5244 call_param_p, nontemporal, reverse,
5247 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5249 /* For conditional expression, get safe form of the target. Then
5250 test the condition, doing the appropriate assignment on either
5251 side. This avoids the creation of unnecessary temporaries.
5252 For non-BLKmode, it is more efficient not to do this. */
5254 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5256 do_pending_stack_adjust ();
5258 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5259 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5260 nontemporal, reverse, btarget);
5261 emit_jump_insn (targetm.gen_jump (lab2));
5264 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5265 nontemporal, reverse, btarget);
5271 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5272 /* If this is a scalar in a register that is stored in a wider mode
5273 than the declared mode, compute the result into its declared mode
5274 and then convert to the wider mode. Our value is the computed
5277 rtx inner_target = 0;
5279 /* We can do the conversion inside EXP, which will often result
5280 in some optimizations. Do the conversion in two steps: first
5281 change the signedness, if needed, then the extend. But don't
5282 do this if the type of EXP is a subtype of something else
5283 since then the conversion might involve more than just
5284 converting modes. */
5285 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5286 && TREE_TYPE (TREE_TYPE (exp)) == 0
5287 && GET_MODE_PRECISION (GET_MODE (target))
5288 == TYPE_PRECISION (TREE_TYPE (exp)))
5290 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5291 TYPE_UNSIGNED (TREE_TYPE (exp))))
5293 /* Some types, e.g. Fortran's logical*4, won't have a signed
5294 version, so use the mode instead. */
5296 = (signed_or_unsigned_type_for
5297 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5299 ntype = lang_hooks.types.type_for_mode
5300 (TYPE_MODE (TREE_TYPE (exp)),
5301 SUBREG_PROMOTED_SIGN (target));
5303 exp = fold_convert_loc (loc, ntype, exp);
5306 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5307 (GET_MODE (SUBREG_REG (target)),
5308 SUBREG_PROMOTED_SIGN (target)),
5311 inner_target = SUBREG_REG (target);
5314 temp = expand_expr (exp, inner_target, VOIDmode,
5315 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5317 /* Handle bounds returned by call. */
5318 if (TREE_CODE (exp) == CALL_EXPR)
5321 chkp_split_slot (temp, &temp, &bounds);
5322 if (bounds && btarget)
5324 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5325 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5326 chkp_set_rtl_bounds (btarget, tmp);
5330 /* If TEMP is a VOIDmode constant, use convert_modes to make
5331 sure that we properly convert it. */
5332 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5334 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5335 temp, SUBREG_PROMOTED_SIGN (target));
5336 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5337 GET_MODE (target), temp,
5338 SUBREG_PROMOTED_SIGN (target));
5341 convert_move (SUBREG_REG (target), temp,
5342 SUBREG_PROMOTED_SIGN (target));
5346 else if ((TREE_CODE (exp) == STRING_CST
5347 || (TREE_CODE (exp) == MEM_REF
5348 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5349 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5351 && integer_zerop (TREE_OPERAND (exp, 1))))
5352 && !nontemporal && !call_param_p
5355 /* Optimize initialization of an array with a STRING_CST. */
5356 HOST_WIDE_INT exp_len, str_copy_len;
5358 tree str = TREE_CODE (exp) == STRING_CST
5359 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5361 exp_len = int_expr_size (exp);
5365 if (TREE_STRING_LENGTH (str) <= 0)
5368 str_copy_len = strlen (TREE_STRING_POINTER (str));
5369 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5372 str_copy_len = TREE_STRING_LENGTH (str);
5373 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5374 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5376 str_copy_len += STORE_MAX_PIECES - 1;
5377 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5379 str_copy_len = MIN (str_copy_len, exp_len);
5380 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5381 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5382 MEM_ALIGN (target), false))
5387 dest_mem = store_by_pieces (dest_mem,
5388 str_copy_len, builtin_strncpy_read_str,
5390 TREE_STRING_POINTER (str)),
5391 MEM_ALIGN (target), false,
5392 exp_len > str_copy_len ? 1 : 0);
5393 if (exp_len > str_copy_len)
5394 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5395 GEN_INT (exp_len - str_copy_len),
5404 /* If we want to use a nontemporal or a reverse order store, force the
5405 value into a register first. */
5406 tmp_target = nontemporal || reverse ? NULL_RTX : target;
5407 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5409 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5412 /* Handle bounds returned by call. */
5413 if (TREE_CODE (exp) == CALL_EXPR)
5416 chkp_split_slot (temp, &temp, &bounds);
5417 if (bounds && btarget)
5419 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5420 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5421 chkp_set_rtl_bounds (btarget, tmp);
5426 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5427 the same as that of TARGET, adjust the constant. This is needed, for
5428 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5429 only a word-sized value. */
5430 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5431 && TREE_CODE (exp) != ERROR_MARK
5432 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5433 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5434 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5436 /* If value was not generated in the target, store it there.
5437 Convert the value to TARGET's type first if necessary and emit the
5438 pending incrementations that have been queued when expanding EXP.
5439 Note that we cannot emit the whole queue blindly because this will
5440 effectively disable the POST_INC optimization later.
5442 If TEMP and TARGET compare equal according to rtx_equal_p, but
5443 one or both of them are volatile memory refs, we have to distinguish
5445 - expand_expr has used TARGET. In this case, we must not generate
5446 another copy. This can be detected by TARGET being equal according
5448 - expand_expr has not used TARGET - that means that the source just
5449 happens to have the same RTX form. Since temp will have been created
5450 by expand_expr, it will compare unequal according to == .
5451 We must generate a copy in this case, to reach the correct number
5452 of volatile memory references. */
5454 if ((! rtx_equal_p (temp, target)
5455 || (temp != target && (side_effects_p (temp)
5456 || side_effects_p (target))))
5457 && TREE_CODE (exp) != ERROR_MARK
5458 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5459 but TARGET is not valid memory reference, TEMP will differ
5460 from TARGET although it is really the same location. */
5462 && rtx_equal_p (alt_rtl, target)
5463 && !side_effects_p (alt_rtl)
5464 && !side_effects_p (target))
5465 /* If there's nothing to copy, don't bother. Don't call
5466 expr_size unless necessary, because some front-ends (C++)
5467 expr_size-hook must not be given objects that are not
5468 supposed to be bit-copied or bit-initialized. */
5469 && expr_size (exp) != const0_rtx)
5471 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5473 if (GET_MODE (target) == BLKmode)
5475 /* Handle calls that return BLKmode values in registers. */
5476 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5477 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5479 store_bit_field (target,
5480 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5481 0, 0, 0, GET_MODE (temp), temp, reverse);
5484 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5487 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5489 /* Handle copying a string constant into an array. The string
5490 constant may be shorter than the array. So copy just the string's
5491 actual length, and clear the rest. First get the size of the data
5492 type of the string, which is actually the size of the target. */
5493 rtx size = expr_size (exp);
5495 if (CONST_INT_P (size)
5496 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5497 emit_block_move (target, temp, size,
5499 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5502 machine_mode pointer_mode
5503 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5504 machine_mode address_mode = get_address_mode (target);
5506 /* Compute the size of the data to copy from the string. */
5508 = size_binop_loc (loc, MIN_EXPR,
5509 make_tree (sizetype, size),
5510 size_int (TREE_STRING_LENGTH (exp)));
5512 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5514 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5515 rtx_code_label *label = 0;
5517 /* Copy that much. */
5518 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5519 TYPE_UNSIGNED (sizetype));
5520 emit_block_move (target, temp, copy_size_rtx,
5522 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5524 /* Figure out how much is left in TARGET that we have to clear.
5525 Do all calculations in pointer_mode. */
5526 if (CONST_INT_P (copy_size_rtx))
5528 size = plus_constant (address_mode, size,
5529 -INTVAL (copy_size_rtx));
5530 target = adjust_address (target, BLKmode,
5531 INTVAL (copy_size_rtx));
5535 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5536 copy_size_rtx, NULL_RTX, 0,
5539 if (GET_MODE (copy_size_rtx) != address_mode)
5540 copy_size_rtx = convert_to_mode (address_mode,
5542 TYPE_UNSIGNED (sizetype));
5544 target = offset_address (target, copy_size_rtx,
5545 highest_pow2_factor (copy_size));
5546 label = gen_label_rtx ();
5547 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5548 GET_MODE (size), 0, label);
5551 if (size != const0_rtx)
5552 clear_storage (target, size, BLOCK_OP_NORMAL);
5558 /* Handle calls that return values in multiple non-contiguous locations.
5559 The Irix 6 ABI has examples of this. */
5560 else if (GET_CODE (target) == PARALLEL)
5562 if (GET_CODE (temp) == PARALLEL)
5563 emit_group_move (target, temp);
5565 emit_group_load (target, temp, TREE_TYPE (exp),
5566 int_size_in_bytes (TREE_TYPE (exp)));
5568 else if (GET_CODE (temp) == PARALLEL)
5569 emit_group_store (target, temp, TREE_TYPE (exp),
5570 int_size_in_bytes (TREE_TYPE (exp)));
5571 else if (GET_MODE (temp) == BLKmode)
5572 emit_block_move (target, temp, expr_size (exp),
5574 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5575 /* If we emit a nontemporal store, there is nothing else to do. */
5576 else if (nontemporal && emit_storent_insn (target, temp))
5581 temp = flip_storage_order (GET_MODE (target), temp);
5582 temp = force_operand (temp, target);
5584 emit_move_insn (target, temp);
5591 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5593 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal,
5596 return store_expr_with_bounds (exp, target, call_param_p, nontemporal,
5600 /* Return true if field F of structure TYPE is a flexible array. */
5603 flexible_array_member_p (const_tree f, const_tree type)
5608 return (DECL_CHAIN (f) == NULL
5609 && TREE_CODE (tf) == ARRAY_TYPE
5611 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5612 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5613 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5614 && int_size_in_bytes (type) >= 0);
5617 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5618 must have in order for it to completely initialize a value of type TYPE.
5619 Return -1 if the number isn't known.
5621 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5623 static HOST_WIDE_INT
5624 count_type_elements (const_tree type, bool for_ctor_p)
5626 switch (TREE_CODE (type))
5632 nelts = array_type_nelts (type);
5633 if (nelts && tree_fits_uhwi_p (nelts))
5635 unsigned HOST_WIDE_INT n;
5637 n = tree_to_uhwi (nelts) + 1;
5638 if (n == 0 || for_ctor_p)
5641 return n * count_type_elements (TREE_TYPE (type), false);
5643 return for_ctor_p ? -1 : 1;
5648 unsigned HOST_WIDE_INT n;
5652 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5653 if (TREE_CODE (f) == FIELD_DECL)
5656 n += count_type_elements (TREE_TYPE (f), false);
5657 else if (!flexible_array_member_p (f, type))
5658 /* Don't count flexible arrays, which are not supposed
5659 to be initialized. */
5667 case QUAL_UNION_TYPE:
5672 gcc_assert (!for_ctor_p);
5673 /* Estimate the number of scalars in each field and pick the
5674 maximum. Other estimates would do instead; the idea is simply
5675 to make sure that the estimate is not sensitive to the ordering
5678 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5679 if (TREE_CODE (f) == FIELD_DECL)
5681 m = count_type_elements (TREE_TYPE (f), false);
5682 /* If the field doesn't span the whole union, add an extra
5683 scalar for the rest. */
5684 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5685 TYPE_SIZE (type)) != 1)
5697 return TYPE_VECTOR_SUBPARTS (type);
5701 case FIXED_POINT_TYPE:
5706 case REFERENCE_TYPE:
5722 /* Helper for categorize_ctor_elements. Identical interface. */
5725 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5726 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5728 unsigned HOST_WIDE_INT idx;
5729 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5730 tree value, purpose, elt_type;
5732 /* Whether CTOR is a valid constant initializer, in accordance with what
5733 initializer_constant_valid_p does. If inferred from the constructor
5734 elements, true until proven otherwise. */
5735 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5736 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5741 elt_type = NULL_TREE;
5743 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5745 HOST_WIDE_INT mult = 1;
5747 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5749 tree lo_index = TREE_OPERAND (purpose, 0);
5750 tree hi_index = TREE_OPERAND (purpose, 1);
5752 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5753 mult = (tree_to_uhwi (hi_index)
5754 - tree_to_uhwi (lo_index) + 1);
5757 elt_type = TREE_TYPE (value);
5759 switch (TREE_CODE (value))
5763 HOST_WIDE_INT nz = 0, ic = 0;
5765 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5768 nz_elts += mult * nz;
5769 init_elts += mult * ic;
5771 if (const_from_elts_p && const_p)
5772 const_p = const_elt_p;
5779 if (!initializer_zerop (value))
5785 nz_elts += mult * TREE_STRING_LENGTH (value);
5786 init_elts += mult * TREE_STRING_LENGTH (value);
5790 if (!initializer_zerop (TREE_REALPART (value)))
5792 if (!initializer_zerop (TREE_IMAGPART (value)))
5800 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5802 tree v = VECTOR_CST_ELT (value, i);
5803 if (!initializer_zerop (v))
5812 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5813 nz_elts += mult * tc;
5814 init_elts += mult * tc;
5816 if (const_from_elts_p && const_p)
5818 = initializer_constant_valid_p (value,
5820 TYPE_REVERSE_STORAGE_ORDER
5828 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5829 num_fields, elt_type))
5830 *p_complete = false;
5832 *p_nz_elts += nz_elts;
5833 *p_init_elts += init_elts;
5838 /* Examine CTOR to discover:
5839 * how many scalar fields are set to nonzero values,
5840 and place it in *P_NZ_ELTS;
5841 * how many scalar fields in total are in CTOR,
5842 and place it in *P_ELT_COUNT.
5843 * whether the constructor is complete -- in the sense that every
5844 meaningful byte is explicitly given a value --
5845 and place it in *P_COMPLETE.
5847 Return whether or not CTOR is a valid static constant initializer, the same
5848 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5851 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5852 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5858 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5861 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5862 of which had type LAST_TYPE. Each element was itself a complete
5863 initializer, in the sense that every meaningful byte was explicitly
5864 given a value. Return true if the same is true for the constructor
5868 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5869 const_tree last_type)
5871 if (TREE_CODE (type) == UNION_TYPE
5872 || TREE_CODE (type) == QUAL_UNION_TYPE)
5877 gcc_assert (num_elts == 1 && last_type);
5879 /* ??? We could look at each element of the union, and find the
5880 largest element. Which would avoid comparing the size of the
5881 initialized element against any tail padding in the union.
5882 Doesn't seem worth the effort... */
5883 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5886 return count_type_elements (type, true) == num_elts;
5889 /* Return 1 if EXP contains mostly (3/4) zeros. */
5892 mostly_zeros_p (const_tree exp)
5894 if (TREE_CODE (exp) == CONSTRUCTOR)
5896 HOST_WIDE_INT nz_elts, init_elts;
5899 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5900 return !complete_p || nz_elts < init_elts / 4;
5903 return initializer_zerop (exp);
5906 /* Return 1 if EXP contains all zeros. */
5909 all_zeros_p (const_tree exp)
5911 if (TREE_CODE (exp) == CONSTRUCTOR)
5913 HOST_WIDE_INT nz_elts, init_elts;
5916 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5917 return nz_elts == 0;
5920 return initializer_zerop (exp);
5923 /* Helper function for store_constructor.
5924 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5925 CLEARED is as for store_constructor.
5926 ALIAS_SET is the alias set to use for any stores.
5927 If REVERSE is true, the store is to be done in reverse order.
5929 This provides a recursive shortcut back to store_constructor when it isn't
5930 necessary to go through store_field. This is so that we can pass through
5931 the cleared field to let store_constructor know that we may not have to
5932 clear a substructure if the outer structure has already been cleared. */
5935 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5936 HOST_WIDE_INT bitpos,
5937 unsigned HOST_WIDE_INT bitregion_start,
5938 unsigned HOST_WIDE_INT bitregion_end,
5940 tree exp, int cleared,
5941 alias_set_type alias_set, bool reverse)
5943 if (TREE_CODE (exp) == CONSTRUCTOR
5944 /* We can only call store_constructor recursively if the size and
5945 bit position are on a byte boundary. */
5946 && bitpos % BITS_PER_UNIT == 0
5947 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5948 /* If we have a nonzero bitpos for a register target, then we just
5949 let store_field do the bitfield handling. This is unlikely to
5950 generate unnecessary clear instructions anyways. */
5951 && (bitpos == 0 || MEM_P (target)))
5955 = adjust_address (target,
5956 GET_MODE (target) == BLKmode
5958 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5959 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5962 /* Update the alias set, if required. */
5963 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5964 && MEM_ALIAS_SET (target) != 0)
5966 target = copy_rtx (target);
5967 set_mem_alias_set (target, alias_set);
5970 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT,
5974 store_field (target, bitsize, bitpos, bitregion_start, bitregion_end, mode,
5975 exp, alias_set, false, reverse);
5979 /* Returns the number of FIELD_DECLs in TYPE. */
5982 fields_length (const_tree type)
5984 tree t = TYPE_FIELDS (type);
5987 for (; t; t = DECL_CHAIN (t))
5988 if (TREE_CODE (t) == FIELD_DECL)
5995 /* Store the value of constructor EXP into the rtx TARGET.
5996 TARGET is either a REG or a MEM; we know it cannot conflict, since
5997 safe_from_p has been called.
5998 CLEARED is true if TARGET is known to have been zero'd.
5999 SIZE is the number of bytes of TARGET we are allowed to modify: this
6000 may not be the same as the size of EXP if we are assigning to a field
6001 which has been packed to exclude padding bits.
6002 If REVERSE is true, the store is to be done in reverse order. */
6005 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size,
6008 tree type = TREE_TYPE (exp);
6009 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
6010 HOST_WIDE_INT bitregion_end = size > 0 ? size * BITS_PER_UNIT - 1 : 0;
6012 switch (TREE_CODE (type))
6016 case QUAL_UNION_TYPE:
6018 unsigned HOST_WIDE_INT idx;
6021 /* The storage order is specified for every aggregate type. */
6022 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6024 /* If size is zero or the target is already cleared, do nothing. */
6025 if (size == 0 || cleared)
6027 /* We either clear the aggregate or indicate the value is dead. */
6028 else if ((TREE_CODE (type) == UNION_TYPE
6029 || TREE_CODE (type) == QUAL_UNION_TYPE)
6030 && ! CONSTRUCTOR_ELTS (exp))
6031 /* If the constructor is empty, clear the union. */
6033 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6037 /* If we are building a static constructor into a register,
6038 set the initial value as zero so we can fold the value into
6039 a constant. But if more than one register is involved,
6040 this probably loses. */
6041 else if (REG_P (target) && TREE_STATIC (exp)
6042 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
6044 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6048 /* If the constructor has fewer fields than the structure or
6049 if we are initializing the structure to mostly zeros, clear
6050 the whole structure first. Don't do this if TARGET is a
6051 register whose mode size isn't equal to SIZE since
6052 clear_storage can't handle this case. */
6054 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
6055 != fields_length (type))
6056 || mostly_zeros_p (exp))
6058 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6061 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6065 if (REG_P (target) && !cleared)
6066 emit_clobber (target);
6068 /* Store each element of the constructor into the
6069 corresponding field of TARGET. */
6070 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6073 HOST_WIDE_INT bitsize;
6074 HOST_WIDE_INT bitpos = 0;
6076 rtx to_rtx = target;
6078 /* Just ignore missing fields. We cleared the whole
6079 structure, above, if any fields are missing. */
6083 if (cleared && initializer_zerop (value))
6086 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6087 bitsize = tree_to_uhwi (DECL_SIZE (field));
6091 mode = DECL_MODE (field);
6092 if (DECL_BIT_FIELD (field))
6095 offset = DECL_FIELD_OFFSET (field);
6096 if (tree_fits_shwi_p (offset)
6097 && tree_fits_shwi_p (bit_position (field)))
6099 bitpos = int_bit_position (field);
6105 /* If this initializes a field that is smaller than a
6106 word, at the start of a word, try to widen it to a full
6107 word. This special case allows us to output C++ member
6108 function initializations in a form that the optimizers
6110 if (WORD_REGISTER_OPERATIONS
6112 && bitsize < BITS_PER_WORD
6113 && bitpos % BITS_PER_WORD == 0
6114 && GET_MODE_CLASS (mode) == MODE_INT
6115 && TREE_CODE (value) == INTEGER_CST
6117 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6119 tree type = TREE_TYPE (value);
6121 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6123 type = lang_hooks.types.type_for_mode
6124 (word_mode, TYPE_UNSIGNED (type));
6125 value = fold_convert (type, value);
6126 /* Make sure the bits beyond the original bitsize are zero
6127 so that we can correctly avoid extra zeroing stores in
6128 later constructor elements. */
6130 = wide_int_to_tree (type, wi::mask (bitsize, false,
6132 value = fold_build2 (BIT_AND_EXPR, type, value, bitsize_mask);
6135 if (BYTES_BIG_ENDIAN)
6137 = fold_build2 (LSHIFT_EXPR, type, value,
6138 build_int_cst (type,
6139 BITS_PER_WORD - bitsize));
6140 bitsize = BITS_PER_WORD;
6144 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6145 && DECL_NONADDRESSABLE_P (field))
6147 to_rtx = copy_rtx (to_rtx);
6148 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6151 store_constructor_field (to_rtx, bitsize, bitpos,
6152 0, bitregion_end, mode,
6154 get_alias_set (TREE_TYPE (field)),
6162 unsigned HOST_WIDE_INT i;
6165 tree elttype = TREE_TYPE (type);
6167 HOST_WIDE_INT minelt = 0;
6168 HOST_WIDE_INT maxelt = 0;
6170 /* The storage order is specified for every aggregate type. */
6171 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6173 domain = TYPE_DOMAIN (type);
6174 const_bounds_p = (TYPE_MIN_VALUE (domain)
6175 && TYPE_MAX_VALUE (domain)
6176 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6177 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6179 /* If we have constant bounds for the range of the type, get them. */
6182 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6183 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6186 /* If the constructor has fewer elements than the array, clear
6187 the whole array first. Similarly if this is static
6188 constructor of a non-BLKmode object. */
6191 else if (REG_P (target) && TREE_STATIC (exp))
6195 unsigned HOST_WIDE_INT idx;
6197 HOST_WIDE_INT count = 0, zero_count = 0;
6198 need_to_clear = ! const_bounds_p;
6200 /* This loop is a more accurate version of the loop in
6201 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6202 is also needed to check for missing elements. */
6203 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6205 HOST_WIDE_INT this_node_count;
6210 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6212 tree lo_index = TREE_OPERAND (index, 0);
6213 tree hi_index = TREE_OPERAND (index, 1);
6215 if (! tree_fits_uhwi_p (lo_index)
6216 || ! tree_fits_uhwi_p (hi_index))
6222 this_node_count = (tree_to_uhwi (hi_index)
6223 - tree_to_uhwi (lo_index) + 1);
6226 this_node_count = 1;
6228 count += this_node_count;
6229 if (mostly_zeros_p (value))
6230 zero_count += this_node_count;
6233 /* Clear the entire array first if there are any missing
6234 elements, or if the incidence of zero elements is >=
6237 && (count < maxelt - minelt + 1
6238 || 4 * zero_count >= 3 * count))
6242 if (need_to_clear && size > 0)
6245 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6247 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6251 if (!cleared && REG_P (target))
6252 /* Inform later passes that the old value is dead. */
6253 emit_clobber (target);
6255 /* Store each element of the constructor into the
6256 corresponding element of TARGET, determined by counting the
6258 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6261 HOST_WIDE_INT bitsize;
6262 HOST_WIDE_INT bitpos;
6263 rtx xtarget = target;
6265 if (cleared && initializer_zerop (value))
6268 mode = TYPE_MODE (elttype);
6269 if (mode == BLKmode)
6270 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6271 ? tree_to_uhwi (TYPE_SIZE (elttype))
6274 bitsize = GET_MODE_BITSIZE (mode);
6276 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6278 tree lo_index = TREE_OPERAND (index, 0);
6279 tree hi_index = TREE_OPERAND (index, 1);
6280 rtx index_r, pos_rtx;
6281 HOST_WIDE_INT lo, hi, count;
6284 /* If the range is constant and "small", unroll the loop. */
6286 && tree_fits_shwi_p (lo_index)
6287 && tree_fits_shwi_p (hi_index)
6288 && (lo = tree_to_shwi (lo_index),
6289 hi = tree_to_shwi (hi_index),
6290 count = hi - lo + 1,
6293 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6294 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6297 lo -= minelt; hi -= minelt;
6298 for (; lo <= hi; lo++)
6300 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6303 && !MEM_KEEP_ALIAS_SET_P (target)
6304 && TREE_CODE (type) == ARRAY_TYPE
6305 && TYPE_NONALIASED_COMPONENT (type))
6307 target = copy_rtx (target);
6308 MEM_KEEP_ALIAS_SET_P (target) = 1;
6311 store_constructor_field
6312 (target, bitsize, bitpos, 0, bitregion_end,
6313 mode, value, cleared,
6314 get_alias_set (elttype), reverse);
6319 rtx_code_label *loop_start = gen_label_rtx ();
6320 rtx_code_label *loop_end = gen_label_rtx ();
6323 expand_normal (hi_index);
6325 index = build_decl (EXPR_LOCATION (exp),
6326 VAR_DECL, NULL_TREE, domain);
6327 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6328 SET_DECL_RTL (index, index_r);
6329 store_expr (lo_index, index_r, 0, false, reverse);
6331 /* Build the head of the loop. */
6332 do_pending_stack_adjust ();
6333 emit_label (loop_start);
6335 /* Assign value to element index. */
6337 fold_convert (ssizetype,
6338 fold_build2 (MINUS_EXPR,
6341 TYPE_MIN_VALUE (domain)));
6344 size_binop (MULT_EXPR, position,
6345 fold_convert (ssizetype,
6346 TYPE_SIZE_UNIT (elttype)));
6348 pos_rtx = expand_normal (position);
6349 xtarget = offset_address (target, pos_rtx,
6350 highest_pow2_factor (position));
6351 xtarget = adjust_address (xtarget, mode, 0);
6352 if (TREE_CODE (value) == CONSTRUCTOR)
6353 store_constructor (value, xtarget, cleared,
6354 bitsize / BITS_PER_UNIT, reverse);
6356 store_expr (value, xtarget, 0, false, reverse);
6358 /* Generate a conditional jump to exit the loop. */
6359 exit_cond = build2 (LT_EXPR, integer_type_node,
6361 jumpif (exit_cond, loop_end, -1);
6363 /* Update the loop counter, and jump to the head of
6365 expand_assignment (index,
6366 build2 (PLUS_EXPR, TREE_TYPE (index),
6367 index, integer_one_node),
6370 emit_jump (loop_start);
6372 /* Build the end of the loop. */
6373 emit_label (loop_end);
6376 else if ((index != 0 && ! tree_fits_shwi_p (index))
6377 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6382 index = ssize_int (1);
6385 index = fold_convert (ssizetype,
6386 fold_build2 (MINUS_EXPR,
6389 TYPE_MIN_VALUE (domain)));
6392 size_binop (MULT_EXPR, index,
6393 fold_convert (ssizetype,
6394 TYPE_SIZE_UNIT (elttype)));
6395 xtarget = offset_address (target,
6396 expand_normal (position),
6397 highest_pow2_factor (position));
6398 xtarget = adjust_address (xtarget, mode, 0);
6399 store_expr (value, xtarget, 0, false, reverse);
6404 bitpos = ((tree_to_shwi (index) - minelt)
6405 * tree_to_uhwi (TYPE_SIZE (elttype)));
6407 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6409 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6410 && TREE_CODE (type) == ARRAY_TYPE
6411 && TYPE_NONALIASED_COMPONENT (type))
6413 target = copy_rtx (target);
6414 MEM_KEEP_ALIAS_SET_P (target) = 1;
6416 store_constructor_field (target, bitsize, bitpos, 0,
6417 bitregion_end, mode, value,
6418 cleared, get_alias_set (elttype),
6427 unsigned HOST_WIDE_INT idx;
6428 constructor_elt *ce;
6431 int icode = CODE_FOR_nothing;
6432 tree elttype = TREE_TYPE (type);
6433 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6434 machine_mode eltmode = TYPE_MODE (elttype);
6435 HOST_WIDE_INT bitsize;
6436 HOST_WIDE_INT bitpos;
6437 rtvec vector = NULL;
6439 alias_set_type alias;
6441 gcc_assert (eltmode != BLKmode);
6443 n_elts = TYPE_VECTOR_SUBPARTS (type);
6444 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6446 machine_mode mode = GET_MODE (target);
6448 icode = (int) optab_handler (vec_init_optab, mode);
6449 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6450 if (icode != CODE_FOR_nothing)
6454 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6455 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6457 icode = CODE_FOR_nothing;
6461 if (icode != CODE_FOR_nothing)
6465 vector = rtvec_alloc (n_elts);
6466 for (i = 0; i < n_elts; i++)
6467 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6471 /* If the constructor has fewer elements than the vector,
6472 clear the whole array first. Similarly if this is static
6473 constructor of a non-BLKmode object. */
6476 else if (REG_P (target) && TREE_STATIC (exp))
6480 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6483 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6485 int n_elts_here = tree_to_uhwi
6486 (int_const_binop (TRUNC_DIV_EXPR,
6487 TYPE_SIZE (TREE_TYPE (value)),
6488 TYPE_SIZE (elttype)));
6490 count += n_elts_here;
6491 if (mostly_zeros_p (value))
6492 zero_count += n_elts_here;
6495 /* Clear the entire vector first if there are any missing elements,
6496 or if the incidence of zero elements is >= 75%. */
6497 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6500 if (need_to_clear && size > 0 && !vector)
6503 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6505 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6509 /* Inform later passes that the old value is dead. */
6510 if (!cleared && !vector && REG_P (target))
6511 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6514 alias = MEM_ALIAS_SET (target);
6516 alias = get_alias_set (elttype);
6518 /* Store each element of the constructor into the corresponding
6519 element of TARGET, determined by counting the elements. */
6520 for (idx = 0, i = 0;
6521 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6522 idx++, i += bitsize / elt_size)
6524 HOST_WIDE_INT eltpos;
6525 tree value = ce->value;
6527 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6528 if (cleared && initializer_zerop (value))
6532 eltpos = tree_to_uhwi (ce->index);
6538 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6540 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6541 RTVEC_ELT (vector, eltpos)
6542 = expand_normal (value);
6546 machine_mode value_mode =
6547 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6548 ? TYPE_MODE (TREE_TYPE (value))
6550 bitpos = eltpos * elt_size;
6551 store_constructor_field (target, bitsize, bitpos, 0,
6552 bitregion_end, value_mode,
6553 value, cleared, alias, reverse);
6558 emit_insn (GEN_FCN (icode)
6560 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6569 /* Store the value of EXP (an expression tree)
6570 into a subfield of TARGET which has mode MODE and occupies
6571 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6572 If MODE is VOIDmode, it means that we are storing into a bit-field.
6574 BITREGION_START is bitpos of the first bitfield in this region.
6575 BITREGION_END is the bitpos of the ending bitfield in this region.
6576 These two fields are 0, if the C++ memory model does not apply,
6577 or we are not interested in keeping track of bitfield regions.
6579 Always return const0_rtx unless we have something particular to
6582 ALIAS_SET is the alias set for the destination. This value will
6583 (in general) be different from that for TARGET, since TARGET is a
6584 reference to the containing structure.
6586 If NONTEMPORAL is true, try generating a nontemporal store.
6588 If REVERSE is true, the store is to be done in reverse order. */
6591 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6592 unsigned HOST_WIDE_INT bitregion_start,
6593 unsigned HOST_WIDE_INT bitregion_end,
6594 machine_mode mode, tree exp,
6595 alias_set_type alias_set, bool nontemporal, bool reverse)
6597 if (TREE_CODE (exp) == ERROR_MARK)
6600 /* If we have nothing to store, do nothing unless the expression has
6603 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6605 if (GET_CODE (target) == CONCAT)
6607 /* We're storing into a struct containing a single __complex. */
6609 gcc_assert (!bitpos);
6610 return store_expr (exp, target, 0, nontemporal, reverse);
6613 /* If the structure is in a register or if the component
6614 is a bit field, we cannot use addressing to access it.
6615 Use bit-field techniques or SUBREG to store in it. */
6617 if (mode == VOIDmode
6618 || (mode != BLKmode && ! direct_store[(int) mode]
6619 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6620 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6622 || GET_CODE (target) == SUBREG
6623 /* If the field isn't aligned enough to store as an ordinary memref,
6624 store it as a bit field. */
6626 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6627 || bitpos % GET_MODE_ALIGNMENT (mode))
6628 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6629 || (bitpos % BITS_PER_UNIT != 0)))
6630 || (bitsize >= 0 && mode != BLKmode
6631 && GET_MODE_BITSIZE (mode) > bitsize)
6632 /* If the RHS and field are a constant size and the size of the
6633 RHS isn't the same size as the bitfield, we must use bitfield
6636 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6637 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0
6638 /* Except for initialization of full bytes from a CONSTRUCTOR, which
6639 we will handle specially below. */
6640 && !(TREE_CODE (exp) == CONSTRUCTOR
6641 && bitsize % BITS_PER_UNIT == 0)
6642 /* And except for bitwise copying of TREE_ADDRESSABLE types,
6643 where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
6644 includes some extra padding. store_expr / expand_expr will in
6645 that case call get_inner_reference that will have the bitsize
6646 we check here and thus the block move will not clobber the
6647 padding that shouldn't be clobbered. In the future we could
6648 replace the TREE_ADDRESSABLE check with a check that
6649 get_base_address needs to live in memory. */
6650 && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
6651 || TREE_CODE (exp) != COMPONENT_REF
6652 || TREE_CODE (DECL_SIZE (TREE_OPERAND (exp, 1))) != INTEGER_CST
6653 || (bitsize % BITS_PER_UNIT != 0)
6654 || (bitpos % BITS_PER_UNIT != 0)
6655 || (compare_tree_int (DECL_SIZE (TREE_OPERAND (exp, 1)), bitsize)
6657 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6658 decl we must use bitfield operations. */
6660 && TREE_CODE (exp) == MEM_REF
6661 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6662 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6663 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6664 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6669 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6670 implies a mask operation. If the precision is the same size as
6671 the field we're storing into, that mask is redundant. This is
6672 particularly common with bit field assignments generated by the
6674 nop_def = get_def_for_expr (exp, NOP_EXPR);
6677 tree type = TREE_TYPE (exp);
6678 if (INTEGRAL_TYPE_P (type)
6679 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6680 && bitsize == TYPE_PRECISION (type))
6682 tree op = gimple_assign_rhs1 (nop_def);
6683 type = TREE_TYPE (op);
6684 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6689 temp = expand_normal (exp);
6691 /* Handle calls that return values in multiple non-contiguous locations.
6692 The Irix 6 ABI has examples of this. */
6693 if (GET_CODE (temp) == PARALLEL)
6695 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6696 machine_mode temp_mode
6697 = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6698 rtx temp_target = gen_reg_rtx (temp_mode);
6699 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6703 /* Handle calls that return BLKmode values in registers. */
6704 else if (mode == BLKmode && REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6706 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6707 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6711 /* If the value has aggregate type and an integral mode then, if BITSIZE
6712 is narrower than this mode and this is for big-endian data, we first
6713 need to put the value into the low-order bits for store_bit_field,
6714 except when MODE is BLKmode and BITSIZE larger than the word size
6715 (see the handling of fields larger than a word in store_bit_field).
6716 Moreover, the field may be not aligned on a byte boundary; in this
6717 case, if it has reverse storage order, it needs to be accessed as a
6718 scalar field with reverse storage order and we must first put the
6719 value into target order. */
6720 if (AGGREGATE_TYPE_P (TREE_TYPE (exp))
6721 && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT)
6723 HOST_WIDE_INT size = GET_MODE_BITSIZE (GET_MODE (temp));
6725 reverse = TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp));
6728 temp = flip_storage_order (GET_MODE (temp), temp);
6731 && reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN
6732 && !(mode == BLKmode && bitsize > BITS_PER_WORD))
6733 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6734 size - bitsize, NULL_RTX, 1);
6737 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6738 if (mode != VOIDmode && mode != BLKmode
6739 && mode != TYPE_MODE (TREE_TYPE (exp)))
6740 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6742 /* If the mode of TEMP and TARGET is BLKmode, both must be in memory
6743 and BITPOS must be aligned on a byte boundary. If so, we simply do
6744 a block copy. Likewise for a BLKmode-like TARGET. */
6745 if (GET_MODE (temp) == BLKmode
6746 && (GET_MODE (target) == BLKmode
6748 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6749 && (bitpos % BITS_PER_UNIT) == 0
6750 && (bitsize % BITS_PER_UNIT) == 0)))
6752 gcc_assert (MEM_P (target) && MEM_P (temp)
6753 && (bitpos % BITS_PER_UNIT) == 0);
6755 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6756 emit_block_move (target, temp,
6757 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6764 /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the
6765 word size, we need to load the value (see again store_bit_field). */
6766 if (GET_MODE (temp) == BLKmode && bitsize <= BITS_PER_WORD)
6768 machine_mode temp_mode = smallest_mode_for_size (bitsize, MODE_INT);
6769 temp = extract_bit_field (temp, bitsize, 0, 1, NULL_RTX, temp_mode,
6773 /* Store the value in the bitfield. */
6774 store_bit_field (target, bitsize, bitpos,
6775 bitregion_start, bitregion_end,
6776 mode, temp, reverse);
6782 /* Now build a reference to just the desired component. */
6783 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6785 if (to_rtx == target)
6786 to_rtx = copy_rtx (to_rtx);
6788 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6789 set_mem_alias_set (to_rtx, alias_set);
6791 /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
6792 into a target smaller than its type; handle that case now. */
6793 if (TREE_CODE (exp) == CONSTRUCTOR && bitsize >= 0)
6795 gcc_assert (bitsize % BITS_PER_UNIT == 0);
6796 store_constructor (exp, to_rtx, 0, bitsize / BITS_PER_UNIT, reverse);
6800 return store_expr (exp, to_rtx, 0, nontemporal, reverse);
6804 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6805 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6806 codes and find the ultimate containing object, which we return.
6808 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6809 bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
6810 storage order of the field.
6811 If the position of the field is variable, we store a tree
6812 giving the variable offset (in units) in *POFFSET.
6813 This offset is in addition to the bit position.
6814 If the position is not variable, we store 0 in *POFFSET.
6816 If any of the extraction expressions is volatile,
6817 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6819 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6820 Otherwise, it is a mode that can be used to access the field.
6822 If the field describes a variable-sized object, *PMODE is set to
6823 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6824 this case, but the address of the object can be found.
6826 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6827 look through nodes that serve as markers of a greater alignment than
6828 the one that can be deduced from the expression. These nodes make it
6829 possible for front-ends to prevent temporaries from being created by
6830 the middle-end on alignment considerations. For that purpose, the
6831 normal operating mode at high-level is to always pass FALSE so that
6832 the ultimate containing object is really returned; moreover, the
6833 associated predicate handled_component_p will always return TRUE
6834 on these nodes, thus indicating that they are essentially handled
6835 by get_inner_reference. TRUE should only be passed when the caller
6836 is scanning the expression in order to build another representation
6837 and specifically knows how to handle these nodes; as such, this is
6838 the normal operating mode in the RTL expanders. */
6841 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6842 HOST_WIDE_INT *pbitpos, tree *poffset,
6843 machine_mode *pmode, int *punsignedp,
6844 int *preversep, int *pvolatilep, bool keep_aligning)
6847 machine_mode mode = VOIDmode;
6848 bool blkmode_bitfield = false;
6849 tree offset = size_zero_node;
6850 offset_int bit_offset = 0;
6852 /* First get the mode, signedness, storage order and size. We do this from
6853 just the outermost expression. */
6855 if (TREE_CODE (exp) == COMPONENT_REF)
6857 tree field = TREE_OPERAND (exp, 1);
6858 size_tree = DECL_SIZE (field);
6859 if (flag_strict_volatile_bitfields > 0
6860 && TREE_THIS_VOLATILE (exp)
6861 && DECL_BIT_FIELD_TYPE (field)
6862 && DECL_MODE (field) != BLKmode)
6863 /* Volatile bitfields should be accessed in the mode of the
6864 field's type, not the mode computed based on the bit
6866 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6867 else if (!DECL_BIT_FIELD (field))
6868 mode = DECL_MODE (field);
6869 else if (DECL_MODE (field) == BLKmode)
6870 blkmode_bitfield = true;
6872 *punsignedp = DECL_UNSIGNED (field);
6874 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6876 size_tree = TREE_OPERAND (exp, 1);
6877 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6878 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6880 /* For vector types, with the correct size of access, use the mode of
6882 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6883 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6884 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6885 mode = TYPE_MODE (TREE_TYPE (exp));
6889 mode = TYPE_MODE (TREE_TYPE (exp));
6890 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6892 if (mode == BLKmode)
6893 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6895 *pbitsize = GET_MODE_BITSIZE (mode);
6900 if (! tree_fits_uhwi_p (size_tree))
6901 mode = BLKmode, *pbitsize = -1;
6903 *pbitsize = tree_to_uhwi (size_tree);
6906 *preversep = reverse_storage_order_for_component_p (exp);
6908 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6909 and find the ultimate containing object. */
6912 switch (TREE_CODE (exp))
6915 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6920 tree field = TREE_OPERAND (exp, 1);
6921 tree this_offset = component_ref_field_offset (exp);
6923 /* If this field hasn't been filled in yet, don't go past it.
6924 This should only happen when folding expressions made during
6925 type construction. */
6926 if (this_offset == 0)
6929 offset = size_binop (PLUS_EXPR, offset, this_offset);
6930 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6932 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6937 case ARRAY_RANGE_REF:
6939 tree index = TREE_OPERAND (exp, 1);
6940 tree low_bound = array_ref_low_bound (exp);
6941 tree unit_size = array_ref_element_size (exp);
6943 /* We assume all arrays have sizes that are a multiple of a byte.
6944 First subtract the lower bound, if any, in the type of the
6945 index, then convert to sizetype and multiply by the size of
6946 the array element. */
6947 if (! integer_zerop (low_bound))
6948 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6951 offset = size_binop (PLUS_EXPR, offset,
6952 size_binop (MULT_EXPR,
6953 fold_convert (sizetype, index),
6962 bit_offset += *pbitsize;
6965 case VIEW_CONVERT_EXPR:
6966 if (keep_aligning && STRICT_ALIGNMENT
6967 && (TYPE_ALIGN (TREE_TYPE (exp))
6968 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6969 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6970 < BIGGEST_ALIGNMENT)
6971 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6972 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6977 /* Hand back the decl for MEM[&decl, off]. */
6978 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6980 tree off = TREE_OPERAND (exp, 1);
6981 if (!integer_zerop (off))
6983 offset_int boff, coff = mem_ref_offset (exp);
6984 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6987 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6995 /* If any reference in the chain is volatile, the effect is volatile. */
6996 if (TREE_THIS_VOLATILE (exp))
6999 exp = TREE_OPERAND (exp, 0);
7003 /* If OFFSET is constant, see if we can return the whole thing as a
7004 constant bit position. Make sure to handle overflow during
7006 if (TREE_CODE (offset) == INTEGER_CST)
7008 offset_int tem = wi::sext (wi::to_offset (offset),
7009 TYPE_PRECISION (sizetype));
7010 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
7012 if (wi::fits_shwi_p (tem))
7014 *pbitpos = tem.to_shwi ();
7015 *poffset = offset = NULL_TREE;
7019 /* Otherwise, split it up. */
7022 /* Avoid returning a negative bitpos as this may wreak havoc later. */
7023 if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
7025 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
7026 offset_int tem = bit_offset.and_not (mask);
7027 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
7028 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
7030 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
7031 offset = size_binop (PLUS_EXPR, offset,
7032 wide_int_to_tree (sizetype, tem));
7035 *pbitpos = bit_offset.to_shwi ();
7039 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7040 if (mode == VOIDmode
7042 && (*pbitpos % BITS_PER_UNIT) == 0
7043 && (*pbitsize % BITS_PER_UNIT) == 0)
7051 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7053 static unsigned HOST_WIDE_INT
7054 target_align (const_tree target)
7056 /* We might have a chain of nested references with intermediate misaligning
7057 bitfields components, so need to recurse to find out. */
7059 unsigned HOST_WIDE_INT this_align, outer_align;
7061 switch (TREE_CODE (target))
7067 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7068 outer_align = target_align (TREE_OPERAND (target, 0));
7069 return MIN (this_align, outer_align);
7072 case ARRAY_RANGE_REF:
7073 this_align = TYPE_ALIGN (TREE_TYPE (target));
7074 outer_align = target_align (TREE_OPERAND (target, 0));
7075 return MIN (this_align, outer_align);
7078 case NON_LVALUE_EXPR:
7079 case VIEW_CONVERT_EXPR:
7080 this_align = TYPE_ALIGN (TREE_TYPE (target));
7081 outer_align = target_align (TREE_OPERAND (target, 0));
7082 return MAX (this_align, outer_align);
7085 return TYPE_ALIGN (TREE_TYPE (target));
7090 /* Given an rtx VALUE that may contain additions and multiplications, return
7091 an equivalent value that just refers to a register, memory, or constant.
7092 This is done by generating instructions to perform the arithmetic and
7093 returning a pseudo-register containing the value.
7095 The returned value may be a REG, SUBREG, MEM or constant. */
7098 force_operand (rtx value, rtx target)
7101 /* Use subtarget as the target for operand 0 of a binary operation. */
7102 rtx subtarget = get_subtarget (target);
7103 enum rtx_code code = GET_CODE (value);
7105 /* Check for subreg applied to an expression produced by loop optimizer. */
7107 && !REG_P (SUBREG_REG (value))
7108 && !MEM_P (SUBREG_REG (value)))
7111 = simplify_gen_subreg (GET_MODE (value),
7112 force_reg (GET_MODE (SUBREG_REG (value)),
7113 force_operand (SUBREG_REG (value),
7115 GET_MODE (SUBREG_REG (value)),
7116 SUBREG_BYTE (value));
7117 code = GET_CODE (value);
7120 /* Check for a PIC address load. */
7121 if ((code == PLUS || code == MINUS)
7122 && XEXP (value, 0) == pic_offset_table_rtx
7123 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7124 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7125 || GET_CODE (XEXP (value, 1)) == CONST))
7128 subtarget = gen_reg_rtx (GET_MODE (value));
7129 emit_move_insn (subtarget, value);
7133 if (ARITHMETIC_P (value))
7135 op2 = XEXP (value, 1);
7136 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7138 if (code == MINUS && CONST_INT_P (op2))
7141 op2 = negate_rtx (GET_MODE (value), op2);
7144 /* Check for an addition with OP2 a constant integer and our first
7145 operand a PLUS of a virtual register and something else. In that
7146 case, we want to emit the sum of the virtual register and the
7147 constant first and then add the other value. This allows virtual
7148 register instantiation to simply modify the constant rather than
7149 creating another one around this addition. */
7150 if (code == PLUS && CONST_INT_P (op2)
7151 && GET_CODE (XEXP (value, 0)) == PLUS
7152 && REG_P (XEXP (XEXP (value, 0), 0))
7153 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7154 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7156 rtx temp = expand_simple_binop (GET_MODE (value), code,
7157 XEXP (XEXP (value, 0), 0), op2,
7158 subtarget, 0, OPTAB_LIB_WIDEN);
7159 return expand_simple_binop (GET_MODE (value), code, temp,
7160 force_operand (XEXP (XEXP (value,
7162 target, 0, OPTAB_LIB_WIDEN);
7165 op1 = force_operand (XEXP (value, 0), subtarget);
7166 op2 = force_operand (op2, NULL_RTX);
7170 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7172 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7173 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7174 target, 1, OPTAB_LIB_WIDEN);
7176 return expand_divmod (0,
7177 FLOAT_MODE_P (GET_MODE (value))
7178 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7179 GET_MODE (value), op1, op2, target, 0);
7181 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7184 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7187 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7190 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7191 target, 0, OPTAB_LIB_WIDEN);
7193 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7194 target, 1, OPTAB_LIB_WIDEN);
7197 if (UNARY_P (value))
7200 target = gen_reg_rtx (GET_MODE (value));
7201 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7208 case FLOAT_TRUNCATE:
7209 convert_move (target, op1, code == ZERO_EXTEND);
7214 expand_fix (target, op1, code == UNSIGNED_FIX);
7218 case UNSIGNED_FLOAT:
7219 expand_float (target, op1, code == UNSIGNED_FLOAT);
7223 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7227 #ifdef INSN_SCHEDULING
7228 /* On machines that have insn scheduling, we want all memory reference to be
7229 explicit, so we need to deal with such paradoxical SUBREGs. */
7230 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7232 = simplify_gen_subreg (GET_MODE (value),
7233 force_reg (GET_MODE (SUBREG_REG (value)),
7234 force_operand (SUBREG_REG (value),
7236 GET_MODE (SUBREG_REG (value)),
7237 SUBREG_BYTE (value));
7243 /* Subroutine of expand_expr: return nonzero iff there is no way that
7244 EXP can reference X, which is being modified. TOP_P is nonzero if this
7245 call is going to be used to determine whether we need a temporary
7246 for EXP, as opposed to a recursive call to this function.
7248 It is always safe for this routine to return zero since it merely
7249 searches for optimization opportunities. */
7252 safe_from_p (const_rtx x, tree exp, int top_p)
7258 /* If EXP has varying size, we MUST use a target since we currently
7259 have no way of allocating temporaries of variable size
7260 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7261 So we assume here that something at a higher level has prevented a
7262 clash. This is somewhat bogus, but the best we can do. Only
7263 do this when X is BLKmode and when we are at the top level. */
7264 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7265 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7266 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7267 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7268 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7270 && GET_MODE (x) == BLKmode)
7271 /* If X is in the outgoing argument area, it is always safe. */
7273 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7274 || (GET_CODE (XEXP (x, 0)) == PLUS
7275 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7278 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7279 find the underlying pseudo. */
7280 if (GET_CODE (x) == SUBREG)
7283 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7287 /* Now look at our tree code and possibly recurse. */
7288 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7290 case tcc_declaration:
7291 exp_rtl = DECL_RTL_IF_SET (exp);
7297 case tcc_exceptional:
7298 if (TREE_CODE (exp) == TREE_LIST)
7302 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7304 exp = TREE_CHAIN (exp);
7307 if (TREE_CODE (exp) != TREE_LIST)
7308 return safe_from_p (x, exp, 0);
7311 else if (TREE_CODE (exp) == CONSTRUCTOR)
7313 constructor_elt *ce;
7314 unsigned HOST_WIDE_INT idx;
7316 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7317 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7318 || !safe_from_p (x, ce->value, 0))
7322 else if (TREE_CODE (exp) == ERROR_MARK)
7323 return 1; /* An already-visited SAVE_EXPR? */
7328 /* The only case we look at here is the DECL_INITIAL inside a
7330 return (TREE_CODE (exp) != DECL_EXPR
7331 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7332 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7333 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7336 case tcc_comparison:
7337 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7342 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7344 case tcc_expression:
7347 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7348 the expression. If it is set, we conflict iff we are that rtx or
7349 both are in memory. Otherwise, we check all operands of the
7350 expression recursively. */
7352 switch (TREE_CODE (exp))
7355 /* If the operand is static or we are static, we can't conflict.
7356 Likewise if we don't conflict with the operand at all. */
7357 if (staticp (TREE_OPERAND (exp, 0))
7358 || TREE_STATIC (exp)
7359 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7362 /* Otherwise, the only way this can conflict is if we are taking
7363 the address of a DECL a that address if part of X, which is
7365 exp = TREE_OPERAND (exp, 0);
7368 if (!DECL_RTL_SET_P (exp)
7369 || !MEM_P (DECL_RTL (exp)))
7372 exp_rtl = XEXP (DECL_RTL (exp), 0);
7378 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7379 get_alias_set (exp)))
7384 /* Assume that the call will clobber all hard registers and
7386 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7391 case WITH_CLEANUP_EXPR:
7392 case CLEANUP_POINT_EXPR:
7393 /* Lowered by gimplify.c. */
7397 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7403 /* If we have an rtx, we do not need to scan our operands. */
7407 nops = TREE_OPERAND_LENGTH (exp);
7408 for (i = 0; i < nops; i++)
7409 if (TREE_OPERAND (exp, i) != 0
7410 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7416 /* Should never get a type here. */
7420 /* If we have an rtl, find any enclosed object. Then see if we conflict
7424 if (GET_CODE (exp_rtl) == SUBREG)
7426 exp_rtl = SUBREG_REG (exp_rtl);
7428 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7432 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7433 are memory and they conflict. */
7434 return ! (rtx_equal_p (x, exp_rtl)
7435 || (MEM_P (x) && MEM_P (exp_rtl)
7436 && true_dependence (exp_rtl, VOIDmode, x)));
7439 /* If we reach here, it is safe. */
7444 /* Return the highest power of two that EXP is known to be a multiple of.
7445 This is used in updating alignment of MEMs in array references. */
7447 unsigned HOST_WIDE_INT
7448 highest_pow2_factor (const_tree exp)
7450 unsigned HOST_WIDE_INT ret;
7451 int trailing_zeros = tree_ctz (exp);
7452 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7453 return BIGGEST_ALIGNMENT;
7454 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7455 if (ret > BIGGEST_ALIGNMENT)
7456 return BIGGEST_ALIGNMENT;
7460 /* Similar, except that the alignment requirements of TARGET are
7461 taken into account. Assume it is at least as aligned as its
7462 type, unless it is a COMPONENT_REF in which case the layout of
7463 the structure gives the alignment. */
7465 static unsigned HOST_WIDE_INT
7466 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7468 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7469 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7471 return MAX (factor, talign);
7474 /* Convert the tree comparison code TCODE to the rtl one where the
7475 signedness is UNSIGNEDP. */
7477 static enum rtx_code
7478 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7490 code = unsignedp ? LTU : LT;
7493 code = unsignedp ? LEU : LE;
7496 code = unsignedp ? GTU : GT;
7499 code = unsignedp ? GEU : GE;
7501 case UNORDERED_EXPR:
7532 /* Subroutine of expand_expr. Expand the two operands of a binary
7533 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7534 The value may be stored in TARGET if TARGET is nonzero. The
7535 MODIFIER argument is as documented by expand_expr. */
7538 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7539 enum expand_modifier modifier)
7541 if (! safe_from_p (target, exp1, 1))
7543 if (operand_equal_p (exp0, exp1, 0))
7545 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7546 *op1 = copy_rtx (*op0);
7550 /* If we need to preserve evaluation order, copy exp0 into its own
7551 temporary variable so that it can't be clobbered by exp1. */
7552 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7553 exp0 = save_expr (exp0);
7554 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7555 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7560 /* Return a MEM that contains constant EXP. DEFER is as for
7561 output_constant_def and MODIFIER is as for expand_expr. */
7564 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7568 mem = output_constant_def (exp, defer);
7569 if (modifier != EXPAND_INITIALIZER)
7570 mem = use_anchored_address (mem);
7574 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7575 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7578 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7579 enum expand_modifier modifier, addr_space_t as)
7581 rtx result, subtarget;
7583 HOST_WIDE_INT bitsize, bitpos;
7584 int unsignedp, reversep, volatilep = 0;
7587 /* If we are taking the address of a constant and are at the top level,
7588 we have to use output_constant_def since we can't call force_const_mem
7590 /* ??? This should be considered a front-end bug. We should not be
7591 generating ADDR_EXPR of something that isn't an LVALUE. The only
7592 exception here is STRING_CST. */
7593 if (CONSTANT_CLASS_P (exp))
7595 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7596 if (modifier < EXPAND_SUM)
7597 result = force_operand (result, target);
7601 /* Everything must be something allowed by is_gimple_addressable. */
7602 switch (TREE_CODE (exp))
7605 /* This case will happen via recursion for &a->b. */
7606 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7610 tree tem = TREE_OPERAND (exp, 0);
7611 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7612 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7613 return expand_expr (tem, target, tmode, modifier);
7617 /* Expand the initializer like constants above. */
7618 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7620 if (modifier < EXPAND_SUM)
7621 result = force_operand (result, target);
7625 /* The real part of the complex number is always first, therefore
7626 the address is the same as the address of the parent object. */
7629 inner = TREE_OPERAND (exp, 0);
7633 /* The imaginary part of the complex number is always second.
7634 The expression is therefore always offset by the size of the
7637 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7638 inner = TREE_OPERAND (exp, 0);
7641 case COMPOUND_LITERAL_EXPR:
7642 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7643 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7644 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7645 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7646 the initializers aren't gimplified. */
7647 if (COMPOUND_LITERAL_EXPR_DECL (exp)
7648 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7649 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7650 target, tmode, modifier, as);
7653 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7654 expand_expr, as that can have various side effects; LABEL_DECLs for
7655 example, may not have their DECL_RTL set yet. Expand the rtl of
7656 CONSTRUCTORs too, which should yield a memory reference for the
7657 constructor's contents. Assume language specific tree nodes can
7658 be expanded in some interesting way. */
7659 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7661 || TREE_CODE (exp) == CONSTRUCTOR
7662 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7664 result = expand_expr (exp, target, tmode,
7665 modifier == EXPAND_INITIALIZER
7666 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7668 /* If the DECL isn't in memory, then the DECL wasn't properly
7669 marked TREE_ADDRESSABLE, which will be either a front-end
7670 or a tree optimizer bug. */
7672 gcc_assert (MEM_P (result));
7673 result = XEXP (result, 0);
7675 /* ??? Is this needed anymore? */
7677 TREE_USED (exp) = 1;
7679 if (modifier != EXPAND_INITIALIZER
7680 && modifier != EXPAND_CONST_ADDRESS
7681 && modifier != EXPAND_SUM)
7682 result = force_operand (result, target);
7686 /* Pass FALSE as the last argument to get_inner_reference although
7687 we are expanding to RTL. The rationale is that we know how to
7688 handle "aligning nodes" here: we can just bypass them because
7689 they won't change the final object whose address will be returned
7690 (they actually exist only for that purpose). */
7691 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
7692 &unsignedp, &reversep, &volatilep, false);
7696 /* We must have made progress. */
7697 gcc_assert (inner != exp);
7699 subtarget = offset || bitpos ? NULL_RTX : target;
7700 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7701 inner alignment, force the inner to be sufficiently aligned. */
7702 if (CONSTANT_CLASS_P (inner)
7703 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7705 inner = copy_node (inner);
7706 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7707 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7708 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7710 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7716 if (modifier != EXPAND_NORMAL)
7717 result = force_operand (result, NULL);
7718 tmp = expand_expr (offset, NULL_RTX, tmode,
7719 modifier == EXPAND_INITIALIZER
7720 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7722 /* expand_expr is allowed to return an object in a mode other
7723 than TMODE. If it did, we need to convert. */
7724 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7725 tmp = convert_modes (tmode, GET_MODE (tmp),
7726 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7727 result = convert_memory_address_addr_space (tmode, result, as);
7728 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7730 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7731 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7734 subtarget = bitpos ? NULL_RTX : target;
7735 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7736 1, OPTAB_LIB_WIDEN);
7742 /* Someone beforehand should have rejected taking the address
7743 of such an object. */
7744 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7746 result = convert_memory_address_addr_space (tmode, result, as);
7747 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7748 if (modifier < EXPAND_SUM)
7749 result = force_operand (result, target);
7755 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7756 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7759 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7760 enum expand_modifier modifier)
7762 addr_space_t as = ADDR_SPACE_GENERIC;
7763 machine_mode address_mode = Pmode;
7764 machine_mode pointer_mode = ptr_mode;
7768 /* Target mode of VOIDmode says "whatever's natural". */
7769 if (tmode == VOIDmode)
7770 tmode = TYPE_MODE (TREE_TYPE (exp));
7772 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7774 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7775 address_mode = targetm.addr_space.address_mode (as);
7776 pointer_mode = targetm.addr_space.pointer_mode (as);
7779 /* We can get called with some Weird Things if the user does silliness
7780 like "(short) &a". In that case, convert_memory_address won't do
7781 the right thing, so ignore the given target mode. */
7782 if (tmode != address_mode && tmode != pointer_mode)
7783 tmode = address_mode;
7785 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7786 tmode, modifier, as);
7788 /* Despite expand_expr claims concerning ignoring TMODE when not
7789 strictly convenient, stuff breaks if we don't honor it. Note
7790 that combined with the above, we only do this for pointer modes. */
7791 rmode = GET_MODE (result);
7792 if (rmode == VOIDmode)
7795 result = convert_memory_address_addr_space (tmode, result, as);
7800 /* Generate code for computing CONSTRUCTOR EXP.
7801 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7802 is TRUE, instead of creating a temporary variable in memory
7803 NULL is returned and the caller needs to handle it differently. */
7806 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7807 bool avoid_temp_mem)
7809 tree type = TREE_TYPE (exp);
7810 machine_mode mode = TYPE_MODE (type);
7812 /* Try to avoid creating a temporary at all. This is possible
7813 if all of the initializer is zero.
7814 FIXME: try to handle all [0..255] initializers we can handle
7816 if (TREE_STATIC (exp)
7817 && !TREE_ADDRESSABLE (exp)
7818 && target != 0 && mode == BLKmode
7819 && all_zeros_p (exp))
7821 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7825 /* All elts simple constants => refer to a constant in memory. But
7826 if this is a non-BLKmode mode, let it store a field at a time
7827 since that should make a CONST_INT, CONST_WIDE_INT or
7828 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7829 use, it is best to store directly into the target unless the type
7830 is large enough that memcpy will be used. If we are making an
7831 initializer and all operands are constant, put it in memory as
7834 FIXME: Avoid trying to fill vector constructors piece-meal.
7835 Output them with output_constant_def below unless we're sure
7836 they're zeros. This should go away when vector initializers
7837 are treated like VECTOR_CST instead of arrays. */
7838 if ((TREE_STATIC (exp)
7839 && ((mode == BLKmode
7840 && ! (target != 0 && safe_from_p (target, exp, 1)))
7841 || TREE_ADDRESSABLE (exp)
7842 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7843 && (! can_move_by_pieces
7844 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7846 && ! mostly_zeros_p (exp))))
7847 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7848 && TREE_CONSTANT (exp)))
7855 constructor = expand_expr_constant (exp, 1, modifier);
7857 if (modifier != EXPAND_CONST_ADDRESS
7858 && modifier != EXPAND_INITIALIZER
7859 && modifier != EXPAND_SUM)
7860 constructor = validize_mem (constructor);
7865 /* Handle calls that pass values in multiple non-contiguous
7866 locations. The Irix 6 ABI has examples of this. */
7867 if (target == 0 || ! safe_from_p (target, exp, 1)
7868 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7873 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7876 store_constructor (exp, target, 0, int_expr_size (exp), false);
7881 /* expand_expr: generate code for computing expression EXP.
7882 An rtx for the computed value is returned. The value is never null.
7883 In the case of a void EXP, const0_rtx is returned.
7885 The value may be stored in TARGET if TARGET is nonzero.
7886 TARGET is just a suggestion; callers must assume that
7887 the rtx returned may not be the same as TARGET.
7889 If TARGET is CONST0_RTX, it means that the value will be ignored.
7891 If TMODE is not VOIDmode, it suggests generating the
7892 result in mode TMODE. But this is done only when convenient.
7893 Otherwise, TMODE is ignored and the value generated in its natural mode.
7894 TMODE is just a suggestion; callers must assume that
7895 the rtx returned may not have mode TMODE.
7897 Note that TARGET may have neither TMODE nor MODE. In that case, it
7898 probably will not be used.
7900 If MODIFIER is EXPAND_SUM then when EXP is an addition
7901 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7902 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7903 products as above, or REG or MEM, or constant.
7904 Ordinarily in such cases we would output mul or add instructions
7905 and then return a pseudo reg containing the sum.
7907 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7908 it also marks a label as absolutely required (it can't be dead).
7909 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7910 This is used for outputting expressions used in initializers.
7912 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7913 with a constant address even if that address is not normally legitimate.
7914 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7916 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7917 a call parameter. Such targets require special care as we haven't yet
7918 marked TARGET so that it's safe from being trashed by libcalls. We
7919 don't want to use TARGET for anything but the final result;
7920 Intermediate values must go elsewhere. Additionally, calls to
7921 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7923 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7924 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7925 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7926 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7929 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7930 In this case, we don't adjust a returned MEM rtx that wouldn't be
7931 sufficiently aligned for its mode; instead, it's up to the caller
7932 to deal with it afterwards. This is used to make sure that unaligned
7933 base objects for which out-of-bounds accesses are supported, for
7934 example record types with trailing arrays, aren't realigned behind
7935 the back of the caller.
7936 The normal operating mode is to pass FALSE for this parameter. */
7939 expand_expr_real (tree exp, rtx target, machine_mode tmode,
7940 enum expand_modifier modifier, rtx *alt_rtl,
7941 bool inner_reference_p)
7945 /* Handle ERROR_MARK before anybody tries to access its type. */
7946 if (TREE_CODE (exp) == ERROR_MARK
7947 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7949 ret = CONST0_RTX (tmode);
7950 return ret ? ret : const0_rtx;
7953 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7958 /* Try to expand the conditional expression which is represented by
7959 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If it succeeds
7960 return the rtl reg which represents the result. Otherwise return
7964 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7965 tree treeop1 ATTRIBUTE_UNUSED,
7966 tree treeop2 ATTRIBUTE_UNUSED)
7969 rtx op00, op01, op1, op2;
7970 enum rtx_code comparison_code;
7971 machine_mode comparison_mode;
7974 tree type = TREE_TYPE (treeop1);
7975 int unsignedp = TYPE_UNSIGNED (type);
7976 machine_mode mode = TYPE_MODE (type);
7977 machine_mode orig_mode = mode;
7979 /* If we cannot do a conditional move on the mode, try doing it
7980 with the promoted mode. */
7981 if (!can_conditionally_move_p (mode))
7983 mode = promote_mode (type, mode, &unsignedp);
7984 if (!can_conditionally_move_p (mode))
7986 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7989 temp = assign_temp (type, 0, 1);
7992 expand_operands (treeop1, treeop2,
7993 temp, &op1, &op2, EXPAND_NORMAL);
7995 if (TREE_CODE (treeop0) == SSA_NAME
7996 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7998 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7999 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8000 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8001 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8002 comparison_mode = TYPE_MODE (type);
8003 unsignedp = TYPE_UNSIGNED (type);
8004 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8006 else if (COMPARISON_CLASS_P (treeop0))
8008 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8009 enum tree_code cmpcode = TREE_CODE (treeop0);
8010 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8011 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8012 unsignedp = TYPE_UNSIGNED (type);
8013 comparison_mode = TYPE_MODE (type);
8014 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8018 op00 = expand_normal (treeop0);
8020 comparison_code = NE;
8021 comparison_mode = GET_MODE (op00);
8022 if (comparison_mode == VOIDmode)
8023 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8026 if (GET_MODE (op1) != mode)
8027 op1 = gen_lowpart (mode, op1);
8029 if (GET_MODE (op2) != mode)
8030 op2 = gen_lowpart (mode, op2);
8032 /* Try to emit the conditional move. */
8033 insn = emit_conditional_move (temp, comparison_code,
8034 op00, op01, comparison_mode,
8038 /* If we could do the conditional move, emit the sequence,
8042 rtx_insn *seq = get_insns ();
8045 return convert_modes (orig_mode, mode, temp, 0);
8048 /* Otherwise discard the sequence and fall back to code with
8055 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8056 enum expand_modifier modifier)
8058 rtx op0, op1, op2, temp;
8059 rtx_code_label *lab;
8063 enum tree_code code = ops->code;
8065 rtx subtarget, original_target;
8067 bool reduce_bit_field;
8068 location_t loc = ops->location;
8069 tree treeop0, treeop1, treeop2;
8070 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8071 ? reduce_to_bit_field_precision ((expr), \
8077 mode = TYPE_MODE (type);
8078 unsignedp = TYPE_UNSIGNED (type);
8084 /* We should be called only on simple (binary or unary) expressions,
8085 exactly those that are valid in gimple expressions that aren't
8086 GIMPLE_SINGLE_RHS (or invalid). */
8087 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8088 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8089 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8091 ignore = (target == const0_rtx
8092 || ((CONVERT_EXPR_CODE_P (code)
8093 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8094 && TREE_CODE (type) == VOID_TYPE));
8096 /* We should be called only if we need the result. */
8097 gcc_assert (!ignore);
8099 /* An operation in what may be a bit-field type needs the
8100 result to be reduced to the precision of the bit-field type,
8101 which is narrower than that of the type's mode. */
8102 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8103 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8105 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8108 /* Use subtarget as the target for operand 0 of a binary operation. */
8109 subtarget = get_subtarget (target);
8110 original_target = target;
8114 case NON_LVALUE_EXPR:
8117 if (treeop0 == error_mark_node)
8120 if (TREE_CODE (type) == UNION_TYPE)
8122 tree valtype = TREE_TYPE (treeop0);
8124 /* If both input and output are BLKmode, this conversion isn't doing
8125 anything except possibly changing memory attribute. */
8126 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8128 rtx result = expand_expr (treeop0, target, tmode,
8131 result = copy_rtx (result);
8132 set_mem_attributes (result, type, 0);
8138 if (TYPE_MODE (type) != BLKmode)
8139 target = gen_reg_rtx (TYPE_MODE (type));
8141 target = assign_temp (type, 1, 1);
8145 /* Store data into beginning of memory target. */
8146 store_expr (treeop0,
8147 adjust_address (target, TYPE_MODE (valtype), 0),
8148 modifier == EXPAND_STACK_PARM,
8149 false, TYPE_REVERSE_STORAGE_ORDER (type));
8153 gcc_assert (REG_P (target)
8154 && !TYPE_REVERSE_STORAGE_ORDER (type));
8156 /* Store this field into a union of the proper type. */
8157 store_field (target,
8158 MIN ((int_size_in_bytes (TREE_TYPE
8161 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8162 0, 0, 0, TYPE_MODE (valtype), treeop0, 0,
8166 /* Return the entire union. */
8170 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8172 op0 = expand_expr (treeop0, target, VOIDmode,
8175 /* If the signedness of the conversion differs and OP0 is
8176 a promoted SUBREG, clear that indication since we now
8177 have to do the proper extension. */
8178 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8179 && GET_CODE (op0) == SUBREG)
8180 SUBREG_PROMOTED_VAR_P (op0) = 0;
8182 return REDUCE_BIT_FIELD (op0);
8185 op0 = expand_expr (treeop0, NULL_RTX, mode,
8186 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8187 if (GET_MODE (op0) == mode)
8190 /* If OP0 is a constant, just convert it into the proper mode. */
8191 else if (CONSTANT_P (op0))
8193 tree inner_type = TREE_TYPE (treeop0);
8194 machine_mode inner_mode = GET_MODE (op0);
8196 if (inner_mode == VOIDmode)
8197 inner_mode = TYPE_MODE (inner_type);
8199 if (modifier == EXPAND_INITIALIZER)
8200 op0 = lowpart_subreg (mode, op0, inner_mode);
8202 op0= convert_modes (mode, inner_mode, op0,
8203 TYPE_UNSIGNED (inner_type));
8206 else if (modifier == EXPAND_INITIALIZER)
8207 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8209 else if (target == 0)
8210 op0 = convert_to_mode (mode, op0,
8211 TYPE_UNSIGNED (TREE_TYPE
8215 convert_move (target, op0,
8216 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8220 return REDUCE_BIT_FIELD (op0);
8222 case ADDR_SPACE_CONVERT_EXPR:
8224 tree treeop0_type = TREE_TYPE (treeop0);
8226 gcc_assert (POINTER_TYPE_P (type));
8227 gcc_assert (POINTER_TYPE_P (treeop0_type));
8229 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8230 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8232 /* Conversions between pointers to the same address space should
8233 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8234 gcc_assert (as_to != as_from);
8236 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8238 /* Ask target code to handle conversion between pointers
8239 to overlapping address spaces. */
8240 if (targetm.addr_space.subset_p (as_to, as_from)
8241 || targetm.addr_space.subset_p (as_from, as_to))
8243 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8247 /* For disjoint address spaces, converting anything but a null
8248 pointer invokes undefined behavior. We truncate or extend the
8249 value as if we'd converted via integers, which handles 0 as
8250 required, and all others as the programmer likely expects. */
8251 #ifndef POINTERS_EXTEND_UNSIGNED
8252 const int POINTERS_EXTEND_UNSIGNED = 1;
8254 op0 = convert_modes (mode, TYPE_MODE (treeop0_type),
8255 op0, POINTERS_EXTEND_UNSIGNED);
8261 case POINTER_PLUS_EXPR:
8262 /* Even though the sizetype mode and the pointer's mode can be different
8263 expand is able to handle this correctly and get the correct result out
8264 of the PLUS_EXPR code. */
8265 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8266 if sizetype precision is smaller than pointer precision. */
8267 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8268 treeop1 = fold_convert_loc (loc, type,
8269 fold_convert_loc (loc, ssizetype,
8271 /* If sizetype precision is larger than pointer precision, truncate the
8272 offset to have matching modes. */
8273 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8274 treeop1 = fold_convert_loc (loc, type, treeop1);
8277 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8278 something else, make sure we add the register to the constant and
8279 then to the other thing. This case can occur during strength
8280 reduction and doing it this way will produce better code if the
8281 frame pointer or argument pointer is eliminated.
8283 fold-const.c will ensure that the constant is always in the inner
8284 PLUS_EXPR, so the only case we need to do anything about is if
8285 sp, ap, or fp is our second argument, in which case we must swap
8286 the innermost first argument and our second argument. */
8288 if (TREE_CODE (treeop0) == PLUS_EXPR
8289 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8290 && TREE_CODE (treeop1) == VAR_DECL
8291 && (DECL_RTL (treeop1) == frame_pointer_rtx
8292 || DECL_RTL (treeop1) == stack_pointer_rtx
8293 || DECL_RTL (treeop1) == arg_pointer_rtx))
8298 /* If the result is to be ptr_mode and we are adding an integer to
8299 something, we might be forming a constant. So try to use
8300 plus_constant. If it produces a sum and we can't accept it,
8301 use force_operand. This allows P = &ARR[const] to generate
8302 efficient code on machines where a SYMBOL_REF is not a valid
8305 If this is an EXPAND_SUM call, always return the sum. */
8306 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8307 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8309 if (modifier == EXPAND_STACK_PARM)
8311 if (TREE_CODE (treeop0) == INTEGER_CST
8312 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8313 && TREE_CONSTANT (treeop1))
8317 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8319 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8321 /* Use wi::shwi to ensure that the constant is
8322 truncated according to the mode of OP1, then sign extended
8323 to a HOST_WIDE_INT. Using the constant directly can result
8324 in non-canonical RTL in a 64x32 cross compile. */
8325 wc = TREE_INT_CST_LOW (treeop0);
8327 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8328 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8329 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8330 op1 = force_operand (op1, target);
8331 return REDUCE_BIT_FIELD (op1);
8334 else if (TREE_CODE (treeop1) == INTEGER_CST
8335 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8336 && TREE_CONSTANT (treeop0))
8340 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8342 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8343 (modifier == EXPAND_INITIALIZER
8344 ? EXPAND_INITIALIZER : EXPAND_SUM));
8345 if (! CONSTANT_P (op0))
8347 op1 = expand_expr (treeop1, NULL_RTX,
8348 VOIDmode, modifier);
8349 /* Return a PLUS if modifier says it's OK. */
8350 if (modifier == EXPAND_SUM
8351 || modifier == EXPAND_INITIALIZER)
8352 return simplify_gen_binary (PLUS, mode, op0, op1);
8355 /* Use wi::shwi to ensure that the constant is
8356 truncated according to the mode of OP1, then sign extended
8357 to a HOST_WIDE_INT. Using the constant directly can result
8358 in non-canonical RTL in a 64x32 cross compile. */
8359 wc = TREE_INT_CST_LOW (treeop1);
8361 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8362 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8363 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8364 op0 = force_operand (op0, target);
8365 return REDUCE_BIT_FIELD (op0);
8369 /* Use TER to expand pointer addition of a negated value
8370 as pointer subtraction. */
8371 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8372 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8373 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8374 && TREE_CODE (treeop1) == SSA_NAME
8375 && TYPE_MODE (TREE_TYPE (treeop0))
8376 == TYPE_MODE (TREE_TYPE (treeop1)))
8378 gimple *def = get_def_for_expr (treeop1, NEGATE_EXPR);
8381 treeop1 = gimple_assign_rhs1 (def);
8387 /* No sense saving up arithmetic to be done
8388 if it's all in the wrong mode to form part of an address.
8389 And force_operand won't know whether to sign-extend or
8391 if (modifier != EXPAND_INITIALIZER
8392 && (modifier != EXPAND_SUM || mode != ptr_mode))
8394 expand_operands (treeop0, treeop1,
8395 subtarget, &op0, &op1, modifier);
8396 if (op0 == const0_rtx)
8398 if (op1 == const0_rtx)
8403 expand_operands (treeop0, treeop1,
8404 subtarget, &op0, &op1, modifier);
8405 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8409 /* For initializers, we are allowed to return a MINUS of two
8410 symbolic constants. Here we handle all cases when both operands
8412 /* Handle difference of two symbolic constants,
8413 for the sake of an initializer. */
8414 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8415 && really_constant_p (treeop0)
8416 && really_constant_p (treeop1))
8418 expand_operands (treeop0, treeop1,
8419 NULL_RTX, &op0, &op1, modifier);
8421 /* If the last operand is a CONST_INT, use plus_constant of
8422 the negated constant. Else make the MINUS. */
8423 if (CONST_INT_P (op1))
8424 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8427 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8430 /* No sense saving up arithmetic to be done
8431 if it's all in the wrong mode to form part of an address.
8432 And force_operand won't know whether to sign-extend or
8434 if (modifier != EXPAND_INITIALIZER
8435 && (modifier != EXPAND_SUM || mode != ptr_mode))
8438 expand_operands (treeop0, treeop1,
8439 subtarget, &op0, &op1, modifier);
8441 /* Convert A - const to A + (-const). */
8442 if (CONST_INT_P (op1))
8444 op1 = negate_rtx (mode, op1);
8445 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8450 case WIDEN_MULT_PLUS_EXPR:
8451 case WIDEN_MULT_MINUS_EXPR:
8452 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8453 op2 = expand_normal (treeop2);
8454 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8458 case WIDEN_MULT_EXPR:
8459 /* If first operand is constant, swap them.
8460 Thus the following special case checks need only
8461 check the second operand. */
8462 if (TREE_CODE (treeop0) == INTEGER_CST)
8463 std::swap (treeop0, treeop1);
8465 /* First, check if we have a multiplication of one signed and one
8466 unsigned operand. */
8467 if (TREE_CODE (treeop1) != INTEGER_CST
8468 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8469 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8471 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8472 this_optab = usmul_widen_optab;
8473 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8474 != CODE_FOR_nothing)
8476 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8477 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8480 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8482 /* op0 and op1 might still be constant, despite the above
8483 != INTEGER_CST check. Handle it. */
8484 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8486 op0 = convert_modes (innermode, mode, op0, true);
8487 op1 = convert_modes (innermode, mode, op1, false);
8488 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8489 target, unsignedp));
8494 /* Check for a multiplication with matching signedness. */
8495 else if ((TREE_CODE (treeop1) == INTEGER_CST
8496 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8497 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8498 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8500 tree op0type = TREE_TYPE (treeop0);
8501 machine_mode innermode = TYPE_MODE (op0type);
8502 bool zextend_p = TYPE_UNSIGNED (op0type);
8503 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8504 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8506 if (TREE_CODE (treeop0) != INTEGER_CST)
8508 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8509 != CODE_FOR_nothing)
8511 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8513 /* op0 and op1 might still be constant, despite the above
8514 != INTEGER_CST check. Handle it. */
8515 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8518 op0 = convert_modes (innermode, mode, op0, zextend_p);
8520 = convert_modes (innermode, mode, op1,
8521 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8522 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8526 temp = expand_widening_mult (mode, op0, op1, target,
8527 unsignedp, this_optab);
8528 return REDUCE_BIT_FIELD (temp);
8530 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8532 && innermode == word_mode)
8535 op0 = expand_normal (treeop0);
8536 if (TREE_CODE (treeop1) == INTEGER_CST)
8537 op1 = convert_modes (innermode, mode,
8538 expand_normal (treeop1),
8539 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8541 op1 = expand_normal (treeop1);
8542 /* op0 and op1 might still be constant, despite the above
8543 != INTEGER_CST check. Handle it. */
8544 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8545 goto widen_mult_const;
8546 temp = expand_binop (mode, other_optab, op0, op1, target,
8547 unsignedp, OPTAB_LIB_WIDEN);
8548 hipart = gen_highpart (innermode, temp);
8549 htem = expand_mult_highpart_adjust (innermode, hipart,
8553 emit_move_insn (hipart, htem);
8554 return REDUCE_BIT_FIELD (temp);
8558 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8559 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8560 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8561 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8565 optab opt = fma_optab;
8566 gimple *def0, *def2;
8568 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8570 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8572 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8575 gcc_assert (fn != NULL_TREE);
8576 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8577 return expand_builtin (call_expr, target, subtarget, mode, false);
8580 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8581 /* The multiplication is commutative - look at its 2nd operand
8582 if the first isn't fed by a negate. */
8585 def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8586 /* Swap operands if the 2nd operand is fed by a negate. */
8588 std::swap (treeop0, treeop1);
8590 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8595 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8598 op0 = expand_normal (gimple_assign_rhs1 (def0));
8599 op2 = expand_normal (gimple_assign_rhs1 (def2));
8602 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8605 op0 = expand_normal (gimple_assign_rhs1 (def0));
8608 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8611 op2 = expand_normal (gimple_assign_rhs1 (def2));
8615 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8617 op2 = expand_normal (treeop2);
8618 op1 = expand_normal (treeop1);
8620 return expand_ternary_op (TYPE_MODE (type), opt,
8621 op0, op1, op2, target, 0);
8625 /* If this is a fixed-point operation, then we cannot use the code
8626 below because "expand_mult" doesn't support sat/no-sat fixed-point
8628 if (ALL_FIXED_POINT_MODE_P (mode))
8631 /* If first operand is constant, swap them.
8632 Thus the following special case checks need only
8633 check the second operand. */
8634 if (TREE_CODE (treeop0) == INTEGER_CST)
8635 std::swap (treeop0, treeop1);
8637 /* Attempt to return something suitable for generating an
8638 indexed address, for machines that support that. */
8640 if (modifier == EXPAND_SUM && mode == ptr_mode
8641 && tree_fits_shwi_p (treeop1))
8643 tree exp1 = treeop1;
8645 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8649 op0 = force_operand (op0, NULL_RTX);
8651 op0 = copy_to_mode_reg (mode, op0);
8653 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8654 gen_int_mode (tree_to_shwi (exp1),
8655 TYPE_MODE (TREE_TYPE (exp1)))));
8658 if (modifier == EXPAND_STACK_PARM)
8661 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8662 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8664 case TRUNC_DIV_EXPR:
8665 case FLOOR_DIV_EXPR:
8667 case ROUND_DIV_EXPR:
8668 case EXACT_DIV_EXPR:
8669 /* If this is a fixed-point operation, then we cannot use the code
8670 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8672 if (ALL_FIXED_POINT_MODE_P (mode))
8675 if (modifier == EXPAND_STACK_PARM)
8677 /* Possible optimization: compute the dividend with EXPAND_SUM
8678 then if the divisor is constant can optimize the case
8679 where some terms of the dividend have coeffs divisible by it. */
8680 expand_operands (treeop0, treeop1,
8681 subtarget, &op0, &op1, EXPAND_NORMAL);
8682 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8687 case MULT_HIGHPART_EXPR:
8688 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8689 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8693 case TRUNC_MOD_EXPR:
8694 case FLOOR_MOD_EXPR:
8696 case ROUND_MOD_EXPR:
8697 if (modifier == EXPAND_STACK_PARM)
8699 expand_operands (treeop0, treeop1,
8700 subtarget, &op0, &op1, EXPAND_NORMAL);
8701 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8703 case FIXED_CONVERT_EXPR:
8704 op0 = expand_normal (treeop0);
8705 if (target == 0 || modifier == EXPAND_STACK_PARM)
8706 target = gen_reg_rtx (mode);
8708 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8709 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8710 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8711 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8713 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8716 case FIX_TRUNC_EXPR:
8717 op0 = expand_normal (treeop0);
8718 if (target == 0 || modifier == EXPAND_STACK_PARM)
8719 target = gen_reg_rtx (mode);
8720 expand_fix (target, op0, unsignedp);
8724 op0 = expand_normal (treeop0);
8725 if (target == 0 || modifier == EXPAND_STACK_PARM)
8726 target = gen_reg_rtx (mode);
8727 /* expand_float can't figure out what to do if FROM has VOIDmode.
8728 So give it the correct mode. With -O, cse will optimize this. */
8729 if (GET_MODE (op0) == VOIDmode)
8730 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8732 expand_float (target, op0,
8733 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8737 op0 = expand_expr (treeop0, subtarget,
8738 VOIDmode, EXPAND_NORMAL);
8739 if (modifier == EXPAND_STACK_PARM)
8741 temp = expand_unop (mode,
8742 optab_for_tree_code (NEGATE_EXPR, type,
8746 return REDUCE_BIT_FIELD (temp);
8749 op0 = expand_expr (treeop0, subtarget,
8750 VOIDmode, EXPAND_NORMAL);
8751 if (modifier == EXPAND_STACK_PARM)
8754 /* ABS_EXPR is not valid for complex arguments. */
8755 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8756 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8758 /* Unsigned abs is simply the operand. Testing here means we don't
8759 risk generating incorrect code below. */
8760 if (TYPE_UNSIGNED (type))
8763 return expand_abs (mode, op0, target, unsignedp,
8764 safe_from_p (target, treeop0, 1));
8768 target = original_target;
8770 || modifier == EXPAND_STACK_PARM
8771 || (MEM_P (target) && MEM_VOLATILE_P (target))
8772 || GET_MODE (target) != mode
8774 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8775 target = gen_reg_rtx (mode);
8776 expand_operands (treeop0, treeop1,
8777 target, &op0, &op1, EXPAND_NORMAL);
8779 /* First try to do it with a special MIN or MAX instruction.
8780 If that does not win, use a conditional jump to select the proper
8782 this_optab = optab_for_tree_code (code, type, optab_default);
8783 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8788 /* At this point, a MEM target is no longer useful; we will get better
8791 if (! REG_P (target))
8792 target = gen_reg_rtx (mode);
8794 /* If op1 was placed in target, swap op0 and op1. */
8795 if (target != op0 && target == op1)
8796 std::swap (op0, op1);
8798 /* We generate better code and avoid problems with op1 mentioning
8799 target by forcing op1 into a pseudo if it isn't a constant. */
8800 if (! CONSTANT_P (op1))
8801 op1 = force_reg (mode, op1);
8804 enum rtx_code comparison_code;
8807 if (code == MAX_EXPR)
8808 comparison_code = unsignedp ? GEU : GE;
8810 comparison_code = unsignedp ? LEU : LE;
8812 /* Canonicalize to comparisons against 0. */
8813 if (op1 == const1_rtx)
8815 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8816 or (a != 0 ? a : 1) for unsigned.
8817 For MIN we are safe converting (a <= 1 ? a : 1)
8818 into (a <= 0 ? a : 1) */
8819 cmpop1 = const0_rtx;
8820 if (code == MAX_EXPR)
8821 comparison_code = unsignedp ? NE : GT;
8823 if (op1 == constm1_rtx && !unsignedp)
8825 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8826 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8827 cmpop1 = const0_rtx;
8828 if (code == MIN_EXPR)
8829 comparison_code = LT;
8832 /* Use a conditional move if possible. */
8833 if (can_conditionally_move_p (mode))
8839 /* Try to emit the conditional move. */
8840 insn = emit_conditional_move (target, comparison_code,
8845 /* If we could do the conditional move, emit the sequence,
8849 rtx_insn *seq = get_insns ();
8855 /* Otherwise discard the sequence and fall back to code with
8861 emit_move_insn (target, op0);
8863 lab = gen_label_rtx ();
8864 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8865 unsignedp, mode, NULL_RTX, NULL, lab,
8868 emit_move_insn (target, op1);
8873 op0 = expand_expr (treeop0, subtarget,
8874 VOIDmode, EXPAND_NORMAL);
8875 if (modifier == EXPAND_STACK_PARM)
8877 /* In case we have to reduce the result to bitfield precision
8878 for unsigned bitfield expand this as XOR with a proper constant
8880 if (reduce_bit_field && TYPE_UNSIGNED (type))
8882 wide_int mask = wi::mask (TYPE_PRECISION (type),
8883 false, GET_MODE_PRECISION (mode));
8885 temp = expand_binop (mode, xor_optab, op0,
8886 immed_wide_int_const (mask, mode),
8887 target, 1, OPTAB_LIB_WIDEN);
8890 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8894 /* ??? Can optimize bitwise operations with one arg constant.
8895 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8896 and (a bitwise1 b) bitwise2 b (etc)
8897 but that is probably not worth while. */
8906 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8907 || (GET_MODE_PRECISION (TYPE_MODE (type))
8908 == TYPE_PRECISION (type)));
8914 /* If this is a fixed-point operation, then we cannot use the code
8915 below because "expand_shift" doesn't support sat/no-sat fixed-point
8917 if (ALL_FIXED_POINT_MODE_P (mode))
8920 if (! safe_from_p (subtarget, treeop1, 1))
8922 if (modifier == EXPAND_STACK_PARM)
8924 op0 = expand_expr (treeop0, subtarget,
8925 VOIDmode, EXPAND_NORMAL);
8927 /* Left shift optimization when shifting across word_size boundary.
8929 If mode == GET_MODE_WIDER_MODE (word_mode), then normally
8930 there isn't native instruction to support this wide mode
8931 left shift. Given below scenario:
8933 Type A = (Type) B << C
8936 | dest_high | dest_low |
8940 If the shift amount C caused we shift B to across the word
8941 size boundary, i.e part of B shifted into high half of
8942 destination register, and part of B remains in the low
8943 half, then GCC will use the following left shift expand
8946 1. Initialize dest_low to B.
8947 2. Initialize every bit of dest_high to the sign bit of B.
8948 3. Logic left shift dest_low by C bit to finalize dest_low.
8949 The value of dest_low before this shift is kept in a temp D.
8950 4. Logic left shift dest_high by C.
8951 5. Logic right shift D by (word_size - C).
8952 6. Or the result of 4 and 5 to finalize dest_high.
8954 While, by checking gimple statements, if operand B is
8955 coming from signed extension, then we can simplify above
8958 1. dest_high = src_low >> (word_size - C).
8959 2. dest_low = src_low << C.
8961 We can use one arithmetic right shift to finish all the
8962 purpose of steps 2, 4, 5, 6, thus we reduce the steps
8963 needed from 6 into 2.
8965 The case is similar for zero extension, except that we
8966 initialize dest_high to zero rather than copies of the sign
8967 bit from B. Furthermore, we need to use a logical right shift
8970 The choice of sign-extension versus zero-extension is
8971 determined entirely by whether or not B is signed and is
8972 independent of the current setting of unsignedp. */
8975 if (code == LSHIFT_EXPR
8978 && mode == GET_MODE_WIDER_MODE (word_mode)
8979 && GET_MODE_SIZE (mode) == 2 * GET_MODE_SIZE (word_mode)
8980 && TREE_CONSTANT (treeop1)
8981 && TREE_CODE (treeop0) == SSA_NAME)
8983 gimple *def = SSA_NAME_DEF_STMT (treeop0);
8984 if (is_gimple_assign (def)
8985 && gimple_assign_rhs_code (def) == NOP_EXPR)
8987 machine_mode rmode = TYPE_MODE
8988 (TREE_TYPE (gimple_assign_rhs1 (def)));
8990 if (GET_MODE_SIZE (rmode) < GET_MODE_SIZE (mode)
8991 && TREE_INT_CST_LOW (treeop1) < GET_MODE_BITSIZE (word_mode)
8992 && ((TREE_INT_CST_LOW (treeop1) + GET_MODE_BITSIZE (rmode))
8993 >= GET_MODE_BITSIZE (word_mode)))
8995 rtx_insn *seq, *seq_old;
8996 unsigned int high_off = subreg_highpart_offset (word_mode,
8998 bool extend_unsigned
8999 = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def)));
9000 rtx low = lowpart_subreg (word_mode, op0, mode);
9001 rtx dest_low = lowpart_subreg (word_mode, target, mode);
9002 rtx dest_high = simplify_gen_subreg (word_mode, target,
9004 HOST_WIDE_INT ramount = (BITS_PER_WORD
9005 - TREE_INT_CST_LOW (treeop1));
9006 tree rshift = build_int_cst (TREE_TYPE (treeop1), ramount);
9009 /* dest_high = src_low >> (word_size - C). */
9010 temp = expand_variable_shift (RSHIFT_EXPR, word_mode, low,
9013 if (temp != dest_high)
9014 emit_move_insn (dest_high, temp);
9016 /* dest_low = src_low << C. */
9017 temp = expand_variable_shift (LSHIFT_EXPR, word_mode, low,
9018 treeop1, dest_low, unsignedp);
9019 if (temp != dest_low)
9020 emit_move_insn (dest_low, temp);
9026 if (have_insn_for (ASHIFT, mode))
9028 bool speed_p = optimize_insn_for_speed_p ();
9030 rtx ret_old = expand_variable_shift (code, mode, op0,
9034 seq_old = get_insns ();
9036 if (seq_cost (seq, speed_p)
9037 >= seq_cost (seq_old, speed_p))
9048 if (temp == NULL_RTX)
9049 temp = expand_variable_shift (code, mode, op0, treeop1, target,
9051 if (code == LSHIFT_EXPR)
9052 temp = REDUCE_BIT_FIELD (temp);
9056 /* Could determine the answer when only additive constants differ. Also,
9057 the addition of one can be handled by changing the condition. */
9064 case UNORDERED_EXPR:
9073 temp = do_store_flag (ops,
9074 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9075 tmode != VOIDmode ? tmode : mode);
9079 /* Use a compare and a jump for BLKmode comparisons, or for function
9080 type comparisons is have_canonicalize_funcptr_for_compare. */
9083 || modifier == EXPAND_STACK_PARM
9084 || ! safe_from_p (target, treeop0, 1)
9085 || ! safe_from_p (target, treeop1, 1)
9086 /* Make sure we don't have a hard reg (such as function's return
9087 value) live across basic blocks, if not optimizing. */
9088 || (!optimize && REG_P (target)
9089 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9090 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9092 emit_move_insn (target, const0_rtx);
9094 rtx_code_label *lab1 = gen_label_rtx ();
9095 jumpifnot_1 (code, treeop0, treeop1, lab1, -1);
9097 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9098 emit_move_insn (target, constm1_rtx);
9100 emit_move_insn (target, const1_rtx);
9106 /* Get the rtx code of the operands. */
9107 op0 = expand_normal (treeop0);
9108 op1 = expand_normal (treeop1);
9111 target = gen_reg_rtx (TYPE_MODE (type));
9113 /* If target overlaps with op1, then either we need to force
9114 op1 into a pseudo (if target also overlaps with op0),
9115 or write the complex parts in reverse order. */
9116 switch (GET_CODE (target))
9119 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9121 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9123 complex_expr_force_op1:
9124 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9125 emit_move_insn (temp, op1);
9129 complex_expr_swap_order:
9130 /* Move the imaginary (op1) and real (op0) parts to their
9132 write_complex_part (target, op1, true);
9133 write_complex_part (target, op0, false);
9139 temp = adjust_address_nv (target,
9140 GET_MODE_INNER (GET_MODE (target)), 0);
9141 if (reg_overlap_mentioned_p (temp, op1))
9143 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9144 temp = adjust_address_nv (target, imode,
9145 GET_MODE_SIZE (imode));
9146 if (reg_overlap_mentioned_p (temp, op0))
9147 goto complex_expr_force_op1;
9148 goto complex_expr_swap_order;
9152 if (reg_overlap_mentioned_p (target, op1))
9154 if (reg_overlap_mentioned_p (target, op0))
9155 goto complex_expr_force_op1;
9156 goto complex_expr_swap_order;
9161 /* Move the real (op0) and imaginary (op1) parts to their location. */
9162 write_complex_part (target, op0, false);
9163 write_complex_part (target, op1, true);
9167 case WIDEN_SUM_EXPR:
9169 tree oprnd0 = treeop0;
9170 tree oprnd1 = treeop1;
9172 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9173 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9178 case REDUC_MAX_EXPR:
9179 case REDUC_MIN_EXPR:
9180 case REDUC_PLUS_EXPR:
9182 op0 = expand_normal (treeop0);
9183 this_optab = optab_for_tree_code (code, type, optab_default);
9184 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9186 struct expand_operand ops[2];
9187 enum insn_code icode = optab_handler (this_optab, vec_mode);
9189 create_output_operand (&ops[0], target, mode);
9190 create_input_operand (&ops[1], op0, vec_mode);
9191 expand_insn (icode, 2, ops);
9192 target = ops[0].value;
9193 if (GET_MODE (target) != mode)
9194 return gen_lowpart (tmode, target);
9198 case VEC_UNPACK_HI_EXPR:
9199 case VEC_UNPACK_LO_EXPR:
9201 op0 = expand_normal (treeop0);
9202 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9208 case VEC_UNPACK_FLOAT_HI_EXPR:
9209 case VEC_UNPACK_FLOAT_LO_EXPR:
9211 op0 = expand_normal (treeop0);
9212 /* The signedness is determined from input operand. */
9213 temp = expand_widen_pattern_expr
9214 (ops, op0, NULL_RTX, NULL_RTX,
9215 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9221 case VEC_WIDEN_MULT_HI_EXPR:
9222 case VEC_WIDEN_MULT_LO_EXPR:
9223 case VEC_WIDEN_MULT_EVEN_EXPR:
9224 case VEC_WIDEN_MULT_ODD_EXPR:
9225 case VEC_WIDEN_LSHIFT_HI_EXPR:
9226 case VEC_WIDEN_LSHIFT_LO_EXPR:
9227 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9228 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9230 gcc_assert (target);
9233 case VEC_PACK_TRUNC_EXPR:
9234 case VEC_PACK_SAT_EXPR:
9235 case VEC_PACK_FIX_TRUNC_EXPR:
9236 mode = TYPE_MODE (TREE_TYPE (treeop0));
9240 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9241 op2 = expand_normal (treeop2);
9243 /* Careful here: if the target doesn't support integral vector modes,
9244 a constant selection vector could wind up smooshed into a normal
9245 integral constant. */
9246 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9248 tree sel_type = TREE_TYPE (treeop2);
9250 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9251 TYPE_VECTOR_SUBPARTS (sel_type));
9252 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9253 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9254 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9257 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9259 temp = expand_vec_perm (mode, op0, op1, op2, target);
9265 tree oprnd0 = treeop0;
9266 tree oprnd1 = treeop1;
9267 tree oprnd2 = treeop2;
9270 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9271 op2 = expand_normal (oprnd2);
9272 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9279 tree oprnd0 = treeop0;
9280 tree oprnd1 = treeop1;
9281 tree oprnd2 = treeop2;
9284 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9285 op2 = expand_normal (oprnd2);
9286 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9291 case REALIGN_LOAD_EXPR:
9293 tree oprnd0 = treeop0;
9294 tree oprnd1 = treeop1;
9295 tree oprnd2 = treeop2;
9298 this_optab = optab_for_tree_code (code, type, optab_default);
9299 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9300 op2 = expand_normal (oprnd2);
9301 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9309 /* A COND_EXPR with its type being VOID_TYPE represents a
9310 conditional jump and is handled in
9311 expand_gimple_cond_expr. */
9312 gcc_assert (!VOID_TYPE_P (type));
9314 /* Note that COND_EXPRs whose type is a structure or union
9315 are required to be constructed to contain assignments of
9316 a temporary variable, so that we can evaluate them here
9317 for side effect only. If type is void, we must do likewise. */
9319 gcc_assert (!TREE_ADDRESSABLE (type)
9321 && TREE_TYPE (treeop1) != void_type_node
9322 && TREE_TYPE (treeop2) != void_type_node);
9324 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9328 /* If we are not to produce a result, we have no target. Otherwise,
9329 if a target was specified use it; it will not be used as an
9330 intermediate target unless it is safe. If no target, use a
9333 if (modifier != EXPAND_STACK_PARM
9335 && safe_from_p (original_target, treeop0, 1)
9336 && GET_MODE (original_target) == mode
9337 && !MEM_P (original_target))
9338 temp = original_target;
9340 temp = assign_temp (type, 0, 1);
9342 do_pending_stack_adjust ();
9344 rtx_code_label *lab0 = gen_label_rtx ();
9345 rtx_code_label *lab1 = gen_label_rtx ();
9346 jumpifnot (treeop0, lab0, -1);
9347 store_expr (treeop1, temp,
9348 modifier == EXPAND_STACK_PARM,
9351 emit_jump_insn (targetm.gen_jump (lab1));
9354 store_expr (treeop2, temp,
9355 modifier == EXPAND_STACK_PARM,
9364 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9371 /* Here to do an ordinary binary operator. */
9373 expand_operands (treeop0, treeop1,
9374 subtarget, &op0, &op1, EXPAND_NORMAL);
9376 this_optab = optab_for_tree_code (code, type, optab_default);
9378 if (modifier == EXPAND_STACK_PARM)
9380 temp = expand_binop (mode, this_optab, op0, op1, target,
9381 unsignedp, OPTAB_LIB_WIDEN);
9383 /* Bitwise operations do not need bitfield reduction as we expect their
9384 operands being properly truncated. */
9385 if (code == BIT_XOR_EXPR
9386 || code == BIT_AND_EXPR
9387 || code == BIT_IOR_EXPR)
9389 return REDUCE_BIT_FIELD (temp);
9391 #undef REDUCE_BIT_FIELD
9394 /* Return TRUE if expression STMT is suitable for replacement.
9395 Never consider memory loads as replaceable, because those don't ever lead
9396 into constant expressions. */
9399 stmt_is_replaceable_p (gimple *stmt)
9401 if (ssa_is_replaceable_p (stmt))
9403 /* Don't move around loads. */
9404 if (!gimple_assign_single_p (stmt)
9405 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9412 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9413 enum expand_modifier modifier, rtx *alt_rtl,
9414 bool inner_reference_p)
9416 rtx op0, op1, temp, decl_rtl;
9419 machine_mode mode, dmode;
9420 enum tree_code code = TREE_CODE (exp);
9421 rtx subtarget, original_target;
9424 bool reduce_bit_field;
9425 location_t loc = EXPR_LOCATION (exp);
9426 struct separate_ops ops;
9427 tree treeop0, treeop1, treeop2;
9428 tree ssa_name = NULL_TREE;
9431 type = TREE_TYPE (exp);
9432 mode = TYPE_MODE (type);
9433 unsignedp = TYPE_UNSIGNED (type);
9435 treeop0 = treeop1 = treeop2 = NULL_TREE;
9436 if (!VL_EXP_CLASS_P (exp))
9437 switch (TREE_CODE_LENGTH (code))
9440 case 3: treeop2 = TREE_OPERAND (exp, 2);
9441 case 2: treeop1 = TREE_OPERAND (exp, 1);
9442 case 1: treeop0 = TREE_OPERAND (exp, 0);
9452 ignore = (target == const0_rtx
9453 || ((CONVERT_EXPR_CODE_P (code)
9454 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9455 && TREE_CODE (type) == VOID_TYPE));
9457 /* An operation in what may be a bit-field type needs the
9458 result to be reduced to the precision of the bit-field type,
9459 which is narrower than that of the type's mode. */
9460 reduce_bit_field = (!ignore
9461 && INTEGRAL_TYPE_P (type)
9462 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9464 /* If we are going to ignore this result, we need only do something
9465 if there is a side-effect somewhere in the expression. If there
9466 is, short-circuit the most common cases here. Note that we must
9467 not call expand_expr with anything but const0_rtx in case this
9468 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9472 if (! TREE_SIDE_EFFECTS (exp))
9475 /* Ensure we reference a volatile object even if value is ignored, but
9476 don't do this if all we are doing is taking its address. */
9477 if (TREE_THIS_VOLATILE (exp)
9478 && TREE_CODE (exp) != FUNCTION_DECL
9479 && mode != VOIDmode && mode != BLKmode
9480 && modifier != EXPAND_CONST_ADDRESS)
9482 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9488 if (TREE_CODE_CLASS (code) == tcc_unary
9489 || code == BIT_FIELD_REF
9490 || code == COMPONENT_REF
9491 || code == INDIRECT_REF)
9492 return expand_expr (treeop0, const0_rtx, VOIDmode,
9495 else if (TREE_CODE_CLASS (code) == tcc_binary
9496 || TREE_CODE_CLASS (code) == tcc_comparison
9497 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9499 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9500 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9507 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9510 /* Use subtarget as the target for operand 0 of a binary operation. */
9511 subtarget = get_subtarget (target);
9512 original_target = target;
9518 tree function = decl_function_context (exp);
9520 temp = label_rtx (exp);
9521 temp = gen_rtx_LABEL_REF (Pmode, temp);
9523 if (function != current_function_decl
9525 LABEL_REF_NONLOCAL_P (temp) = 1;
9527 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9532 /* ??? ivopts calls expander, without any preparation from
9533 out-of-ssa. So fake instructions as if this was an access to the
9534 base variable. This unnecessarily allocates a pseudo, see how we can
9535 reuse it, if partition base vars have it set already. */
9536 if (!currently_expanding_to_rtl)
9538 tree var = SSA_NAME_VAR (exp);
9539 if (var && DECL_RTL_SET_P (var))
9540 return DECL_RTL (var);
9541 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9542 LAST_VIRTUAL_REGISTER + 1);
9545 g = get_gimple_for_ssa_name (exp);
9546 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9548 && modifier == EXPAND_INITIALIZER
9549 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9550 && (optimize || !SSA_NAME_VAR (exp)
9551 || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9552 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9553 g = SSA_NAME_DEF_STMT (exp);
9557 location_t saved_loc = curr_insn_location ();
9558 location_t loc = gimple_location (g);
9559 if (loc != UNKNOWN_LOCATION)
9560 set_curr_insn_location (loc);
9561 ops.code = gimple_assign_rhs_code (g);
9562 switch (get_gimple_rhs_class (ops.code))
9564 case GIMPLE_TERNARY_RHS:
9565 ops.op2 = gimple_assign_rhs3 (g);
9567 case GIMPLE_BINARY_RHS:
9568 ops.op1 = gimple_assign_rhs2 (g);
9570 /* Try to expand conditonal compare. */
9571 if (targetm.gen_ccmp_first)
9573 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9574 r = expand_ccmp_expr (g);
9579 case GIMPLE_UNARY_RHS:
9580 ops.op0 = gimple_assign_rhs1 (g);
9581 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9583 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9585 case GIMPLE_SINGLE_RHS:
9587 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9588 tmode, modifier, NULL, inner_reference_p);
9594 set_curr_insn_location (saved_loc);
9595 if (REG_P (r) && !REG_EXPR (r))
9596 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9601 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9602 exp = SSA_NAME_VAR (ssa_name);
9603 goto expand_decl_rtl;
9607 /* If a static var's type was incomplete when the decl was written,
9608 but the type is complete now, lay out the decl now. */
9609 if (DECL_SIZE (exp) == 0
9610 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9611 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9612 layout_decl (exp, 0);
9614 /* ... fall through ... */
9618 decl_rtl = DECL_RTL (exp);
9620 gcc_assert (decl_rtl);
9622 /* DECL_MODE might change when TYPE_MODE depends on attribute target
9623 settings for VECTOR_TYPE_P that might switch for the function. */
9624 if (currently_expanding_to_rtl
9625 && code == VAR_DECL && MEM_P (decl_rtl)
9626 && VECTOR_TYPE_P (type) && exp && DECL_MODE (exp) != mode)
9627 decl_rtl = change_address (decl_rtl, TYPE_MODE (type), 0);
9629 decl_rtl = copy_rtx (decl_rtl);
9631 /* Record writes to register variables. */
9632 if (modifier == EXPAND_WRITE
9634 && HARD_REGISTER_P (decl_rtl))
9635 add_to_hard_reg_set (&crtl->asm_clobbers,
9636 GET_MODE (decl_rtl), REGNO (decl_rtl));
9638 /* Ensure variable marked as used even if it doesn't go through
9639 a parser. If it hasn't be used yet, write out an external
9642 TREE_USED (exp) = 1;
9644 /* Show we haven't gotten RTL for this yet. */
9647 /* Variables inherited from containing functions should have
9648 been lowered by this point. */
9650 context = decl_function_context (exp);
9652 || SCOPE_FILE_SCOPE_P (context)
9653 || context == current_function_decl
9654 || TREE_STATIC (exp)
9655 || DECL_EXTERNAL (exp)
9656 /* ??? C++ creates functions that are not TREE_STATIC. */
9657 || TREE_CODE (exp) == FUNCTION_DECL);
9659 /* This is the case of an array whose size is to be determined
9660 from its initializer, while the initializer is still being parsed.
9661 ??? We aren't parsing while expanding anymore. */
9663 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9664 temp = validize_mem (decl_rtl);
9666 /* If DECL_RTL is memory, we are in the normal case and the
9667 address is not valid, get the address into a register. */
9669 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9672 *alt_rtl = decl_rtl;
9673 decl_rtl = use_anchored_address (decl_rtl);
9674 if (modifier != EXPAND_CONST_ADDRESS
9675 && modifier != EXPAND_SUM
9676 && !memory_address_addr_space_p (exp ? DECL_MODE (exp)
9677 : GET_MODE (decl_rtl),
9679 MEM_ADDR_SPACE (decl_rtl)))
9680 temp = replace_equiv_address (decl_rtl,
9681 copy_rtx (XEXP (decl_rtl, 0)));
9684 /* If we got something, return it. But first, set the alignment
9685 if the address is a register. */
9688 if (exp && MEM_P (temp) && REG_P (XEXP (temp, 0)))
9689 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9695 dmode = DECL_MODE (exp);
9697 dmode = TYPE_MODE (TREE_TYPE (ssa_name));
9699 /* If the mode of DECL_RTL does not match that of the decl,
9700 there are two cases: we are dealing with a BLKmode value
9701 that is returned in a register, or we are dealing with
9702 a promoted value. In the latter case, return a SUBREG
9703 of the wanted mode, but mark it so that we know that it
9704 was already extended. */
9705 if (REG_P (decl_rtl)
9707 && GET_MODE (decl_rtl) != dmode)
9711 /* Get the signedness to be used for this variable. Ensure we get
9712 the same mode we got when the variable was declared. */
9713 if (code != SSA_NAME)
9714 pmode = promote_decl_mode (exp, &unsignedp);
9715 else if ((g = SSA_NAME_DEF_STMT (ssa_name))
9716 && gimple_code (g) == GIMPLE_CALL
9717 && !gimple_call_internal_p (g))
9718 pmode = promote_function_mode (type, mode, &unsignedp,
9719 gimple_call_fntype (g),
9722 pmode = promote_ssa_mode (ssa_name, &unsignedp);
9723 gcc_assert (GET_MODE (decl_rtl) == pmode);
9725 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9726 SUBREG_PROMOTED_VAR_P (temp) = 1;
9727 SUBREG_PROMOTED_SET (temp, unsignedp);
9734 /* Given that TYPE_PRECISION (type) is not always equal to
9735 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9736 the former to the latter according to the signedness of the
9738 temp = immed_wide_int_const (wide_int::from
9740 GET_MODE_PRECISION (TYPE_MODE (type)),
9747 tree tmp = NULL_TREE;
9748 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9749 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9750 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9751 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9752 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9753 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9754 return const_vector_from_tree (exp);
9755 if (GET_MODE_CLASS (mode) == MODE_INT)
9757 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
9758 return const_scalar_mask_from_tree (exp);
9761 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9763 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR,
9764 type_for_mode, exp);
9769 vec<constructor_elt, va_gc> *v;
9771 vec_alloc (v, VECTOR_CST_NELTS (exp));
9772 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9773 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9774 tmp = build_constructor (type, v);
9776 return expand_expr (tmp, ignore ? const0_rtx : target,
9781 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9784 /* If optimized, generate immediate CONST_DOUBLE
9785 which will be turned into memory by reload if necessary.
9787 We used to force a register so that loop.c could see it. But
9788 this does not allow gen_* patterns to perform optimizations with
9789 the constants. It also produces two insns in cases like "x = 1.0;".
9790 On most machines, floating-point constants are not permitted in
9791 many insns, so we'd end up copying it to a register in any case.
9793 Now, we do the copying in expand_binop, if appropriate. */
9794 return const_double_from_real_value (TREE_REAL_CST (exp),
9795 TYPE_MODE (TREE_TYPE (exp)));
9798 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9799 TYPE_MODE (TREE_TYPE (exp)));
9802 /* Handle evaluating a complex constant in a CONCAT target. */
9803 if (original_target && GET_CODE (original_target) == CONCAT)
9805 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9808 rtarg = XEXP (original_target, 0);
9809 itarg = XEXP (original_target, 1);
9811 /* Move the real and imaginary parts separately. */
9812 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9813 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9816 emit_move_insn (rtarg, op0);
9818 emit_move_insn (itarg, op1);
9820 return original_target;
9823 /* ... fall through ... */
9826 temp = expand_expr_constant (exp, 1, modifier);
9828 /* temp contains a constant address.
9829 On RISC machines where a constant address isn't valid,
9830 make some insns to get that address into a register. */
9831 if (modifier != EXPAND_CONST_ADDRESS
9832 && modifier != EXPAND_INITIALIZER
9833 && modifier != EXPAND_SUM
9834 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9835 MEM_ADDR_SPACE (temp)))
9836 return replace_equiv_address (temp,
9837 copy_rtx (XEXP (temp, 0)));
9843 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9846 if (!SAVE_EXPR_RESOLVED_P (exp))
9848 /* We can indeed still hit this case, typically via builtin
9849 expanders calling save_expr immediately before expanding
9850 something. Assume this means that we only have to deal
9851 with non-BLKmode values. */
9852 gcc_assert (GET_MODE (ret) != BLKmode);
9854 val = build_decl (curr_insn_location (),
9855 VAR_DECL, NULL, TREE_TYPE (exp));
9856 DECL_ARTIFICIAL (val) = 1;
9857 DECL_IGNORED_P (val) = 1;
9859 TREE_OPERAND (exp, 0) = treeop0;
9860 SAVE_EXPR_RESOLVED_P (exp) = 1;
9862 if (!CONSTANT_P (ret))
9863 ret = copy_to_reg (ret);
9864 SET_DECL_RTL (val, ret);
9872 /* If we don't need the result, just ensure we evaluate any
9876 unsigned HOST_WIDE_INT idx;
9879 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9880 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9885 return expand_constructor (exp, target, modifier, false);
9887 case TARGET_MEM_REF:
9890 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9891 enum insn_code icode;
9894 op0 = addr_for_mem_ref (exp, as, true);
9895 op0 = memory_address_addr_space (mode, op0, as);
9896 temp = gen_rtx_MEM (mode, op0);
9897 set_mem_attributes (temp, exp, 0);
9898 set_mem_addr_space (temp, as);
9899 align = get_object_alignment (exp);
9900 if (modifier != EXPAND_WRITE
9901 && modifier != EXPAND_MEMORY
9903 && align < GET_MODE_ALIGNMENT (mode)
9904 /* If the target does not have special handling for unaligned
9905 loads of mode then it can use regular moves for them. */
9906 && ((icode = optab_handler (movmisalign_optab, mode))
9907 != CODE_FOR_nothing))
9909 struct expand_operand ops[2];
9911 /* We've already validated the memory, and we're creating a
9912 new pseudo destination. The predicates really can't fail,
9913 nor can the generator. */
9914 create_output_operand (&ops[0], NULL_RTX, mode);
9915 create_fixed_operand (&ops[1], temp);
9916 expand_insn (icode, 2, ops);
9917 temp = ops[0].value;
9924 const bool reverse = REF_REVERSE_STORAGE_ORDER (exp);
9926 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9927 machine_mode address_mode;
9928 tree base = TREE_OPERAND (exp, 0);
9930 enum insn_code icode;
9932 /* Handle expansion of non-aliased memory with non-BLKmode. That
9933 might end up in a register. */
9934 if (mem_ref_refers_to_non_mem_p (exp))
9936 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9937 base = TREE_OPERAND (base, 0);
9940 && tree_fits_uhwi_p (TYPE_SIZE (type))
9941 && (GET_MODE_BITSIZE (DECL_MODE (base))
9942 == tree_to_uhwi (TYPE_SIZE (type))))
9943 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9944 target, tmode, modifier);
9945 if (TYPE_MODE (type) == BLKmode)
9947 temp = assign_stack_temp (DECL_MODE (base),
9948 GET_MODE_SIZE (DECL_MODE (base)));
9949 store_expr (base, temp, 0, false, false);
9950 temp = adjust_address (temp, BLKmode, offset);
9951 set_mem_size (temp, int_size_in_bytes (type));
9954 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9955 bitsize_int (offset * BITS_PER_UNIT));
9956 REF_REVERSE_STORAGE_ORDER (exp) = reverse;
9957 return expand_expr (exp, target, tmode, modifier);
9959 address_mode = targetm.addr_space.address_mode (as);
9960 base = TREE_OPERAND (exp, 0);
9961 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9963 tree mask = gimple_assign_rhs2 (def_stmt);
9964 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9965 gimple_assign_rhs1 (def_stmt), mask);
9966 TREE_OPERAND (exp, 0) = base;
9968 align = get_object_alignment (exp);
9969 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9970 op0 = memory_address_addr_space (mode, op0, as);
9971 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9973 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9974 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9975 op0 = memory_address_addr_space (mode, op0, as);
9977 temp = gen_rtx_MEM (mode, op0);
9978 set_mem_attributes (temp, exp, 0);
9979 set_mem_addr_space (temp, as);
9980 if (TREE_THIS_VOLATILE (exp))
9981 MEM_VOLATILE_P (temp) = 1;
9982 if (modifier != EXPAND_WRITE
9983 && modifier != EXPAND_MEMORY
9984 && !inner_reference_p
9986 && align < GET_MODE_ALIGNMENT (mode))
9988 if ((icode = optab_handler (movmisalign_optab, mode))
9989 != CODE_FOR_nothing)
9991 struct expand_operand ops[2];
9993 /* We've already validated the memory, and we're creating a
9994 new pseudo destination. The predicates really can't fail,
9995 nor can the generator. */
9996 create_output_operand (&ops[0], NULL_RTX, mode);
9997 create_fixed_operand (&ops[1], temp);
9998 expand_insn (icode, 2, ops);
9999 temp = ops[0].value;
10001 else if (SLOW_UNALIGNED_ACCESS (mode, align))
10002 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
10003 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
10004 (modifier == EXPAND_STACK_PARM
10005 ? NULL_RTX : target),
10006 mode, mode, false);
10009 && modifier != EXPAND_MEMORY
10010 && modifier != EXPAND_WRITE)
10011 temp = flip_storage_order (mode, temp);
10018 tree array = treeop0;
10019 tree index = treeop1;
10022 /* Fold an expression like: "foo"[2].
10023 This is not done in fold so it won't happen inside &.
10024 Don't fold if this is for wide characters since it's too
10025 difficult to do correctly and this is a very rare case. */
10027 if (modifier != EXPAND_CONST_ADDRESS
10028 && modifier != EXPAND_INITIALIZER
10029 && modifier != EXPAND_MEMORY)
10031 tree t = fold_read_from_constant_string (exp);
10034 return expand_expr (t, target, tmode, modifier);
10037 /* If this is a constant index into a constant array,
10038 just get the value from the array. Handle both the cases when
10039 we have an explicit constructor and when our operand is a variable
10040 that was declared const. */
10042 if (modifier != EXPAND_CONST_ADDRESS
10043 && modifier != EXPAND_INITIALIZER
10044 && modifier != EXPAND_MEMORY
10045 && TREE_CODE (array) == CONSTRUCTOR
10046 && ! TREE_SIDE_EFFECTS (array)
10047 && TREE_CODE (index) == INTEGER_CST)
10049 unsigned HOST_WIDE_INT ix;
10052 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
10054 if (tree_int_cst_equal (field, index))
10056 if (!TREE_SIDE_EFFECTS (value))
10057 return expand_expr (fold (value), target, tmode, modifier);
10062 else if (optimize >= 1
10063 && modifier != EXPAND_CONST_ADDRESS
10064 && modifier != EXPAND_INITIALIZER
10065 && modifier != EXPAND_MEMORY
10066 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
10067 && TREE_CODE (index) == INTEGER_CST
10068 && (TREE_CODE (array) == VAR_DECL
10069 || TREE_CODE (array) == CONST_DECL)
10070 && (init = ctor_for_folding (array)) != error_mark_node)
10072 if (init == NULL_TREE)
10074 tree value = build_zero_cst (type);
10075 if (TREE_CODE (value) == CONSTRUCTOR)
10077 /* If VALUE is a CONSTRUCTOR, this optimization is only
10078 useful if this doesn't store the CONSTRUCTOR into
10079 memory. If it does, it is more efficient to just
10080 load the data from the array directly. */
10081 rtx ret = expand_constructor (value, target,
10083 if (ret == NULL_RTX)
10088 return expand_expr (value, target, tmode, modifier);
10090 else if (TREE_CODE (init) == CONSTRUCTOR)
10092 unsigned HOST_WIDE_INT ix;
10095 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10097 if (tree_int_cst_equal (field, index))
10099 if (TREE_SIDE_EFFECTS (value))
10102 if (TREE_CODE (value) == CONSTRUCTOR)
10104 /* If VALUE is a CONSTRUCTOR, this
10105 optimization is only useful if
10106 this doesn't store the CONSTRUCTOR
10107 into memory. If it does, it is more
10108 efficient to just load the data from
10109 the array directly. */
10110 rtx ret = expand_constructor (value, target,
10112 if (ret == NULL_RTX)
10117 expand_expr (fold (value), target, tmode, modifier);
10120 else if (TREE_CODE (init) == STRING_CST)
10122 tree low_bound = array_ref_low_bound (exp);
10123 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10125 /* Optimize the special case of a zero lower bound.
10127 We convert the lower bound to sizetype to avoid problems
10128 with constant folding. E.g. suppose the lower bound is
10129 1 and its mode is QI. Without the conversion
10130 (ARRAY + (INDEX - (unsigned char)1))
10132 (ARRAY + (-(unsigned char)1) + INDEX)
10134 (ARRAY + 255 + INDEX). Oops! */
10135 if (!integer_zerop (low_bound))
10136 index1 = size_diffop_loc (loc, index1,
10137 fold_convert_loc (loc, sizetype,
10140 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10142 tree type = TREE_TYPE (TREE_TYPE (init));
10143 machine_mode mode = TYPE_MODE (type);
10145 if (GET_MODE_CLASS (mode) == MODE_INT
10146 && GET_MODE_SIZE (mode) == 1)
10147 return gen_int_mode (TREE_STRING_POINTER (init)
10148 [TREE_INT_CST_LOW (index1)],
10154 goto normal_inner_ref;
10156 case COMPONENT_REF:
10157 /* If the operand is a CONSTRUCTOR, we can just extract the
10158 appropriate field if it is present. */
10159 if (TREE_CODE (treeop0) == CONSTRUCTOR)
10161 unsigned HOST_WIDE_INT idx;
10164 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10166 if (field == treeop1
10167 /* We can normally use the value of the field in the
10168 CONSTRUCTOR. However, if this is a bitfield in
10169 an integral mode that we can fit in a HOST_WIDE_INT,
10170 we must mask only the number of bits in the bitfield,
10171 since this is done implicitly by the constructor. If
10172 the bitfield does not meet either of those conditions,
10173 we can't do this optimization. */
10174 && (! DECL_BIT_FIELD (field)
10175 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10176 && (GET_MODE_PRECISION (DECL_MODE (field))
10177 <= HOST_BITS_PER_WIDE_INT))))
10179 if (DECL_BIT_FIELD (field)
10180 && modifier == EXPAND_STACK_PARM)
10182 op0 = expand_expr (value, target, tmode, modifier);
10183 if (DECL_BIT_FIELD (field))
10185 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10186 machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10188 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10190 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10192 op0 = expand_and (imode, op0, op1, target);
10196 int count = GET_MODE_PRECISION (imode) - bitsize;
10198 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10200 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10208 goto normal_inner_ref;
10210 case BIT_FIELD_REF:
10211 case ARRAY_RANGE_REF:
10214 machine_mode mode1, mode2;
10215 HOST_WIDE_INT bitsize, bitpos;
10217 int reversep, volatilep = 0, must_force_mem;
10219 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
10220 &unsignedp, &reversep, &volatilep, true);
10221 rtx orig_op0, memloc;
10222 bool clear_mem_expr = false;
10224 /* If we got back the original object, something is wrong. Perhaps
10225 we are evaluating an expression too early. In any event, don't
10226 infinitely recurse. */
10227 gcc_assert (tem != exp);
10229 /* If TEM's type is a union of variable size, pass TARGET to the inner
10230 computation, since it will need a temporary and TARGET is known
10231 to have to do. This occurs in unchecked conversion in Ada. */
10233 = expand_expr_real (tem,
10234 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10235 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10236 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10238 && modifier != EXPAND_STACK_PARM
10239 ? target : NULL_RTX),
10241 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10244 /* If the field has a mode, we want to access it in the
10245 field's mode, not the computed mode.
10246 If a MEM has VOIDmode (external with incomplete type),
10247 use BLKmode for it instead. */
10250 if (mode1 != VOIDmode)
10251 op0 = adjust_address (op0, mode1, 0);
10252 else if (GET_MODE (op0) == VOIDmode)
10253 op0 = adjust_address (op0, BLKmode, 0);
10257 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10259 /* If we have either an offset, a BLKmode result, or a reference
10260 outside the underlying object, we must force it to memory.
10261 Such a case can occur in Ada if we have unchecked conversion
10262 of an expression from a scalar type to an aggregate type or
10263 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10264 passed a partially uninitialized object or a view-conversion
10265 to a larger size. */
10266 must_force_mem = (offset
10267 || mode1 == BLKmode
10268 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10270 /* Handle CONCAT first. */
10271 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10274 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0))
10275 && COMPLEX_MODE_P (mode1)
10276 && COMPLEX_MODE_P (GET_MODE (op0))
10277 && (GET_MODE_PRECISION (GET_MODE_INNER (mode1))
10278 == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0)))))
10281 op0 = flip_storage_order (GET_MODE (op0), op0);
10282 if (mode1 != GET_MODE (op0))
10285 for (int i = 0; i < 2; i++)
10287 rtx op = read_complex_part (op0, i != 0);
10288 if (GET_CODE (op) == SUBREG)
10289 op = force_reg (GET_MODE (op), op);
10290 rtx temp = gen_lowpart_common (GET_MODE_INNER (mode1),
10296 if (!REG_P (op) && !MEM_P (op))
10297 op = force_reg (GET_MODE (op), op);
10298 op = gen_lowpart (GET_MODE_INNER (mode1), op);
10302 op0 = gen_rtx_CONCAT (mode1, parts[0], parts[1]);
10307 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10310 op0 = XEXP (op0, 0);
10311 mode2 = GET_MODE (op0);
10313 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10314 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10318 op0 = XEXP (op0, 1);
10320 mode2 = GET_MODE (op0);
10323 /* Otherwise force into memory. */
10324 must_force_mem = 1;
10327 /* If this is a constant, put it in a register if it is a legitimate
10328 constant and we don't need a memory reference. */
10329 if (CONSTANT_P (op0)
10330 && mode2 != BLKmode
10331 && targetm.legitimate_constant_p (mode2, op0)
10332 && !must_force_mem)
10333 op0 = force_reg (mode2, op0);
10335 /* Otherwise, if this is a constant, try to force it to the constant
10336 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10337 is a legitimate constant. */
10338 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10339 op0 = validize_mem (memloc);
10341 /* Otherwise, if this is a constant or the object is not in memory
10342 and need be, put it there. */
10343 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10345 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10346 emit_move_insn (memloc, op0);
10348 clear_mem_expr = true;
10353 machine_mode address_mode;
10354 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10357 gcc_assert (MEM_P (op0));
10359 address_mode = get_address_mode (op0);
10360 if (GET_MODE (offset_rtx) != address_mode)
10362 /* We cannot be sure that the RTL in offset_rtx is valid outside
10363 of a memory address context, so force it into a register
10364 before attempting to convert it to the desired mode. */
10365 offset_rtx = force_operand (offset_rtx, NULL_RTX);
10366 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10369 /* See the comment in expand_assignment for the rationale. */
10370 if (mode1 != VOIDmode
10373 && (bitpos % bitsize) == 0
10374 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10375 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10377 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10381 op0 = offset_address (op0, offset_rtx,
10382 highest_pow2_factor (offset));
10385 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10386 record its alignment as BIGGEST_ALIGNMENT. */
10387 if (MEM_P (op0) && bitpos == 0 && offset != 0
10388 && is_aligning_offset (offset, tem))
10389 set_mem_align (op0, BIGGEST_ALIGNMENT);
10391 /* Don't forget about volatility even if this is a bitfield. */
10392 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10394 if (op0 == orig_op0)
10395 op0 = copy_rtx (op0);
10397 MEM_VOLATILE_P (op0) = 1;
10400 /* In cases where an aligned union has an unaligned object
10401 as a field, we might be extracting a BLKmode value from
10402 an integer-mode (e.g., SImode) object. Handle this case
10403 by doing the extract into an object as wide as the field
10404 (which we know to be the width of a basic mode), then
10405 storing into memory, and changing the mode to BLKmode. */
10406 if (mode1 == VOIDmode
10407 || REG_P (op0) || GET_CODE (op0) == SUBREG
10408 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10409 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10410 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10411 && modifier != EXPAND_CONST_ADDRESS
10412 && modifier != EXPAND_INITIALIZER
10413 && modifier != EXPAND_MEMORY)
10414 /* If the bitfield is volatile and the bitsize
10415 is narrower than the access size of the bitfield,
10416 we need to extract bitfields from the access. */
10417 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10418 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10419 && mode1 != BLKmode
10420 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10421 /* If the field isn't aligned enough to fetch as a memref,
10422 fetch it as a bit field. */
10423 || (mode1 != BLKmode
10424 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10425 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10427 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10428 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10429 && modifier != EXPAND_MEMORY
10430 && ((modifier == EXPAND_CONST_ADDRESS
10431 || modifier == EXPAND_INITIALIZER)
10433 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10434 || (bitpos % BITS_PER_UNIT != 0)))
10435 /* If the type and the field are a constant size and the
10436 size of the type isn't the same size as the bitfield,
10437 we must use bitfield operations. */
10439 && TYPE_SIZE (TREE_TYPE (exp))
10440 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10441 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10444 machine_mode ext_mode = mode;
10446 if (ext_mode == BLKmode
10447 && ! (target != 0 && MEM_P (op0)
10449 && bitpos % BITS_PER_UNIT == 0))
10450 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10452 if (ext_mode == BLKmode)
10455 target = assign_temp (type, 1, 1);
10457 /* ??? Unlike the similar test a few lines below, this one is
10458 very likely obsolete. */
10462 /* In this case, BITPOS must start at a byte boundary and
10463 TARGET, if specified, must be a MEM. */
10464 gcc_assert (MEM_P (op0)
10465 && (!target || MEM_P (target))
10466 && !(bitpos % BITS_PER_UNIT));
10468 emit_block_move (target,
10469 adjust_address (op0, VOIDmode,
10470 bitpos / BITS_PER_UNIT),
10471 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10473 (modifier == EXPAND_STACK_PARM
10474 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10479 /* If we have nothing to extract, the result will be 0 for targets
10480 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10481 return 0 for the sake of consistency, as reading a zero-sized
10482 bitfield is valid in Ada and the value is fully specified. */
10486 op0 = validize_mem (op0);
10488 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10489 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10491 /* If the result has a record type and the extraction is done in
10492 an integral mode, then the field may be not aligned on a byte
10493 boundary; in this case, if it has reverse storage order, it
10494 needs to be extracted as a scalar field with reverse storage
10495 order and put back into memory order afterwards. */
10496 if (TREE_CODE (type) == RECORD_TYPE
10497 && GET_MODE_CLASS (ext_mode) == MODE_INT)
10498 reversep = TYPE_REVERSE_STORAGE_ORDER (type);
10500 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10501 (modifier == EXPAND_STACK_PARM
10502 ? NULL_RTX : target),
10503 ext_mode, ext_mode, reversep);
10505 /* If the result has a record type and the mode of OP0 is an
10506 integral mode then, if BITSIZE is narrower than this mode
10507 and this is for big-endian data, we must put the field
10508 into the high-order bits. And we must also put it back
10509 into memory order if it has been previously reversed. */
10510 if (TREE_CODE (type) == RECORD_TYPE
10511 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
10513 HOST_WIDE_INT size = GET_MODE_BITSIZE (GET_MODE (op0));
10516 && reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
10517 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10518 size - bitsize, op0, 1);
10521 op0 = flip_storage_order (GET_MODE (op0), op0);
10524 /* If the result type is BLKmode, store the data into a temporary
10525 of the appropriate type, but with the mode corresponding to the
10526 mode for the data we have (op0's mode). */
10527 if (mode == BLKmode)
10530 = assign_stack_temp_for_type (ext_mode,
10531 GET_MODE_BITSIZE (ext_mode),
10533 emit_move_insn (new_rtx, op0);
10534 op0 = copy_rtx (new_rtx);
10535 PUT_MODE (op0, BLKmode);
10541 /* If the result is BLKmode, use that to access the object
10543 if (mode == BLKmode)
10546 /* Get a reference to just this component. */
10547 if (modifier == EXPAND_CONST_ADDRESS
10548 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10549 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10551 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10553 if (op0 == orig_op0)
10554 op0 = copy_rtx (op0);
10556 /* Don't set memory attributes if the base expression is
10557 SSA_NAME that got expanded as a MEM. In that case, we should
10558 just honor its original memory attributes. */
10559 if (TREE_CODE (tem) != SSA_NAME || !MEM_P (orig_op0))
10560 set_mem_attributes (op0, exp, 0);
10562 if (REG_P (XEXP (op0, 0)))
10563 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10565 /* If op0 is a temporary because the original expressions was forced
10566 to memory, clear MEM_EXPR so that the original expression cannot
10567 be marked as addressable through MEM_EXPR of the temporary. */
10568 if (clear_mem_expr)
10569 set_mem_expr (op0, NULL_TREE);
10571 MEM_VOLATILE_P (op0) |= volatilep;
10574 && modifier != EXPAND_MEMORY
10575 && modifier != EXPAND_WRITE)
10576 op0 = flip_storage_order (mode1, op0);
10578 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10579 || modifier == EXPAND_CONST_ADDRESS
10580 || modifier == EXPAND_INITIALIZER)
10584 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10586 convert_move (target, op0, unsignedp);
10591 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10594 /* All valid uses of __builtin_va_arg_pack () are removed during
10596 if (CALL_EXPR_VA_ARG_PACK (exp))
10597 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10599 tree fndecl = get_callee_fndecl (exp), attr;
10602 && (attr = lookup_attribute ("error",
10603 DECL_ATTRIBUTES (fndecl))) != NULL)
10604 error ("%Kcall to %qs declared with attribute error: %s",
10605 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10606 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10608 && (attr = lookup_attribute ("warning",
10609 DECL_ATTRIBUTES (fndecl))) != NULL)
10610 warning_at (tree_nonartificial_location (exp),
10611 0, "%Kcall to %qs declared with attribute warning: %s",
10612 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10613 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10615 /* Check for a built-in function. */
10616 if (fndecl && DECL_BUILT_IN (fndecl))
10618 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10619 if (CALL_WITH_BOUNDS_P (exp))
10620 return expand_builtin_with_bounds (exp, target, subtarget,
10623 return expand_builtin (exp, target, subtarget, tmode, ignore);
10626 return expand_call (exp, target, ignore);
10628 case VIEW_CONVERT_EXPR:
10631 /* If we are converting to BLKmode, try to avoid an intermediate
10632 temporary by fetching an inner memory reference. */
10633 if (mode == BLKmode
10634 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10635 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10636 && handled_component_p (treeop0))
10638 machine_mode mode1;
10639 HOST_WIDE_INT bitsize, bitpos;
10641 int unsignedp, reversep, volatilep = 0;
10643 = get_inner_reference (treeop0, &bitsize, &bitpos, &offset, &mode1,
10644 &unsignedp, &reversep, &volatilep, true);
10647 /* ??? We should work harder and deal with non-zero offsets. */
10649 && (bitpos % BITS_PER_UNIT) == 0
10652 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10654 /* See the normal_inner_ref case for the rationale. */
10656 = expand_expr_real (tem,
10657 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10658 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10660 && modifier != EXPAND_STACK_PARM
10661 ? target : NULL_RTX),
10663 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10666 if (MEM_P (orig_op0))
10670 /* Get a reference to just this component. */
10671 if (modifier == EXPAND_CONST_ADDRESS
10672 || modifier == EXPAND_SUM
10673 || modifier == EXPAND_INITIALIZER)
10674 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10676 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10678 if (op0 == orig_op0)
10679 op0 = copy_rtx (op0);
10681 set_mem_attributes (op0, treeop0, 0);
10682 if (REG_P (XEXP (op0, 0)))
10683 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10685 MEM_VOLATILE_P (op0) |= volatilep;
10691 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10692 NULL, inner_reference_p);
10694 /* If the input and output modes are both the same, we are done. */
10695 if (mode == GET_MODE (op0))
10697 /* If neither mode is BLKmode, and both modes are the same size
10698 then we can use gen_lowpart. */
10699 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10700 && (GET_MODE_PRECISION (mode)
10701 == GET_MODE_PRECISION (GET_MODE (op0)))
10702 && !COMPLEX_MODE_P (GET_MODE (op0)))
10704 if (GET_CODE (op0) == SUBREG)
10705 op0 = force_reg (GET_MODE (op0), op0);
10706 temp = gen_lowpart_common (mode, op0);
10711 if (!REG_P (op0) && !MEM_P (op0))
10712 op0 = force_reg (GET_MODE (op0), op0);
10713 op0 = gen_lowpart (mode, op0);
10716 /* If both types are integral, convert from one mode to the other. */
10717 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10718 op0 = convert_modes (mode, GET_MODE (op0), op0,
10719 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10720 /* If the output type is a bit-field type, do an extraction. */
10721 else if (reduce_bit_field)
10722 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10723 TYPE_UNSIGNED (type), NULL_RTX,
10724 mode, mode, false);
10725 /* As a last resort, spill op0 to memory, and reload it in a
10727 else if (!MEM_P (op0))
10729 /* If the operand is not a MEM, force it into memory. Since we
10730 are going to be changing the mode of the MEM, don't call
10731 force_const_mem for constants because we don't allow pool
10732 constants to change mode. */
10733 tree inner_type = TREE_TYPE (treeop0);
10735 gcc_assert (!TREE_ADDRESSABLE (exp));
10737 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10739 = assign_stack_temp_for_type
10740 (TYPE_MODE (inner_type),
10741 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10743 emit_move_insn (target, op0);
10747 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10748 output type is such that the operand is known to be aligned, indicate
10749 that it is. Otherwise, we need only be concerned about alignment for
10750 non-BLKmode results. */
10753 enum insn_code icode;
10755 if (TYPE_ALIGN_OK (type))
10757 /* ??? Copying the MEM without substantially changing it might
10758 run afoul of the code handling volatile memory references in
10759 store_expr, which assumes that TARGET is returned unmodified
10760 if it has been used. */
10761 op0 = copy_rtx (op0);
10762 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10764 else if (modifier != EXPAND_WRITE
10765 && modifier != EXPAND_MEMORY
10766 && !inner_reference_p
10768 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10770 /* If the target does have special handling for unaligned
10771 loads of mode then use them. */
10772 if ((icode = optab_handler (movmisalign_optab, mode))
10773 != CODE_FOR_nothing)
10777 op0 = adjust_address (op0, mode, 0);
10778 /* We've already validated the memory, and we're creating a
10779 new pseudo destination. The predicates really can't
10781 reg = gen_reg_rtx (mode);
10783 /* Nor can the insn generator. */
10784 rtx_insn *insn = GEN_FCN (icode) (reg, op0);
10788 else if (STRICT_ALIGNMENT)
10790 tree inner_type = TREE_TYPE (treeop0);
10791 HOST_WIDE_INT temp_size
10792 = MAX (int_size_in_bytes (inner_type),
10793 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10795 = assign_stack_temp_for_type (mode, temp_size, type);
10796 rtx new_with_op0_mode
10797 = adjust_address (new_rtx, GET_MODE (op0), 0);
10799 gcc_assert (!TREE_ADDRESSABLE (exp));
10801 if (GET_MODE (op0) == BLKmode)
10802 emit_block_move (new_with_op0_mode, op0,
10803 GEN_INT (GET_MODE_SIZE (mode)),
10804 (modifier == EXPAND_STACK_PARM
10805 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10807 emit_move_insn (new_with_op0_mode, op0);
10813 op0 = adjust_address (op0, mode, 0);
10820 tree lhs = treeop0;
10821 tree rhs = treeop1;
10822 gcc_assert (ignore);
10824 /* Check for |= or &= of a bitfield of size one into another bitfield
10825 of size 1. In this case, (unless we need the result of the
10826 assignment) we can do this more efficiently with a
10827 test followed by an assignment, if necessary.
10829 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10830 things change so we do, this code should be enhanced to
10832 if (TREE_CODE (lhs) == COMPONENT_REF
10833 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10834 || TREE_CODE (rhs) == BIT_AND_EXPR)
10835 && TREE_OPERAND (rhs, 0) == lhs
10836 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10837 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10838 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10840 rtx_code_label *label = gen_label_rtx ();
10841 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10842 do_jump (TREE_OPERAND (rhs, 1),
10844 value ? 0 : label, -1);
10845 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10847 do_pending_stack_adjust ();
10848 emit_label (label);
10852 expand_assignment (lhs, rhs, false);
10857 return expand_expr_addr_expr (exp, target, tmode, modifier);
10859 case REALPART_EXPR:
10860 op0 = expand_normal (treeop0);
10861 return read_complex_part (op0, false);
10863 case IMAGPART_EXPR:
10864 op0 = expand_normal (treeop0);
10865 return read_complex_part (op0, true);
10872 /* Expanded in cfgexpand.c. */
10873 gcc_unreachable ();
10875 case TRY_CATCH_EXPR:
10877 case EH_FILTER_EXPR:
10878 case TRY_FINALLY_EXPR:
10879 /* Lowered by tree-eh.c. */
10880 gcc_unreachable ();
10882 case WITH_CLEANUP_EXPR:
10883 case CLEANUP_POINT_EXPR:
10885 case CASE_LABEL_EXPR:
10890 case COMPOUND_EXPR:
10891 case PREINCREMENT_EXPR:
10892 case PREDECREMENT_EXPR:
10893 case POSTINCREMENT_EXPR:
10894 case POSTDECREMENT_EXPR:
10897 case COMPOUND_LITERAL_EXPR:
10898 /* Lowered by gimplify.c. */
10899 gcc_unreachable ();
10902 /* Function descriptors are not valid except for as
10903 initialization constants, and should not be expanded. */
10904 gcc_unreachable ();
10906 case WITH_SIZE_EXPR:
10907 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10908 have pulled out the size to use in whatever context it needed. */
10909 return expand_expr_real (treeop0, original_target, tmode,
10910 modifier, alt_rtl, inner_reference_p);
10913 return expand_expr_real_2 (&ops, target, tmode, modifier);
10917 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10918 signedness of TYPE), possibly returning the result in TARGET. */
10920 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10922 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10923 if (target && GET_MODE (target) != GET_MODE (exp))
10925 /* For constant values, reduce using build_int_cst_type. */
10926 if (CONST_INT_P (exp))
10928 HOST_WIDE_INT value = INTVAL (exp);
10929 tree t = build_int_cst_type (type, value);
10930 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10932 else if (TYPE_UNSIGNED (type))
10934 machine_mode mode = GET_MODE (exp);
10935 rtx mask = immed_wide_int_const
10936 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10937 return expand_and (mode, exp, mask, target);
10941 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10942 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10943 exp, count, target, 0);
10944 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10945 exp, count, target, 0);
10949 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10950 when applied to the address of EXP produces an address known to be
10951 aligned more than BIGGEST_ALIGNMENT. */
10954 is_aligning_offset (const_tree offset, const_tree exp)
10956 /* Strip off any conversions. */
10957 while (CONVERT_EXPR_P (offset))
10958 offset = TREE_OPERAND (offset, 0);
10960 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10961 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10962 if (TREE_CODE (offset) != BIT_AND_EXPR
10963 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10964 || compare_tree_int (TREE_OPERAND (offset, 1),
10965 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10966 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10969 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10970 It must be NEGATE_EXPR. Then strip any more conversions. */
10971 offset = TREE_OPERAND (offset, 0);
10972 while (CONVERT_EXPR_P (offset))
10973 offset = TREE_OPERAND (offset, 0);
10975 if (TREE_CODE (offset) != NEGATE_EXPR)
10978 offset = TREE_OPERAND (offset, 0);
10979 while (CONVERT_EXPR_P (offset))
10980 offset = TREE_OPERAND (offset, 0);
10982 /* This must now be the address of EXP. */
10983 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10986 /* Return the tree node if an ARG corresponds to a string constant or zero
10987 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10988 in bytes within the string that ARG is accessing. The type of the
10989 offset will be `sizetype'. */
10992 string_constant (tree arg, tree *ptr_offset)
10994 tree array, offset, lower_bound;
10997 if (TREE_CODE (arg) == ADDR_EXPR)
10999 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
11001 *ptr_offset = size_zero_node;
11002 return TREE_OPERAND (arg, 0);
11004 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
11006 array = TREE_OPERAND (arg, 0);
11007 offset = size_zero_node;
11009 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
11011 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
11012 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
11013 if (TREE_CODE (array) != STRING_CST
11014 && TREE_CODE (array) != VAR_DECL)
11017 /* Check if the array has a nonzero lower bound. */
11018 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
11019 if (!integer_zerop (lower_bound))
11021 /* If the offset and base aren't both constants, return 0. */
11022 if (TREE_CODE (lower_bound) != INTEGER_CST)
11024 if (TREE_CODE (offset) != INTEGER_CST)
11026 /* Adjust offset by the lower bound. */
11027 offset = size_diffop (fold_convert (sizetype, offset),
11028 fold_convert (sizetype, lower_bound));
11031 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
11033 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
11034 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
11035 if (TREE_CODE (array) != ADDR_EXPR)
11037 array = TREE_OPERAND (array, 0);
11038 if (TREE_CODE (array) != STRING_CST
11039 && TREE_CODE (array) != VAR_DECL)
11045 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
11047 tree arg0 = TREE_OPERAND (arg, 0);
11048 tree arg1 = TREE_OPERAND (arg, 1);
11053 if (TREE_CODE (arg0) == ADDR_EXPR
11054 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
11055 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
11057 array = TREE_OPERAND (arg0, 0);
11060 else if (TREE_CODE (arg1) == ADDR_EXPR
11061 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
11062 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
11064 array = TREE_OPERAND (arg1, 0);
11073 if (TREE_CODE (array) == STRING_CST)
11075 *ptr_offset = fold_convert (sizetype, offset);
11078 else if (TREE_CODE (array) == VAR_DECL
11079 || TREE_CODE (array) == CONST_DECL)
11082 tree init = ctor_for_folding (array);
11084 /* Variables initialized to string literals can be handled too. */
11085 if (init == error_mark_node
11087 || TREE_CODE (init) != STRING_CST)
11090 /* Avoid const char foo[4] = "abcde"; */
11091 if (DECL_SIZE_UNIT (array) == NULL_TREE
11092 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
11093 || (length = TREE_STRING_LENGTH (init)) <= 0
11094 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
11097 /* If variable is bigger than the string literal, OFFSET must be constant
11098 and inside of the bounds of the string literal. */
11099 offset = fold_convert (sizetype, offset);
11100 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
11101 && (! tree_fits_uhwi_p (offset)
11102 || compare_tree_int (offset, length) >= 0))
11105 *ptr_offset = offset;
11112 /* Generate code to calculate OPS, and exploded expression
11113 using a store-flag instruction and return an rtx for the result.
11114 OPS reflects a comparison.
11116 If TARGET is nonzero, store the result there if convenient.
11118 Return zero if there is no suitable set-flag instruction
11119 available on this machine.
11121 Once expand_expr has been called on the arguments of the comparison,
11122 we are committed to doing the store flag, since it is not safe to
11123 re-evaluate the expression. We emit the store-flag insn by calling
11124 emit_store_flag, but only expand the arguments if we have a reason
11125 to believe that emit_store_flag will be successful. If we think that
11126 it will, but it isn't, we have to simulate the store-flag with a
11127 set/jump/set sequence. */
11130 do_store_flag (sepops ops, rtx target, machine_mode mode)
11132 enum rtx_code code;
11133 tree arg0, arg1, type;
11134 machine_mode operand_mode;
11137 rtx subtarget = target;
11138 location_t loc = ops->location;
11143 /* Don't crash if the comparison was erroneous. */
11144 if (arg0 == error_mark_node || arg1 == error_mark_node)
11147 type = TREE_TYPE (arg0);
11148 operand_mode = TYPE_MODE (type);
11149 unsignedp = TYPE_UNSIGNED (type);
11151 /* We won't bother with BLKmode store-flag operations because it would mean
11152 passing a lot of information to emit_store_flag. */
11153 if (operand_mode == BLKmode)
11156 /* We won't bother with store-flag operations involving function pointers
11157 when function pointers must be canonicalized before comparisons. */
11158 if (targetm.have_canonicalize_funcptr_for_compare ()
11159 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
11160 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
11162 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
11163 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
11164 == FUNCTION_TYPE))))
11170 /* For vector typed comparisons emit code to generate the desired
11171 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11172 expander for this. */
11173 if (TREE_CODE (ops->type) == VECTOR_TYPE)
11175 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11176 if (VECTOR_BOOLEAN_TYPE_P (ops->type)
11177 && expand_vec_cmp_expr_p (TREE_TYPE (arg0), ops->type))
11178 return expand_vec_cmp_expr (ops->type, ifexp, target);
11181 tree if_true = constant_boolean_node (true, ops->type);
11182 tree if_false = constant_boolean_node (false, ops->type);
11183 return expand_vec_cond_expr (ops->type, ifexp, if_true,
11188 /* Get the rtx comparison code to use. We know that EXP is a comparison
11189 operation of some type. Some comparisons against 1 and -1 can be
11190 converted to comparisons with zero. Do so here so that the tests
11191 below will be aware that we have a comparison with zero. These
11192 tests will not catch constants in the first operand, but constants
11193 are rarely passed as the first operand. */
11204 if (integer_onep (arg1))
11205 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11207 code = unsignedp ? LTU : LT;
11210 if (! unsignedp && integer_all_onesp (arg1))
11211 arg1 = integer_zero_node, code = LT;
11213 code = unsignedp ? LEU : LE;
11216 if (! unsignedp && integer_all_onesp (arg1))
11217 arg1 = integer_zero_node, code = GE;
11219 code = unsignedp ? GTU : GT;
11222 if (integer_onep (arg1))
11223 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11225 code = unsignedp ? GEU : GE;
11228 case UNORDERED_EXPR:
11254 gcc_unreachable ();
11257 /* Put a constant second. */
11258 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11259 || TREE_CODE (arg0) == FIXED_CST)
11261 std::swap (arg0, arg1);
11262 code = swap_condition (code);
11265 /* If this is an equality or inequality test of a single bit, we can
11266 do this by shifting the bit being tested to the low-order bit and
11267 masking the result with the constant 1. If the condition was EQ,
11268 we xor it with 1. This does not require an scc insn and is faster
11269 than an scc insn even if we have it.
11271 The code to make this transformation was moved into fold_single_bit_test,
11272 so we just call into the folder and expand its result. */
11274 if ((code == NE || code == EQ)
11275 && integer_zerop (arg1)
11276 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11278 gimple *srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11280 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11282 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11283 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11284 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11285 gimple_assign_rhs1 (srcstmt),
11286 gimple_assign_rhs2 (srcstmt));
11287 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11289 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11293 if (! get_subtarget (target)
11294 || GET_MODE (subtarget) != operand_mode)
11297 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11300 target = gen_reg_rtx (mode);
11302 /* Try a cstore if possible. */
11303 return emit_store_flag_force (target, code, op0, op1,
11304 operand_mode, unsignedp,
11305 (TYPE_PRECISION (ops->type) == 1
11306 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11309 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11310 0 otherwise (i.e. if there is no casesi instruction).
11312 DEFAULT_PROBABILITY is the probability of jumping to the default
11315 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11316 rtx table_label, rtx default_label, rtx fallback_label,
11317 int default_probability)
11319 struct expand_operand ops[5];
11320 machine_mode index_mode = SImode;
11321 rtx op1, op2, index;
11323 if (! targetm.have_casesi ())
11326 /* Convert the index to SImode. */
11327 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11329 machine_mode omode = TYPE_MODE (index_type);
11330 rtx rangertx = expand_normal (range);
11332 /* We must handle the endpoints in the original mode. */
11333 index_expr = build2 (MINUS_EXPR, index_type,
11334 index_expr, minval);
11335 minval = integer_zero_node;
11336 index = expand_normal (index_expr);
11338 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11339 omode, 1, default_label,
11340 default_probability);
11341 /* Now we can safely truncate. */
11342 index = convert_to_mode (index_mode, index, 0);
11346 if (TYPE_MODE (index_type) != index_mode)
11348 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11349 index_expr = fold_convert (index_type, index_expr);
11352 index = expand_normal (index_expr);
11355 do_pending_stack_adjust ();
11357 op1 = expand_normal (minval);
11358 op2 = expand_normal (range);
11360 create_input_operand (&ops[0], index, index_mode);
11361 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11362 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11363 create_fixed_operand (&ops[3], table_label);
11364 create_fixed_operand (&ops[4], (default_label
11366 : fallback_label));
11367 expand_jump_insn (targetm.code_for_casesi, 5, ops);
11371 /* Attempt to generate a tablejump instruction; same concept. */
11372 /* Subroutine of the next function.
11374 INDEX is the value being switched on, with the lowest value
11375 in the table already subtracted.
11376 MODE is its expected mode (needed if INDEX is constant).
11377 RANGE is the length of the jump table.
11378 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11380 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11381 index value is out of range.
11382 DEFAULT_PROBABILITY is the probability of jumping to
11383 the default label. */
11386 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11387 rtx default_label, int default_probability)
11391 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11392 cfun->cfg->max_jumptable_ents = INTVAL (range);
11394 /* Do an unsigned comparison (in the proper mode) between the index
11395 expression and the value which represents the length of the range.
11396 Since we just finished subtracting the lower bound of the range
11397 from the index expression, this comparison allows us to simultaneously
11398 check that the original index expression value is both greater than
11399 or equal to the minimum value of the range and less than or equal to
11400 the maximum value of the range. */
11403 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11404 default_label, default_probability);
11407 /* If index is in range, it must fit in Pmode.
11408 Convert to Pmode so we can index with it. */
11410 index = convert_to_mode (Pmode, index, 1);
11412 /* Don't let a MEM slip through, because then INDEX that comes
11413 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11414 and break_out_memory_refs will go to work on it and mess it up. */
11415 #ifdef PIC_CASE_VECTOR_ADDRESS
11416 if (flag_pic && !REG_P (index))
11417 index = copy_to_mode_reg (Pmode, index);
11420 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11421 GET_MODE_SIZE, because this indicates how large insns are. The other
11422 uses should all be Pmode, because they are addresses. This code
11423 could fail if addresses and insns are not the same size. */
11424 index = simplify_gen_binary (MULT, Pmode, index,
11425 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11427 index = simplify_gen_binary (PLUS, Pmode, index,
11428 gen_rtx_LABEL_REF (Pmode, table_label));
11430 #ifdef PIC_CASE_VECTOR_ADDRESS
11432 index = PIC_CASE_VECTOR_ADDRESS (index);
11435 index = memory_address (CASE_VECTOR_MODE, index);
11436 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11437 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11438 convert_move (temp, vector, 0);
11440 emit_jump_insn (targetm.gen_tablejump (temp, table_label));
11442 /* If we are generating PIC code or if the table is PC-relative, the
11443 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11444 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11449 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11450 rtx table_label, rtx default_label, int default_probability)
11454 if (! targetm.have_tablejump ())
11457 index_expr = fold_build2 (MINUS_EXPR, index_type,
11458 fold_convert (index_type, index_expr),
11459 fold_convert (index_type, minval));
11460 index = expand_normal (index_expr);
11461 do_pending_stack_adjust ();
11463 do_tablejump (index, TYPE_MODE (index_type),
11464 convert_modes (TYPE_MODE (index_type),
11465 TYPE_MODE (TREE_TYPE (range)),
11466 expand_normal (range),
11467 TYPE_UNSIGNED (TREE_TYPE (range))),
11468 table_label, default_label, default_probability);
11472 /* Return a CONST_VECTOR rtx representing vector mask for
11473 a VECTOR_CST of booleans. */
11475 const_vector_mask_from_tree (tree exp)
11481 machine_mode inner, mode;
11483 mode = TYPE_MODE (TREE_TYPE (exp));
11484 units = GET_MODE_NUNITS (mode);
11485 inner = GET_MODE_INNER (mode);
11487 v = rtvec_alloc (units);
11489 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11491 elt = VECTOR_CST_ELT (exp, i);
11493 gcc_assert (TREE_CODE (elt) == INTEGER_CST);
11494 if (integer_zerop (elt))
11495 RTVEC_ELT (v, i) = CONST0_RTX (inner);
11496 else if (integer_onep (elt)
11497 || integer_minus_onep (elt))
11498 RTVEC_ELT (v, i) = CONSTM1_RTX (inner);
11500 gcc_unreachable ();
11503 return gen_rtx_CONST_VECTOR (mode, v);
11506 /* Return a CONST_INT rtx representing vector mask for
11507 a VECTOR_CST of booleans. */
11509 const_scalar_mask_from_tree (tree exp)
11511 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
11512 wide_int res = wi::zero (GET_MODE_PRECISION (mode));
11516 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11518 elt = VECTOR_CST_ELT (exp, i);
11519 gcc_assert (TREE_CODE (elt) == INTEGER_CST);
11520 if (integer_all_onesp (elt))
11521 res = wi::set_bit (res, i);
11523 gcc_assert (integer_zerop (elt));
11526 return immed_wide_int_const (res, mode);
11529 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11531 const_vector_from_tree (tree exp)
11537 machine_mode inner, mode;
11539 mode = TYPE_MODE (TREE_TYPE (exp));
11541 if (initializer_zerop (exp))
11542 return CONST0_RTX (mode);
11544 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
11545 return const_vector_mask_from_tree (exp);
11547 units = GET_MODE_NUNITS (mode);
11548 inner = GET_MODE_INNER (mode);
11550 v = rtvec_alloc (units);
11552 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11554 elt = VECTOR_CST_ELT (exp, i);
11556 if (TREE_CODE (elt) == REAL_CST)
11557 RTVEC_ELT (v, i) = const_double_from_real_value (TREE_REAL_CST (elt),
11559 else if (TREE_CODE (elt) == FIXED_CST)
11560 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11563 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11566 return gen_rtx_CONST_VECTOR (mode, v);
11569 /* Build a decl for a personality function given a language prefix. */
11572 build_personality_function (const char *lang)
11574 const char *unwind_and_version;
11578 switch (targetm_common.except_unwind_info (&global_options))
11583 unwind_and_version = "_sj0";
11587 unwind_and_version = "_v0";
11590 unwind_and_version = "_seh0";
11593 gcc_unreachable ();
11596 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11598 type = build_function_type_list (integer_type_node, integer_type_node,
11599 long_long_unsigned_type_node,
11600 ptr_type_node, ptr_type_node, NULL_TREE);
11601 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11602 get_identifier (name), type);
11603 DECL_ARTIFICIAL (decl) = 1;
11604 DECL_EXTERNAL (decl) = 1;
11605 TREE_PUBLIC (decl) = 1;
11607 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11608 are the flags assigned by targetm.encode_section_info. */
11609 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11614 /* Extracts the personality function of DECL and returns the corresponding
11618 get_personality_function (tree decl)
11620 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11621 enum eh_personality_kind pk;
11623 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11624 if (pk == eh_personality_none)
11628 && pk == eh_personality_any)
11629 personality = lang_hooks.eh_personality ();
11631 if (pk == eh_personality_lang)
11632 gcc_assert (personality != NULL_TREE);
11634 return XEXP (DECL_RTL (personality), 0);
11637 /* Returns a tree for the size of EXP in bytes. */
11640 tree_expr_size (const_tree exp)
11643 && DECL_SIZE_UNIT (exp) != 0)
11644 return DECL_SIZE_UNIT (exp);
11646 return size_in_bytes (TREE_TYPE (exp));
11649 /* Return an rtx for the size in bytes of the value of EXP. */
11652 expr_size (tree exp)
11656 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11657 size = TREE_OPERAND (exp, 1);
11660 size = tree_expr_size (exp);
11662 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11665 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11668 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11669 if the size can vary or is larger than an integer. */
11671 static HOST_WIDE_INT
11672 int_expr_size (tree exp)
11676 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11677 size = TREE_OPERAND (exp, 1);
11680 size = tree_expr_size (exp);
11684 if (size == 0 || !tree_fits_shwi_p (size))
11687 return tree_to_shwi (size);
11690 #include "gt-expr.h"