1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
71 #define STACK_PUSH_CODE PRE_INC
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
133 static rtx enqueue_insn (rtx, rtx);
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
136 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
137 struct move_by_pieces *);
138 static bool block_move_libcall_safe_for_call_parm (void);
139 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
140 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
141 static tree emit_block_move_libcall_fn (int);
142 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
143 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
144 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
145 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
146 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
147 struct store_by_pieces *);
148 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
149 static rtx clear_storage_via_libcall (rtx, rtx);
150 static tree clear_storage_libcall_fn (int);
151 static rtx compress_float_constant (rtx, rtx);
152 static rtx get_subtarget (rtx);
153 static int is_zeros_p (tree);
154 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, int, int);
157 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
158 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
159 tree, enum machine_mode, int, tree, int);
160 static rtx var_rtx (tree);
162 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
163 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
165 static int is_aligning_offset (tree, tree);
166 static rtx expand_increment (tree, int, int);
167 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
168 enum expand_modifier);
169 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
171 static void emit_single_push_insn (enum machine_mode, rtx, tree);
173 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
174 static rtx const_vector_from_tree (tree);
176 /* Record for each mode whether we can move a register directly to or
177 from an object of that mode in memory. If we can't, we won't try
178 to use that mode directly when accessing a field of that mode. */
180 static char direct_load[NUM_MACHINE_MODES];
181 static char direct_store[NUM_MACHINE_MODES];
183 /* Record for each mode whether we can float-extend from memory. */
185 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
187 /* If a memory-to-memory move would take MOVE_RATIO or more simple
188 move-instruction sequences, we will do a movstr or libcall instead. */
191 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
194 /* If we are optimizing for space (-Os), cut down the default move ratio. */
195 #define MOVE_RATIO (optimize_size ? 3 : 15)
199 /* This macro is used to determine whether move_by_pieces should be called
200 to perform a structure copy. */
201 #ifndef MOVE_BY_PIECES_P
202 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
203 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
206 /* If a clear memory operation would take CLEAR_RATIO or more simple
207 move-instruction sequences, we will do a clrstr or libcall instead. */
210 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
211 #define CLEAR_RATIO 2
213 /* If we are optimizing for space, cut down the default clear ratio. */
214 #define CLEAR_RATIO (optimize_size ? 3 : 15)
218 /* This macro is used to determine whether clear_by_pieces should be
219 called to clear storage. */
220 #ifndef CLEAR_BY_PIECES_P
221 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
222 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
225 /* This macro is used to determine whether store_by_pieces should be
226 called to "memset" storage with byte values other than zero, or
227 to "memcpy" storage when the source is a constant string. */
228 #ifndef STORE_BY_PIECES_P
229 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
232 /* This array records the insn_code of insns to perform block moves. */
233 enum insn_code movstr_optab[NUM_MACHINE_MODES];
235 /* This array records the insn_code of insns to perform block clears. */
236 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
238 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
239 struct file_stack *expr_wfl_stack;
241 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
243 #ifndef SLOW_UNALIGNED_ACCESS
244 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
247 /* This is run once per compilation to set up which modes can be used
248 directly in memory and to initialize the block move optab. */
251 init_expr_once (void)
254 enum machine_mode mode;
259 /* Try indexing by frame ptr and try by stack ptr.
260 It is known that on the Convex the stack ptr isn't a valid index.
261 With luck, one or the other is valid on any machine. */
262 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
263 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
265 /* A scratch register we can modify in-place below to avoid
266 useless RTL allocations. */
267 reg = gen_rtx_REG (VOIDmode, -1);
269 insn = rtx_alloc (INSN);
270 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
271 PATTERN (insn) = pat;
273 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
274 mode = (enum machine_mode) ((int) mode + 1))
278 direct_load[(int) mode] = direct_store[(int) mode] = 0;
279 PUT_MODE (mem, mode);
280 PUT_MODE (mem1, mode);
281 PUT_MODE (reg, mode);
283 /* See if there is some register that can be used in this mode and
284 directly loaded or stored from memory. */
286 if (mode != VOIDmode && mode != BLKmode)
287 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
288 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
291 if (! HARD_REGNO_MODE_OK (regno, mode))
297 SET_DEST (pat) = reg;
298 if (recog (pat, insn, &num_clobbers) >= 0)
299 direct_load[(int) mode] = 1;
301 SET_SRC (pat) = mem1;
302 SET_DEST (pat) = reg;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_load[(int) mode] = 1;
307 SET_DEST (pat) = mem;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_store[(int) mode] = 1;
312 SET_DEST (pat) = mem1;
313 if (recog (pat, insn, &num_clobbers) >= 0)
314 direct_store[(int) mode] = 1;
318 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
320 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
321 mode = GET_MODE_WIDER_MODE (mode))
323 enum machine_mode srcmode;
324 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
325 srcmode = GET_MODE_WIDER_MODE (srcmode))
329 ic = can_extend_p (mode, srcmode, 0);
330 if (ic == CODE_FOR_nothing)
333 PUT_MODE (mem, srcmode);
335 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
336 float_extend_from_mem[mode][srcmode] = true;
341 /* This is run at the start of compiling a function. */
346 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
349 /* Small sanity check that the queue is empty at the end of a function. */
352 finish_expr_for_function (void)
358 /* Manage the queue of increment instructions to be output
359 for POSTINCREMENT_EXPR expressions, etc. */
361 /* Queue up to increment (or change) VAR later. BODY says how:
362 BODY should be the same thing you would pass to emit_insn
363 to increment right away. It will go to emit_insn later on.
365 The value is a QUEUED expression to be used in place of VAR
366 where you want to guarantee the pre-incrementation value of VAR. */
369 enqueue_insn (rtx var, rtx body)
371 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
372 body, pending_chain);
373 return pending_chain;
376 /* Use protect_from_queue to convert a QUEUED expression
377 into something that you can put immediately into an instruction.
378 If the queued incrementation has not happened yet,
379 protect_from_queue returns the variable itself.
380 If the incrementation has happened, protect_from_queue returns a temp
381 that contains a copy of the old value of the variable.
383 Any time an rtx which might possibly be a QUEUED is to be put
384 into an instruction, it must be passed through protect_from_queue first.
385 QUEUED expressions are not meaningful in instructions.
387 Do not pass a value through protect_from_queue and then hold
388 on to it for a while before putting it in an instruction!
389 If the queue is flushed in between, incorrect code will result. */
392 protect_from_queue (rtx x, int modify)
394 RTX_CODE code = GET_CODE (x);
396 #if 0 /* A QUEUED can hang around after the queue is forced out. */
397 /* Shortcut for most common case. */
398 if (pending_chain == 0)
404 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
405 use of autoincrement. Make a copy of the contents of the memory
406 location rather than a copy of the address, but not if the value is
407 of mode BLKmode. Don't modify X in place since it might be
409 if (code == MEM && GET_MODE (x) != BLKmode
410 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
413 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
417 rtx temp = gen_reg_rtx (GET_MODE (x));
419 emit_insn_before (gen_move_insn (temp, new),
424 /* Copy the address into a pseudo, so that the returned value
425 remains correct across calls to emit_queue. */
426 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
429 /* Otherwise, recursively protect the subexpressions of all
430 the kinds of rtx's that can contain a QUEUED. */
433 rtx tem = protect_from_queue (XEXP (x, 0), 0);
434 if (tem != XEXP (x, 0))
440 else if (code == PLUS || code == MULT)
442 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
443 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
444 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
453 /* If the increment has not happened, use the variable itself. Copy it
454 into a new pseudo so that the value remains correct across calls to
456 if (QUEUED_INSN (x) == 0)
457 return copy_to_reg (QUEUED_VAR (x));
458 /* If the increment has happened and a pre-increment copy exists,
460 if (QUEUED_COPY (x) != 0)
461 return QUEUED_COPY (x);
462 /* The increment has happened but we haven't set up a pre-increment copy.
463 Set one up now, and use it. */
464 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
465 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
467 return QUEUED_COPY (x);
470 /* Return nonzero if X contains a QUEUED expression:
471 if it contains anything that will be altered by a queued increment.
472 We handle only combinations of MEM, PLUS, MINUS and MULT operators
473 since memory addresses generally contain only those. */
476 queued_subexp_p (rtx x)
478 enum rtx_code code = GET_CODE (x);
484 return queued_subexp_p (XEXP (x, 0));
488 return (queued_subexp_p (XEXP (x, 0))
489 || queued_subexp_p (XEXP (x, 1)));
495 /* Perform all the pending incrementations. */
501 while ((p = pending_chain))
503 rtx body = QUEUED_BODY (p);
505 switch (GET_CODE (body))
513 QUEUED_INSN (p) = body;
517 #ifdef ENABLE_CHECKING
524 QUEUED_INSN (p) = emit_insn (body);
528 pending_chain = QUEUED_NEXT (p);
532 /* Copy data from FROM to TO, where the machine modes are not the same.
533 Both modes may be integer, or both may be floating.
534 UNSIGNEDP should be nonzero if FROM is an unsigned type.
535 This causes zero-extension instead of sign-extension. */
538 convert_move (rtx to, rtx from, int unsignedp)
540 enum machine_mode to_mode = GET_MODE (to);
541 enum machine_mode from_mode = GET_MODE (from);
542 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
543 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
547 /* rtx code for making an equivalent value. */
548 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
549 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
551 to = protect_from_queue (to, 1);
552 from = protect_from_queue (from, 0);
554 if (to_real != from_real)
557 /* If FROM is a SUBREG that indicates that we have already done at least
558 the required extension, strip it. We don't handle such SUBREGs as
561 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
562 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
563 >= GET_MODE_SIZE (to_mode))
564 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
565 from = gen_lowpart (to_mode, from), from_mode = to_mode;
567 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
570 if (to_mode == from_mode
571 || (from_mode == VOIDmode && CONSTANT_P (from)))
573 emit_move_insn (to, from);
577 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
579 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
582 if (VECTOR_MODE_P (to_mode))
583 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
585 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
587 emit_move_insn (to, from);
591 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
593 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
594 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
598 if (to_real != from_real)
605 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
607 /* Try converting directly if the insn is supported. */
608 if ((code = can_extend_p (to_mode, from_mode, 0))
611 emit_unop_insn (code, to, from, UNKNOWN);
616 #ifdef HAVE_trunchfqf2
617 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
619 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
623 #ifdef HAVE_trunctqfqf2
624 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
626 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
630 #ifdef HAVE_truncsfqf2
631 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
633 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
637 #ifdef HAVE_truncdfqf2
638 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
640 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
644 #ifdef HAVE_truncxfqf2
645 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
647 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
651 #ifdef HAVE_trunctfqf2
652 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
654 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
659 #ifdef HAVE_trunctqfhf2
660 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
662 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
666 #ifdef HAVE_truncsfhf2
667 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
669 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
673 #ifdef HAVE_truncdfhf2
674 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
676 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
680 #ifdef HAVE_truncxfhf2
681 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
683 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
687 #ifdef HAVE_trunctfhf2
688 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
690 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
695 #ifdef HAVE_truncsftqf2
696 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
698 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
702 #ifdef HAVE_truncdftqf2
703 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
705 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
709 #ifdef HAVE_truncxftqf2
710 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
712 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
716 #ifdef HAVE_trunctftqf2
717 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
719 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
724 #ifdef HAVE_truncdfsf2
725 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
727 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
731 #ifdef HAVE_truncxfsf2
732 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
734 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
738 #ifdef HAVE_trunctfsf2
739 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
741 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
745 #ifdef HAVE_truncxfdf2
746 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
748 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
752 #ifdef HAVE_trunctfdf2
753 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
755 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
767 libcall = extendsfdf2_libfunc;
771 libcall = extendsfxf2_libfunc;
775 libcall = extendsftf2_libfunc;
787 libcall = truncdfsf2_libfunc;
791 libcall = extenddfxf2_libfunc;
795 libcall = extenddftf2_libfunc;
807 libcall = truncxfsf2_libfunc;
811 libcall = truncxfdf2_libfunc;
823 libcall = trunctfsf2_libfunc;
827 libcall = trunctfdf2_libfunc;
839 if (libcall == (rtx) 0)
840 /* This conversion is not implemented yet. */
844 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
846 insns = get_insns ();
848 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
853 /* Now both modes are integers. */
855 /* Handle expanding beyond a word. */
856 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
857 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
864 enum machine_mode lowpart_mode;
865 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
867 /* Try converting directly if the insn is supported. */
868 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
871 /* If FROM is a SUBREG, put it into a register. Do this
872 so that we always generate the same set of insns for
873 better cse'ing; if an intermediate assignment occurred,
874 we won't be doing the operation directly on the SUBREG. */
875 if (optimize > 0 && GET_CODE (from) == SUBREG)
876 from = force_reg (from_mode, from);
877 emit_unop_insn (code, to, from, equiv_code);
880 /* Next, try converting via full word. */
881 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
882 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
883 != CODE_FOR_nothing))
885 if (GET_CODE (to) == REG)
886 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
887 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
888 emit_unop_insn (code, to,
889 gen_lowpart (word_mode, to), equiv_code);
893 /* No special multiword conversion insn; do it by hand. */
896 /* Since we will turn this into a no conflict block, we must ensure
897 that the source does not overlap the target. */
899 if (reg_overlap_mentioned_p (to, from))
900 from = force_reg (from_mode, from);
902 /* Get a copy of FROM widened to a word, if necessary. */
903 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
904 lowpart_mode = word_mode;
906 lowpart_mode = from_mode;
908 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
910 lowpart = gen_lowpart (lowpart_mode, to);
911 emit_move_insn (lowpart, lowfrom);
913 /* Compute the value to put in each remaining word. */
915 fill_value = const0_rtx;
920 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
921 && STORE_FLAG_VALUE == -1)
923 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
925 fill_value = gen_reg_rtx (word_mode);
926 emit_insn (gen_slt (fill_value));
932 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
933 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
935 fill_value = convert_to_mode (word_mode, fill_value, 1);
939 /* Fill the remaining words. */
940 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
942 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
943 rtx subword = operand_subword (to, index, 1, to_mode);
948 if (fill_value != subword)
949 emit_move_insn (subword, fill_value);
952 insns = get_insns ();
955 emit_no_conflict_block (insns, to, from, NULL_RTX,
956 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
960 /* Truncating multi-word to a word or less. */
961 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
962 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
964 if (!((GET_CODE (from) == MEM
965 && ! MEM_VOLATILE_P (from)
966 && direct_load[(int) to_mode]
967 && ! mode_dependent_address_p (XEXP (from, 0)))
968 || GET_CODE (from) == REG
969 || GET_CODE (from) == SUBREG))
970 from = force_reg (from_mode, from);
971 convert_move (to, gen_lowpart (word_mode, from), 0);
975 /* Handle pointer conversion. */ /* SPEE 900220. */
976 if (to_mode == PQImode)
978 if (from_mode != QImode)
979 from = convert_to_mode (QImode, from, unsignedp);
981 #ifdef HAVE_truncqipqi2
982 if (HAVE_truncqipqi2)
984 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
987 #endif /* HAVE_truncqipqi2 */
991 if (from_mode == PQImode)
993 if (to_mode != QImode)
995 from = convert_to_mode (QImode, from, unsignedp);
1000 #ifdef HAVE_extendpqiqi2
1001 if (HAVE_extendpqiqi2)
1003 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1006 #endif /* HAVE_extendpqiqi2 */
1011 if (to_mode == PSImode)
1013 if (from_mode != SImode)
1014 from = convert_to_mode (SImode, from, unsignedp);
1016 #ifdef HAVE_truncsipsi2
1017 if (HAVE_truncsipsi2)
1019 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1022 #endif /* HAVE_truncsipsi2 */
1026 if (from_mode == PSImode)
1028 if (to_mode != SImode)
1030 from = convert_to_mode (SImode, from, unsignedp);
1035 #ifdef HAVE_extendpsisi2
1036 if (! unsignedp && HAVE_extendpsisi2)
1038 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1041 #endif /* HAVE_extendpsisi2 */
1042 #ifdef HAVE_zero_extendpsisi2
1043 if (unsignedp && HAVE_zero_extendpsisi2)
1045 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1048 #endif /* HAVE_zero_extendpsisi2 */
1053 if (to_mode == PDImode)
1055 if (from_mode != DImode)
1056 from = convert_to_mode (DImode, from, unsignedp);
1058 #ifdef HAVE_truncdipdi2
1059 if (HAVE_truncdipdi2)
1061 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1064 #endif /* HAVE_truncdipdi2 */
1068 if (from_mode == PDImode)
1070 if (to_mode != DImode)
1072 from = convert_to_mode (DImode, from, unsignedp);
1077 #ifdef HAVE_extendpdidi2
1078 if (HAVE_extendpdidi2)
1080 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1083 #endif /* HAVE_extendpdidi2 */
1088 /* Now follow all the conversions between integers
1089 no more than a word long. */
1091 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1092 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1093 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1094 GET_MODE_BITSIZE (from_mode)))
1096 if (!((GET_CODE (from) == MEM
1097 && ! MEM_VOLATILE_P (from)
1098 && direct_load[(int) to_mode]
1099 && ! mode_dependent_address_p (XEXP (from, 0)))
1100 || GET_CODE (from) == REG
1101 || GET_CODE (from) == SUBREG))
1102 from = force_reg (from_mode, from);
1103 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1104 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1105 from = copy_to_reg (from);
1106 emit_move_insn (to, gen_lowpart (to_mode, from));
1110 /* Handle extension. */
1111 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1113 /* Convert directly if that works. */
1114 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1115 != CODE_FOR_nothing)
1118 from = force_not_mem (from);
1120 emit_unop_insn (code, to, from, equiv_code);
1125 enum machine_mode intermediate;
1129 /* Search for a mode to convert via. */
1130 for (intermediate = from_mode; intermediate != VOIDmode;
1131 intermediate = GET_MODE_WIDER_MODE (intermediate))
1132 if (((can_extend_p (to_mode, intermediate, unsignedp)
1133 != CODE_FOR_nothing)
1134 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1135 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1136 GET_MODE_BITSIZE (intermediate))))
1137 && (can_extend_p (intermediate, from_mode, unsignedp)
1138 != CODE_FOR_nothing))
1140 convert_move (to, convert_to_mode (intermediate, from,
1141 unsignedp), unsignedp);
1145 /* No suitable intermediate mode.
1146 Generate what we need with shifts. */
1147 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1148 - GET_MODE_BITSIZE (from_mode), 0);
1149 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1150 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1152 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1155 emit_move_insn (to, tmp);
1160 /* Support special truncate insns for certain modes. */
1162 if (from_mode == DImode && to_mode == SImode)
1164 #ifdef HAVE_truncdisi2
1165 if (HAVE_truncdisi2)
1167 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1171 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 if (from_mode == DImode && to_mode == HImode)
1177 #ifdef HAVE_truncdihi2
1178 if (HAVE_truncdihi2)
1180 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1184 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 if (from_mode == DImode && to_mode == QImode)
1190 #ifdef HAVE_truncdiqi2
1191 if (HAVE_truncdiqi2)
1193 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1197 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 if (from_mode == SImode && to_mode == HImode)
1203 #ifdef HAVE_truncsihi2
1204 if (HAVE_truncsihi2)
1206 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1210 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 if (from_mode == SImode && to_mode == QImode)
1216 #ifdef HAVE_truncsiqi2
1217 if (HAVE_truncsiqi2)
1219 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1223 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 if (from_mode == HImode && to_mode == QImode)
1229 #ifdef HAVE_trunchiqi2
1230 if (HAVE_trunchiqi2)
1232 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1236 convert_move (to, force_reg (from_mode, from), unsignedp);
1240 if (from_mode == TImode && to_mode == DImode)
1242 #ifdef HAVE_trunctidi2
1243 if (HAVE_trunctidi2)
1245 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1249 convert_move (to, force_reg (from_mode, from), unsignedp);
1253 if (from_mode == TImode && to_mode == SImode)
1255 #ifdef HAVE_trunctisi2
1256 if (HAVE_trunctisi2)
1258 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1262 convert_move (to, force_reg (from_mode, from), unsignedp);
1266 if (from_mode == TImode && to_mode == HImode)
1268 #ifdef HAVE_trunctihi2
1269 if (HAVE_trunctihi2)
1271 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1275 convert_move (to, force_reg (from_mode, from), unsignedp);
1279 if (from_mode == TImode && to_mode == QImode)
1281 #ifdef HAVE_trunctiqi2
1282 if (HAVE_trunctiqi2)
1284 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1288 convert_move (to, force_reg (from_mode, from), unsignedp);
1292 /* Handle truncation of volatile memrefs, and so on;
1293 the things that couldn't be truncated directly,
1294 and for which there was no special instruction. */
1295 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1297 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1298 emit_move_insn (to, temp);
1302 /* Mode combination is not recognized. */
1306 /* Return an rtx for a value that would result
1307 from converting X to mode MODE.
1308 Both X and MODE may be floating, or both integer.
1309 UNSIGNEDP is nonzero if X is an unsigned value.
1310 This can be done by referring to a part of X in place
1311 or by copying to a new temporary with conversion.
1313 This function *must not* call protect_from_queue
1314 except when putting X into an insn (in which case convert_move does it). */
1317 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
1319 return convert_modes (mode, VOIDmode, x, unsignedp);
1322 /* Return an rtx for a value that would result
1323 from converting X from mode OLDMODE to mode MODE.
1324 Both modes may be floating, or both integer.
1325 UNSIGNEDP is nonzero if X is an unsigned value.
1327 This can be done by referring to a part of X in place
1328 or by copying to a new temporary with conversion.
1330 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1332 This function *must not* call protect_from_queue
1333 except when putting X into an insn (in which case convert_move does it). */
1336 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
1340 /* If FROM is a SUBREG that indicates that we have already done at least
1341 the required extension, strip it. */
1343 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1344 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1345 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1346 x = gen_lowpart (mode, x);
1348 if (GET_MODE (x) != VOIDmode)
1349 oldmode = GET_MODE (x);
1351 if (mode == oldmode)
1354 /* There is one case that we must handle specially: If we are converting
1355 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1356 we are to interpret the constant as unsigned, gen_lowpart will do
1357 the wrong if the constant appears negative. What we want to do is
1358 make the high-order word of the constant zero, not all ones. */
1360 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1361 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1362 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1364 HOST_WIDE_INT val = INTVAL (x);
1366 if (oldmode != VOIDmode
1367 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1369 int width = GET_MODE_BITSIZE (oldmode);
1371 /* We need to zero extend VAL. */
1372 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1375 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1378 /* We can do this with a gen_lowpart if both desired and current modes
1379 are integer, and this is either a constant integer, a register, or a
1380 non-volatile MEM. Except for the constant case where MODE is no
1381 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1383 if ((GET_CODE (x) == CONST_INT
1384 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1385 || (GET_MODE_CLASS (mode) == MODE_INT
1386 && GET_MODE_CLASS (oldmode) == MODE_INT
1387 && (GET_CODE (x) == CONST_DOUBLE
1388 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1389 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1390 && direct_load[(int) mode])
1391 || (GET_CODE (x) == REG
1392 && (! HARD_REGISTER_P (x)
1393 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1394 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1395 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1397 /* ?? If we don't know OLDMODE, we have to assume here that
1398 X does not need sign- or zero-extension. This may not be
1399 the case, but it's the best we can do. */
1400 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1401 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1403 HOST_WIDE_INT val = INTVAL (x);
1404 int width = GET_MODE_BITSIZE (oldmode);
1406 /* We must sign or zero-extend in this case. Start by
1407 zero-extending, then sign extend if we need to. */
1408 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1410 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1411 val |= (HOST_WIDE_INT) (-1) << width;
1413 return gen_int_mode (val, mode);
1416 return gen_lowpart (mode, x);
1419 /* Converting from integer constant into mode is always equivalent to an
1420 subreg operation. */
1421 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1423 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1425 return simplify_gen_subreg (mode, x, oldmode, 0);
1428 temp = gen_reg_rtx (mode);
1429 convert_move (temp, x, unsignedp);
1433 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1434 store efficiently. Due to internal GCC limitations, this is
1435 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1436 for an immediate constant. */
1438 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1440 /* Determine whether the LEN bytes can be moved by using several move
1441 instructions. Return nonzero if a call to move_by_pieces should
1445 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1446 unsigned int align ATTRIBUTE_UNUSED)
1448 return MOVE_BY_PIECES_P (len, align);
1451 /* Generate several move instructions to copy LEN bytes from block FROM to
1452 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1453 and TO through protect_from_queue before calling.
1455 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1456 used to push FROM to the stack.
1458 ALIGN is maximum stack alignment we can assume.
1460 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1461 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1465 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1466 unsigned int align, int endp)
1468 struct move_by_pieces data;
1469 rtx to_addr, from_addr = XEXP (from, 0);
1470 unsigned int max_size = MOVE_MAX_PIECES + 1;
1471 enum machine_mode mode = VOIDmode, tmode;
1472 enum insn_code icode;
1474 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1477 data.from_addr = from_addr;
1480 to_addr = XEXP (to, 0);
1483 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1484 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1486 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1493 #ifdef STACK_GROWS_DOWNWARD
1499 data.to_addr = to_addr;
1502 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1503 || GET_CODE (from_addr) == POST_INC
1504 || GET_CODE (from_addr) == POST_DEC);
1506 data.explicit_inc_from = 0;
1507 data.explicit_inc_to = 0;
1508 if (data.reverse) data.offset = len;
1511 /* If copying requires more than two move insns,
1512 copy addresses to registers (to make displacements shorter)
1513 and use post-increment if available. */
1514 if (!(data.autinc_from && data.autinc_to)
1515 && move_by_pieces_ninsns (len, align) > 2)
1517 /* Find the mode of the largest move... */
1518 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1519 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1520 if (GET_MODE_SIZE (tmode) < max_size)
1523 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1525 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1526 data.autinc_from = 1;
1527 data.explicit_inc_from = -1;
1529 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1531 data.from_addr = copy_addr_to_reg (from_addr);
1532 data.autinc_from = 1;
1533 data.explicit_inc_from = 1;
1535 if (!data.autinc_from && CONSTANT_P (from_addr))
1536 data.from_addr = copy_addr_to_reg (from_addr);
1537 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1539 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1541 data.explicit_inc_to = -1;
1543 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1545 data.to_addr = copy_addr_to_reg (to_addr);
1547 data.explicit_inc_to = 1;
1549 if (!data.autinc_to && CONSTANT_P (to_addr))
1550 data.to_addr = copy_addr_to_reg (to_addr);
1553 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1554 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1555 align = MOVE_MAX * BITS_PER_UNIT;
1557 /* First move what we can in the largest integer mode, then go to
1558 successively smaller modes. */
1560 while (max_size > 1)
1562 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1563 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1564 if (GET_MODE_SIZE (tmode) < max_size)
1567 if (mode == VOIDmode)
1570 icode = mov_optab->handlers[(int) mode].insn_code;
1571 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1572 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1574 max_size = GET_MODE_SIZE (mode);
1577 /* The code above should have handled everything. */
1591 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1592 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1594 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1597 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1604 to1 = adjust_address (data.to, QImode, data.offset);
1612 /* Return number of insns required to move L bytes by pieces.
1613 ALIGN (in bits) is maximum alignment we can assume. */
1615 static unsigned HOST_WIDE_INT
1616 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1618 unsigned HOST_WIDE_INT n_insns = 0;
1619 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1621 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1622 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1623 align = MOVE_MAX * BITS_PER_UNIT;
1625 while (max_size > 1)
1627 enum machine_mode mode = VOIDmode, tmode;
1628 enum insn_code icode;
1630 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1631 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1632 if (GET_MODE_SIZE (tmode) < max_size)
1635 if (mode == VOIDmode)
1638 icode = mov_optab->handlers[(int) mode].insn_code;
1639 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1640 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1642 max_size = GET_MODE_SIZE (mode);
1650 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1651 with move instructions for mode MODE. GENFUN is the gen_... function
1652 to make a move insn for that mode. DATA has all the other info. */
1655 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1656 struct move_by_pieces *data)
1658 unsigned int size = GET_MODE_SIZE (mode);
1659 rtx to1 = NULL_RTX, from1;
1661 while (data->len >= size)
1664 data->offset -= size;
1668 if (data->autinc_to)
1669 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1672 to1 = adjust_address (data->to, mode, data->offset);
1675 if (data->autinc_from)
1676 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1679 from1 = adjust_address (data->from, mode, data->offset);
1681 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1682 emit_insn (gen_add2_insn (data->to_addr,
1683 GEN_INT (-(HOST_WIDE_INT)size)));
1684 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1685 emit_insn (gen_add2_insn (data->from_addr,
1686 GEN_INT (-(HOST_WIDE_INT)size)));
1689 emit_insn ((*genfun) (to1, from1));
1692 #ifdef PUSH_ROUNDING
1693 emit_single_push_insn (mode, from1, NULL);
1699 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1700 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1701 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1702 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1704 if (! data->reverse)
1705 data->offset += size;
1711 /* Emit code to move a block Y to a block X. This may be done with
1712 string-move instructions, with multiple scalar move instructions,
1713 or with a library call.
1715 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1716 SIZE is an rtx that says how long they are.
1717 ALIGN is the maximum alignment we can assume they have.
1718 METHOD describes what kind of copy this is, and what mechanisms may be used.
1720 Return the address of the new block, if memcpy is called and returns it,
1724 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1732 case BLOCK_OP_NORMAL:
1733 may_use_call = true;
1736 case BLOCK_OP_CALL_PARM:
1737 may_use_call = block_move_libcall_safe_for_call_parm ();
1739 /* Make inhibit_defer_pop nonzero around the library call
1740 to force it to pop the arguments right away. */
1744 case BLOCK_OP_NO_LIBCALL:
1745 may_use_call = false;
1752 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1754 if (GET_MODE (x) != BLKmode)
1756 if (GET_MODE (y) != BLKmode)
1759 x = protect_from_queue (x, 1);
1760 y = protect_from_queue (y, 0);
1761 size = protect_from_queue (size, 0);
1763 if (GET_CODE (x) != MEM)
1765 if (GET_CODE (y) != MEM)
1770 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1771 can be incorrect is coming from __builtin_memcpy. */
1772 if (GET_CODE (size) == CONST_INT)
1774 if (INTVAL (size) == 0)
1777 x = shallow_copy_rtx (x);
1778 y = shallow_copy_rtx (y);
1779 set_mem_size (x, size);
1780 set_mem_size (y, size);
1783 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1784 move_by_pieces (x, y, INTVAL (size), align, 0);
1785 else if (emit_block_move_via_movstr (x, y, size, align))
1787 else if (may_use_call)
1788 retval = emit_block_move_via_libcall (x, y, size);
1790 emit_block_move_via_loop (x, y, size, align);
1792 if (method == BLOCK_OP_CALL_PARM)
1798 /* A subroutine of emit_block_move. Returns true if calling the
1799 block move libcall will not clobber any parameters which may have
1800 already been placed on the stack. */
1803 block_move_libcall_safe_for_call_parm (void)
1809 /* Check to see whether memcpy takes all register arguments. */
1811 takes_regs_uninit, takes_regs_no, takes_regs_yes
1812 } takes_regs = takes_regs_uninit;
1816 case takes_regs_uninit:
1818 CUMULATIVE_ARGS args_so_far;
1821 fn = emit_block_move_libcall_fn (false);
1822 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1824 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1825 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1827 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1828 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1829 if (!tmp || !REG_P (tmp))
1830 goto fail_takes_regs;
1831 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1832 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1834 goto fail_takes_regs;
1836 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1839 takes_regs = takes_regs_yes;
1842 case takes_regs_yes:
1846 takes_regs = takes_regs_no;
1857 /* A subroutine of emit_block_move. Expand a movstr pattern;
1858 return true if successful. */
1861 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1863 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1864 enum machine_mode mode;
1866 /* Since this is a move insn, we don't care about volatility. */
1869 /* Try the most limited insn first, because there's no point
1870 including more than one in the machine description unless
1871 the more limited one has some advantage. */
1873 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1874 mode = GET_MODE_WIDER_MODE (mode))
1876 enum insn_code code = movstr_optab[(int) mode];
1877 insn_operand_predicate_fn pred;
1879 if (code != CODE_FOR_nothing
1880 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1881 here because if SIZE is less than the mode mask, as it is
1882 returned by the macro, it will definitely be less than the
1883 actual mode mask. */
1884 && ((GET_CODE (size) == CONST_INT
1885 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1886 <= (GET_MODE_MASK (mode) >> 1)))
1887 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1888 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1889 || (*pred) (x, BLKmode))
1890 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1891 || (*pred) (y, BLKmode))
1892 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1893 || (*pred) (opalign, VOIDmode)))
1896 rtx last = get_last_insn ();
1899 op2 = convert_to_mode (mode, size, 1);
1900 pred = insn_data[(int) code].operand[2].predicate;
1901 if (pred != 0 && ! (*pred) (op2, mode))
1902 op2 = copy_to_mode_reg (mode, op2);
1904 /* ??? When called via emit_block_move_for_call, it'd be
1905 nice if there were some way to inform the backend, so
1906 that it doesn't fail the expansion because it thinks
1907 emitting the libcall would be more efficient. */
1909 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1917 delete_insns_since (last);
1925 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1926 Return the return value from memcpy, 0 otherwise. */
1929 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1931 rtx dst_addr, src_addr;
1932 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1933 enum machine_mode size_mode;
1936 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1938 It is unsafe to save the value generated by protect_from_queue and reuse
1939 it later. Consider what happens if emit_queue is called before the
1940 return value from protect_from_queue is used.
1942 Expansion of the CALL_EXPR below will call emit_queue before we are
1943 finished emitting RTL for argument setup. So if we are not careful we
1944 could get the wrong value for an argument.
1946 To avoid this problem we go ahead and emit code to copy the addresses of
1947 DST and SRC and SIZE into new pseudos. We can then place those new
1948 pseudos into an RTL_EXPR and use them later, even after a call to
1951 Note this is not strictly needed for library calls since they do not call
1952 emit_queue before loading their arguments. However, we may need to have
1953 library calls call emit_queue in the future since failing to do so could
1954 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1955 arguments in registers. */
1957 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1958 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1960 #ifdef POINTERS_EXTEND_UNSIGNED
1961 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1962 src_addr = convert_memory_address (ptr_mode, src_addr);
1965 dst_tree = make_tree (ptr_type_node, dst_addr);
1966 src_tree = make_tree (ptr_type_node, src_addr);
1968 if (TARGET_MEM_FUNCTIONS)
1969 size_mode = TYPE_MODE (sizetype);
1971 size_mode = TYPE_MODE (unsigned_type_node);
1973 size = convert_to_mode (size_mode, size, 1);
1974 size = copy_to_mode_reg (size_mode, size);
1976 /* It is incorrect to use the libcall calling conventions to call
1977 memcpy in this context. This could be a user call to memcpy and
1978 the user may wish to examine the return value from memcpy. For
1979 targets where libcalls and normal calls have different conventions
1980 for returning pointers, we could end up generating incorrect code.
1982 For convenience, we generate the call to bcopy this way as well. */
1984 if (TARGET_MEM_FUNCTIONS)
1985 size_tree = make_tree (sizetype, size);
1987 size_tree = make_tree (unsigned_type_node, size);
1989 fn = emit_block_move_libcall_fn (true);
1990 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1991 if (TARGET_MEM_FUNCTIONS)
1993 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1994 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1998 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1999 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
2002 /* Now we have to build up the CALL_EXPR itself. */
2003 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2004 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2005 call_expr, arg_list, NULL_TREE);
2007 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2009 /* If we are initializing a readonly value, show the above call clobbered
2010 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
2011 the delay slot scheduler might overlook conflicts and take nasty
2013 if (RTX_UNCHANGING_P (dst))
2014 add_function_usage_to
2015 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
2016 gen_rtx_CLOBBER (VOIDmode, dst),
2019 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
2022 /* A subroutine of emit_block_move_via_libcall. Create the tree node
2023 for the function we use for block copies. The first time FOR_CALL
2024 is true, we call assemble_external. */
2026 static GTY(()) tree block_move_fn;
2029 init_block_move_fn (const char *asmspec)
2035 if (TARGET_MEM_FUNCTIONS)
2037 fn = get_identifier ("memcpy");
2038 args = build_function_type_list (ptr_type_node, ptr_type_node,
2039 const_ptr_type_node, sizetype,
2044 fn = get_identifier ("bcopy");
2045 args = build_function_type_list (void_type_node, const_ptr_type_node,
2046 ptr_type_node, unsigned_type_node,
2050 fn = build_decl (FUNCTION_DECL, fn, args);
2051 DECL_EXTERNAL (fn) = 1;
2052 TREE_PUBLIC (fn) = 1;
2053 DECL_ARTIFICIAL (fn) = 1;
2054 TREE_NOTHROW (fn) = 1;
2061 SET_DECL_RTL (block_move_fn, NULL_RTX);
2062 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
2067 emit_block_move_libcall_fn (int for_call)
2069 static bool emitted_extern;
2072 init_block_move_fn (NULL);
2074 if (for_call && !emitted_extern)
2076 emitted_extern = true;
2077 make_decl_rtl (block_move_fn, NULL);
2078 assemble_external (block_move_fn);
2081 return block_move_fn;
2084 /* A subroutine of emit_block_move. Copy the data via an explicit
2085 loop. This is used only when libcalls are forbidden. */
2086 /* ??? It'd be nice to copy in hunks larger than QImode. */
2089 emit_block_move_via_loop (rtx x, rtx y, rtx size,
2090 unsigned int align ATTRIBUTE_UNUSED)
2092 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2093 enum machine_mode iter_mode;
2095 iter_mode = GET_MODE (size);
2096 if (iter_mode == VOIDmode)
2097 iter_mode = word_mode;
2099 top_label = gen_label_rtx ();
2100 cmp_label = gen_label_rtx ();
2101 iter = gen_reg_rtx (iter_mode);
2103 emit_move_insn (iter, const0_rtx);
2105 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2106 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2107 do_pending_stack_adjust ();
2109 emit_note (NOTE_INSN_LOOP_BEG);
2111 emit_jump (cmp_label);
2112 emit_label (top_label);
2114 tmp = convert_modes (Pmode, iter_mode, iter, true);
2115 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2116 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2117 x = change_address (x, QImode, x_addr);
2118 y = change_address (y, QImode, y_addr);
2120 emit_move_insn (x, y);
2122 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2123 true, OPTAB_LIB_WIDEN);
2125 emit_move_insn (iter, tmp);
2127 emit_note (NOTE_INSN_LOOP_CONT);
2128 emit_label (cmp_label);
2130 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2133 emit_note (NOTE_INSN_LOOP_END);
2136 /* Copy all or part of a value X into registers starting at REGNO.
2137 The number of registers to be filled is NREGS. */
2140 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
2143 #ifdef HAVE_load_multiple
2151 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2152 x = validize_mem (force_const_mem (mode, x));
2154 /* See if the machine can do this with a load multiple insn. */
2155 #ifdef HAVE_load_multiple
2156 if (HAVE_load_multiple)
2158 last = get_last_insn ();
2159 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2167 delete_insns_since (last);
2171 for (i = 0; i < nregs; i++)
2172 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2173 operand_subword_force (x, i, mode));
2176 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2177 The number of registers to be filled is NREGS. */
2180 move_block_from_reg (int regno, rtx x, int nregs)
2187 /* See if the machine can do this with a store multiple insn. */
2188 #ifdef HAVE_store_multiple
2189 if (HAVE_store_multiple)
2191 rtx last = get_last_insn ();
2192 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2200 delete_insns_since (last);
2204 for (i = 0; i < nregs; i++)
2206 rtx tem = operand_subword (x, i, 1, BLKmode);
2211 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2215 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2216 ORIG, where ORIG is a non-consecutive group of registers represented by
2217 a PARALLEL. The clone is identical to the original except in that the
2218 original set of registers is replaced by a new set of pseudo registers.
2219 The new set has the same modes as the original set. */
2222 gen_group_rtx (rtx orig)
2227 if (GET_CODE (orig) != PARALLEL)
2230 length = XVECLEN (orig, 0);
2231 tmps = alloca (sizeof (rtx) * length);
2233 /* Skip a NULL entry in first slot. */
2234 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2239 for (; i < length; i++)
2241 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2242 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2244 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2247 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2250 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
2251 where DST is non-consecutive registers represented by a PARALLEL.
2252 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2256 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
2261 if (GET_CODE (dst) != PARALLEL)
2264 /* Check for a NULL entry, used to indicate that the parameter goes
2265 both on the stack and in registers. */
2266 if (XEXP (XVECEXP (dst, 0, 0), 0))
2271 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
2273 /* Process the pieces. */
2274 for (i = start; i < XVECLEN (dst, 0); i++)
2276 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2277 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2278 unsigned int bytelen = GET_MODE_SIZE (mode);
2281 /* Handle trailing fragments that run over the size of the struct. */
2282 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2284 /* Arrange to shift the fragment to where it belongs.
2285 extract_bit_field loads to the lsb of the reg. */
2287 #ifdef BLOCK_REG_PADDING
2288 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
2289 == (BYTES_BIG_ENDIAN ? upward : downward)
2294 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2295 bytelen = ssize - bytepos;
2300 /* If we won't be loading directly from memory, protect the real source
2301 from strange tricks we might play; but make sure that the source can
2302 be loaded directly into the destination. */
2304 if (GET_CODE (orig_src) != MEM
2305 && (!CONSTANT_P (orig_src)
2306 || (GET_MODE (orig_src) != mode
2307 && GET_MODE (orig_src) != VOIDmode)))
2309 if (GET_MODE (orig_src) == VOIDmode)
2310 src = gen_reg_rtx (mode);
2312 src = gen_reg_rtx (GET_MODE (orig_src));
2314 emit_move_insn (src, orig_src);
2317 /* Optimize the access just a bit. */
2318 if (GET_CODE (src) == MEM
2319 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
2320 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
2321 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2322 && bytelen == GET_MODE_SIZE (mode))
2324 tmps[i] = gen_reg_rtx (mode);
2325 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2327 else if (GET_CODE (src) == CONCAT)
2329 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2330 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2332 if ((bytepos == 0 && bytelen == slen0)
2333 || (bytepos != 0 && bytepos + bytelen <= slen))
2335 /* The following assumes that the concatenated objects all
2336 have the same size. In this case, a simple calculation
2337 can be used to determine the object and the bit field
2339 tmps[i] = XEXP (src, bytepos / slen0);
2340 if (! CONSTANT_P (tmps[i])
2341 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2342 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2343 (bytepos % slen0) * BITS_PER_UNIT,
2344 1, NULL_RTX, mode, mode, ssize);
2346 else if (bytepos == 0)
2348 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2349 emit_move_insn (mem, src);
2350 tmps[i] = adjust_address (mem, mode, 0);
2355 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2356 SIMD register, which is currently broken. While we get GCC
2357 to emit proper RTL for these cases, let's dump to memory. */
2358 else if (VECTOR_MODE_P (GET_MODE (dst))
2359 && GET_CODE (src) == REG)
2361 int slen = GET_MODE_SIZE (GET_MODE (src));
2364 mem = assign_stack_temp (GET_MODE (src), slen, 0);
2365 emit_move_insn (mem, src);
2366 tmps[i] = adjust_address (mem, mode, (int) bytepos);
2368 else if (CONSTANT_P (src)
2369 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2372 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2373 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2377 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2378 tmps[i], 0, OPTAB_WIDEN);
2383 /* Copy the extracted pieces into the proper (probable) hard regs. */
2384 for (i = start; i < XVECLEN (dst, 0); i++)
2385 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2388 /* Emit code to move a block SRC to block DST, where SRC and DST are
2389 non-consecutive groups of registers, each represented by a PARALLEL. */
2392 emit_group_move (rtx dst, rtx src)
2396 if (GET_CODE (src) != PARALLEL
2397 || GET_CODE (dst) != PARALLEL
2398 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2401 /* Skip first entry if NULL. */
2402 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2403 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2404 XEXP (XVECEXP (src, 0, i), 0));
2407 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2408 where SRC is non-consecutive registers represented by a PARALLEL.
2409 SSIZE represents the total size of block ORIG_DST, or -1 if not
2413 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
2418 if (GET_CODE (src) != PARALLEL)
2421 /* Check for a NULL entry, used to indicate that the parameter goes
2422 both on the stack and in registers. */
2423 if (XEXP (XVECEXP (src, 0, 0), 0))
2428 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
2430 /* Copy the (probable) hard regs into pseudos. */
2431 for (i = start; i < XVECLEN (src, 0); i++)
2433 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2434 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2435 emit_move_insn (tmps[i], reg);
2439 /* If we won't be storing directly into memory, protect the real destination
2440 from strange tricks we might play. */
2442 if (GET_CODE (dst) == PARALLEL)
2446 /* We can get a PARALLEL dst if there is a conditional expression in
2447 a return statement. In that case, the dst and src are the same,
2448 so no action is necessary. */
2449 if (rtx_equal_p (dst, src))
2452 /* It is unclear if we can ever reach here, but we may as well handle
2453 it. Allocate a temporary, and split this into a store/load to/from
2456 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2457 emit_group_store (temp, src, type, ssize);
2458 emit_group_load (dst, temp, type, ssize);
2461 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2463 dst = gen_reg_rtx (GET_MODE (orig_dst));
2464 /* Make life a bit easier for combine. */
2465 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2468 /* Process the pieces. */
2469 for (i = start; i < XVECLEN (src, 0); i++)
2471 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2472 enum machine_mode mode = GET_MODE (tmps[i]);
2473 unsigned int bytelen = GET_MODE_SIZE (mode);
2476 /* Handle trailing fragments that run over the size of the struct. */
2477 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2479 /* store_bit_field always takes its value from the lsb.
2480 Move the fragment to the lsb if it's not already there. */
2482 #ifdef BLOCK_REG_PADDING
2483 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2484 == (BYTES_BIG_ENDIAN ? upward : downward)
2490 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2491 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2492 tmps[i], 0, OPTAB_WIDEN);
2494 bytelen = ssize - bytepos;
2497 if (GET_CODE (dst) == CONCAT)
2499 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2500 dest = XEXP (dst, 0);
2501 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2503 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2504 dest = XEXP (dst, 1);
2506 else if (bytepos == 0 && XVECLEN (src, 0))
2508 dest = assign_stack_temp (GET_MODE (dest),
2509 GET_MODE_SIZE (GET_MODE (dest)), 0);
2510 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2519 /* Optimize the access just a bit. */
2520 if (GET_CODE (dest) == MEM
2521 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2522 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2523 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2524 && bytelen == GET_MODE_SIZE (mode))
2525 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2527 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2528 mode, tmps[i], ssize);
2533 /* Copy from the pseudo into the (probable) hard reg. */
2534 if (orig_dst != dst)
2535 emit_move_insn (orig_dst, dst);
2538 /* Generate code to copy a BLKmode object of TYPE out of a
2539 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2540 is null, a stack temporary is created. TGTBLK is returned.
2542 The primary purpose of this routine is to handle functions
2543 that return BLKmode structures in registers. Some machines
2544 (the PA for example) want to return all small structures
2545 in registers regardless of the structure's alignment. */
2548 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2550 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2551 rtx src = NULL, dst = NULL;
2552 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2553 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2557 tgtblk = assign_temp (build_qualified_type (type,
2559 | TYPE_QUAL_CONST)),
2561 preserve_temp_slots (tgtblk);
2564 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2565 into a new pseudo which is a full word. */
2567 if (GET_MODE (srcreg) != BLKmode
2568 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2569 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2571 /* Structures whose size is not a multiple of a word are aligned
2572 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2573 machine, this means we must skip the empty high order bytes when
2574 calculating the bit offset. */
2575 if (BYTES_BIG_ENDIAN
2576 && bytes % UNITS_PER_WORD)
2577 big_endian_correction
2578 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2580 /* Copy the structure BITSIZE bites at a time.
2582 We could probably emit more efficient code for machines which do not use
2583 strict alignment, but it doesn't seem worth the effort at the current
2585 for (bitpos = 0, xbitpos = big_endian_correction;
2586 bitpos < bytes * BITS_PER_UNIT;
2587 bitpos += bitsize, xbitpos += bitsize)
2589 /* We need a new source operand each time xbitpos is on a
2590 word boundary and when xbitpos == big_endian_correction
2591 (the first time through). */
2592 if (xbitpos % BITS_PER_WORD == 0
2593 || xbitpos == big_endian_correction)
2594 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2597 /* We need a new destination operand each time bitpos is on
2599 if (bitpos % BITS_PER_WORD == 0)
2600 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2602 /* Use xbitpos for the source extraction (right justified) and
2603 xbitpos for the destination store (left justified). */
2604 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2605 extract_bit_field (src, bitsize,
2606 xbitpos % BITS_PER_WORD, 1,
2607 NULL_RTX, word_mode, word_mode,
2615 /* Add a USE expression for REG to the (possibly empty) list pointed
2616 to by CALL_FUSAGE. REG must denote a hard register. */
2619 use_reg (rtx *call_fusage, rtx reg)
2621 if (GET_CODE (reg) != REG
2622 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2626 = gen_rtx_EXPR_LIST (VOIDmode,
2627 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2630 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2631 starting at REGNO. All of these registers must be hard registers. */
2634 use_regs (rtx *call_fusage, int regno, int nregs)
2638 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2641 for (i = 0; i < nregs; i++)
2642 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2645 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2646 PARALLEL REGS. This is for calls that pass values in multiple
2647 non-contiguous locations. The Irix 6 ABI has examples of this. */
2650 use_group_regs (rtx *call_fusage, rtx regs)
2654 for (i = 0; i < XVECLEN (regs, 0); i++)
2656 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2658 /* A NULL entry means the parameter goes both on the stack and in
2659 registers. This can also be a MEM for targets that pass values
2660 partially on the stack and partially in registers. */
2661 if (reg != 0 && GET_CODE (reg) == REG)
2662 use_reg (call_fusage, reg);
2667 /* Determine whether the LEN bytes generated by CONSTFUN can be
2668 stored to memory using several move instructions. CONSTFUNDATA is
2669 a pointer which will be passed as argument in every CONSTFUN call.
2670 ALIGN is maximum alignment we can assume. Return nonzero if a
2671 call to store_by_pieces should succeed. */
2674 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2675 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2676 void *constfundata, unsigned int align)
2678 unsigned HOST_WIDE_INT max_size, l;
2679 HOST_WIDE_INT offset = 0;
2680 enum machine_mode mode, tmode;
2681 enum insn_code icode;
2688 if (! STORE_BY_PIECES_P (len, align))
2691 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2692 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2693 align = MOVE_MAX * BITS_PER_UNIT;
2695 /* We would first store what we can in the largest integer mode, then go to
2696 successively smaller modes. */
2699 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2704 max_size = STORE_MAX_PIECES + 1;
2705 while (max_size > 1)
2707 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2708 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2709 if (GET_MODE_SIZE (tmode) < max_size)
2712 if (mode == VOIDmode)
2715 icode = mov_optab->handlers[(int) mode].insn_code;
2716 if (icode != CODE_FOR_nothing
2717 && align >= GET_MODE_ALIGNMENT (mode))
2719 unsigned int size = GET_MODE_SIZE (mode);
2726 cst = (*constfun) (constfundata, offset, mode);
2727 if (!LEGITIMATE_CONSTANT_P (cst))
2737 max_size = GET_MODE_SIZE (mode);
2740 /* The code above should have handled everything. */
2748 /* Generate several move instructions to store LEN bytes generated by
2749 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2750 pointer which will be passed as argument in every CONSTFUN call.
2751 ALIGN is maximum alignment we can assume.
2752 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2753 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2757 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2758 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2759 void *constfundata, unsigned int align, int endp)
2761 struct store_by_pieces data;
2770 if (! STORE_BY_PIECES_P (len, align))
2772 to = protect_from_queue (to, 1);
2773 data.constfun = constfun;
2774 data.constfundata = constfundata;
2777 store_by_pieces_1 (&data, align);
2788 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2789 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2791 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2794 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2801 to1 = adjust_address (data.to, QImode, data.offset);
2809 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2810 rtx with BLKmode). The caller must pass TO through protect_from_queue
2811 before calling. ALIGN is maximum alignment we can assume. */
2814 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2816 struct store_by_pieces data;
2821 data.constfun = clear_by_pieces_1;
2822 data.constfundata = NULL;
2825 store_by_pieces_1 (&data, align);
2828 /* Callback routine for clear_by_pieces.
2829 Return const0_rtx unconditionally. */
2832 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2833 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2834 enum machine_mode mode ATTRIBUTE_UNUSED)
2839 /* Subroutine of clear_by_pieces and store_by_pieces.
2840 Generate several move instructions to store LEN bytes of block TO. (A MEM
2841 rtx with BLKmode). The caller must pass TO through protect_from_queue
2842 before calling. ALIGN is maximum alignment we can assume. */
2845 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2846 unsigned int align ATTRIBUTE_UNUSED)
2848 rtx to_addr = XEXP (data->to, 0);
2849 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2850 enum machine_mode mode = VOIDmode, tmode;
2851 enum insn_code icode;
2854 data->to_addr = to_addr;
2856 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2857 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2859 data->explicit_inc_to = 0;
2861 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2863 data->offset = data->len;
2865 /* If storing requires more than two move insns,
2866 copy addresses to registers (to make displacements shorter)
2867 and use post-increment if available. */
2868 if (!data->autinc_to
2869 && move_by_pieces_ninsns (data->len, align) > 2)
2871 /* Determine the main mode we'll be using. */
2872 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2873 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2874 if (GET_MODE_SIZE (tmode) < max_size)
2877 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2879 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2880 data->autinc_to = 1;
2881 data->explicit_inc_to = -1;
2884 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2885 && ! data->autinc_to)
2887 data->to_addr = copy_addr_to_reg (to_addr);
2888 data->autinc_to = 1;
2889 data->explicit_inc_to = 1;
2892 if ( !data->autinc_to && CONSTANT_P (to_addr))
2893 data->to_addr = copy_addr_to_reg (to_addr);
2896 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2897 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2898 align = MOVE_MAX * BITS_PER_UNIT;
2900 /* First store what we can in the largest integer mode, then go to
2901 successively smaller modes. */
2903 while (max_size > 1)
2905 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2906 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2907 if (GET_MODE_SIZE (tmode) < max_size)
2910 if (mode == VOIDmode)
2913 icode = mov_optab->handlers[(int) mode].insn_code;
2914 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2915 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2917 max_size = GET_MODE_SIZE (mode);
2920 /* The code above should have handled everything. */
2925 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2926 with move instructions for mode MODE. GENFUN is the gen_... function
2927 to make a move insn for that mode. DATA has all the other info. */
2930 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2931 struct store_by_pieces *data)
2933 unsigned int size = GET_MODE_SIZE (mode);
2936 while (data->len >= size)
2939 data->offset -= size;
2941 if (data->autinc_to)
2942 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2945 to1 = adjust_address (data->to, mode, data->offset);
2947 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2948 emit_insn (gen_add2_insn (data->to_addr,
2949 GEN_INT (-(HOST_WIDE_INT) size)));
2951 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2952 emit_insn ((*genfun) (to1, cst));
2954 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2955 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2957 if (! data->reverse)
2958 data->offset += size;
2964 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2965 its length in bytes. */
2968 clear_storage (rtx object, rtx size)
2971 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2972 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2974 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2975 just move a zero. Otherwise, do this a piece at a time. */
2976 if (GET_MODE (object) != BLKmode
2977 && GET_CODE (size) == CONST_INT
2978 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2979 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2982 object = protect_from_queue (object, 1);
2983 size = protect_from_queue (size, 0);
2985 if (size == const0_rtx)
2987 else if (GET_CODE (size) == CONST_INT
2988 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2989 clear_by_pieces (object, INTVAL (size), align);
2990 else if (clear_storage_via_clrstr (object, size, align))
2993 retval = clear_storage_via_libcall (object, size);
2999 /* A subroutine of clear_storage. Expand a clrstr pattern;
3000 return true if successful. */
3003 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
3005 /* Try the most limited insn first, because there's no point
3006 including more than one in the machine description unless
3007 the more limited one has some advantage. */
3009 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3010 enum machine_mode mode;
3012 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3013 mode = GET_MODE_WIDER_MODE (mode))
3015 enum insn_code code = clrstr_optab[(int) mode];
3016 insn_operand_predicate_fn pred;
3018 if (code != CODE_FOR_nothing
3019 /* We don't need MODE to be narrower than
3020 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
3021 the mode mask, as it is returned by the macro, it will
3022 definitely be less than the actual mode mask. */
3023 && ((GET_CODE (size) == CONST_INT
3024 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3025 <= (GET_MODE_MASK (mode) >> 1)))
3026 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3027 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
3028 || (*pred) (object, BLKmode))
3029 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
3030 || (*pred) (opalign, VOIDmode)))
3033 rtx last = get_last_insn ();
3036 op1 = convert_to_mode (mode, size, 1);
3037 pred = insn_data[(int) code].operand[1].predicate;
3038 if (pred != 0 && ! (*pred) (op1, mode))
3039 op1 = copy_to_mode_reg (mode, op1);
3041 pat = GEN_FCN ((int) code) (object, op1, opalign);
3048 delete_insns_since (last);
3055 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3056 Return the return value of memset, 0 otherwise. */
3059 clear_storage_via_libcall (rtx object, rtx size)
3061 tree call_expr, arg_list, fn, object_tree, size_tree;
3062 enum machine_mode size_mode;
3065 /* OBJECT or SIZE may have been passed through protect_from_queue.
3067 It is unsafe to save the value generated by protect_from_queue
3068 and reuse it later. Consider what happens if emit_queue is
3069 called before the return value from protect_from_queue is used.
3071 Expansion of the CALL_EXPR below will call emit_queue before
3072 we are finished emitting RTL for argument setup. So if we are
3073 not careful we could get the wrong value for an argument.
3075 To avoid this problem we go ahead and emit code to copy OBJECT
3076 and SIZE into new pseudos. We can then place those new pseudos
3077 into an RTL_EXPR and use them later, even after a call to
3080 Note this is not strictly needed for library calls since they
3081 do not call emit_queue before loading their arguments. However,
3082 we may need to have library calls call emit_queue in the future
3083 since failing to do so could cause problems for targets which
3084 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3086 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3088 if (TARGET_MEM_FUNCTIONS)
3089 size_mode = TYPE_MODE (sizetype);
3091 size_mode = TYPE_MODE (unsigned_type_node);
3092 size = convert_to_mode (size_mode, size, 1);
3093 size = copy_to_mode_reg (size_mode, size);
3095 /* It is incorrect to use the libcall calling conventions to call
3096 memset in this context. This could be a user call to memset and
3097 the user may wish to examine the return value from memset. For
3098 targets where libcalls and normal calls have different conventions
3099 for returning pointers, we could end up generating incorrect code.
3101 For convenience, we generate the call to bzero this way as well. */
3103 object_tree = make_tree (ptr_type_node, object);
3104 if (TARGET_MEM_FUNCTIONS)
3105 size_tree = make_tree (sizetype, size);
3107 size_tree = make_tree (unsigned_type_node, size);
3109 fn = clear_storage_libcall_fn (true);
3110 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3111 if (TARGET_MEM_FUNCTIONS)
3112 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3113 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3115 /* Now we have to build up the CALL_EXPR itself. */
3116 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3117 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3118 call_expr, arg_list, NULL_TREE);
3120 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3122 /* If we are initializing a readonly value, show the above call
3123 clobbered it. Otherwise, a load from it may erroneously be
3124 hoisted from a loop. */
3125 if (RTX_UNCHANGING_P (object))
3126 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3128 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3131 /* A subroutine of clear_storage_via_libcall. Create the tree node
3132 for the function we use for block clears. The first time FOR_CALL
3133 is true, we call assemble_external. */
3135 static GTY(()) tree block_clear_fn;
3138 init_block_clear_fn (const char *asmspec)
3140 if (!block_clear_fn)
3144 if (TARGET_MEM_FUNCTIONS)
3146 fn = get_identifier ("memset");
3147 args = build_function_type_list (ptr_type_node, ptr_type_node,
3148 integer_type_node, sizetype,
3153 fn = get_identifier ("bzero");
3154 args = build_function_type_list (void_type_node, ptr_type_node,
3155 unsigned_type_node, NULL_TREE);
3158 fn = build_decl (FUNCTION_DECL, fn, args);
3159 DECL_EXTERNAL (fn) = 1;
3160 TREE_PUBLIC (fn) = 1;
3161 DECL_ARTIFICIAL (fn) = 1;
3162 TREE_NOTHROW (fn) = 1;
3164 block_clear_fn = fn;
3169 SET_DECL_RTL (block_clear_fn, NULL_RTX);
3170 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
3175 clear_storage_libcall_fn (int for_call)
3177 static bool emitted_extern;
3179 if (!block_clear_fn)
3180 init_block_clear_fn (NULL);
3182 if (for_call && !emitted_extern)
3184 emitted_extern = true;
3185 make_decl_rtl (block_clear_fn, NULL);
3186 assemble_external (block_clear_fn);
3189 return block_clear_fn;
3192 /* Generate code to copy Y into X.
3193 Both Y and X must have the same mode, except that
3194 Y can be a constant with VOIDmode.
3195 This mode cannot be BLKmode; use emit_block_move for that.
3197 Return the last instruction emitted. */
3200 emit_move_insn (rtx x, rtx y)
3202 enum machine_mode mode = GET_MODE (x);
3203 rtx y_cst = NULL_RTX;
3206 x = protect_from_queue (x, 1);
3207 y = protect_from_queue (y, 0);
3209 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3212 /* Never force constant_p_rtx to memory. */
3213 if (GET_CODE (y) == CONSTANT_P_RTX)
3215 else if (CONSTANT_P (y))
3218 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3219 && (last_insn = compress_float_constant (x, y)))
3224 if (!LEGITIMATE_CONSTANT_P (y))
3226 y = force_const_mem (mode, y);
3228 /* If the target's cannot_force_const_mem prevented the spill,
3229 assume that the target's move expanders will also take care
3230 of the non-legitimate constant. */
3236 /* If X or Y are memory references, verify that their addresses are valid
3238 if (GET_CODE (x) == MEM
3239 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3240 && ! push_operand (x, GET_MODE (x)))
3242 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3243 x = validize_mem (x);
3245 if (GET_CODE (y) == MEM
3246 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3248 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3249 y = validize_mem (y);
3251 if (mode == BLKmode)
3254 last_insn = emit_move_insn_1 (x, y);
3256 if (y_cst && GET_CODE (x) == REG
3257 && (set = single_set (last_insn)) != NULL_RTX
3258 && SET_DEST (set) == x
3259 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3260 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3265 /* Low level part of emit_move_insn.
3266 Called just like emit_move_insn, but assumes X and Y
3267 are basically valid. */
3270 emit_move_insn_1 (rtx x, rtx y)
3272 enum machine_mode mode = GET_MODE (x);
3273 enum machine_mode submode;
3274 enum mode_class class = GET_MODE_CLASS (mode);
3276 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3279 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3281 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3283 /* Expand complex moves by moving real part and imag part, if possible. */
3284 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3285 && BLKmode != (submode = GET_MODE_INNER (mode))
3286 && (mov_optab->handlers[(int) submode].insn_code
3287 != CODE_FOR_nothing))
3289 /* Don't split destination if it is a stack push. */
3290 int stack = push_operand (x, GET_MODE (x));
3292 #ifdef PUSH_ROUNDING
3293 /* In case we output to the stack, but the size is smaller than the
3294 machine can push exactly, we need to use move instructions. */
3296 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3297 != GET_MODE_SIZE (submode)))
3300 HOST_WIDE_INT offset1, offset2;
3302 /* Do not use anti_adjust_stack, since we don't want to update
3303 stack_pointer_delta. */
3304 temp = expand_binop (Pmode,
3305 #ifdef STACK_GROWS_DOWNWARD
3313 (GET_MODE_SIZE (GET_MODE (x)))),
3314 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3316 if (temp != stack_pointer_rtx)
3317 emit_move_insn (stack_pointer_rtx, temp);
3319 #ifdef STACK_GROWS_DOWNWARD
3321 offset2 = GET_MODE_SIZE (submode);
3323 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3324 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3325 + GET_MODE_SIZE (submode));
3328 emit_move_insn (change_address (x, submode,
3329 gen_rtx_PLUS (Pmode,
3331 GEN_INT (offset1))),
3332 gen_realpart (submode, y));
3333 emit_move_insn (change_address (x, submode,
3334 gen_rtx_PLUS (Pmode,
3336 GEN_INT (offset2))),
3337 gen_imagpart (submode, y));
3341 /* If this is a stack, push the highpart first, so it
3342 will be in the argument order.
3344 In that case, change_address is used only to convert
3345 the mode, not to change the address. */
3348 /* Note that the real part always precedes the imag part in memory
3349 regardless of machine's endianness. */
3350 #ifdef STACK_GROWS_DOWNWARD
3351 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3352 gen_imagpart (submode, y));
3353 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3354 gen_realpart (submode, y));
3356 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3357 gen_realpart (submode, y));
3358 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3359 gen_imagpart (submode, y));
3364 rtx realpart_x, realpart_y;
3365 rtx imagpart_x, imagpart_y;
3367 /* If this is a complex value with each part being smaller than a
3368 word, the usual calling sequence will likely pack the pieces into
3369 a single register. Unfortunately, SUBREG of hard registers only
3370 deals in terms of words, so we have a problem converting input
3371 arguments to the CONCAT of two registers that is used elsewhere
3372 for complex values. If this is before reload, we can copy it into
3373 memory and reload. FIXME, we should see about using extract and
3374 insert on integer registers, but complex short and complex char
3375 variables should be rarely used. */
3376 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3377 && (reload_in_progress | reload_completed) == 0)
3380 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3382 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3384 if (packed_dest_p || packed_src_p)
3386 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3387 ? MODE_FLOAT : MODE_INT);
3389 enum machine_mode reg_mode
3390 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3392 if (reg_mode != BLKmode)
3394 rtx mem = assign_stack_temp (reg_mode,
3395 GET_MODE_SIZE (mode), 0);
3396 rtx cmem = adjust_address (mem, mode, 0);
3399 = N_("function using short complex types cannot be inline");
3403 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3405 emit_move_insn_1 (cmem, y);
3406 return emit_move_insn_1 (sreg, mem);
3410 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3412 emit_move_insn_1 (mem, sreg);
3413 return emit_move_insn_1 (x, cmem);
3419 realpart_x = gen_realpart (submode, x);
3420 realpart_y = gen_realpart (submode, y);
3421 imagpart_x = gen_imagpart (submode, x);
3422 imagpart_y = gen_imagpart (submode, y);
3424 /* Show the output dies here. This is necessary for SUBREGs
3425 of pseudos since we cannot track their lifetimes correctly;
3426 hard regs shouldn't appear here except as return values.
3427 We never want to emit such a clobber after reload. */
3429 && ! (reload_in_progress || reload_completed)
3430 && (GET_CODE (realpart_x) == SUBREG
3431 || GET_CODE (imagpart_x) == SUBREG))
3432 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3434 emit_move_insn (realpart_x, realpart_y);
3435 emit_move_insn (imagpart_x, imagpart_y);
3438 return get_last_insn ();
3441 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3442 find a mode to do it in. If we have a movcc, use it. Otherwise,
3443 find the MODE_INT mode of the same width. */
3444 else if (GET_MODE_CLASS (mode) == MODE_CC
3445 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3447 enum insn_code insn_code;
3448 enum machine_mode tmode = VOIDmode;
3452 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3455 for (tmode = QImode; tmode != VOIDmode;
3456 tmode = GET_MODE_WIDER_MODE (tmode))
3457 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3460 if (tmode == VOIDmode)
3463 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3464 may call change_address which is not appropriate if we were
3465 called when a reload was in progress. We don't have to worry
3466 about changing the address since the size in bytes is supposed to
3467 be the same. Copy the MEM to change the mode and move any
3468 substitutions from the old MEM to the new one. */
3470 if (reload_in_progress)
3472 x = gen_lowpart_common (tmode, x1);
3473 if (x == 0 && GET_CODE (x1) == MEM)
3475 x = adjust_address_nv (x1, tmode, 0);
3476 copy_replacements (x1, x);
3479 y = gen_lowpart_common (tmode, y1);
3480 if (y == 0 && GET_CODE (y1) == MEM)
3482 y = adjust_address_nv (y1, tmode, 0);
3483 copy_replacements (y1, y);
3488 x = gen_lowpart (tmode, x);
3489 y = gen_lowpart (tmode, y);
3492 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3493 return emit_insn (GEN_FCN (insn_code) (x, y));
3496 /* This will handle any multi-word or full-word mode that lacks a move_insn
3497 pattern. However, you will get better code if you define such patterns,
3498 even if they must turn into multiple assembler instructions. */
3499 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3506 #ifdef PUSH_ROUNDING
3508 /* If X is a push on the stack, do the push now and replace
3509 X with a reference to the stack pointer. */
3510 if (push_operand (x, GET_MODE (x)))
3515 /* Do not use anti_adjust_stack, since we don't want to update
3516 stack_pointer_delta. */
3517 temp = expand_binop (Pmode,
3518 #ifdef STACK_GROWS_DOWNWARD
3526 (GET_MODE_SIZE (GET_MODE (x)))),
3527 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3529 if (temp != stack_pointer_rtx)
3530 emit_move_insn (stack_pointer_rtx, temp);
3532 code = GET_CODE (XEXP (x, 0));
3534 /* Just hope that small offsets off SP are OK. */
3535 if (code == POST_INC)
3536 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3537 GEN_INT (-((HOST_WIDE_INT)
3538 GET_MODE_SIZE (GET_MODE (x)))));
3539 else if (code == POST_DEC)
3540 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3541 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3543 temp = stack_pointer_rtx;
3545 x = change_address (x, VOIDmode, temp);
3549 /* If we are in reload, see if either operand is a MEM whose address
3550 is scheduled for replacement. */
3551 if (reload_in_progress && GET_CODE (x) == MEM
3552 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3553 x = replace_equiv_address_nv (x, inner);
3554 if (reload_in_progress && GET_CODE (y) == MEM
3555 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3556 y = replace_equiv_address_nv (y, inner);
3562 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3565 rtx xpart = operand_subword (x, i, 1, mode);
3566 rtx ypart = operand_subword (y, i, 1, mode);
3568 /* If we can't get a part of Y, put Y into memory if it is a
3569 constant. Otherwise, force it into a register. If we still
3570 can't get a part of Y, abort. */
3571 if (ypart == 0 && CONSTANT_P (y))
3573 y = force_const_mem (mode, y);
3574 ypart = operand_subword (y, i, 1, mode);
3576 else if (ypart == 0)
3577 ypart = operand_subword_force (y, i, mode);
3579 if (xpart == 0 || ypart == 0)
3582 need_clobber |= (GET_CODE (xpart) == SUBREG);
3584 last_insn = emit_move_insn (xpart, ypart);
3590 /* Show the output dies here. This is necessary for SUBREGs
3591 of pseudos since we cannot track their lifetimes correctly;
3592 hard regs shouldn't appear here except as return values.
3593 We never want to emit such a clobber after reload. */
3595 && ! (reload_in_progress || reload_completed)
3596 && need_clobber != 0)
3597 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3607 /* If Y is representable exactly in a narrower mode, and the target can
3608 perform the extension directly from constant or memory, then emit the
3609 move as an extension. */
3612 compress_float_constant (rtx x, rtx y)
3614 enum machine_mode dstmode = GET_MODE (x);
3615 enum machine_mode orig_srcmode = GET_MODE (y);
3616 enum machine_mode srcmode;
3619 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3621 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3622 srcmode != orig_srcmode;
3623 srcmode = GET_MODE_WIDER_MODE (srcmode))
3626 rtx trunc_y, last_insn;
3628 /* Skip if the target can't extend this way. */
3629 ic = can_extend_p (dstmode, srcmode, 0);
3630 if (ic == CODE_FOR_nothing)
3633 /* Skip if the narrowed value isn't exact. */
3634 if (! exact_real_truncate (srcmode, &r))
3637 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3639 if (LEGITIMATE_CONSTANT_P (trunc_y))
3641 /* Skip if the target needs extra instructions to perform
3643 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3646 else if (float_extend_from_mem[dstmode][srcmode])
3647 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3651 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3652 last_insn = get_last_insn ();
3654 if (GET_CODE (x) == REG)
3655 set_unique_reg_note (last_insn, REG_EQUAL, y);
3663 /* Pushing data onto the stack. */
3665 /* Push a block of length SIZE (perhaps variable)
3666 and return an rtx to address the beginning of the block.
3667 Note that it is not possible for the value returned to be a QUEUED.
3668 The value may be virtual_outgoing_args_rtx.
3670 EXTRA is the number of bytes of padding to push in addition to SIZE.
3671 BELOW nonzero means this padding comes at low addresses;
3672 otherwise, the padding comes at high addresses. */
3675 push_block (rtx size, int extra, int below)
3679 size = convert_modes (Pmode, ptr_mode, size, 1);
3680 if (CONSTANT_P (size))
3681 anti_adjust_stack (plus_constant (size, extra));
3682 else if (GET_CODE (size) == REG && extra == 0)
3683 anti_adjust_stack (size);
3686 temp = copy_to_mode_reg (Pmode, size);
3688 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3689 temp, 0, OPTAB_LIB_WIDEN);
3690 anti_adjust_stack (temp);
3693 #ifndef STACK_GROWS_DOWNWARD
3699 temp = virtual_outgoing_args_rtx;
3700 if (extra != 0 && below)
3701 temp = plus_constant (temp, extra);
3705 if (GET_CODE (size) == CONST_INT)
3706 temp = plus_constant (virtual_outgoing_args_rtx,
3707 -INTVAL (size) - (below ? 0 : extra));
3708 else if (extra != 0 && !below)
3709 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3710 negate_rtx (Pmode, plus_constant (size, extra)));
3712 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3713 negate_rtx (Pmode, size));
3716 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3719 #ifdef PUSH_ROUNDING
3721 /* Emit single push insn. */
3724 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3727 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3729 enum insn_code icode;
3730 insn_operand_predicate_fn pred;
3732 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3733 /* If there is push pattern, use it. Otherwise try old way of throwing
3734 MEM representing push operation to move expander. */
3735 icode = push_optab->handlers[(int) mode].insn_code;
3736 if (icode != CODE_FOR_nothing)
3738 if (((pred = insn_data[(int) icode].operand[0].predicate)
3739 && !((*pred) (x, mode))))
3740 x = force_reg (mode, x);
3741 emit_insn (GEN_FCN (icode) (x));
3744 if (GET_MODE_SIZE (mode) == rounded_size)
3745 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3746 /* If we are to pad downward, adjust the stack pointer first and
3747 then store X into the stack location using an offset. This is
3748 because emit_move_insn does not know how to pad; it does not have
3750 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3752 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3753 HOST_WIDE_INT offset;
3755 emit_move_insn (stack_pointer_rtx,
3756 expand_binop (Pmode,
3757 #ifdef STACK_GROWS_DOWNWARD
3763 GEN_INT (rounded_size),
3764 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3766 offset = (HOST_WIDE_INT) padding_size;
3767 #ifdef STACK_GROWS_DOWNWARD
3768 if (STACK_PUSH_CODE == POST_DEC)
3769 /* We have already decremented the stack pointer, so get the
3771 offset += (HOST_WIDE_INT) rounded_size;
3773 if (STACK_PUSH_CODE == POST_INC)
3774 /* We have already incremented the stack pointer, so get the
3776 offset -= (HOST_WIDE_INT) rounded_size;
3778 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3782 #ifdef STACK_GROWS_DOWNWARD
3783 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3784 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3785 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3787 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3788 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3789 GEN_INT (rounded_size));
3791 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3794 dest = gen_rtx_MEM (mode, dest_addr);
3798 set_mem_attributes (dest, type, 1);
3800 if (flag_optimize_sibling_calls)
3801 /* Function incoming arguments may overlap with sibling call
3802 outgoing arguments and we cannot allow reordering of reads
3803 from function arguments with stores to outgoing arguments
3804 of sibling calls. */
3805 set_mem_alias_set (dest, 0);
3807 emit_move_insn (dest, x);
3811 /* Generate code to push X onto the stack, assuming it has mode MODE and
3813 MODE is redundant except when X is a CONST_INT (since they don't
3815 SIZE is an rtx for the size of data to be copied (in bytes),
3816 needed only if X is BLKmode.
3818 ALIGN (in bits) is maximum alignment we can assume.
3820 If PARTIAL and REG are both nonzero, then copy that many of the first
3821 words of X into registers starting with REG, and push the rest of X.
3822 The amount of space pushed is decreased by PARTIAL words,
3823 rounded *down* to a multiple of PARM_BOUNDARY.
3824 REG must be a hard register in this case.
3825 If REG is zero but PARTIAL is not, take any all others actions for an
3826 argument partially in registers, but do not actually load any
3829 EXTRA is the amount in bytes of extra space to leave next to this arg.
3830 This is ignored if an argument block has already been allocated.
3832 On a machine that lacks real push insns, ARGS_ADDR is the address of
3833 the bottom of the argument block for this call. We use indexing off there
3834 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3835 argument block has not been preallocated.
3837 ARGS_SO_FAR is the size of args previously pushed for this call.
3839 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3840 for arguments passed in registers. If nonzero, it will be the number
3841 of bytes required. */
3844 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3845 unsigned int align, int partial, rtx reg, int extra,
3846 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3850 enum direction stack_direction
3851 #ifdef STACK_GROWS_DOWNWARD
3857 /* Decide where to pad the argument: `downward' for below,
3858 `upward' for above, or `none' for don't pad it.
3859 Default is below for small data on big-endian machines; else above. */
3860 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3862 /* Invert direction if stack is post-decrement.
3864 if (STACK_PUSH_CODE == POST_DEC)
3865 if (where_pad != none)
3866 where_pad = (where_pad == downward ? upward : downward);
3868 xinner = x = protect_from_queue (x, 0);
3870 if (mode == BLKmode)
3872 /* Copy a block into the stack, entirely or partially. */
3875 int used = partial * UNITS_PER_WORD;
3876 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3884 /* USED is now the # of bytes we need not copy to the stack
3885 because registers will take care of them. */
3888 xinner = adjust_address (xinner, BLKmode, used);
3890 /* If the partial register-part of the arg counts in its stack size,
3891 skip the part of stack space corresponding to the registers.
3892 Otherwise, start copying to the beginning of the stack space,
3893 by setting SKIP to 0. */
3894 skip = (reg_parm_stack_space == 0) ? 0 : used;
3896 #ifdef PUSH_ROUNDING
3897 /* Do it with several push insns if that doesn't take lots of insns
3898 and if there is no difficulty with push insns that skip bytes
3899 on the stack for alignment purposes. */
3902 && GET_CODE (size) == CONST_INT
3904 && MEM_ALIGN (xinner) >= align
3905 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3906 /* Here we avoid the case of a structure whose weak alignment
3907 forces many pushes of a small amount of data,
3908 and such small pushes do rounding that causes trouble. */
3909 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3910 || align >= BIGGEST_ALIGNMENT
3911 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3912 == (align / BITS_PER_UNIT)))
3913 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3915 /* Push padding now if padding above and stack grows down,
3916 or if padding below and stack grows up.
3917 But if space already allocated, this has already been done. */
3918 if (extra && args_addr == 0
3919 && where_pad != none && where_pad != stack_direction)
3920 anti_adjust_stack (GEN_INT (extra));
3922 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3925 #endif /* PUSH_ROUNDING */
3929 /* Otherwise make space on the stack and copy the data
3930 to the address of that space. */
3932 /* Deduct words put into registers from the size we must copy. */
3935 if (GET_CODE (size) == CONST_INT)
3936 size = GEN_INT (INTVAL (size) - used);
3938 size = expand_binop (GET_MODE (size), sub_optab, size,
3939 GEN_INT (used), NULL_RTX, 0,
3943 /* Get the address of the stack space.
3944 In this case, we do not deal with EXTRA separately.
3945 A single stack adjust will do. */
3948 temp = push_block (size, extra, where_pad == downward);
3951 else if (GET_CODE (args_so_far) == CONST_INT)
3952 temp = memory_address (BLKmode,
3953 plus_constant (args_addr,
3954 skip + INTVAL (args_so_far)));
3956 temp = memory_address (BLKmode,
3957 plus_constant (gen_rtx_PLUS (Pmode,
3962 if (!ACCUMULATE_OUTGOING_ARGS)
3964 /* If the source is referenced relative to the stack pointer,
3965 copy it to another register to stabilize it. We do not need
3966 to do this if we know that we won't be changing sp. */
3968 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3969 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3970 temp = copy_to_reg (temp);
3973 target = gen_rtx_MEM (BLKmode, temp);
3977 set_mem_attributes (target, type, 1);
3978 /* Function incoming arguments may overlap with sibling call
3979 outgoing arguments and we cannot allow reordering of reads
3980 from function arguments with stores to outgoing arguments
3981 of sibling calls. */
3982 set_mem_alias_set (target, 0);
3985 /* ALIGN may well be better aligned than TYPE, e.g. due to
3986 PARM_BOUNDARY. Assume the caller isn't lying. */
3987 set_mem_align (target, align);
3989 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3992 else if (partial > 0)
3994 /* Scalar partly in registers. */
3996 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3999 /* # words of start of argument
4000 that we must make space for but need not store. */
4001 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
4002 int args_offset = INTVAL (args_so_far);
4005 /* Push padding now if padding above and stack grows down,
4006 or if padding below and stack grows up.
4007 But if space already allocated, this has already been done. */
4008 if (extra && args_addr == 0
4009 && where_pad != none && where_pad != stack_direction)
4010 anti_adjust_stack (GEN_INT (extra));
4012 /* If we make space by pushing it, we might as well push
4013 the real data. Otherwise, we can leave OFFSET nonzero
4014 and leave the space uninitialized. */
4018 /* Now NOT_STACK gets the number of words that we don't need to
4019 allocate on the stack. */
4020 not_stack = partial - offset;
4022 /* If the partial register-part of the arg counts in its stack size,
4023 skip the part of stack space corresponding to the registers.
4024 Otherwise, start copying to the beginning of the stack space,
4025 by setting SKIP to 0. */
4026 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4028 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
4029 x = validize_mem (force_const_mem (mode, x));
4031 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4032 SUBREGs of such registers are not allowed. */
4033 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
4034 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4035 x = copy_to_reg (x);
4037 /* Loop over all the words allocated on the stack for this arg. */
4038 /* We can do it by words, because any scalar bigger than a word
4039 has a size a multiple of a word. */
4040 #ifndef PUSH_ARGS_REVERSED
4041 for (i = not_stack; i < size; i++)
4043 for (i = size - 1; i >= not_stack; i--)
4045 if (i >= not_stack + offset)
4046 emit_push_insn (operand_subword_force (x, i, mode),
4047 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4049 GEN_INT (args_offset + ((i - not_stack + skip)
4051 reg_parm_stack_space, alignment_pad);
4058 /* Push padding now if padding above and stack grows down,
4059 or if padding below and stack grows up.
4060 But if space already allocated, this has already been done. */
4061 if (extra && args_addr == 0
4062 && where_pad != none && where_pad != stack_direction)
4063 anti_adjust_stack (GEN_INT (extra));
4065 #ifdef PUSH_ROUNDING
4066 if (args_addr == 0 && PUSH_ARGS)
4067 emit_single_push_insn (mode, x, type);
4071 if (GET_CODE (args_so_far) == CONST_INT)
4073 = memory_address (mode,
4074 plus_constant (args_addr,
4075 INTVAL (args_so_far)));
4077 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4079 dest = gen_rtx_MEM (mode, addr);
4082 set_mem_attributes (dest, type, 1);
4083 /* Function incoming arguments may overlap with sibling call
4084 outgoing arguments and we cannot allow reordering of reads
4085 from function arguments with stores to outgoing arguments
4086 of sibling calls. */
4087 set_mem_alias_set (dest, 0);
4090 emit_move_insn (dest, x);
4094 /* If part should go in registers, copy that part
4095 into the appropriate registers. Do this now, at the end,
4096 since mem-to-mem copies above may do function calls. */
4097 if (partial > 0 && reg != 0)
4099 /* Handle calls that pass values in multiple non-contiguous locations.
4100 The Irix 6 ABI has examples of this. */
4101 if (GET_CODE (reg) == PARALLEL)
4102 emit_group_load (reg, x, type, -1);
4104 move_block_to_reg (REGNO (reg), x, partial, mode);
4107 if (extra && args_addr == 0 && where_pad == stack_direction)
4108 anti_adjust_stack (GEN_INT (extra));
4110 if (alignment_pad && args_addr == 0)
4111 anti_adjust_stack (alignment_pad);
4114 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4118 get_subtarget (rtx x)
4121 /* Only registers can be subtargets. */
4122 || GET_CODE (x) != REG
4123 /* If the register is readonly, it can't be set more than once. */
4124 || RTX_UNCHANGING_P (x)
4125 /* Don't use hard regs to avoid extending their life. */
4126 || REGNO (x) < FIRST_PSEUDO_REGISTER
4127 /* Avoid subtargets inside loops,
4128 since they hide some invariant expressions. */
4129 || preserve_subexpressions_p ())
4133 /* Expand an assignment that stores the value of FROM into TO.
4134 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4135 (This may contain a QUEUED rtx;
4136 if the value is constant, this rtx is a constant.)
4137 Otherwise, the returned value is NULL_RTX. */
4140 expand_assignment (tree to, tree from, int want_value)
4145 /* Don't crash if the lhs of the assignment was erroneous. */
4147 if (TREE_CODE (to) == ERROR_MARK)
4149 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4150 return want_value ? result : NULL_RTX;
4153 /* Assignment of a structure component needs special treatment
4154 if the structure component's rtx is not simply a MEM.
4155 Assignment of an array element at a constant index, and assignment of
4156 an array element in an unaligned packed structure field, has the same
4159 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4160 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4161 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4163 enum machine_mode mode1;
4164 HOST_WIDE_INT bitsize, bitpos;
4172 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4173 &unsignedp, &volatilep);
4175 /* If we are going to use store_bit_field and extract_bit_field,
4176 make sure to_rtx will be safe for multiple use. */
4178 if (mode1 == VOIDmode && want_value)
4179 tem = stabilize_reference (tem);
4181 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4185 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4187 if (GET_CODE (to_rtx) != MEM)
4190 #ifdef POINTERS_EXTEND_UNSIGNED
4191 if (GET_MODE (offset_rtx) != Pmode)
4192 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4194 if (GET_MODE (offset_rtx) != ptr_mode)
4195 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4198 /* A constant address in TO_RTX can have VOIDmode, we must not try
4199 to call force_reg for that case. Avoid that case. */
4200 if (GET_CODE (to_rtx) == MEM
4201 && GET_MODE (to_rtx) == BLKmode
4202 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4204 && (bitpos % bitsize) == 0
4205 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4206 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4208 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4212 to_rtx = offset_address (to_rtx, offset_rtx,
4213 highest_pow2_factor_for_type (TREE_TYPE (to),
4217 if (GET_CODE (to_rtx) == MEM)
4219 /* If the field is at offset zero, we could have been given the
4220 DECL_RTX of the parent struct. Don't munge it. */
4221 to_rtx = shallow_copy_rtx (to_rtx);
4223 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4226 /* Deal with volatile and readonly fields. The former is only done
4227 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4228 if (volatilep && GET_CODE (to_rtx) == MEM)
4230 if (to_rtx == orig_to_rtx)
4231 to_rtx = copy_rtx (to_rtx);
4232 MEM_VOLATILE_P (to_rtx) = 1;
4235 if (TREE_CODE (to) == COMPONENT_REF
4236 && TREE_READONLY (TREE_OPERAND (to, 1)))
4238 if (to_rtx == orig_to_rtx)
4239 to_rtx = copy_rtx (to_rtx);
4240 RTX_UNCHANGING_P (to_rtx) = 1;
4243 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4245 if (to_rtx == orig_to_rtx)
4246 to_rtx = copy_rtx (to_rtx);
4247 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4250 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4252 /* Spurious cast for HPUX compiler. */
4253 ? ((enum machine_mode)
4254 TYPE_MODE (TREE_TYPE (to)))
4256 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4258 preserve_temp_slots (result);
4262 /* If the value is meaningful, convert RESULT to the proper mode.
4263 Otherwise, return nothing. */
4264 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4265 TYPE_MODE (TREE_TYPE (from)),
4267 TREE_UNSIGNED (TREE_TYPE (to)))
4271 /* If the rhs is a function call and its value is not an aggregate,
4272 call the function before we start to compute the lhs.
4273 This is needed for correct code for cases such as
4274 val = setjmp (buf) on machines where reference to val
4275 requires loading up part of an address in a separate insn.
4277 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4278 since it might be a promoted variable where the zero- or sign- extension
4279 needs to be done. Handling this in the normal way is safe because no
4280 computation is done before the call. */
4281 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4282 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4283 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4284 && GET_CODE (DECL_RTL (to)) == REG))
4289 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4291 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4293 /* Handle calls that return values in multiple non-contiguous locations.
4294 The Irix 6 ABI has examples of this. */
4295 if (GET_CODE (to_rtx) == PARALLEL)
4296 emit_group_load (to_rtx, value, TREE_TYPE (from),
4297 int_size_in_bytes (TREE_TYPE (from)));
4298 else if (GET_MODE (to_rtx) == BLKmode)
4299 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4302 #ifdef POINTERS_EXTEND_UNSIGNED
4303 if (POINTER_TYPE_P (TREE_TYPE (to))
4304 && GET_MODE (to_rtx) != GET_MODE (value))
4305 value = convert_memory_address (GET_MODE (to_rtx), value);
4307 emit_move_insn (to_rtx, value);
4309 preserve_temp_slots (to_rtx);
4312 return want_value ? to_rtx : NULL_RTX;
4315 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4316 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4319 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4321 /* Don't move directly into a return register. */
4322 if (TREE_CODE (to) == RESULT_DECL
4323 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4328 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4330 if (GET_CODE (to_rtx) == PARALLEL)
4331 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4332 int_size_in_bytes (TREE_TYPE (from)));
4334 emit_move_insn (to_rtx, temp);
4336 preserve_temp_slots (to_rtx);
4339 return want_value ? to_rtx : NULL_RTX;
4342 /* In case we are returning the contents of an object which overlaps
4343 the place the value is being stored, use a safe function when copying
4344 a value through a pointer into a structure value return block. */
4345 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4346 && current_function_returns_struct
4347 && !current_function_returns_pcc_struct)
4352 size = expr_size (from);
4353 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4355 if (TARGET_MEM_FUNCTIONS)
4356 emit_library_call (memmove_libfunc, LCT_NORMAL,
4357 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4358 XEXP (from_rtx, 0), Pmode,
4359 convert_to_mode (TYPE_MODE (sizetype),
4360 size, TREE_UNSIGNED (sizetype)),
4361 TYPE_MODE (sizetype));
4363 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4364 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4365 XEXP (to_rtx, 0), Pmode,
4366 convert_to_mode (TYPE_MODE (integer_type_node),
4368 TREE_UNSIGNED (integer_type_node)),
4369 TYPE_MODE (integer_type_node));
4371 preserve_temp_slots (to_rtx);
4374 return want_value ? to_rtx : NULL_RTX;
4377 /* Compute FROM and store the value in the rtx we got. */
4380 result = store_expr (from, to_rtx, want_value);
4381 preserve_temp_slots (result);
4384 return want_value ? result : NULL_RTX;
4387 /* Generate code for computing expression EXP,
4388 and storing the value into TARGET.
4389 TARGET may contain a QUEUED rtx.
4391 If WANT_VALUE & 1 is nonzero, return a copy of the value
4392 not in TARGET, so that we can be sure to use the proper
4393 value in a containing expression even if TARGET has something
4394 else stored in it. If possible, we copy the value through a pseudo
4395 and return that pseudo. Or, if the value is constant, we try to
4396 return the constant. In some cases, we return a pseudo
4397 copied *from* TARGET.
4399 If the mode is BLKmode then we may return TARGET itself.
4400 It turns out that in BLKmode it doesn't cause a problem.
4401 because C has no operators that could combine two different
4402 assignments into the same BLKmode object with different values
4403 with no sequence point. Will other languages need this to
4406 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4407 to catch quickly any cases where the caller uses the value
4408 and fails to set WANT_VALUE.
4410 If WANT_VALUE & 2 is set, this is a store into a call param on the
4411 stack, and block moves may need to be treated specially. */
4414 store_expr (tree exp, rtx target, int want_value)
4417 int dont_return_target = 0;
4418 int dont_store_target = 0;
4420 if (VOID_TYPE_P (TREE_TYPE (exp)))
4422 /* C++ can generate ?: expressions with a throw expression in one
4423 branch and an rvalue in the other. Here, we resolve attempts to
4424 store the throw expression's nonexistent result. */
4427 expand_expr (exp, const0_rtx, VOIDmode, 0);
4430 if (TREE_CODE (exp) == COMPOUND_EXPR)
4432 /* Perform first part of compound expression, then assign from second
4434 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4435 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4437 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4439 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4441 /* For conditional expression, get safe form of the target. Then
4442 test the condition, doing the appropriate assignment on either
4443 side. This avoids the creation of unnecessary temporaries.
4444 For non-BLKmode, it is more efficient not to do this. */
4446 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4449 target = protect_from_queue (target, 1);
4451 do_pending_stack_adjust ();
4453 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4454 start_cleanup_deferral ();
4455 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4456 end_cleanup_deferral ();
4458 emit_jump_insn (gen_jump (lab2));
4461 start_cleanup_deferral ();
4462 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4463 end_cleanup_deferral ();
4468 return want_value & 1 ? target : NULL_RTX;
4470 else if (queued_subexp_p (target))
4471 /* If target contains a postincrement, let's not risk
4472 using it as the place to generate the rhs. */
4474 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4476 /* Expand EXP into a new pseudo. */
4477 temp = gen_reg_rtx (GET_MODE (target));
4478 temp = expand_expr (exp, temp, GET_MODE (target),
4480 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4483 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4485 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4487 /* If target is volatile, ANSI requires accessing the value
4488 *from* the target, if it is accessed. So make that happen.
4489 In no case return the target itself. */
4490 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4491 dont_return_target = 1;
4493 else if ((want_value & 1) != 0
4494 && GET_CODE (target) == MEM
4495 && ! MEM_VOLATILE_P (target)
4496 && GET_MODE (target) != BLKmode)
4497 /* If target is in memory and caller wants value in a register instead,
4498 arrange that. Pass TARGET as target for expand_expr so that,
4499 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4500 We know expand_expr will not use the target in that case.
4501 Don't do this if TARGET is volatile because we are supposed
4502 to write it and then read it. */
4504 temp = expand_expr (exp, target, GET_MODE (target),
4505 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4506 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4508 /* If TEMP is already in the desired TARGET, only copy it from
4509 memory and don't store it there again. */
4511 || (rtx_equal_p (temp, target)
4512 && ! side_effects_p (temp) && ! side_effects_p (target)))
4513 dont_store_target = 1;
4514 temp = copy_to_reg (temp);
4516 dont_return_target = 1;
4518 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4519 /* If this is a scalar in a register that is stored in a wider mode
4520 than the declared mode, compute the result into its declared mode
4521 and then convert to the wider mode. Our value is the computed
4524 rtx inner_target = 0;
4526 /* If we don't want a value, we can do the conversion inside EXP,
4527 which will often result in some optimizations. Do the conversion
4528 in two steps: first change the signedness, if needed, then
4529 the extend. But don't do this if the type of EXP is a subtype
4530 of something else since then the conversion might involve
4531 more than just converting modes. */
4532 if ((want_value & 1) == 0
4533 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4534 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4536 if (TREE_UNSIGNED (TREE_TYPE (exp))
4537 != SUBREG_PROMOTED_UNSIGNED_P (target))
4539 ((*lang_hooks.types.signed_or_unsigned_type)
4540 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4542 exp = convert ((*lang_hooks.types.type_for_mode)
4543 (GET_MODE (SUBREG_REG (target)),
4544 SUBREG_PROMOTED_UNSIGNED_P (target)),
4547 inner_target = SUBREG_REG (target);
4550 temp = expand_expr (exp, inner_target, VOIDmode,
4551 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4553 /* If TEMP is a MEM and we want a result value, make the access
4554 now so it gets done only once. Strictly speaking, this is
4555 only necessary if the MEM is volatile, or if the address
4556 overlaps TARGET. But not performing the load twice also
4557 reduces the amount of rtl we generate and then have to CSE. */
4558 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4559 temp = copy_to_reg (temp);
4561 /* If TEMP is a VOIDmode constant, use convert_modes to make
4562 sure that we properly convert it. */
4563 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4565 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4566 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4567 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4568 GET_MODE (target), temp,
4569 SUBREG_PROMOTED_UNSIGNED_P (target));
4572 convert_move (SUBREG_REG (target), temp,
4573 SUBREG_PROMOTED_UNSIGNED_P (target));
4575 /* If we promoted a constant, change the mode back down to match
4576 target. Otherwise, the caller might get confused by a result whose
4577 mode is larger than expected. */
4579 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4581 if (GET_MODE (temp) != VOIDmode)
4583 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4584 SUBREG_PROMOTED_VAR_P (temp) = 1;
4585 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4586 SUBREG_PROMOTED_UNSIGNED_P (target));
4589 temp = convert_modes (GET_MODE (target),
4590 GET_MODE (SUBREG_REG (target)),
4591 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4594 return want_value & 1 ? temp : NULL_RTX;
4598 temp = expand_expr (exp, target, GET_MODE (target),
4599 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4600 /* Return TARGET if it's a specified hardware register.
4601 If TARGET is a volatile mem ref, either return TARGET
4602 or return a reg copied *from* TARGET; ANSI requires this.
4604 Otherwise, if TEMP is not TARGET, return TEMP
4605 if it is constant (for efficiency),
4606 or if we really want the correct value. */
4607 if (!(target && GET_CODE (target) == REG
4608 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4609 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4610 && ! rtx_equal_p (temp, target)
4611 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4612 dont_return_target = 1;
4615 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4616 the same as that of TARGET, adjust the constant. This is needed, for
4617 example, in case it is a CONST_DOUBLE and we want only a word-sized
4619 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4620 && TREE_CODE (exp) != ERROR_MARK
4621 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4622 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4623 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4625 /* If value was not generated in the target, store it there.
4626 Convert the value to TARGET's type first if necessary.
4627 If TEMP and TARGET compare equal according to rtx_equal_p, but
4628 one or both of them are volatile memory refs, we have to distinguish
4630 - expand_expr has used TARGET. In this case, we must not generate
4631 another copy. This can be detected by TARGET being equal according
4633 - expand_expr has not used TARGET - that means that the source just
4634 happens to have the same RTX form. Since temp will have been created
4635 by expand_expr, it will compare unequal according to == .
4636 We must generate a copy in this case, to reach the correct number
4637 of volatile memory references. */
4639 if ((! rtx_equal_p (temp, target)
4640 || (temp != target && (side_effects_p (temp)
4641 || side_effects_p (target))))
4642 && TREE_CODE (exp) != ERROR_MARK
4643 && ! dont_store_target
4644 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4645 but TARGET is not valid memory reference, TEMP will differ
4646 from TARGET although it is really the same location. */
4647 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4648 || target != DECL_RTL_IF_SET (exp))
4649 /* If there's nothing to copy, don't bother. Don't call expr_size
4650 unless necessary, because some front-ends (C++) expr_size-hook
4651 aborts on objects that are not supposed to be bit-copied or
4653 && expr_size (exp) != const0_rtx)
4655 target = protect_from_queue (target, 1);
4656 if (GET_MODE (temp) != GET_MODE (target)
4657 && GET_MODE (temp) != VOIDmode)
4659 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4660 if (dont_return_target)
4662 /* In this case, we will return TEMP,
4663 so make sure it has the proper mode.
4664 But don't forget to store the value into TARGET. */
4665 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4666 emit_move_insn (target, temp);
4669 convert_move (target, temp, unsignedp);
4672 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4674 /* Handle copying a string constant into an array. The string
4675 constant may be shorter than the array. So copy just the string's
4676 actual length, and clear the rest. First get the size of the data
4677 type of the string, which is actually the size of the target. */
4678 rtx size = expr_size (exp);
4680 if (GET_CODE (size) == CONST_INT
4681 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4682 emit_block_move (target, temp, size,
4684 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4687 /* Compute the size of the data to copy from the string. */
4689 = size_binop (MIN_EXPR,
4690 make_tree (sizetype, size),
4691 size_int (TREE_STRING_LENGTH (exp)));
4693 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4695 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4698 /* Copy that much. */
4699 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4700 TREE_UNSIGNED (sizetype));
4701 emit_block_move (target, temp, copy_size_rtx,
4703 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4705 /* Figure out how much is left in TARGET that we have to clear.
4706 Do all calculations in ptr_mode. */
4707 if (GET_CODE (copy_size_rtx) == CONST_INT)
4709 size = plus_constant (size, -INTVAL (copy_size_rtx));
4710 target = adjust_address (target, BLKmode,
4711 INTVAL (copy_size_rtx));
4715 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4716 copy_size_rtx, NULL_RTX, 0,
4719 #ifdef POINTERS_EXTEND_UNSIGNED
4720 if (GET_MODE (copy_size_rtx) != Pmode)
4721 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4722 TREE_UNSIGNED (sizetype));
4725 target = offset_address (target, copy_size_rtx,
4726 highest_pow2_factor (copy_size));
4727 label = gen_label_rtx ();
4728 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4729 GET_MODE (size), 0, label);
4732 if (size != const0_rtx)
4733 clear_storage (target, size);
4739 /* Handle calls that return values in multiple non-contiguous locations.
4740 The Irix 6 ABI has examples of this. */
4741 else if (GET_CODE (target) == PARALLEL)
4742 emit_group_load (target, temp, TREE_TYPE (exp),
4743 int_size_in_bytes (TREE_TYPE (exp)));
4744 else if (GET_MODE (temp) == BLKmode)
4745 emit_block_move (target, temp, expr_size (exp),
4747 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4749 emit_move_insn (target, temp);
4752 /* If we don't want a value, return NULL_RTX. */
4753 if ((want_value & 1) == 0)
4756 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4757 ??? The latter test doesn't seem to make sense. */
4758 else if (dont_return_target && GET_CODE (temp) != MEM)
4761 /* Return TARGET itself if it is a hard register. */
4762 else if ((want_value & 1) != 0
4763 && GET_MODE (target) != BLKmode
4764 && ! (GET_CODE (target) == REG
4765 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4766 return copy_to_reg (target);
4772 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4775 is_zeros_p (tree exp)
4779 switch (TREE_CODE (exp))
4783 case NON_LVALUE_EXPR:
4784 case VIEW_CONVERT_EXPR:
4785 return is_zeros_p (TREE_OPERAND (exp, 0));
4788 return integer_zerop (exp);
4792 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4795 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4798 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4799 elt = TREE_CHAIN (elt))
4800 if (!is_zeros_p (TREE_VALUE (elt)))
4806 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4807 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4808 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4809 if (! is_zeros_p (TREE_VALUE (elt)))
4819 /* Return 1 if EXP contains mostly (3/4) zeros. */
4822 mostly_zeros_p (tree exp)
4824 if (TREE_CODE (exp) == CONSTRUCTOR)
4826 int elts = 0, zeros = 0;
4827 tree elt = CONSTRUCTOR_ELTS (exp);
4828 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4830 /* If there are no ranges of true bits, it is all zero. */
4831 return elt == NULL_TREE;
4833 for (; elt; elt = TREE_CHAIN (elt))
4835 /* We do not handle the case where the index is a RANGE_EXPR,
4836 so the statistic will be somewhat inaccurate.
4837 We do make a more accurate count in store_constructor itself,
4838 so since this function is only used for nested array elements,
4839 this should be close enough. */
4840 if (mostly_zeros_p (TREE_VALUE (elt)))
4845 return 4 * zeros >= 3 * elts;
4848 return is_zeros_p (exp);
4851 /* Helper function for store_constructor.
4852 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4853 TYPE is the type of the CONSTRUCTOR, not the element type.
4854 CLEARED is as for store_constructor.
4855 ALIAS_SET is the alias set to use for any stores.
4857 This provides a recursive shortcut back to store_constructor when it isn't
4858 necessary to go through store_field. This is so that we can pass through
4859 the cleared field to let store_constructor know that we may not have to
4860 clear a substructure if the outer structure has already been cleared. */
4863 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4864 HOST_WIDE_INT bitpos, enum machine_mode mode,
4865 tree exp, tree type, int cleared, int alias_set)
4867 if (TREE_CODE (exp) == CONSTRUCTOR
4868 && bitpos % BITS_PER_UNIT == 0
4869 /* If we have a nonzero bitpos for a register target, then we just
4870 let store_field do the bitfield handling. This is unlikely to
4871 generate unnecessary clear instructions anyways. */
4872 && (bitpos == 0 || GET_CODE (target) == MEM))
4874 if (GET_CODE (target) == MEM)
4876 = adjust_address (target,
4877 GET_MODE (target) == BLKmode
4879 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4880 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4883 /* Update the alias set, if required. */
4884 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4885 && MEM_ALIAS_SET (target) != 0)
4887 target = copy_rtx (target);
4888 set_mem_alias_set (target, alias_set);
4891 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4894 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4898 /* Store the value of constructor EXP into the rtx TARGET.
4899 TARGET is either a REG or a MEM; we know it cannot conflict, since
4900 safe_from_p has been called.
4901 CLEARED is true if TARGET is known to have been zero'd.
4902 SIZE is the number of bytes of TARGET we are allowed to modify: this
4903 may not be the same as the size of EXP if we are assigning to a field
4904 which has been packed to exclude padding bits. */
4907 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4909 tree type = TREE_TYPE (exp);
4910 #ifdef WORD_REGISTER_OPERATIONS
4911 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4914 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4915 || TREE_CODE (type) == QUAL_UNION_TYPE)
4919 /* If size is zero or the target is already cleared, do nothing. */
4920 if (size == 0 || cleared)
4922 /* We either clear the aggregate or indicate the value is dead. */
4923 else if ((TREE_CODE (type) == UNION_TYPE
4924 || TREE_CODE (type) == QUAL_UNION_TYPE)
4925 && ! CONSTRUCTOR_ELTS (exp))
4926 /* If the constructor is empty, clear the union. */
4928 clear_storage (target, expr_size (exp));
4932 /* If we are building a static constructor into a register,
4933 set the initial value as zero so we can fold the value into
4934 a constant. But if more than one register is involved,
4935 this probably loses. */
4936 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4937 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4939 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4943 /* If the constructor has fewer fields than the structure
4944 or if we are initializing the structure to mostly zeros,
4945 clear the whole structure first. Don't do this if TARGET is a
4946 register whose mode size isn't equal to SIZE since clear_storage
4947 can't handle this case. */
4948 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4949 || mostly_zeros_p (exp))
4950 && (GET_CODE (target) != REG
4951 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4954 rtx xtarget = target;
4956 if (readonly_fields_p (type))
4958 xtarget = copy_rtx (xtarget);
4959 RTX_UNCHANGING_P (xtarget) = 1;
4962 clear_storage (xtarget, GEN_INT (size));
4967 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4969 /* Store each element of the constructor into
4970 the corresponding field of TARGET. */
4972 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4974 tree field = TREE_PURPOSE (elt);
4975 tree value = TREE_VALUE (elt);
4976 enum machine_mode mode;
4977 HOST_WIDE_INT bitsize;
4978 HOST_WIDE_INT bitpos = 0;
4980 rtx to_rtx = target;
4982 /* Just ignore missing fields.
4983 We cleared the whole structure, above,
4984 if any fields are missing. */
4988 if (cleared && is_zeros_p (value))
4991 if (host_integerp (DECL_SIZE (field), 1))
4992 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4996 mode = DECL_MODE (field);
4997 if (DECL_BIT_FIELD (field))
5000 offset = DECL_FIELD_OFFSET (field);
5001 if (host_integerp (offset, 0)
5002 && host_integerp (bit_position (field), 0))
5004 bitpos = int_bit_position (field);
5008 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5014 if (CONTAINS_PLACEHOLDER_P (offset))
5015 offset = build (WITH_RECORD_EXPR, sizetype,
5016 offset, make_tree (TREE_TYPE (exp), target));
5018 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5019 if (GET_CODE (to_rtx) != MEM)
5022 #ifdef POINTERS_EXTEND_UNSIGNED
5023 if (GET_MODE (offset_rtx) != Pmode)
5024 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5026 if (GET_MODE (offset_rtx) != ptr_mode)
5027 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5030 to_rtx = offset_address (to_rtx, offset_rtx,
5031 highest_pow2_factor (offset));
5034 if (TREE_READONLY (field))
5036 if (GET_CODE (to_rtx) == MEM)
5037 to_rtx = copy_rtx (to_rtx);
5039 RTX_UNCHANGING_P (to_rtx) = 1;
5042 #ifdef WORD_REGISTER_OPERATIONS
5043 /* If this initializes a field that is smaller than a word, at the
5044 start of a word, try to widen it to a full word.
5045 This special case allows us to output C++ member function
5046 initializations in a form that the optimizers can understand. */
5047 if (GET_CODE (target) == REG
5048 && bitsize < BITS_PER_WORD
5049 && bitpos % BITS_PER_WORD == 0
5050 && GET_MODE_CLASS (mode) == MODE_INT
5051 && TREE_CODE (value) == INTEGER_CST
5053 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5055 tree type = TREE_TYPE (value);
5057 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5059 type = (*lang_hooks.types.type_for_size)
5060 (BITS_PER_WORD, TREE_UNSIGNED (type));
5061 value = convert (type, value);
5064 if (BYTES_BIG_ENDIAN)
5066 = fold (build (LSHIFT_EXPR, type, value,
5067 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5068 bitsize = BITS_PER_WORD;
5073 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5074 && DECL_NONADDRESSABLE_P (field))
5076 to_rtx = copy_rtx (to_rtx);
5077 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5080 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5081 value, type, cleared,
5082 get_alias_set (TREE_TYPE (field)));
5085 else if (TREE_CODE (type) == ARRAY_TYPE
5086 || TREE_CODE (type) == VECTOR_TYPE)
5091 tree domain = TYPE_DOMAIN (type);
5092 tree elttype = TREE_TYPE (type);
5094 HOST_WIDE_INT minelt = 0;
5095 HOST_WIDE_INT maxelt = 0;
5097 /* Vectors are like arrays, but the domain is stored via an array
5099 if (TREE_CODE (type) == VECTOR_TYPE)
5101 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5102 the same field as TYPE_DOMAIN, we are not guaranteed that
5104 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5105 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5108 const_bounds_p = (TYPE_MIN_VALUE (domain)
5109 && TYPE_MAX_VALUE (domain)
5110 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5111 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5113 /* If we have constant bounds for the range of the type, get them. */
5116 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5117 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5120 /* If the constructor has fewer elements than the array,
5121 clear the whole array first. Similarly if this is
5122 static constructor of a non-BLKmode object. */
5123 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5127 HOST_WIDE_INT count = 0, zero_count = 0;
5128 need_to_clear = ! const_bounds_p;
5130 /* This loop is a more accurate version of the loop in
5131 mostly_zeros_p (it handles RANGE_EXPR in an index).
5132 It is also needed to check for missing elements. */
5133 for (elt = CONSTRUCTOR_ELTS (exp);
5134 elt != NULL_TREE && ! need_to_clear;
5135 elt = TREE_CHAIN (elt))
5137 tree index = TREE_PURPOSE (elt);
5138 HOST_WIDE_INT this_node_count;
5140 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5142 tree lo_index = TREE_OPERAND (index, 0);
5143 tree hi_index = TREE_OPERAND (index, 1);
5145 if (! host_integerp (lo_index, 1)
5146 || ! host_integerp (hi_index, 1))
5152 this_node_count = (tree_low_cst (hi_index, 1)
5153 - tree_low_cst (lo_index, 1) + 1);
5156 this_node_count = 1;
5158 count += this_node_count;
5159 if (mostly_zeros_p (TREE_VALUE (elt)))
5160 zero_count += this_node_count;
5163 /* Clear the entire array first if there are any missing elements,
5164 or if the incidence of zero elements is >= 75%. */
5166 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5170 if (need_to_clear && size > 0)
5175 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5177 clear_storage (target, GEN_INT (size));
5181 else if (REG_P (target))
5182 /* Inform later passes that the old value is dead. */
5183 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5185 /* Store each element of the constructor into
5186 the corresponding element of TARGET, determined
5187 by counting the elements. */
5188 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5190 elt = TREE_CHAIN (elt), i++)
5192 enum machine_mode mode;
5193 HOST_WIDE_INT bitsize;
5194 HOST_WIDE_INT bitpos;
5196 tree value = TREE_VALUE (elt);
5197 tree index = TREE_PURPOSE (elt);
5198 rtx xtarget = target;
5200 if (cleared && is_zeros_p (value))
5203 unsignedp = TREE_UNSIGNED (elttype);
5204 mode = TYPE_MODE (elttype);
5205 if (mode == BLKmode)
5206 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5207 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5210 bitsize = GET_MODE_BITSIZE (mode);
5212 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5214 tree lo_index = TREE_OPERAND (index, 0);
5215 tree hi_index = TREE_OPERAND (index, 1);
5216 rtx index_r, pos_rtx, loop_end;
5217 struct nesting *loop;
5218 HOST_WIDE_INT lo, hi, count;
5221 /* If the range is constant and "small", unroll the loop. */
5223 && host_integerp (lo_index, 0)
5224 && host_integerp (hi_index, 0)
5225 && (lo = tree_low_cst (lo_index, 0),
5226 hi = tree_low_cst (hi_index, 0),
5227 count = hi - lo + 1,
5228 (GET_CODE (target) != MEM
5230 || (host_integerp (TYPE_SIZE (elttype), 1)
5231 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5234 lo -= minelt; hi -= minelt;
5235 for (; lo <= hi; lo++)
5237 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5239 if (GET_CODE (target) == MEM
5240 && !MEM_KEEP_ALIAS_SET_P (target)
5241 && TREE_CODE (type) == ARRAY_TYPE
5242 && TYPE_NONALIASED_COMPONENT (type))
5244 target = copy_rtx (target);
5245 MEM_KEEP_ALIAS_SET_P (target) = 1;
5248 store_constructor_field
5249 (target, bitsize, bitpos, mode, value, type, cleared,
5250 get_alias_set (elttype));
5255 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5256 loop_end = gen_label_rtx ();
5258 unsignedp = TREE_UNSIGNED (domain);
5260 index = build_decl (VAR_DECL, NULL_TREE, domain);
5263 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5265 SET_DECL_RTL (index, index_r);
5266 if (TREE_CODE (value) == SAVE_EXPR
5267 && SAVE_EXPR_RTL (value) == 0)
5269 /* Make sure value gets expanded once before the
5271 expand_expr (value, const0_rtx, VOIDmode, 0);
5274 store_expr (lo_index, index_r, 0);
5275 loop = expand_start_loop (0);
5277 /* Assign value to element index. */
5279 = convert (ssizetype,
5280 fold (build (MINUS_EXPR, TREE_TYPE (index),
5281 index, TYPE_MIN_VALUE (domain))));
5282 position = size_binop (MULT_EXPR, position,
5284 TYPE_SIZE_UNIT (elttype)));
5286 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5287 xtarget = offset_address (target, pos_rtx,
5288 highest_pow2_factor (position));
5289 xtarget = adjust_address (xtarget, mode, 0);
5290 if (TREE_CODE (value) == CONSTRUCTOR)
5291 store_constructor (value, xtarget, cleared,
5292 bitsize / BITS_PER_UNIT);
5294 store_expr (value, xtarget, 0);
5296 expand_exit_loop_if_false (loop,
5297 build (LT_EXPR, integer_type_node,
5300 expand_increment (build (PREINCREMENT_EXPR,
5302 index, integer_one_node), 0, 0);
5304 emit_label (loop_end);
5307 else if ((index != 0 && ! host_integerp (index, 0))
5308 || ! host_integerp (TYPE_SIZE (elttype), 1))
5313 index = ssize_int (1);
5316 index = convert (ssizetype,
5317 fold (build (MINUS_EXPR, index,
5318 TYPE_MIN_VALUE (domain))));
5320 position = size_binop (MULT_EXPR, index,
5322 TYPE_SIZE_UNIT (elttype)));
5323 xtarget = offset_address (target,
5324 expand_expr (position, 0, VOIDmode, 0),
5325 highest_pow2_factor (position));
5326 xtarget = adjust_address (xtarget, mode, 0);
5327 store_expr (value, xtarget, 0);
5332 bitpos = ((tree_low_cst (index, 0) - minelt)
5333 * tree_low_cst (TYPE_SIZE (elttype), 1));
5335 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5337 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5338 && TREE_CODE (type) == ARRAY_TYPE
5339 && TYPE_NONALIASED_COMPONENT (type))
5341 target = copy_rtx (target);
5342 MEM_KEEP_ALIAS_SET_P (target) = 1;
5345 store_constructor_field (target, bitsize, bitpos, mode, value,
5346 type, cleared, get_alias_set (elttype));
5352 /* Set constructor assignments. */
5353 else if (TREE_CODE (type) == SET_TYPE)
5355 tree elt = CONSTRUCTOR_ELTS (exp);
5356 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5357 tree domain = TYPE_DOMAIN (type);
5358 tree domain_min, domain_max, bitlength;
5360 /* The default implementation strategy is to extract the constant
5361 parts of the constructor, use that to initialize the target,
5362 and then "or" in whatever non-constant ranges we need in addition.
5364 If a large set is all zero or all ones, it is
5365 probably better to set it using memset (if available) or bzero.
5366 Also, if a large set has just a single range, it may also be
5367 better to first clear all the first clear the set (using
5368 bzero/memset), and set the bits we want. */
5370 /* Check for all zeros. */
5371 if (elt == NULL_TREE && size > 0)
5374 clear_storage (target, GEN_INT (size));
5378 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5379 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5380 bitlength = size_binop (PLUS_EXPR,
5381 size_diffop (domain_max, domain_min),
5384 nbits = tree_low_cst (bitlength, 1);
5386 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5387 are "complicated" (more than one range), initialize (the
5388 constant parts) by copying from a constant. */
5389 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5390 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5392 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5393 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5394 char *bit_buffer = alloca (nbits);
5395 HOST_WIDE_INT word = 0;
5396 unsigned int bit_pos = 0;
5397 unsigned int ibit = 0;
5398 unsigned int offset = 0; /* In bytes from beginning of set. */
5400 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5403 if (bit_buffer[ibit])
5405 if (BYTES_BIG_ENDIAN)
5406 word |= (1 << (set_word_size - 1 - bit_pos));
5408 word |= 1 << bit_pos;
5412 if (bit_pos >= set_word_size || ibit == nbits)
5414 if (word != 0 || ! cleared)
5416 rtx datum = GEN_INT (word);
5419 /* The assumption here is that it is safe to use
5420 XEXP if the set is multi-word, but not if
5421 it's single-word. */
5422 if (GET_CODE (target) == MEM)
5423 to_rtx = adjust_address (target, mode, offset);
5424 else if (offset == 0)
5428 emit_move_insn (to_rtx, datum);
5435 offset += set_word_size / BITS_PER_UNIT;
5440 /* Don't bother clearing storage if the set is all ones. */
5441 if (TREE_CHAIN (elt) != NULL_TREE
5442 || (TREE_PURPOSE (elt) == NULL_TREE
5444 : ( ! host_integerp (TREE_VALUE (elt), 0)
5445 || ! host_integerp (TREE_PURPOSE (elt), 0)
5446 || (tree_low_cst (TREE_VALUE (elt), 0)
5447 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5448 != (HOST_WIDE_INT) nbits))))
5449 clear_storage (target, expr_size (exp));
5451 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5453 /* Start of range of element or NULL. */
5454 tree startbit = TREE_PURPOSE (elt);
5455 /* End of range of element, or element value. */
5456 tree endbit = TREE_VALUE (elt);
5457 HOST_WIDE_INT startb, endb;
5458 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5460 bitlength_rtx = expand_expr (bitlength,
5461 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5463 /* Handle non-range tuple element like [ expr ]. */
5464 if (startbit == NULL_TREE)
5466 startbit = save_expr (endbit);
5470 startbit = convert (sizetype, startbit);
5471 endbit = convert (sizetype, endbit);
5472 if (! integer_zerop (domain_min))
5474 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5475 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5477 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5478 EXPAND_CONST_ADDRESS);
5479 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5480 EXPAND_CONST_ADDRESS);
5486 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5487 (GET_MODE (target), 0),
5490 emit_move_insn (targetx, target);
5493 else if (GET_CODE (target) == MEM)
5498 /* Optimization: If startbit and endbit are constants divisible
5499 by BITS_PER_UNIT, call memset instead. */
5500 if (TARGET_MEM_FUNCTIONS
5501 && TREE_CODE (startbit) == INTEGER_CST
5502 && TREE_CODE (endbit) == INTEGER_CST
5503 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5504 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5506 emit_library_call (memset_libfunc, LCT_NORMAL,
5508 plus_constant (XEXP (targetx, 0),
5509 startb / BITS_PER_UNIT),
5511 constm1_rtx, TYPE_MODE (integer_type_node),
5512 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5513 TYPE_MODE (sizetype));
5516 emit_library_call (setbits_libfunc, LCT_NORMAL,
5517 VOIDmode, 4, XEXP (targetx, 0),
5518 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5519 startbit_rtx, TYPE_MODE (sizetype),
5520 endbit_rtx, TYPE_MODE (sizetype));
5523 emit_move_insn (target, targetx);
5531 /* Store the value of EXP (an expression tree)
5532 into a subfield of TARGET which has mode MODE and occupies
5533 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5534 If MODE is VOIDmode, it means that we are storing into a bit-field.
5536 If VALUE_MODE is VOIDmode, return nothing in particular.
5537 UNSIGNEDP is not used in this case.
5539 Otherwise, return an rtx for the value stored. This rtx
5540 has mode VALUE_MODE if that is convenient to do.
5541 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5543 TYPE is the type of the underlying object,
5545 ALIAS_SET is the alias set for the destination. This value will
5546 (in general) be different from that for TARGET, since TARGET is a
5547 reference to the containing structure. */
5550 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5551 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5552 int unsignedp, tree type, int alias_set)
5554 HOST_WIDE_INT width_mask = 0;
5556 if (TREE_CODE (exp) == ERROR_MARK)
5559 /* If we have nothing to store, do nothing unless the expression has
5562 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5563 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5564 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5566 /* If we are storing into an unaligned field of an aligned union that is
5567 in a register, we may have the mode of TARGET being an integer mode but
5568 MODE == BLKmode. In that case, get an aligned object whose size and
5569 alignment are the same as TARGET and store TARGET into it (we can avoid
5570 the store if the field being stored is the entire width of TARGET). Then
5571 call ourselves recursively to store the field into a BLKmode version of
5572 that object. Finally, load from the object into TARGET. This is not
5573 very efficient in general, but should only be slightly more expensive
5574 than the otherwise-required unaligned accesses. Perhaps this can be
5575 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5576 twice, once with emit_move_insn and once via store_field. */
5579 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5581 rtx object = assign_temp (type, 0, 1, 1);
5582 rtx blk_object = adjust_address (object, BLKmode, 0);
5584 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5585 emit_move_insn (object, target);
5587 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5590 emit_move_insn (target, object);
5592 /* We want to return the BLKmode version of the data. */
5596 if (GET_CODE (target) == CONCAT)
5598 /* We're storing into a struct containing a single __complex. */
5602 return store_expr (exp, target, 0);
5605 /* If the structure is in a register or if the component
5606 is a bit field, we cannot use addressing to access it.
5607 Use bit-field techniques or SUBREG to store in it. */
5609 if (mode == VOIDmode
5610 || (mode != BLKmode && ! direct_store[(int) mode]
5611 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5612 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5613 || GET_CODE (target) == REG
5614 || GET_CODE (target) == SUBREG
5615 /* If the field isn't aligned enough to store as an ordinary memref,
5616 store it as a bit field. */
5618 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5619 || bitpos % GET_MODE_ALIGNMENT (mode))
5620 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5621 || (bitpos % BITS_PER_UNIT != 0)))
5622 /* If the RHS and field are a constant size and the size of the
5623 RHS isn't the same size as the bitfield, we must use bitfield
5626 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5627 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5629 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5631 /* If BITSIZE is narrower than the size of the type of EXP
5632 we will be narrowing TEMP. Normally, what's wanted are the
5633 low-order bits. However, if EXP's type is a record and this is
5634 big-endian machine, we want the upper BITSIZE bits. */
5635 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5636 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5637 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5638 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5639 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5643 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5645 if (mode != VOIDmode && mode != BLKmode
5646 && mode != TYPE_MODE (TREE_TYPE (exp)))
5647 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5649 /* If the modes of TARGET and TEMP are both BLKmode, both
5650 must be in memory and BITPOS must be aligned on a byte
5651 boundary. If so, we simply do a block copy. */
5652 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5654 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5655 || bitpos % BITS_PER_UNIT != 0)
5658 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5659 emit_block_move (target, temp,
5660 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5664 return value_mode == VOIDmode ? const0_rtx : target;
5667 /* Store the value in the bitfield. */
5668 store_bit_field (target, bitsize, bitpos, mode, temp,
5669 int_size_in_bytes (type));
5671 if (value_mode != VOIDmode)
5673 /* The caller wants an rtx for the value.
5674 If possible, avoid refetching from the bitfield itself. */
5676 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5679 enum machine_mode tmode;
5681 tmode = GET_MODE (temp);
5682 if (tmode == VOIDmode)
5686 return expand_and (tmode, temp,
5687 gen_int_mode (width_mask, tmode),
5690 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5691 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5692 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5695 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5696 NULL_RTX, value_mode, VOIDmode,
5697 int_size_in_bytes (type));
5703 rtx addr = XEXP (target, 0);
5704 rtx to_rtx = target;
5706 /* If a value is wanted, it must be the lhs;
5707 so make the address stable for multiple use. */
5709 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5710 && ! CONSTANT_ADDRESS_P (addr)
5711 /* A frame-pointer reference is already stable. */
5712 && ! (GET_CODE (addr) == PLUS
5713 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5714 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5715 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5716 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5718 /* Now build a reference to just the desired component. */
5720 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5722 if (to_rtx == target)
5723 to_rtx = copy_rtx (to_rtx);
5725 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5726 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5727 set_mem_alias_set (to_rtx, alias_set);
5729 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5733 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5734 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5735 codes and find the ultimate containing object, which we return.
5737 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5738 bit position, and *PUNSIGNEDP to the signedness of the field.
5739 If the position of the field is variable, we store a tree
5740 giving the variable offset (in units) in *POFFSET.
5741 This offset is in addition to the bit position.
5742 If the position is not variable, we store 0 in *POFFSET.
5744 If any of the extraction expressions is volatile,
5745 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5747 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5748 is a mode that can be used to access the field. In that case, *PBITSIZE
5751 If the field describes a variable-sized object, *PMODE is set to
5752 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5753 this case, but the address of the object can be found. */
5756 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5757 HOST_WIDE_INT *pbitpos, tree *poffset,
5758 enum machine_mode *pmode, int *punsignedp,
5762 enum machine_mode mode = VOIDmode;
5763 tree offset = size_zero_node;
5764 tree bit_offset = bitsize_zero_node;
5765 tree placeholder_ptr = 0;
5768 /* First get the mode, signedness, and size. We do this from just the
5769 outermost expression. */
5770 if (TREE_CODE (exp) == COMPONENT_REF)
5772 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5773 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5774 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5776 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5778 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5780 size_tree = TREE_OPERAND (exp, 1);
5781 *punsignedp = TREE_UNSIGNED (exp);
5785 mode = TYPE_MODE (TREE_TYPE (exp));
5786 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5788 if (mode == BLKmode)
5789 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5791 *pbitsize = GET_MODE_BITSIZE (mode);
5796 if (! host_integerp (size_tree, 1))
5797 mode = BLKmode, *pbitsize = -1;
5799 *pbitsize = tree_low_cst (size_tree, 1);
5802 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5803 and find the ultimate containing object. */
5806 if (TREE_CODE (exp) == BIT_FIELD_REF)
5807 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5808 else if (TREE_CODE (exp) == COMPONENT_REF)
5810 tree field = TREE_OPERAND (exp, 1);
5811 tree this_offset = DECL_FIELD_OFFSET (field);
5813 /* If this field hasn't been filled in yet, don't go
5814 past it. This should only happen when folding expressions
5815 made during type construction. */
5816 if (this_offset == 0)
5818 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5819 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5821 offset = size_binop (PLUS_EXPR, offset, this_offset);
5822 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5823 DECL_FIELD_BIT_OFFSET (field));
5825 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5828 else if (TREE_CODE (exp) == ARRAY_REF
5829 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5831 tree index = TREE_OPERAND (exp, 1);
5832 tree array = TREE_OPERAND (exp, 0);
5833 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5834 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5835 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5837 /* We assume all arrays have sizes that are a multiple of a byte.
5838 First subtract the lower bound, if any, in the type of the
5839 index, then convert to sizetype and multiply by the size of the
5841 if (low_bound != 0 && ! integer_zerop (low_bound))
5842 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5845 /* If the index has a self-referential type, pass it to a
5846 WITH_RECORD_EXPR; if the component size is, pass our
5847 component to one. */
5848 if (CONTAINS_PLACEHOLDER_P (index))
5849 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5850 if (CONTAINS_PLACEHOLDER_P (unit_size))
5851 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5853 offset = size_binop (PLUS_EXPR, offset,
5854 size_binop (MULT_EXPR,
5855 convert (sizetype, index),
5859 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5861 tree new = find_placeholder (exp, &placeholder_ptr);
5863 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5864 We might have been called from tree optimization where we
5865 haven't set up an object yet. */
5874 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5875 conversions that don't change the mode, and all view conversions
5876 except those that need to "step up" the alignment. */
5877 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5878 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5879 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5880 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5882 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5883 < BIGGEST_ALIGNMENT)
5884 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5885 || TYPE_ALIGN_OK (TREE_TYPE
5886 (TREE_OPERAND (exp, 0))))))
5887 && ! ((TREE_CODE (exp) == NOP_EXPR
5888 || TREE_CODE (exp) == CONVERT_EXPR)
5889 && (TYPE_MODE (TREE_TYPE (exp))
5890 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5893 /* If any reference in the chain is volatile, the effect is volatile. */
5894 if (TREE_THIS_VOLATILE (exp))
5897 exp = TREE_OPERAND (exp, 0);
5900 /* If OFFSET is constant, see if we can return the whole thing as a
5901 constant bit position. Otherwise, split it up. */
5902 if (host_integerp (offset, 0)
5903 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5905 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5906 && host_integerp (tem, 0))
5907 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5909 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5915 /* Return 1 if T is an expression that get_inner_reference handles. */
5918 handled_component_p (tree t)
5920 switch (TREE_CODE (t))
5925 case ARRAY_RANGE_REF:
5926 case NON_LVALUE_EXPR:
5927 case VIEW_CONVERT_EXPR:
5930 /* ??? Sure they are handled, but get_inner_reference may return
5931 a different PBITSIZE, depending upon whether the expression is
5932 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5935 return (TYPE_MODE (TREE_TYPE (t))
5936 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5943 /* Given an rtx VALUE that may contain additions and multiplications, return
5944 an equivalent value that just refers to a register, memory, or constant.
5945 This is done by generating instructions to perform the arithmetic and
5946 returning a pseudo-register containing the value.
5948 The returned value may be a REG, SUBREG, MEM or constant. */
5951 force_operand (rtx value, rtx target)
5954 /* Use subtarget as the target for operand 0 of a binary operation. */
5955 rtx subtarget = get_subtarget (target);
5956 enum rtx_code code = GET_CODE (value);
5958 /* Check for a PIC address load. */
5959 if ((code == PLUS || code == MINUS)
5960 && XEXP (value, 0) == pic_offset_table_rtx
5961 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5962 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5963 || GET_CODE (XEXP (value, 1)) == CONST))
5966 subtarget = gen_reg_rtx (GET_MODE (value));
5967 emit_move_insn (subtarget, value);
5971 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5974 target = gen_reg_rtx (GET_MODE (value));
5975 convert_move (target, force_operand (XEXP (value, 0), NULL),
5976 code == ZERO_EXTEND);
5980 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5982 op2 = XEXP (value, 1);
5983 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5985 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5988 op2 = negate_rtx (GET_MODE (value), op2);
5991 /* Check for an addition with OP2 a constant integer and our first
5992 operand a PLUS of a virtual register and something else. In that
5993 case, we want to emit the sum of the virtual register and the
5994 constant first and then add the other value. This allows virtual
5995 register instantiation to simply modify the constant rather than
5996 creating another one around this addition. */
5997 if (code == PLUS && GET_CODE (op2) == CONST_INT
5998 && GET_CODE (XEXP (value, 0)) == PLUS
5999 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
6000 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6001 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6003 rtx temp = expand_simple_binop (GET_MODE (value), code,
6004 XEXP (XEXP (value, 0), 0), op2,
6005 subtarget, 0, OPTAB_LIB_WIDEN);
6006 return expand_simple_binop (GET_MODE (value), code, temp,
6007 force_operand (XEXP (XEXP (value,
6009 target, 0, OPTAB_LIB_WIDEN);
6012 op1 = force_operand (XEXP (value, 0), subtarget);
6013 op2 = force_operand (op2, NULL_RTX);
6017 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6019 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6020 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6021 target, 1, OPTAB_LIB_WIDEN);
6023 return expand_divmod (0,
6024 FLOAT_MODE_P (GET_MODE (value))
6025 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6026 GET_MODE (value), op1, op2, target, 0);
6029 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6033 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6037 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6041 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6042 target, 0, OPTAB_LIB_WIDEN);
6045 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6046 target, 1, OPTAB_LIB_WIDEN);
6049 if (GET_RTX_CLASS (code) == '1')
6051 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6052 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6055 #ifdef INSN_SCHEDULING
6056 /* On machines that have insn scheduling, we want all memory reference to be
6057 explicit, so we need to deal with such paradoxical SUBREGs. */
6058 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6059 && (GET_MODE_SIZE (GET_MODE (value))
6060 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6062 = simplify_gen_subreg (GET_MODE (value),
6063 force_reg (GET_MODE (SUBREG_REG (value)),
6064 force_operand (SUBREG_REG (value),
6066 GET_MODE (SUBREG_REG (value)),
6067 SUBREG_BYTE (value));
6073 /* Subroutine of expand_expr: return nonzero iff there is no way that
6074 EXP can reference X, which is being modified. TOP_P is nonzero if this
6075 call is going to be used to determine whether we need a temporary
6076 for EXP, as opposed to a recursive call to this function.
6078 It is always safe for this routine to return zero since it merely
6079 searches for optimization opportunities. */
6082 safe_from_p (rtx x, tree exp, int top_p)
6086 static tree save_expr_list;
6089 /* If EXP has varying size, we MUST use a target since we currently
6090 have no way of allocating temporaries of variable size
6091 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6092 So we assume here that something at a higher level has prevented a
6093 clash. This is somewhat bogus, but the best we can do. Only
6094 do this when X is BLKmode and when we are at the top level. */
6095 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6096 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6097 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6098 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6099 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6101 && GET_MODE (x) == BLKmode)
6102 /* If X is in the outgoing argument area, it is always safe. */
6103 || (GET_CODE (x) == MEM
6104 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6105 || (GET_CODE (XEXP (x, 0)) == PLUS
6106 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6109 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6110 find the underlying pseudo. */
6111 if (GET_CODE (x) == SUBREG)
6114 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6118 /* A SAVE_EXPR might appear many times in the expression passed to the
6119 top-level safe_from_p call, and if it has a complex subexpression,
6120 examining it multiple times could result in a combinatorial explosion.
6121 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6122 with optimization took about 28 minutes to compile -- even though it was
6123 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6124 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6125 we have processed. Note that the only test of top_p was above. */
6134 rtn = safe_from_p (x, exp, 0);
6136 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6137 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6142 /* Now look at our tree code and possibly recurse. */
6143 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6146 exp_rtl = DECL_RTL_IF_SET (exp);
6153 if (TREE_CODE (exp) == TREE_LIST)
6157 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6159 exp = TREE_CHAIN (exp);
6162 if (TREE_CODE (exp) != TREE_LIST)
6163 return safe_from_p (x, exp, 0);
6166 else if (TREE_CODE (exp) == ERROR_MARK)
6167 return 1; /* An already-visited SAVE_EXPR? */
6173 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6178 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6182 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6183 the expression. If it is set, we conflict iff we are that rtx or
6184 both are in memory. Otherwise, we check all operands of the
6185 expression recursively. */
6187 switch (TREE_CODE (exp))
6190 /* If the operand is static or we are static, we can't conflict.
6191 Likewise if we don't conflict with the operand at all. */
6192 if (staticp (TREE_OPERAND (exp, 0))
6193 || TREE_STATIC (exp)
6194 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6197 /* Otherwise, the only way this can conflict is if we are taking
6198 the address of a DECL a that address if part of X, which is
6200 exp = TREE_OPERAND (exp, 0);
6203 if (!DECL_RTL_SET_P (exp)
6204 || GET_CODE (DECL_RTL (exp)) != MEM)
6207 exp_rtl = XEXP (DECL_RTL (exp), 0);
6212 if (GET_CODE (x) == MEM
6213 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6214 get_alias_set (exp)))
6219 /* Assume that the call will clobber all hard registers and
6221 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6222 || GET_CODE (x) == MEM)
6227 /* If a sequence exists, we would have to scan every instruction
6228 in the sequence to see if it was safe. This is probably not
6230 if (RTL_EXPR_SEQUENCE (exp))
6233 exp_rtl = RTL_EXPR_RTL (exp);
6236 case WITH_CLEANUP_EXPR:
6237 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6240 case CLEANUP_POINT_EXPR:
6241 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6244 exp_rtl = SAVE_EXPR_RTL (exp);
6248 /* If we've already scanned this, don't do it again. Otherwise,
6249 show we've scanned it and record for clearing the flag if we're
6251 if (TREE_PRIVATE (exp))
6254 TREE_PRIVATE (exp) = 1;
6255 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6257 TREE_PRIVATE (exp) = 0;
6261 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6265 /* The only operand we look at is operand 1. The rest aren't
6266 part of the expression. */
6267 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6273 /* If we have an rtx, we do not need to scan our operands. */
6277 nops = first_rtl_op (TREE_CODE (exp));
6278 for (i = 0; i < nops; i++)
6279 if (TREE_OPERAND (exp, i) != 0
6280 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6283 /* If this is a language-specific tree code, it may require
6284 special handling. */
6285 if ((unsigned int) TREE_CODE (exp)
6286 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6287 && !(*lang_hooks.safe_from_p) (x, exp))
6291 /* If we have an rtl, find any enclosed object. Then see if we conflict
6295 if (GET_CODE (exp_rtl) == SUBREG)
6297 exp_rtl = SUBREG_REG (exp_rtl);
6298 if (GET_CODE (exp_rtl) == REG
6299 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6303 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6304 are memory and they conflict. */
6305 return ! (rtx_equal_p (x, exp_rtl)
6306 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6307 && true_dependence (exp_rtl, VOIDmode, x,
6308 rtx_addr_varies_p)));
6311 /* If we reach here, it is safe. */
6315 /* Subroutine of expand_expr: return rtx if EXP is a
6316 variable or parameter; else return 0. */
6322 switch (TREE_CODE (exp))
6326 return DECL_RTL (exp);
6332 #ifdef MAX_INTEGER_COMPUTATION_MODE
6335 check_max_integer_computation_mode (tree exp)
6337 enum tree_code code;
6338 enum machine_mode mode;
6340 /* Strip any NOPs that don't change the mode. */
6342 code = TREE_CODE (exp);
6344 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6345 if (code == NOP_EXPR
6346 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6349 /* First check the type of the overall operation. We need only look at
6350 unary, binary and relational operations. */
6351 if (TREE_CODE_CLASS (code) == '1'
6352 || TREE_CODE_CLASS (code) == '2'
6353 || TREE_CODE_CLASS (code) == '<')
6355 mode = TYPE_MODE (TREE_TYPE (exp));
6356 if (GET_MODE_CLASS (mode) == MODE_INT
6357 && mode > MAX_INTEGER_COMPUTATION_MODE)
6358 internal_error ("unsupported wide integer operation");
6361 /* Check operand of a unary op. */
6362 if (TREE_CODE_CLASS (code) == '1')
6364 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6365 if (GET_MODE_CLASS (mode) == MODE_INT
6366 && mode > MAX_INTEGER_COMPUTATION_MODE)
6367 internal_error ("unsupported wide integer operation");
6370 /* Check operands of a binary/comparison op. */
6371 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6373 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6374 if (GET_MODE_CLASS (mode) == MODE_INT
6375 && mode > MAX_INTEGER_COMPUTATION_MODE)
6376 internal_error ("unsupported wide integer operation");
6378 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6379 if (GET_MODE_CLASS (mode) == MODE_INT
6380 && mode > MAX_INTEGER_COMPUTATION_MODE)
6381 internal_error ("unsupported wide integer operation");
6386 /* Return the highest power of two that EXP is known to be a multiple of.
6387 This is used in updating alignment of MEMs in array references. */
6389 static unsigned HOST_WIDE_INT
6390 highest_pow2_factor (tree exp)
6392 unsigned HOST_WIDE_INT c0, c1;
6394 switch (TREE_CODE (exp))
6397 /* We can find the lowest bit that's a one. If the low
6398 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6399 We need to handle this case since we can find it in a COND_EXPR,
6400 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6401 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6403 if (TREE_CONSTANT_OVERFLOW (exp))
6404 return BIGGEST_ALIGNMENT;
6407 /* Note: tree_low_cst is intentionally not used here,
6408 we don't care about the upper bits. */
6409 c0 = TREE_INT_CST_LOW (exp);
6411 return c0 ? c0 : BIGGEST_ALIGNMENT;
6415 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6416 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6417 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6418 return MIN (c0, c1);
6421 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6422 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6425 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6427 if (integer_pow2p (TREE_OPERAND (exp, 1))
6428 && host_integerp (TREE_OPERAND (exp, 1), 1))
6430 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6431 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6432 return MAX (1, c0 / c1);
6436 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6437 case SAVE_EXPR: case WITH_RECORD_EXPR:
6438 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6441 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6444 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6445 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6446 return MIN (c0, c1);
6455 /* Similar, except that it is known that the expression must be a multiple
6456 of the alignment of TYPE. */
6458 static unsigned HOST_WIDE_INT
6459 highest_pow2_factor_for_type (tree type, tree exp)
6461 unsigned HOST_WIDE_INT type_align, factor;
6463 factor = highest_pow2_factor (exp);
6464 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6465 return MAX (factor, type_align);
6468 /* Return an object on the placeholder list that matches EXP, a
6469 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6470 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6471 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6472 is a location which initially points to a starting location in the
6473 placeholder list (zero means start of the list) and where a pointer into
6474 the placeholder list at which the object is found is placed. */
6477 find_placeholder (tree exp, tree *plist)
6479 tree type = TREE_TYPE (exp);
6480 tree placeholder_expr;
6482 for (placeholder_expr
6483 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6484 placeholder_expr != 0;
6485 placeholder_expr = TREE_CHAIN (placeholder_expr))
6487 tree need_type = TYPE_MAIN_VARIANT (type);
6490 /* Find the outermost reference that is of the type we want. If none,
6491 see if any object has a type that is a pointer to the type we
6493 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6494 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6495 || TREE_CODE (elt) == COND_EXPR)
6496 ? TREE_OPERAND (elt, 1)
6497 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6498 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6499 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6500 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6501 ? TREE_OPERAND (elt, 0) : 0))
6502 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6505 *plist = placeholder_expr;
6509 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6511 = ((TREE_CODE (elt) == COMPOUND_EXPR
6512 || TREE_CODE (elt) == COND_EXPR)
6513 ? TREE_OPERAND (elt, 1)
6514 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6515 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6516 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6517 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6518 ? TREE_OPERAND (elt, 0) : 0))
6519 if (POINTER_TYPE_P (TREE_TYPE (elt))
6520 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6524 *plist = placeholder_expr;
6525 return build1 (INDIRECT_REF, need_type, elt);
6532 /* Subroutine of expand_expr. Expand the two operands of a binary
6533 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6534 The value may be stored in TARGET if TARGET is nonzero. The
6535 MODIFIER argument is as documented by expand_expr. */
6538 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6539 enum expand_modifier modifier)
6541 if (! safe_from_p (target, exp1, 1))
6543 if (operand_equal_p (exp0, exp1, 0))
6545 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6546 *op1 = copy_rtx (*op0);
6550 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6551 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6556 /* expand_expr: generate code for computing expression EXP.
6557 An rtx for the computed value is returned. The value is never null.
6558 In the case of a void EXP, const0_rtx is returned.
6560 The value may be stored in TARGET if TARGET is nonzero.
6561 TARGET is just a suggestion; callers must assume that
6562 the rtx returned may not be the same as TARGET.
6564 If TARGET is CONST0_RTX, it means that the value will be ignored.
6566 If TMODE is not VOIDmode, it suggests generating the
6567 result in mode TMODE. But this is done only when convenient.
6568 Otherwise, TMODE is ignored and the value generated in its natural mode.
6569 TMODE is just a suggestion; callers must assume that
6570 the rtx returned may not have mode TMODE.
6572 Note that TARGET may have neither TMODE nor MODE. In that case, it
6573 probably will not be used.
6575 If MODIFIER is EXPAND_SUM then when EXP is an addition
6576 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6577 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6578 products as above, or REG or MEM, or constant.
6579 Ordinarily in such cases we would output mul or add instructions
6580 and then return a pseudo reg containing the sum.
6582 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6583 it also marks a label as absolutely required (it can't be dead).
6584 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6585 This is used for outputting expressions used in initializers.
6587 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6588 with a constant address even if that address is not normally legitimate.
6589 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6591 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6592 a call parameter. Such targets require special care as we haven't yet
6593 marked TARGET so that it's safe from being trashed by libcalls. We
6594 don't want to use TARGET for anything but the final result;
6595 Intermediate values must go elsewhere. Additionally, calls to
6596 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6599 expand_expr (tree exp, rtx target, enum machine_mode tmode,
6600 enum expand_modifier modifier)
6603 tree type = TREE_TYPE (exp);
6604 int unsignedp = TREE_UNSIGNED (type);
6605 enum machine_mode mode;
6606 enum tree_code code = TREE_CODE (exp);
6608 rtx subtarget, original_target;
6612 /* Handle ERROR_MARK before anybody tries to access its type. */
6613 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6615 op0 = CONST0_RTX (tmode);
6621 mode = TYPE_MODE (type);
6622 /* Use subtarget as the target for operand 0 of a binary operation. */
6623 subtarget = get_subtarget (target);
6624 original_target = target;
6625 ignore = (target == const0_rtx
6626 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6627 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6628 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6629 && TREE_CODE (type) == VOID_TYPE));
6631 /* If we are going to ignore this result, we need only do something
6632 if there is a side-effect somewhere in the expression. If there
6633 is, short-circuit the most common cases here. Note that we must
6634 not call expand_expr with anything but const0_rtx in case this
6635 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6639 if (! TREE_SIDE_EFFECTS (exp))
6642 /* Ensure we reference a volatile object even if value is ignored, but
6643 don't do this if all we are doing is taking its address. */
6644 if (TREE_THIS_VOLATILE (exp)
6645 && TREE_CODE (exp) != FUNCTION_DECL
6646 && mode != VOIDmode && mode != BLKmode
6647 && modifier != EXPAND_CONST_ADDRESS)
6649 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6650 if (GET_CODE (temp) == MEM)
6651 temp = copy_to_reg (temp);
6655 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6656 || code == INDIRECT_REF || code == BUFFER_REF)
6657 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6660 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6661 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6663 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6664 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6667 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6668 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6669 /* If the second operand has no side effects, just evaluate
6671 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6673 else if (code == BIT_FIELD_REF)
6675 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6676 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6677 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6684 #ifdef MAX_INTEGER_COMPUTATION_MODE
6685 /* Only check stuff here if the mode we want is different from the mode
6686 of the expression; if it's the same, check_max_integer_computation_mode
6687 will handle it. Do we really need to check this stuff at all? */
6690 && GET_MODE (target) != mode
6691 && TREE_CODE (exp) != INTEGER_CST
6692 && TREE_CODE (exp) != PARM_DECL
6693 && TREE_CODE (exp) != ARRAY_REF
6694 && TREE_CODE (exp) != ARRAY_RANGE_REF
6695 && TREE_CODE (exp) != COMPONENT_REF
6696 && TREE_CODE (exp) != BIT_FIELD_REF
6697 && TREE_CODE (exp) != INDIRECT_REF
6698 && TREE_CODE (exp) != CALL_EXPR
6699 && TREE_CODE (exp) != VAR_DECL
6700 && TREE_CODE (exp) != RTL_EXPR)
6702 enum machine_mode mode = GET_MODE (target);
6704 if (GET_MODE_CLASS (mode) == MODE_INT
6705 && mode > MAX_INTEGER_COMPUTATION_MODE)
6706 internal_error ("unsupported wide integer operation");
6710 && TREE_CODE (exp) != INTEGER_CST
6711 && TREE_CODE (exp) != PARM_DECL
6712 && TREE_CODE (exp) != ARRAY_REF
6713 && TREE_CODE (exp) != ARRAY_RANGE_REF
6714 && TREE_CODE (exp) != COMPONENT_REF
6715 && TREE_CODE (exp) != BIT_FIELD_REF
6716 && TREE_CODE (exp) != INDIRECT_REF
6717 && TREE_CODE (exp) != VAR_DECL
6718 && TREE_CODE (exp) != CALL_EXPR
6719 && TREE_CODE (exp) != RTL_EXPR
6720 && GET_MODE_CLASS (tmode) == MODE_INT
6721 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6722 internal_error ("unsupported wide integer operation");
6724 check_max_integer_computation_mode (exp);
6727 /* If will do cse, generate all results into pseudo registers
6728 since 1) that allows cse to find more things
6729 and 2) otherwise cse could produce an insn the machine
6730 cannot support. An exception is a CONSTRUCTOR into a multi-word
6731 MEM: that's much more likely to be most efficient into the MEM.
6732 Another is a CALL_EXPR which must return in memory. */
6734 if (! cse_not_expected && mode != BLKmode && target
6735 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6736 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6737 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6744 tree function = decl_function_context (exp);
6745 /* Labels in containing functions, or labels used from initializers,
6747 if (modifier == EXPAND_INITIALIZER
6748 || (function != current_function_decl
6749 && function != inline_function_decl
6751 temp = force_label_rtx (exp);
6753 temp = label_rtx (exp);
6755 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6756 if (function != current_function_decl
6757 && function != inline_function_decl && function != 0)
6758 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6763 if (!DECL_RTL_SET_P (exp))
6765 error ("%Hprior parameter's size depends on '%D'",
6766 &DECL_SOURCE_LOCATION (exp), exp);
6767 return CONST0_RTX (mode);
6770 /* ... fall through ... */
6773 /* If a static var's type was incomplete when the decl was written,
6774 but the type is complete now, lay out the decl now. */
6775 if (DECL_SIZE (exp) == 0
6776 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6777 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6778 layout_decl (exp, 0);
6780 /* ... fall through ... */
6784 if (DECL_RTL (exp) == 0)
6787 /* Ensure variable marked as used even if it doesn't go through
6788 a parser. If it hasn't be used yet, write out an external
6790 if (! TREE_USED (exp))
6792 assemble_external (exp);
6793 TREE_USED (exp) = 1;
6796 /* Show we haven't gotten RTL for this yet. */
6799 /* Handle variables inherited from containing functions. */
6800 context = decl_function_context (exp);
6802 /* We treat inline_function_decl as an alias for the current function
6803 because that is the inline function whose vars, types, etc.
6804 are being merged into the current function.
6805 See expand_inline_function. */
6807 if (context != 0 && context != current_function_decl
6808 && context != inline_function_decl
6809 /* If var is static, we don't need a static chain to access it. */
6810 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6811 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6815 /* Mark as non-local and addressable. */
6816 DECL_NONLOCAL (exp) = 1;
6817 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6819 (*lang_hooks.mark_addressable) (exp);
6820 if (GET_CODE (DECL_RTL (exp)) != MEM)
6822 addr = XEXP (DECL_RTL (exp), 0);
6823 if (GET_CODE (addr) == MEM)
6825 = replace_equiv_address (addr,
6826 fix_lexical_addr (XEXP (addr, 0), exp));
6828 addr = fix_lexical_addr (addr, exp);
6830 temp = replace_equiv_address (DECL_RTL (exp), addr);
6833 /* This is the case of an array whose size is to be determined
6834 from its initializer, while the initializer is still being parsed.
6837 else if (GET_CODE (DECL_RTL (exp)) == MEM
6838 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6839 temp = validize_mem (DECL_RTL (exp));
6841 /* If DECL_RTL is memory, we are in the normal case and either
6842 the address is not valid or it is not a register and -fforce-addr
6843 is specified, get the address into a register. */
6845 else if (GET_CODE (DECL_RTL (exp)) == MEM
6846 && modifier != EXPAND_CONST_ADDRESS
6847 && modifier != EXPAND_SUM
6848 && modifier != EXPAND_INITIALIZER
6849 && (! memory_address_p (DECL_MODE (exp),
6850 XEXP (DECL_RTL (exp), 0))
6852 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6853 temp = replace_equiv_address (DECL_RTL (exp),
6854 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6856 /* If we got something, return it. But first, set the alignment
6857 if the address is a register. */
6860 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6861 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6866 /* If the mode of DECL_RTL does not match that of the decl, it
6867 must be a promoted value. We return a SUBREG of the wanted mode,
6868 but mark it so that we know that it was already extended. */
6870 if (GET_CODE (DECL_RTL (exp)) == REG
6871 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6873 /* Get the signedness used for this variable. Ensure we get the
6874 same mode we got when the variable was declared. */
6875 if (GET_MODE (DECL_RTL (exp))
6876 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6877 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6880 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6881 SUBREG_PROMOTED_VAR_P (temp) = 1;
6882 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6886 return DECL_RTL (exp);
6889 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6890 TREE_INT_CST_HIGH (exp), mode);
6892 /* ??? If overflow is set, fold will have done an incomplete job,
6893 which can result in (plus xx (const_int 0)), which can get
6894 simplified by validate_replace_rtx during virtual register
6895 instantiation, which can result in unrecognizable insns.
6896 Avoid this by forcing all overflows into registers. */
6897 if (TREE_CONSTANT_OVERFLOW (exp)
6898 && modifier != EXPAND_INITIALIZER)
6899 temp = force_reg (mode, temp);
6904 return const_vector_from_tree (exp);
6907 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6910 /* If optimized, generate immediate CONST_DOUBLE
6911 which will be turned into memory by reload if necessary.
6913 We used to force a register so that loop.c could see it. But
6914 this does not allow gen_* patterns to perform optimizations with
6915 the constants. It also produces two insns in cases like "x = 1.0;".
6916 On most machines, floating-point constants are not permitted in
6917 many insns, so we'd end up copying it to a register in any case.
6919 Now, we do the copying in expand_binop, if appropriate. */
6920 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6921 TYPE_MODE (TREE_TYPE (exp)));
6924 /* Handle evaluating a complex constant in a CONCAT target. */
6925 if (original_target && GET_CODE (original_target) == CONCAT)
6927 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6930 rtarg = XEXP (original_target, 0);
6931 itarg = XEXP (original_target, 1);
6933 /* Move the real and imaginary parts separately. */
6934 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6935 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6938 emit_move_insn (rtarg, op0);
6940 emit_move_insn (itarg, op1);
6942 return original_target;
6945 /* ... fall through ... */
6948 temp = output_constant_def (exp, 1);
6950 /* temp contains a constant address.
6951 On RISC machines where a constant address isn't valid,
6952 make some insns to get that address into a register. */
6953 if (modifier != EXPAND_CONST_ADDRESS
6954 && modifier != EXPAND_INITIALIZER
6955 && modifier != EXPAND_SUM
6956 && (! memory_address_p (mode, XEXP (temp, 0))
6957 || flag_force_addr))
6958 return replace_equiv_address (temp,
6959 copy_rtx (XEXP (temp, 0)));
6962 case EXPR_WITH_FILE_LOCATION:
6965 struct file_stack fs;
6967 fs.location = input_location;
6968 fs.next = expr_wfl_stack;
6969 input_filename = EXPR_WFL_FILENAME (exp);
6970 input_line = EXPR_WFL_LINENO (exp);
6971 expr_wfl_stack = &fs;
6972 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6973 emit_line_note (input_location);
6974 /* Possibly avoid switching back and forth here. */
6975 to_return = expand_expr (EXPR_WFL_NODE (exp),
6976 (ignore ? const0_rtx : target),
6978 if (expr_wfl_stack != &fs)
6980 input_location = fs.location;
6981 expr_wfl_stack = fs.next;
6986 context = decl_function_context (exp);
6988 /* If this SAVE_EXPR was at global context, assume we are an
6989 initialization function and move it into our context. */
6991 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6993 /* We treat inline_function_decl as an alias for the current function
6994 because that is the inline function whose vars, types, etc.
6995 are being merged into the current function.
6996 See expand_inline_function. */
6997 if (context == current_function_decl || context == inline_function_decl)
7000 /* If this is non-local, handle it. */
7003 /* The following call just exists to abort if the context is
7004 not of a containing function. */
7005 find_function_data (context);
7007 temp = SAVE_EXPR_RTL (exp);
7008 if (temp && GET_CODE (temp) == REG)
7010 put_var_into_stack (exp, /*rescan=*/true);
7011 temp = SAVE_EXPR_RTL (exp);
7013 if (temp == 0 || GET_CODE (temp) != MEM)
7016 replace_equiv_address (temp,
7017 fix_lexical_addr (XEXP (temp, 0), exp));
7019 if (SAVE_EXPR_RTL (exp) == 0)
7021 if (mode == VOIDmode)
7024 temp = assign_temp (build_qualified_type (type,
7026 | TYPE_QUAL_CONST)),
7029 SAVE_EXPR_RTL (exp) = temp;
7030 if (!optimize && GET_CODE (temp) == REG)
7031 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
7034 /* If the mode of TEMP does not match that of the expression, it
7035 must be a promoted value. We pass store_expr a SUBREG of the
7036 wanted mode but mark it so that we know that it was already
7039 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
7041 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7042 promote_mode (type, mode, &unsignedp, 0);
7043 SUBREG_PROMOTED_VAR_P (temp) = 1;
7044 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7047 if (temp == const0_rtx)
7048 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7050 store_expr (TREE_OPERAND (exp, 0), temp,
7051 modifier == EXPAND_STACK_PARM ? 2 : 0);
7053 TREE_USED (exp) = 1;
7056 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7057 must be a promoted value. We return a SUBREG of the wanted mode,
7058 but mark it so that we know that it was already extended. */
7060 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
7061 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
7063 /* Compute the signedness and make the proper SUBREG. */
7064 promote_mode (type, mode, &unsignedp, 0);
7065 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7066 SUBREG_PROMOTED_VAR_P (temp) = 1;
7067 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7071 return SAVE_EXPR_RTL (exp);
7076 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7077 TREE_OPERAND (exp, 0)
7078 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
7082 case PLACEHOLDER_EXPR:
7084 tree old_list = placeholder_list;
7085 tree placeholder_expr = 0;
7087 exp = find_placeholder (exp, &placeholder_expr);
7091 placeholder_list = TREE_CHAIN (placeholder_expr);
7092 temp = expand_expr (exp, original_target, tmode, modifier);
7093 placeholder_list = old_list;
7097 case WITH_RECORD_EXPR:
7098 /* Put the object on the placeholder list, expand our first operand,
7099 and pop the list. */
7100 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7102 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7104 placeholder_list = TREE_CHAIN (placeholder_list);
7108 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7109 expand_goto (TREE_OPERAND (exp, 0));
7111 expand_computed_goto (TREE_OPERAND (exp, 0));
7115 expand_exit_loop_if_false (NULL,
7116 invert_truthvalue (TREE_OPERAND (exp, 0)));
7119 case LABELED_BLOCK_EXPR:
7120 if (LABELED_BLOCK_BODY (exp))
7121 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7122 /* Should perhaps use expand_label, but this is simpler and safer. */
7123 do_pending_stack_adjust ();
7124 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7127 case EXIT_BLOCK_EXPR:
7128 if (EXIT_BLOCK_RETURN (exp))
7129 sorry ("returned value in block_exit_expr");
7130 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7135 expand_start_loop (1);
7136 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7144 tree vars = TREE_OPERAND (exp, 0);
7146 /* Need to open a binding contour here because
7147 if there are any cleanups they must be contained here. */
7148 expand_start_bindings (2);
7150 /* Mark the corresponding BLOCK for output in its proper place. */
7151 if (TREE_OPERAND (exp, 2) != 0
7152 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7153 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7155 /* If VARS have not yet been expanded, expand them now. */
7158 if (!DECL_RTL_SET_P (vars))
7160 expand_decl_init (vars);
7161 vars = TREE_CHAIN (vars);
7164 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7166 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7172 if (RTL_EXPR_SEQUENCE (exp))
7174 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7176 emit_insn (RTL_EXPR_SEQUENCE (exp));
7177 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7179 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7180 free_temps_for_rtl_expr (exp);
7181 return RTL_EXPR_RTL (exp);
7184 /* If we don't need the result, just ensure we evaluate any
7190 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7191 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7196 /* All elts simple constants => refer to a constant in memory. But
7197 if this is a non-BLKmode mode, let it store a field at a time
7198 since that should make a CONST_INT or CONST_DOUBLE when we
7199 fold. Likewise, if we have a target we can use, it is best to
7200 store directly into the target unless the type is large enough
7201 that memcpy will be used. If we are making an initializer and
7202 all operands are constant, put it in memory as well.
7204 FIXME: Avoid trying to fill vector constructors piece-meal.
7205 Output them with output_constant_def below unless we're sure
7206 they're zeros. This should go away when vector initializers
7207 are treated like VECTOR_CST instead of arrays.
7209 else if ((TREE_STATIC (exp)
7210 && ((mode == BLKmode
7211 && ! (target != 0 && safe_from_p (target, exp, 1)))
7212 || TREE_ADDRESSABLE (exp)
7213 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7214 && (! MOVE_BY_PIECES_P
7215 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7217 && ((TREE_CODE (type) == VECTOR_TYPE
7218 && !is_zeros_p (exp))
7219 || ! mostly_zeros_p (exp)))))
7220 || ((modifier == EXPAND_INITIALIZER
7221 || modifier == EXPAND_CONST_ADDRESS)
7222 && TREE_CONSTANT (exp)))
7224 rtx constructor = output_constant_def (exp, 1);
7226 if (modifier != EXPAND_CONST_ADDRESS
7227 && modifier != EXPAND_INITIALIZER
7228 && modifier != EXPAND_SUM)
7229 constructor = validize_mem (constructor);
7235 /* Handle calls that pass values in multiple non-contiguous
7236 locations. The Irix 6 ABI has examples of this. */
7237 if (target == 0 || ! safe_from_p (target, exp, 1)
7238 || GET_CODE (target) == PARALLEL
7239 || modifier == EXPAND_STACK_PARM)
7241 = assign_temp (build_qualified_type (type,
7243 | (TREE_READONLY (exp)
7244 * TYPE_QUAL_CONST))),
7245 0, TREE_ADDRESSABLE (exp), 1);
7247 store_constructor (exp, target, 0, int_expr_size (exp));
7253 tree exp1 = TREE_OPERAND (exp, 0);
7255 tree string = string_constant (exp1, &index);
7257 /* Try to optimize reads from const strings. */
7259 && TREE_CODE (string) == STRING_CST
7260 && TREE_CODE (index) == INTEGER_CST
7261 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7262 && GET_MODE_CLASS (mode) == MODE_INT
7263 && GET_MODE_SIZE (mode) == 1
7264 && modifier != EXPAND_WRITE)
7265 return gen_int_mode (TREE_STRING_POINTER (string)
7266 [TREE_INT_CST_LOW (index)], mode);
7268 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7269 op0 = memory_address (mode, op0);
7270 temp = gen_rtx_MEM (mode, op0);
7271 set_mem_attributes (temp, exp, 0);
7273 /* If we are writing to this object and its type is a record with
7274 readonly fields, we must mark it as readonly so it will
7275 conflict with readonly references to those fields. */
7276 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7277 RTX_UNCHANGING_P (temp) = 1;
7283 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7287 tree array = TREE_OPERAND (exp, 0);
7288 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7289 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7290 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7293 /* Optimize the special-case of a zero lower bound.
7295 We convert the low_bound to sizetype to avoid some problems
7296 with constant folding. (E.g. suppose the lower bound is 1,
7297 and its mode is QI. Without the conversion, (ARRAY
7298 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7299 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7301 if (! integer_zerop (low_bound))
7302 index = size_diffop (index, convert (sizetype, low_bound));
7304 /* Fold an expression like: "foo"[2].
7305 This is not done in fold so it won't happen inside &.
7306 Don't fold if this is for wide characters since it's too
7307 difficult to do correctly and this is a very rare case. */
7309 if (modifier != EXPAND_CONST_ADDRESS
7310 && modifier != EXPAND_INITIALIZER
7311 && modifier != EXPAND_MEMORY
7312 && TREE_CODE (array) == STRING_CST
7313 && TREE_CODE (index) == INTEGER_CST
7314 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7315 && GET_MODE_CLASS (mode) == MODE_INT
7316 && GET_MODE_SIZE (mode) == 1)
7317 return gen_int_mode (TREE_STRING_POINTER (array)
7318 [TREE_INT_CST_LOW (index)], mode);
7320 /* If this is a constant index into a constant array,
7321 just get the value from the array. Handle both the cases when
7322 we have an explicit constructor and when our operand is a variable
7323 that was declared const. */
7325 if (modifier != EXPAND_CONST_ADDRESS
7326 && modifier != EXPAND_INITIALIZER
7327 && modifier != EXPAND_MEMORY
7328 && TREE_CODE (array) == CONSTRUCTOR
7329 && ! TREE_SIDE_EFFECTS (array)
7330 && TREE_CODE (index) == INTEGER_CST
7331 && 0 > compare_tree_int (index,
7332 list_length (CONSTRUCTOR_ELTS
7333 (TREE_OPERAND (exp, 0)))))
7337 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7338 i = TREE_INT_CST_LOW (index);
7339 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7343 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7347 else if (optimize >= 1
7348 && modifier != EXPAND_CONST_ADDRESS
7349 && modifier != EXPAND_INITIALIZER
7350 && modifier != EXPAND_MEMORY
7351 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7352 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7353 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7355 if (TREE_CODE (index) == INTEGER_CST)
7357 tree init = DECL_INITIAL (array);
7359 if (TREE_CODE (init) == CONSTRUCTOR)
7363 for (elem = CONSTRUCTOR_ELTS (init);
7365 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7366 elem = TREE_CHAIN (elem))
7369 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7370 return expand_expr (fold (TREE_VALUE (elem)), target,
7373 else if (TREE_CODE (init) == STRING_CST
7374 && 0 > compare_tree_int (index,
7375 TREE_STRING_LENGTH (init)))
7377 tree type = TREE_TYPE (TREE_TYPE (init));
7378 enum machine_mode mode = TYPE_MODE (type);
7380 if (GET_MODE_CLASS (mode) == MODE_INT
7381 && GET_MODE_SIZE (mode) == 1)
7382 return gen_int_mode (TREE_STRING_POINTER (init)
7383 [TREE_INT_CST_LOW (index)], mode);
7388 goto normal_inner_ref;
7391 /* If the operand is a CONSTRUCTOR, we can just extract the
7392 appropriate field if it is present. */
7393 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7397 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7398 elt = TREE_CHAIN (elt))
7399 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7400 /* We can normally use the value of the field in the
7401 CONSTRUCTOR. However, if this is a bitfield in
7402 an integral mode that we can fit in a HOST_WIDE_INT,
7403 we must mask only the number of bits in the bitfield,
7404 since this is done implicitly by the constructor. If
7405 the bitfield does not meet either of those conditions,
7406 we can't do this optimization. */
7407 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7408 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7410 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7411 <= HOST_BITS_PER_WIDE_INT))))
7413 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7414 && modifier == EXPAND_STACK_PARM)
7416 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7417 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7419 HOST_WIDE_INT bitsize
7420 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7421 enum machine_mode imode
7422 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7424 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7426 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7427 op0 = expand_and (imode, op0, op1, target);
7432 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7435 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7437 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7445 goto normal_inner_ref;
7448 case ARRAY_RANGE_REF:
7451 enum machine_mode mode1;
7452 HOST_WIDE_INT bitsize, bitpos;
7455 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7456 &mode1, &unsignedp, &volatilep);
7459 /* If we got back the original object, something is wrong. Perhaps
7460 we are evaluating an expression too early. In any event, don't
7461 infinitely recurse. */
7465 /* If TEM's type is a union of variable size, pass TARGET to the inner
7466 computation, since it will need a temporary and TARGET is known
7467 to have to do. This occurs in unchecked conversion in Ada. */
7471 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7472 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7474 && modifier != EXPAND_STACK_PARM
7475 ? target : NULL_RTX),
7477 (modifier == EXPAND_INITIALIZER
7478 || modifier == EXPAND_CONST_ADDRESS
7479 || modifier == EXPAND_STACK_PARM)
7480 ? modifier : EXPAND_NORMAL);
7482 /* If this is a constant, put it into a register if it is a
7483 legitimate constant and OFFSET is 0 and memory if it isn't. */
7484 if (CONSTANT_P (op0))
7486 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7487 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7489 op0 = force_reg (mode, op0);
7491 op0 = validize_mem (force_const_mem (mode, op0));
7494 /* Otherwise, if this object not in memory and we either have an
7495 offset or a BLKmode result, put it there. This case can't occur in
7496 C, but can in Ada if we have unchecked conversion of an expression
7497 from a scalar type to an array or record type or for an
7498 ARRAY_RANGE_REF whose type is BLKmode. */
7499 else if (GET_CODE (op0) != MEM
7501 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7503 /* If the operand is a SAVE_EXPR, we can deal with this by
7504 forcing the SAVE_EXPR into memory. */
7505 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7507 put_var_into_stack (TREE_OPERAND (exp, 0),
7509 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7514 = build_qualified_type (TREE_TYPE (tem),
7515 (TYPE_QUALS (TREE_TYPE (tem))
7516 | TYPE_QUAL_CONST));
7517 rtx memloc = assign_temp (nt, 1, 1, 1);
7519 emit_move_insn (memloc, op0);
7526 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7529 if (GET_CODE (op0) != MEM)
7532 #ifdef POINTERS_EXTEND_UNSIGNED
7533 if (GET_MODE (offset_rtx) != Pmode)
7534 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7536 if (GET_MODE (offset_rtx) != ptr_mode)
7537 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7540 /* A constant address in OP0 can have VOIDmode, we must not try
7541 to call force_reg for that case. Avoid that case. */
7542 if (GET_CODE (op0) == MEM
7543 && GET_MODE (op0) == BLKmode
7544 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7546 && (bitpos % bitsize) == 0
7547 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7548 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7550 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7554 op0 = offset_address (op0, offset_rtx,
7555 highest_pow2_factor (offset));
7558 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7559 record its alignment as BIGGEST_ALIGNMENT. */
7560 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7561 && is_aligning_offset (offset, tem))
7562 set_mem_align (op0, BIGGEST_ALIGNMENT);
7564 /* Don't forget about volatility even if this is a bitfield. */
7565 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7567 if (op0 == orig_op0)
7568 op0 = copy_rtx (op0);
7570 MEM_VOLATILE_P (op0) = 1;
7573 /* The following code doesn't handle CONCAT.
7574 Assume only bitpos == 0 can be used for CONCAT, due to
7575 one element arrays having the same mode as its element. */
7576 if (GET_CODE (op0) == CONCAT)
7578 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7583 /* In cases where an aligned union has an unaligned object
7584 as a field, we might be extracting a BLKmode value from
7585 an integer-mode (e.g., SImode) object. Handle this case
7586 by doing the extract into an object as wide as the field
7587 (which we know to be the width of a basic mode), then
7588 storing into memory, and changing the mode to BLKmode. */
7589 if (mode1 == VOIDmode
7590 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7591 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7592 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7593 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7594 && modifier != EXPAND_CONST_ADDRESS
7595 && modifier != EXPAND_INITIALIZER)
7596 /* If the field isn't aligned enough to fetch as a memref,
7597 fetch it as a bit field. */
7598 || (mode1 != BLKmode
7599 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7600 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
7601 && ((modifier == EXPAND_CONST_ADDRESS
7602 || modifier == EXPAND_INITIALIZER)
7604 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7605 || (bitpos % BITS_PER_UNIT != 0)))
7606 /* If the type and the field are a constant size and the
7607 size of the type isn't the same size as the bitfield,
7608 we must use bitfield operations. */
7610 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7612 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7615 enum machine_mode ext_mode = mode;
7617 if (ext_mode == BLKmode
7618 && ! (target != 0 && GET_CODE (op0) == MEM
7619 && GET_CODE (target) == MEM
7620 && bitpos % BITS_PER_UNIT == 0))
7621 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7623 if (ext_mode == BLKmode)
7625 /* In this case, BITPOS must start at a byte boundary and
7626 TARGET, if specified, must be a MEM. */
7627 if (GET_CODE (op0) != MEM
7628 || (target != 0 && GET_CODE (target) != MEM)
7629 || bitpos % BITS_PER_UNIT != 0)
7632 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7634 target = assign_temp (type, 0, 1, 1);
7636 emit_block_move (target, op0,
7637 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7639 (modifier == EXPAND_STACK_PARM
7640 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7645 op0 = validize_mem (op0);
7647 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7648 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7650 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7651 (modifier == EXPAND_STACK_PARM
7652 ? NULL_RTX : target),
7654 int_size_in_bytes (TREE_TYPE (tem)));
7656 /* If the result is a record type and BITSIZE is narrower than
7657 the mode of OP0, an integral mode, and this is a big endian
7658 machine, we must put the field into the high-order bits. */
7659 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7660 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7661 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7662 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7663 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7667 if (mode == BLKmode)
7669 rtx new = assign_temp (build_qualified_type
7670 ((*lang_hooks.types.type_for_mode)
7672 TYPE_QUAL_CONST), 0, 1, 1);
7674 emit_move_insn (new, op0);
7675 op0 = copy_rtx (new);
7676 PUT_MODE (op0, BLKmode);
7677 set_mem_attributes (op0, exp, 1);
7683 /* If the result is BLKmode, use that to access the object
7685 if (mode == BLKmode)
7688 /* Get a reference to just this component. */
7689 if (modifier == EXPAND_CONST_ADDRESS
7690 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7691 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7693 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7695 if (op0 == orig_op0)
7696 op0 = copy_rtx (op0);
7698 set_mem_attributes (op0, exp, 0);
7699 if (GET_CODE (XEXP (op0, 0)) == REG)
7700 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7702 MEM_VOLATILE_P (op0) |= volatilep;
7703 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7704 || modifier == EXPAND_CONST_ADDRESS
7705 || modifier == EXPAND_INITIALIZER)
7707 else if (target == 0)
7708 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7710 convert_move (target, op0, unsignedp);
7716 rtx insn, before = get_last_insn (), vtbl_ref;
7718 /* Evaluate the interior expression. */
7719 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7722 /* Get or create an instruction off which to hang a note. */
7723 if (REG_P (subtarget))
7726 insn = get_last_insn ();
7729 if (! INSN_P (insn))
7730 insn = prev_nonnote_insn (insn);
7734 target = gen_reg_rtx (GET_MODE (subtarget));
7735 insn = emit_move_insn (target, subtarget);
7738 /* Collect the data for the note. */
7739 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7740 vtbl_ref = plus_constant (vtbl_ref,
7741 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7742 /* Discard the initial CONST that was added. */
7743 vtbl_ref = XEXP (vtbl_ref, 0);
7746 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7751 /* Intended for a reference to a buffer of a file-object in Pascal.
7752 But it's not certain that a special tree code will really be
7753 necessary for these. INDIRECT_REF might work for them. */
7759 /* Pascal set IN expression.
7762 rlo = set_low - (set_low%bits_per_word);
7763 the_word = set [ (index - rlo)/bits_per_word ];
7764 bit_index = index % bits_per_word;
7765 bitmask = 1 << bit_index;
7766 return !!(the_word & bitmask); */
7768 tree set = TREE_OPERAND (exp, 0);
7769 tree index = TREE_OPERAND (exp, 1);
7770 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7771 tree set_type = TREE_TYPE (set);
7772 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7773 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7774 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7775 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7776 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7777 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7778 rtx setaddr = XEXP (setval, 0);
7779 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7781 rtx diff, quo, rem, addr, bit, result;
7783 /* If domain is empty, answer is no. Likewise if index is constant
7784 and out of bounds. */
7785 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7786 && TREE_CODE (set_low_bound) == INTEGER_CST
7787 && tree_int_cst_lt (set_high_bound, set_low_bound))
7788 || (TREE_CODE (index) == INTEGER_CST
7789 && TREE_CODE (set_low_bound) == INTEGER_CST
7790 && tree_int_cst_lt (index, set_low_bound))
7791 || (TREE_CODE (set_high_bound) == INTEGER_CST
7792 && TREE_CODE (index) == INTEGER_CST
7793 && tree_int_cst_lt (set_high_bound, index))))
7797 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7799 /* If we get here, we have to generate the code for both cases
7800 (in range and out of range). */
7802 op0 = gen_label_rtx ();
7803 op1 = gen_label_rtx ();
7805 if (! (GET_CODE (index_val) == CONST_INT
7806 && GET_CODE (lo_r) == CONST_INT))
7807 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7808 GET_MODE (index_val), iunsignedp, op1);
7810 if (! (GET_CODE (index_val) == CONST_INT
7811 && GET_CODE (hi_r) == CONST_INT))
7812 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7813 GET_MODE (index_val), iunsignedp, op1);
7815 /* Calculate the element number of bit zero in the first word
7817 if (GET_CODE (lo_r) == CONST_INT)
7818 rlow = GEN_INT (INTVAL (lo_r)
7819 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7821 rlow = expand_binop (index_mode, and_optab, lo_r,
7822 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7823 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7825 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7826 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7828 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7829 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7830 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7831 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7833 addr = memory_address (byte_mode,
7834 expand_binop (index_mode, add_optab, diff,
7835 setaddr, NULL_RTX, iunsignedp,
7838 /* Extract the bit we want to examine. */
7839 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7840 gen_rtx_MEM (byte_mode, addr),
7841 make_tree (TREE_TYPE (index), rem),
7843 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7844 GET_MODE (target) == byte_mode ? target : 0,
7845 1, OPTAB_LIB_WIDEN);
7847 if (result != target)
7848 convert_move (target, result, 1);
7850 /* Output the code to handle the out-of-range case. */
7853 emit_move_insn (target, const0_rtx);
7858 case WITH_CLEANUP_EXPR:
7859 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7861 WITH_CLEANUP_EXPR_RTL (exp)
7862 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7863 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7864 CLEANUP_EH_ONLY (exp));
7866 /* That's it for this cleanup. */
7867 TREE_OPERAND (exp, 1) = 0;
7869 return WITH_CLEANUP_EXPR_RTL (exp);
7871 case CLEANUP_POINT_EXPR:
7873 /* Start a new binding layer that will keep track of all cleanup
7874 actions to be performed. */
7875 expand_start_bindings (2);
7877 target_temp_slot_level = temp_slot_level;
7879 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7880 /* If we're going to use this value, load it up now. */
7882 op0 = force_not_mem (op0);
7883 preserve_temp_slots (op0);
7884 expand_end_bindings (NULL_TREE, 0, 0);
7889 /* Check for a built-in function. */
7890 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7891 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7893 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7895 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7896 == BUILT_IN_FRONTEND)
7897 return (*lang_hooks.expand_expr) (exp, original_target,
7900 return expand_builtin (exp, target, subtarget, tmode, ignore);
7903 return expand_call (exp, target, ignore);
7905 case NON_LVALUE_EXPR:
7908 case REFERENCE_EXPR:
7909 if (TREE_OPERAND (exp, 0) == error_mark_node)
7912 if (TREE_CODE (type) == UNION_TYPE)
7914 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7916 /* If both input and output are BLKmode, this conversion isn't doing
7917 anything except possibly changing memory attribute. */
7918 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7920 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7923 result = copy_rtx (result);
7924 set_mem_attributes (result, exp, 0);
7929 target = assign_temp (type, 0, 1, 1);
7931 if (GET_CODE (target) == MEM)
7932 /* Store data into beginning of memory target. */
7933 store_expr (TREE_OPERAND (exp, 0),
7934 adjust_address (target, TYPE_MODE (valtype), 0),
7935 modifier == EXPAND_STACK_PARM ? 2 : 0);
7937 else if (GET_CODE (target) == REG)
7938 /* Store this field into a union of the proper type. */
7939 store_field (target,
7940 MIN ((int_size_in_bytes (TREE_TYPE
7941 (TREE_OPERAND (exp, 0)))
7943 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7944 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7945 VOIDmode, 0, type, 0);
7949 /* Return the entire union. */
7953 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7955 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7958 /* If the signedness of the conversion differs and OP0 is
7959 a promoted SUBREG, clear that indication since we now
7960 have to do the proper extension. */
7961 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7962 && GET_CODE (op0) == SUBREG)
7963 SUBREG_PROMOTED_VAR_P (op0) = 0;
7968 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7969 if (GET_MODE (op0) == mode)
7972 /* If OP0 is a constant, just convert it into the proper mode. */
7973 if (CONSTANT_P (op0))
7975 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7976 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7978 if (modifier == EXPAND_INITIALIZER)
7979 return simplify_gen_subreg (mode, op0, inner_mode,
7980 subreg_lowpart_offset (mode,
7983 return convert_modes (mode, inner_mode, op0,
7984 TREE_UNSIGNED (inner_type));
7987 if (modifier == EXPAND_INITIALIZER)
7988 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7992 convert_to_mode (mode, op0,
7993 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7995 convert_move (target, op0,
7996 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7999 case VIEW_CONVERT_EXPR:
8000 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8002 /* If the input and output modes are both the same, we are done.
8003 Otherwise, if neither mode is BLKmode and both are integral and within
8004 a word, we can use gen_lowpart. If neither is true, make sure the
8005 operand is in memory and convert the MEM to the new mode. */
8006 if (TYPE_MODE (type) == GET_MODE (op0))
8008 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
8009 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8010 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
8011 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
8012 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
8013 op0 = gen_lowpart (TYPE_MODE (type), op0);
8014 else if (GET_CODE (op0) != MEM)
8016 /* If the operand is not a MEM, force it into memory. Since we
8017 are going to be be changing the mode of the MEM, don't call
8018 force_const_mem for constants because we don't allow pool
8019 constants to change mode. */
8020 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8022 if (TREE_ADDRESSABLE (exp))
8025 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8027 = assign_stack_temp_for_type
8028 (TYPE_MODE (inner_type),
8029 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8031 emit_move_insn (target, op0);
8035 /* At this point, OP0 is in the correct mode. If the output type is such
8036 that the operand is known to be aligned, indicate that it is.
8037 Otherwise, we need only be concerned about alignment for non-BLKmode
8039 if (GET_CODE (op0) == MEM)
8041 op0 = copy_rtx (op0);
8043 if (TYPE_ALIGN_OK (type))
8044 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8045 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8046 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8048 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8049 HOST_WIDE_INT temp_size
8050 = MAX (int_size_in_bytes (inner_type),
8051 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8052 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8053 temp_size, 0, type);
8054 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8056 if (TREE_ADDRESSABLE (exp))
8059 if (GET_MODE (op0) == BLKmode)
8060 emit_block_move (new_with_op0_mode, op0,
8061 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8062 (modifier == EXPAND_STACK_PARM
8063 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8065 emit_move_insn (new_with_op0_mode, op0);
8070 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8076 this_optab = ! unsignedp && flag_trapv
8077 && (GET_MODE_CLASS (mode) == MODE_INT)
8078 ? addv_optab : add_optab;
8080 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8081 something else, make sure we add the register to the constant and
8082 then to the other thing. This case can occur during strength
8083 reduction and doing it this way will produce better code if the
8084 frame pointer or argument pointer is eliminated.
8086 fold-const.c will ensure that the constant is always in the inner
8087 PLUS_EXPR, so the only case we need to do anything about is if
8088 sp, ap, or fp is our second argument, in which case we must swap
8089 the innermost first argument and our second argument. */
8091 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8092 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8093 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8094 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8095 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8096 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8098 tree t = TREE_OPERAND (exp, 1);
8100 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8101 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8104 /* If the result is to be ptr_mode and we are adding an integer to
8105 something, we might be forming a constant. So try to use
8106 plus_constant. If it produces a sum and we can't accept it,
8107 use force_operand. This allows P = &ARR[const] to generate
8108 efficient code on machines where a SYMBOL_REF is not a valid
8111 If this is an EXPAND_SUM call, always return the sum. */
8112 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8113 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8115 if (modifier == EXPAND_STACK_PARM)
8117 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8118 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8119 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8123 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8125 /* Use immed_double_const to ensure that the constant is
8126 truncated according to the mode of OP1, then sign extended
8127 to a HOST_WIDE_INT. Using the constant directly can result
8128 in non-canonical RTL in a 64x32 cross compile. */
8130 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8132 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8133 op1 = plus_constant (op1, INTVAL (constant_part));
8134 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8135 op1 = force_operand (op1, target);
8139 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8140 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8141 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8145 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8146 (modifier == EXPAND_INITIALIZER
8147 ? EXPAND_INITIALIZER : EXPAND_SUM));
8148 if (! CONSTANT_P (op0))
8150 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8151 VOIDmode, modifier);
8152 /* Return a PLUS if modifier says it's OK. */
8153 if (modifier == EXPAND_SUM
8154 || modifier == EXPAND_INITIALIZER)
8155 return simplify_gen_binary (PLUS, mode, op0, op1);
8158 /* Use immed_double_const to ensure that the constant is
8159 truncated according to the mode of OP1, then sign extended
8160 to a HOST_WIDE_INT. Using the constant directly can result
8161 in non-canonical RTL in a 64x32 cross compile. */
8163 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8165 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8166 op0 = plus_constant (op0, INTVAL (constant_part));
8167 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8168 op0 = force_operand (op0, target);
8173 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8176 /* No sense saving up arithmetic to be done
8177 if it's all in the wrong mode to form part of an address.
8178 And force_operand won't know whether to sign-extend or
8180 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8181 || mode != ptr_mode)
8183 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8184 subtarget, &op0, &op1, 0);
8185 if (op0 == const0_rtx)
8187 if (op1 == const0_rtx)
8192 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8193 subtarget, &op0, &op1, modifier);
8194 return simplify_gen_binary (PLUS, mode, op0, op1);
8197 /* For initializers, we are allowed to return a MINUS of two
8198 symbolic constants. Here we handle all cases when both operands
8200 /* Handle difference of two symbolic constants,
8201 for the sake of an initializer. */
8202 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8203 && really_constant_p (TREE_OPERAND (exp, 0))
8204 && really_constant_p (TREE_OPERAND (exp, 1)))
8206 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8207 NULL_RTX, &op0, &op1, modifier);
8209 /* If the last operand is a CONST_INT, use plus_constant of
8210 the negated constant. Else make the MINUS. */
8211 if (GET_CODE (op1) == CONST_INT)
8212 return plus_constant (op0, - INTVAL (op1));
8214 return gen_rtx_MINUS (mode, op0, op1);
8217 this_optab = ! unsignedp && flag_trapv
8218 && (GET_MODE_CLASS(mode) == MODE_INT)
8219 ? subv_optab : sub_optab;
8221 /* No sense saving up arithmetic to be done
8222 if it's all in the wrong mode to form part of an address.
8223 And force_operand won't know whether to sign-extend or
8225 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8226 || mode != ptr_mode)
8229 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8230 subtarget, &op0, &op1, modifier);
8232 /* Convert A - const to A + (-const). */
8233 if (GET_CODE (op1) == CONST_INT)
8235 op1 = negate_rtx (mode, op1);
8236 return simplify_gen_binary (PLUS, mode, op0, op1);
8242 /* If first operand is constant, swap them.
8243 Thus the following special case checks need only
8244 check the second operand. */
8245 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8247 tree t1 = TREE_OPERAND (exp, 0);
8248 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8249 TREE_OPERAND (exp, 1) = t1;
8252 /* Attempt to return something suitable for generating an
8253 indexed address, for machines that support that. */
8255 if (modifier == EXPAND_SUM && mode == ptr_mode
8256 && host_integerp (TREE_OPERAND (exp, 1), 0))
8258 tree exp1 = TREE_OPERAND (exp, 1);
8260 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8263 /* If we knew for certain that this is arithmetic for an array
8264 reference, and we knew the bounds of the array, then we could
8265 apply the distributive law across (PLUS X C) for constant C.
8266 Without such knowledge, we risk overflowing the computation
8267 when both X and C are large, but X+C isn't. */
8268 /* ??? Could perhaps special-case EXP being unsigned and C being
8269 positive. In that case we are certain that X+C is no smaller
8270 than X and so the transformed expression will overflow iff the
8271 original would have. */
8273 if (GET_CODE (op0) != REG)
8274 op0 = force_operand (op0, NULL_RTX);
8275 if (GET_CODE (op0) != REG)
8276 op0 = copy_to_mode_reg (mode, op0);
8278 return gen_rtx_MULT (mode, op0,
8279 gen_int_mode (tree_low_cst (exp1, 0),
8280 TYPE_MODE (TREE_TYPE (exp1))));
8283 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8286 if (modifier == EXPAND_STACK_PARM)
8289 /* Check for multiplying things that have been extended
8290 from a narrower type. If this machine supports multiplying
8291 in that narrower type with a result in the desired type,
8292 do it that way, and avoid the explicit type-conversion. */
8293 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8294 && TREE_CODE (type) == INTEGER_TYPE
8295 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8296 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8297 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8298 && int_fits_type_p (TREE_OPERAND (exp, 1),
8299 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8300 /* Don't use a widening multiply if a shift will do. */
8301 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8302 > HOST_BITS_PER_WIDE_INT)
8303 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8305 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8306 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8308 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8309 /* If both operands are extended, they must either both
8310 be zero-extended or both be sign-extended. */
8311 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8313 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8315 enum machine_mode innermode
8316 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8317 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8318 ? smul_widen_optab : umul_widen_optab);
8319 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8320 ? umul_widen_optab : smul_widen_optab);
8321 if (mode == GET_MODE_WIDER_MODE (innermode))
8323 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8325 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8326 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8327 TREE_OPERAND (exp, 1),
8328 NULL_RTX, &op0, &op1, 0);
8330 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8331 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8332 NULL_RTX, &op0, &op1, 0);
8335 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8336 && innermode == word_mode)
8339 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8340 NULL_RTX, VOIDmode, 0);
8341 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8342 op1 = convert_modes (innermode, mode,
8343 expand_expr (TREE_OPERAND (exp, 1),
8344 NULL_RTX, VOIDmode, 0),
8347 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8348 NULL_RTX, VOIDmode, 0);
8349 temp = expand_binop (mode, other_optab, op0, op1, target,
8350 unsignedp, OPTAB_LIB_WIDEN);
8351 htem = expand_mult_highpart_adjust (innermode,
8352 gen_highpart (innermode, temp),
8354 gen_highpart (innermode, temp),
8356 emit_move_insn (gen_highpart (innermode, temp), htem);
8361 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8362 subtarget, &op0, &op1, 0);
8363 return expand_mult (mode, op0, op1, target, unsignedp);
8365 case TRUNC_DIV_EXPR:
8366 case FLOOR_DIV_EXPR:
8368 case ROUND_DIV_EXPR:
8369 case EXACT_DIV_EXPR:
8370 if (modifier == EXPAND_STACK_PARM)
8372 /* Possible optimization: compute the dividend with EXPAND_SUM
8373 then if the divisor is constant can optimize the case
8374 where some terms of the dividend have coeffs divisible by it. */
8375 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8376 subtarget, &op0, &op1, 0);
8377 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8380 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8381 expensive divide. If not, combine will rebuild the original
8383 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8384 && TREE_CODE (type) == REAL_TYPE
8385 && !real_onep (TREE_OPERAND (exp, 0)))
8386 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8387 build (RDIV_EXPR, type,
8388 build_real (type, dconst1),
8389 TREE_OPERAND (exp, 1))),
8390 target, tmode, modifier);
8391 this_optab = sdiv_optab;
8394 case TRUNC_MOD_EXPR:
8395 case FLOOR_MOD_EXPR:
8397 case ROUND_MOD_EXPR:
8398 if (modifier == EXPAND_STACK_PARM)
8400 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8401 subtarget, &op0, &op1, 0);
8402 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8404 case FIX_ROUND_EXPR:
8405 case FIX_FLOOR_EXPR:
8407 abort (); /* Not used for C. */
8409 case FIX_TRUNC_EXPR:
8410 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8411 if (target == 0 || modifier == EXPAND_STACK_PARM)
8412 target = gen_reg_rtx (mode);
8413 expand_fix (target, op0, unsignedp);
8417 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8418 if (target == 0 || modifier == EXPAND_STACK_PARM)
8419 target = gen_reg_rtx (mode);
8420 /* expand_float can't figure out what to do if FROM has VOIDmode.
8421 So give it the correct mode. With -O, cse will optimize this. */
8422 if (GET_MODE (op0) == VOIDmode)
8423 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8425 expand_float (target, op0,
8426 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8430 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8431 if (modifier == EXPAND_STACK_PARM)
8433 temp = expand_unop (mode,
8434 ! unsignedp && flag_trapv
8435 && (GET_MODE_CLASS(mode) == MODE_INT)
8436 ? negv_optab : neg_optab, op0, target, 0);
8442 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8443 if (modifier == EXPAND_STACK_PARM)
8446 /* ABS_EXPR is not valid for complex arguments. */
8447 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8448 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8451 /* Unsigned abs is simply the operand. Testing here means we don't
8452 risk generating incorrect code below. */
8453 if (TREE_UNSIGNED (type))
8456 return expand_abs (mode, op0, target, unsignedp,
8457 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8461 target = original_target;
8463 || modifier == EXPAND_STACK_PARM
8464 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8465 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8466 || GET_MODE (target) != mode
8467 || (GET_CODE (target) == REG
8468 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8469 target = gen_reg_rtx (mode);
8470 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8471 target, &op0, &op1, 0);
8473 /* First try to do it with a special MIN or MAX instruction.
8474 If that does not win, use a conditional jump to select the proper
8476 this_optab = (TREE_UNSIGNED (type)
8477 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8478 : (code == MIN_EXPR ? smin_optab : smax_optab));
8480 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8485 /* At this point, a MEM target is no longer useful; we will get better
8488 if (GET_CODE (target) == MEM)
8489 target = gen_reg_rtx (mode);
8492 emit_move_insn (target, op0);
8494 op0 = gen_label_rtx ();
8496 /* If this mode is an integer too wide to compare properly,
8497 compare word by word. Rely on cse to optimize constant cases. */
8498 if (GET_MODE_CLASS (mode) == MODE_INT
8499 && ! can_compare_p (GE, mode, ccp_jump))
8501 if (code == MAX_EXPR)
8502 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8503 target, op1, NULL_RTX, op0);
8505 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8506 op1, target, NULL_RTX, op0);
8510 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8511 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8512 unsignedp, mode, NULL_RTX, NULL_RTX,
8515 emit_move_insn (target, op1);
8520 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8521 if (modifier == EXPAND_STACK_PARM)
8523 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8528 /* ??? Can optimize bitwise operations with one arg constant.
8529 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8530 and (a bitwise1 b) bitwise2 b (etc)
8531 but that is probably not worth while. */
8533 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8534 boolean values when we want in all cases to compute both of them. In
8535 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8536 as actual zero-or-1 values and then bitwise anding. In cases where
8537 there cannot be any side effects, better code would be made by
8538 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8539 how to recognize those cases. */
8541 case TRUTH_AND_EXPR:
8543 this_optab = and_optab;
8548 this_optab = ior_optab;
8551 case TRUTH_XOR_EXPR:
8553 this_optab = xor_optab;
8560 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8562 if (modifier == EXPAND_STACK_PARM)
8564 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8565 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8568 /* Could determine the answer when only additive constants differ. Also,
8569 the addition of one can be handled by changing the condition. */
8576 case UNORDERED_EXPR:
8583 temp = do_store_flag (exp,
8584 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8585 tmode != VOIDmode ? tmode : mode, 0);
8589 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8590 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8592 && GET_CODE (original_target) == REG
8593 && (GET_MODE (original_target)
8594 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8596 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8599 /* If temp is constant, we can just compute the result. */
8600 if (GET_CODE (temp) == CONST_INT)
8602 if (INTVAL (temp) != 0)
8603 emit_move_insn (target, const1_rtx);
8605 emit_move_insn (target, const0_rtx);
8610 if (temp != original_target)
8612 enum machine_mode mode1 = GET_MODE (temp);
8613 if (mode1 == VOIDmode)
8614 mode1 = tmode != VOIDmode ? tmode : mode;
8616 temp = copy_to_mode_reg (mode1, temp);
8619 op1 = gen_label_rtx ();
8620 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8621 GET_MODE (temp), unsignedp, op1);
8622 emit_move_insn (temp, const1_rtx);
8627 /* If no set-flag instruction, must generate a conditional
8628 store into a temporary variable. Drop through
8629 and handle this like && and ||. */
8631 case TRUTH_ANDIF_EXPR:
8632 case TRUTH_ORIF_EXPR:
8635 || modifier == EXPAND_STACK_PARM
8636 || ! safe_from_p (target, exp, 1)
8637 /* Make sure we don't have a hard reg (such as function's return
8638 value) live across basic blocks, if not optimizing. */
8639 || (!optimize && GET_CODE (target) == REG
8640 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8641 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8644 emit_clr_insn (target);
8646 op1 = gen_label_rtx ();
8647 jumpifnot (exp, op1);
8650 emit_0_to_1_insn (target);
8653 return ignore ? const0_rtx : target;
8655 case TRUTH_NOT_EXPR:
8656 if (modifier == EXPAND_STACK_PARM)
8658 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8659 /* The parser is careful to generate TRUTH_NOT_EXPR
8660 only with operands that are always zero or one. */
8661 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8662 target, 1, OPTAB_LIB_WIDEN);
8668 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8670 return expand_expr (TREE_OPERAND (exp, 1),
8671 (ignore ? const0_rtx : target),
8672 VOIDmode, modifier);
8675 /* If we would have a "singleton" (see below) were it not for a
8676 conversion in each arm, bring that conversion back out. */
8677 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8678 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8679 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8680 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8682 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8683 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8685 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8686 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8687 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8688 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8689 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8690 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8691 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8692 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8693 return expand_expr (build1 (NOP_EXPR, type,
8694 build (COND_EXPR, TREE_TYPE (iftrue),
8695 TREE_OPERAND (exp, 0),
8697 target, tmode, modifier);
8701 /* Note that COND_EXPRs whose type is a structure or union
8702 are required to be constructed to contain assignments of
8703 a temporary variable, so that we can evaluate them here
8704 for side effect only. If type is void, we must do likewise. */
8706 /* If an arm of the branch requires a cleanup,
8707 only that cleanup is performed. */
8710 tree binary_op = 0, unary_op = 0;
8712 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8713 convert it to our mode, if necessary. */
8714 if (integer_onep (TREE_OPERAND (exp, 1))
8715 && integer_zerop (TREE_OPERAND (exp, 2))
8716 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8720 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8725 if (modifier == EXPAND_STACK_PARM)
8727 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8728 if (GET_MODE (op0) == mode)
8732 target = gen_reg_rtx (mode);
8733 convert_move (target, op0, unsignedp);
8737 /* Check for X ? A + B : A. If we have this, we can copy A to the
8738 output and conditionally add B. Similarly for unary operations.
8739 Don't do this if X has side-effects because those side effects
8740 might affect A or B and the "?" operation is a sequence point in
8741 ANSI. (operand_equal_p tests for side effects.) */
8743 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8744 && operand_equal_p (TREE_OPERAND (exp, 2),
8745 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8746 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8747 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8748 && operand_equal_p (TREE_OPERAND (exp, 1),
8749 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8750 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8751 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8752 && operand_equal_p (TREE_OPERAND (exp, 2),
8753 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8754 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8755 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8756 && operand_equal_p (TREE_OPERAND (exp, 1),
8757 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8758 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8760 /* If we are not to produce a result, we have no target. Otherwise,
8761 if a target was specified use it; it will not be used as an
8762 intermediate target unless it is safe. If no target, use a
8767 else if (modifier == EXPAND_STACK_PARM)
8768 temp = assign_temp (type, 0, 0, 1);
8769 else if (original_target
8770 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8771 || (singleton && GET_CODE (original_target) == REG
8772 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8773 && original_target == var_rtx (singleton)))
8774 && GET_MODE (original_target) == mode
8775 #ifdef HAVE_conditional_move
8776 && (! can_conditionally_move_p (mode)
8777 || GET_CODE (original_target) == REG
8778 || TREE_ADDRESSABLE (type))
8780 && (GET_CODE (original_target) != MEM
8781 || TREE_ADDRESSABLE (type)))
8782 temp = original_target;
8783 else if (TREE_ADDRESSABLE (type))
8786 temp = assign_temp (type, 0, 0, 1);
8788 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8789 do the test of X as a store-flag operation, do this as
8790 A + ((X != 0) << log C). Similarly for other simple binary
8791 operators. Only do for C == 1 if BRANCH_COST is low. */
8792 if (temp && singleton && binary_op
8793 && (TREE_CODE (binary_op) == PLUS_EXPR
8794 || TREE_CODE (binary_op) == MINUS_EXPR
8795 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8796 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8797 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8798 : integer_onep (TREE_OPERAND (binary_op, 1)))
8799 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8803 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8804 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8805 ? addv_optab : add_optab)
8806 : TREE_CODE (binary_op) == MINUS_EXPR
8807 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8808 ? subv_optab : sub_optab)
8809 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8812 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8813 if (singleton == TREE_OPERAND (exp, 1))
8814 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8816 cond = TREE_OPERAND (exp, 0);
8818 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8820 mode, BRANCH_COST <= 1);
8822 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8823 result = expand_shift (LSHIFT_EXPR, mode, result,
8824 build_int_2 (tree_log2
8828 (safe_from_p (temp, singleton, 1)
8829 ? temp : NULL_RTX), 0);
8833 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8834 return expand_binop (mode, boptab, op1, result, temp,
8835 unsignedp, OPTAB_LIB_WIDEN);
8839 do_pending_stack_adjust ();
8841 op0 = gen_label_rtx ();
8843 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8847 /* If the target conflicts with the other operand of the
8848 binary op, we can't use it. Also, we can't use the target
8849 if it is a hard register, because evaluating the condition
8850 might clobber it. */
8852 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8853 || (GET_CODE (temp) == REG
8854 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8855 temp = gen_reg_rtx (mode);
8856 store_expr (singleton, temp,
8857 modifier == EXPAND_STACK_PARM ? 2 : 0);
8860 expand_expr (singleton,
8861 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8862 if (singleton == TREE_OPERAND (exp, 1))
8863 jumpif (TREE_OPERAND (exp, 0), op0);
8865 jumpifnot (TREE_OPERAND (exp, 0), op0);
8867 start_cleanup_deferral ();
8868 if (binary_op && temp == 0)
8869 /* Just touch the other operand. */
8870 expand_expr (TREE_OPERAND (binary_op, 1),
8871 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8873 store_expr (build (TREE_CODE (binary_op), type,
8874 make_tree (type, temp),
8875 TREE_OPERAND (binary_op, 1)),
8876 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8878 store_expr (build1 (TREE_CODE (unary_op), type,
8879 make_tree (type, temp)),
8880 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8883 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8884 comparison operator. If we have one of these cases, set the
8885 output to A, branch on A (cse will merge these two references),
8886 then set the output to FOO. */
8888 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8889 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8890 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8891 TREE_OPERAND (exp, 1), 0)
8892 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8893 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8894 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8896 if (GET_CODE (temp) == REG
8897 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8898 temp = gen_reg_rtx (mode);
8899 store_expr (TREE_OPERAND (exp, 1), temp,
8900 modifier == EXPAND_STACK_PARM ? 2 : 0);
8901 jumpif (TREE_OPERAND (exp, 0), op0);
8903 start_cleanup_deferral ();
8904 store_expr (TREE_OPERAND (exp, 2), temp,
8905 modifier == EXPAND_STACK_PARM ? 2 : 0);
8909 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8910 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8911 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8912 TREE_OPERAND (exp, 2), 0)
8913 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8914 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8915 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8917 if (GET_CODE (temp) == REG
8918 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8919 temp = gen_reg_rtx (mode);
8920 store_expr (TREE_OPERAND (exp, 2), temp,
8921 modifier == EXPAND_STACK_PARM ? 2 : 0);
8922 jumpifnot (TREE_OPERAND (exp, 0), op0);
8924 start_cleanup_deferral ();
8925 store_expr (TREE_OPERAND (exp, 1), temp,
8926 modifier == EXPAND_STACK_PARM ? 2 : 0);
8931 op1 = gen_label_rtx ();
8932 jumpifnot (TREE_OPERAND (exp, 0), op0);
8934 start_cleanup_deferral ();
8936 /* One branch of the cond can be void, if it never returns. For
8937 example A ? throw : E */
8939 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8940 store_expr (TREE_OPERAND (exp, 1), temp,
8941 modifier == EXPAND_STACK_PARM ? 2 : 0);
8943 expand_expr (TREE_OPERAND (exp, 1),
8944 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8945 end_cleanup_deferral ();
8947 emit_jump_insn (gen_jump (op1));
8950 start_cleanup_deferral ();
8952 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8953 store_expr (TREE_OPERAND (exp, 2), temp,
8954 modifier == EXPAND_STACK_PARM ? 2 : 0);
8956 expand_expr (TREE_OPERAND (exp, 2),
8957 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8960 end_cleanup_deferral ();
8971 /* Something needs to be initialized, but we didn't know
8972 where that thing was when building the tree. For example,
8973 it could be the return value of a function, or a parameter
8974 to a function which lays down in the stack, or a temporary
8975 variable which must be passed by reference.
8977 We guarantee that the expression will either be constructed
8978 or copied into our original target. */
8980 tree slot = TREE_OPERAND (exp, 0);
8981 tree cleanups = NULL_TREE;
8984 if (TREE_CODE (slot) != VAR_DECL)
8988 target = original_target;
8990 /* Set this here so that if we get a target that refers to a
8991 register variable that's already been used, put_reg_into_stack
8992 knows that it should fix up those uses. */
8993 TREE_USED (slot) = 1;
8997 if (DECL_RTL_SET_P (slot))
8999 target = DECL_RTL (slot);
9000 /* If we have already expanded the slot, so don't do
9002 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9007 target = assign_temp (type, 2, 0, 1);
9008 /* All temp slots at this level must not conflict. */
9009 preserve_temp_slots (target);
9010 SET_DECL_RTL (slot, target);
9011 if (TREE_ADDRESSABLE (slot))
9012 put_var_into_stack (slot, /*rescan=*/false);
9014 /* Since SLOT is not known to the called function
9015 to belong to its stack frame, we must build an explicit
9016 cleanup. This case occurs when we must build up a reference
9017 to pass the reference as an argument. In this case,
9018 it is very likely that such a reference need not be
9021 if (TREE_OPERAND (exp, 2) == 0)
9022 TREE_OPERAND (exp, 2)
9023 = (*lang_hooks.maybe_build_cleanup) (slot);
9024 cleanups = TREE_OPERAND (exp, 2);
9029 /* This case does occur, when expanding a parameter which
9030 needs to be constructed on the stack. The target
9031 is the actual stack address that we want to initialize.
9032 The function we call will perform the cleanup in this case. */
9034 /* If we have already assigned it space, use that space,
9035 not target that we were passed in, as our target
9036 parameter is only a hint. */
9037 if (DECL_RTL_SET_P (slot))
9039 target = DECL_RTL (slot);
9040 /* If we have already expanded the slot, so don't do
9042 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9047 SET_DECL_RTL (slot, target);
9048 /* If we must have an addressable slot, then make sure that
9049 the RTL that we just stored in slot is OK. */
9050 if (TREE_ADDRESSABLE (slot))
9051 put_var_into_stack (slot, /*rescan=*/true);
9055 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9056 /* Mark it as expanded. */
9057 TREE_OPERAND (exp, 1) = NULL_TREE;
9059 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9061 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9068 tree lhs = TREE_OPERAND (exp, 0);
9069 tree rhs = TREE_OPERAND (exp, 1);
9071 temp = expand_assignment (lhs, rhs, ! ignore);
9077 /* If lhs is complex, expand calls in rhs before computing it.
9078 That's so we don't compute a pointer and save it over a
9079 call. If lhs is simple, compute it first so we can give it
9080 as a target if the rhs is just a call. This avoids an
9081 extra temp and copy and that prevents a partial-subsumption
9082 which makes bad code. Actually we could treat
9083 component_ref's of vars like vars. */
9085 tree lhs = TREE_OPERAND (exp, 0);
9086 tree rhs = TREE_OPERAND (exp, 1);
9090 /* Check for |= or &= of a bitfield of size one into another bitfield
9091 of size 1. In this case, (unless we need the result of the
9092 assignment) we can do this more efficiently with a
9093 test followed by an assignment, if necessary.
9095 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9096 things change so we do, this code should be enhanced to
9099 && TREE_CODE (lhs) == COMPONENT_REF
9100 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9101 || TREE_CODE (rhs) == BIT_AND_EXPR)
9102 && TREE_OPERAND (rhs, 0) == lhs
9103 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9104 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9105 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9107 rtx label = gen_label_rtx ();
9109 do_jump (TREE_OPERAND (rhs, 1),
9110 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9111 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9112 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9113 (TREE_CODE (rhs) == BIT_IOR_EXPR
9115 : integer_zero_node)),
9117 do_pending_stack_adjust ();
9122 temp = expand_assignment (lhs, rhs, ! ignore);
9128 if (!TREE_OPERAND (exp, 0))
9129 expand_null_return ();
9131 expand_return (TREE_OPERAND (exp, 0));
9134 case PREINCREMENT_EXPR:
9135 case PREDECREMENT_EXPR:
9136 return expand_increment (exp, 0, ignore);
9138 case POSTINCREMENT_EXPR:
9139 case POSTDECREMENT_EXPR:
9140 /* Faster to treat as pre-increment if result is not used. */
9141 return expand_increment (exp, ! ignore, ignore);
9144 if (modifier == EXPAND_STACK_PARM)
9146 /* Are we taking the address of a nested function? */
9147 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9148 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9149 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9150 && ! TREE_STATIC (exp))
9152 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9153 op0 = force_operand (op0, target);
9155 /* If we are taking the address of something erroneous, just
9157 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9159 /* If we are taking the address of a constant and are at the
9160 top level, we have to use output_constant_def since we can't
9161 call force_const_mem at top level. */
9163 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9164 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9166 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9169 /* We make sure to pass const0_rtx down if we came in with
9170 ignore set, to avoid doing the cleanups twice for something. */
9171 op0 = expand_expr (TREE_OPERAND (exp, 0),
9172 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9173 (modifier == EXPAND_INITIALIZER
9174 ? modifier : EXPAND_CONST_ADDRESS));
9176 /* If we are going to ignore the result, OP0 will have been set
9177 to const0_rtx, so just return it. Don't get confused and
9178 think we are taking the address of the constant. */
9182 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9183 clever and returns a REG when given a MEM. */
9184 op0 = protect_from_queue (op0, 1);
9186 /* We would like the object in memory. If it is a constant, we can
9187 have it be statically allocated into memory. For a non-constant,
9188 we need to allocate some memory and store the value into it. */
9190 if (CONSTANT_P (op0))
9191 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9193 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9194 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9195 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
9197 /* If the operand is a SAVE_EXPR, we can deal with this by
9198 forcing the SAVE_EXPR into memory. */
9199 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9201 put_var_into_stack (TREE_OPERAND (exp, 0),
9203 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9207 /* If this object is in a register, it can't be BLKmode. */
9208 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9209 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9211 if (GET_CODE (op0) == PARALLEL)
9212 /* Handle calls that pass values in multiple
9213 non-contiguous locations. The Irix 6 ABI has examples
9215 emit_group_store (memloc, op0, inner_type,
9216 int_size_in_bytes (inner_type));
9218 emit_move_insn (memloc, op0);
9224 if (GET_CODE (op0) != MEM)
9227 mark_temp_addr_taken (op0);
9228 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9230 op0 = XEXP (op0, 0);
9231 #ifdef POINTERS_EXTEND_UNSIGNED
9232 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9233 && mode == ptr_mode)
9234 op0 = convert_memory_address (ptr_mode, op0);
9239 /* If OP0 is not aligned as least as much as the type requires, we
9240 need to make a temporary, copy OP0 to it, and take the address of
9241 the temporary. We want to use the alignment of the type, not of
9242 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9243 the test for BLKmode means that can't happen. The test for
9244 BLKmode is because we never make mis-aligned MEMs with
9247 We don't need to do this at all if the machine doesn't have
9248 strict alignment. */
9249 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9250 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9252 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9254 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9257 if (TYPE_ALIGN_OK (inner_type))
9260 if (TREE_ADDRESSABLE (inner_type))
9262 /* We can't make a bitwise copy of this object, so fail. */
9263 error ("cannot take the address of an unaligned member");
9267 new = assign_stack_temp_for_type
9268 (TYPE_MODE (inner_type),
9269 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9270 : int_size_in_bytes (inner_type),
9271 1, build_qualified_type (inner_type,
9272 (TYPE_QUALS (inner_type)
9273 | TYPE_QUAL_CONST)));
9275 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9276 (modifier == EXPAND_STACK_PARM
9277 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9282 op0 = force_operand (XEXP (op0, 0), target);
9286 && GET_CODE (op0) != REG
9287 && modifier != EXPAND_CONST_ADDRESS
9288 && modifier != EXPAND_INITIALIZER
9289 && modifier != EXPAND_SUM)
9290 op0 = force_reg (Pmode, op0);
9292 if (GET_CODE (op0) == REG
9293 && ! REG_USERVAR_P (op0))
9294 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9296 #ifdef POINTERS_EXTEND_UNSIGNED
9297 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9298 && mode == ptr_mode)
9299 op0 = convert_memory_address (ptr_mode, op0);
9304 case ENTRY_VALUE_EXPR:
9307 /* COMPLEX type for Extended Pascal & Fortran */
9310 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9313 /* Get the rtx code of the operands. */
9314 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9315 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9318 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9322 /* Move the real (op0) and imaginary (op1) parts to their location. */
9323 emit_move_insn (gen_realpart (mode, target), op0);
9324 emit_move_insn (gen_imagpart (mode, target), op1);
9326 insns = get_insns ();
9329 /* Complex construction should appear as a single unit. */
9330 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9331 each with a separate pseudo as destination.
9332 It's not correct for flow to treat them as a unit. */
9333 if (GET_CODE (target) != CONCAT)
9334 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9342 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9343 return gen_realpart (mode, op0);
9346 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9347 return gen_imagpart (mode, op0);
9351 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9355 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9358 target = gen_reg_rtx (mode);
9362 /* Store the realpart and the negated imagpart to target. */
9363 emit_move_insn (gen_realpart (partmode, target),
9364 gen_realpart (partmode, op0));
9366 imag_t = gen_imagpart (partmode, target);
9367 temp = expand_unop (partmode,
9368 ! unsignedp && flag_trapv
9369 && (GET_MODE_CLASS(partmode) == MODE_INT)
9370 ? negv_optab : neg_optab,
9371 gen_imagpart (partmode, op0), imag_t, 0);
9373 emit_move_insn (imag_t, temp);
9375 insns = get_insns ();
9378 /* Conjugate should appear as a single unit
9379 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9380 each with a separate pseudo as destination.
9381 It's not correct for flow to treat them as a unit. */
9382 if (GET_CODE (target) != CONCAT)
9383 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9390 case TRY_CATCH_EXPR:
9392 tree handler = TREE_OPERAND (exp, 1);
9394 expand_eh_region_start ();
9396 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9398 expand_eh_region_end_cleanup (handler);
9403 case TRY_FINALLY_EXPR:
9405 tree try_block = TREE_OPERAND (exp, 0);
9406 tree finally_block = TREE_OPERAND (exp, 1);
9408 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9410 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9411 is not sufficient, so we cannot expand the block twice.
9412 So we play games with GOTO_SUBROUTINE_EXPR to let us
9413 expand the thing only once. */
9414 /* When not optimizing, we go ahead with this form since
9415 (1) user breakpoints operate more predictably without
9416 code duplication, and
9417 (2) we're not running any of the global optimizers
9418 that would explode in time/space with the highly
9419 connected CFG created by the indirect branching. */
9421 rtx finally_label = gen_label_rtx ();
9422 rtx done_label = gen_label_rtx ();
9423 rtx return_link = gen_reg_rtx (Pmode);
9424 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9425 (tree) finally_label, (tree) return_link);
9426 TREE_SIDE_EFFECTS (cleanup) = 1;
9428 /* Start a new binding layer that will keep track of all cleanup
9429 actions to be performed. */
9430 expand_start_bindings (2);
9431 target_temp_slot_level = temp_slot_level;
9433 expand_decl_cleanup (NULL_TREE, cleanup);
9434 op0 = expand_expr (try_block, target, tmode, modifier);
9436 preserve_temp_slots (op0);
9437 expand_end_bindings (NULL_TREE, 0, 0);
9438 emit_jump (done_label);
9439 emit_label (finally_label);
9440 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9441 emit_indirect_jump (return_link);
9442 emit_label (done_label);
9446 expand_start_bindings (2);
9447 target_temp_slot_level = temp_slot_level;
9449 expand_decl_cleanup (NULL_TREE, finally_block);
9450 op0 = expand_expr (try_block, target, tmode, modifier);
9452 preserve_temp_slots (op0);
9453 expand_end_bindings (NULL_TREE, 0, 0);
9459 case GOTO_SUBROUTINE_EXPR:
9461 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9462 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9463 rtx return_address = gen_label_rtx ();
9464 emit_move_insn (return_link,
9465 gen_rtx_LABEL_REF (Pmode, return_address));
9467 emit_label (return_address);
9472 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9475 return get_exception_pointer (cfun);
9478 /* Function descriptors are not valid except for as
9479 initialization constants, and should not be expanded. */
9483 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9486 /* Here to do an ordinary binary operator, generating an instruction
9487 from the optab already placed in `this_optab'. */
9489 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9490 subtarget, &op0, &op1, 0);
9492 if (modifier == EXPAND_STACK_PARM)
9494 temp = expand_binop (mode, this_optab, op0, op1, target,
9495 unsignedp, OPTAB_LIB_WIDEN);
9501 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9502 when applied to the address of EXP produces an address known to be
9503 aligned more than BIGGEST_ALIGNMENT. */
9506 is_aligning_offset (tree offset, tree exp)
9508 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9509 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9510 || TREE_CODE (offset) == NOP_EXPR
9511 || TREE_CODE (offset) == CONVERT_EXPR
9512 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9513 offset = TREE_OPERAND (offset, 0);
9515 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9516 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9517 if (TREE_CODE (offset) != BIT_AND_EXPR
9518 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9519 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9520 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9523 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9524 It must be NEGATE_EXPR. Then strip any more conversions. */
9525 offset = TREE_OPERAND (offset, 0);
9526 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9527 || TREE_CODE (offset) == NOP_EXPR
9528 || TREE_CODE (offset) == CONVERT_EXPR)
9529 offset = TREE_OPERAND (offset, 0);
9531 if (TREE_CODE (offset) != NEGATE_EXPR)
9534 offset = TREE_OPERAND (offset, 0);
9535 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9536 || TREE_CODE (offset) == NOP_EXPR
9537 || TREE_CODE (offset) == CONVERT_EXPR)
9538 offset = TREE_OPERAND (offset, 0);
9540 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9541 whose type is the same as EXP. */
9542 return (TREE_CODE (offset) == ADDR_EXPR
9543 && (TREE_OPERAND (offset, 0) == exp
9544 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9545 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9546 == TREE_TYPE (exp)))));
9549 /* Return the tree node if an ARG corresponds to a string constant or zero
9550 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9551 in bytes within the string that ARG is accessing. The type of the
9552 offset will be `sizetype'. */
9555 string_constant (tree arg, tree *ptr_offset)
9559 if (TREE_CODE (arg) == ADDR_EXPR
9560 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9562 *ptr_offset = size_zero_node;
9563 return TREE_OPERAND (arg, 0);
9565 else if (TREE_CODE (arg) == PLUS_EXPR)
9567 tree arg0 = TREE_OPERAND (arg, 0);
9568 tree arg1 = TREE_OPERAND (arg, 1);
9573 if (TREE_CODE (arg0) == ADDR_EXPR
9574 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9576 *ptr_offset = convert (sizetype, arg1);
9577 return TREE_OPERAND (arg0, 0);
9579 else if (TREE_CODE (arg1) == ADDR_EXPR
9580 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9582 *ptr_offset = convert (sizetype, arg0);
9583 return TREE_OPERAND (arg1, 0);
9590 /* Expand code for a post- or pre- increment or decrement
9591 and return the RTX for the result.
9592 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9595 expand_increment (tree exp, int post, int ignore)
9599 tree incremented = TREE_OPERAND (exp, 0);
9600 optab this_optab = add_optab;
9602 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9603 int op0_is_copy = 0;
9604 int single_insn = 0;
9605 /* 1 means we can't store into OP0 directly,
9606 because it is a subreg narrower than a word,
9607 and we don't dare clobber the rest of the word. */
9610 /* Stabilize any component ref that might need to be
9611 evaluated more than once below. */
9613 || TREE_CODE (incremented) == BIT_FIELD_REF
9614 || (TREE_CODE (incremented) == COMPONENT_REF
9615 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9616 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9617 incremented = stabilize_reference (incremented);
9618 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9619 ones into save exprs so that they don't accidentally get evaluated
9620 more than once by the code below. */
9621 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9622 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9623 incremented = save_expr (incremented);
9625 /* Compute the operands as RTX.
9626 Note whether OP0 is the actual lvalue or a copy of it:
9627 I believe it is a copy iff it is a register or subreg
9628 and insns were generated in computing it. */
9630 temp = get_last_insn ();
9631 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9633 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9634 in place but instead must do sign- or zero-extension during assignment,
9635 so we copy it into a new register and let the code below use it as
9638 Note that we can safely modify this SUBREG since it is know not to be
9639 shared (it was made by the expand_expr call above). */
9641 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9644 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9648 else if (GET_CODE (op0) == SUBREG
9649 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9651 /* We cannot increment this SUBREG in place. If we are
9652 post-incrementing, get a copy of the old value. Otherwise,
9653 just mark that we cannot increment in place. */
9655 op0 = copy_to_reg (op0);
9660 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9661 && temp != get_last_insn ());
9662 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9664 /* Decide whether incrementing or decrementing. */
9665 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9666 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9667 this_optab = sub_optab;
9669 /* Convert decrement by a constant into a negative increment. */
9670 if (this_optab == sub_optab
9671 && GET_CODE (op1) == CONST_INT)
9673 op1 = GEN_INT (-INTVAL (op1));
9674 this_optab = add_optab;
9677 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9678 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9680 /* For a preincrement, see if we can do this with a single instruction. */
9683 icode = (int) this_optab->handlers[(int) mode].insn_code;
9684 if (icode != (int) CODE_FOR_nothing
9685 /* Make sure that OP0 is valid for operands 0 and 1
9686 of the insn we want to queue. */
9687 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9688 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9689 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9693 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9694 then we cannot just increment OP0. We must therefore contrive to
9695 increment the original value. Then, for postincrement, we can return
9696 OP0 since it is a copy of the old value. For preincrement, expand here
9697 unless we can do it with a single insn.
9699 Likewise if storing directly into OP0 would clobber high bits
9700 we need to preserve (bad_subreg). */
9701 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9703 /* This is the easiest way to increment the value wherever it is.
9704 Problems with multiple evaluation of INCREMENTED are prevented
9705 because either (1) it is a component_ref or preincrement,
9706 in which case it was stabilized above, or (2) it is an array_ref
9707 with constant index in an array in a register, which is
9708 safe to reevaluate. */
9709 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9710 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9711 ? MINUS_EXPR : PLUS_EXPR),
9714 TREE_OPERAND (exp, 1));
9716 while (TREE_CODE (incremented) == NOP_EXPR
9717 || TREE_CODE (incremented) == CONVERT_EXPR)
9719 newexp = convert (TREE_TYPE (incremented), newexp);
9720 incremented = TREE_OPERAND (incremented, 0);
9723 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9724 return post ? op0 : temp;
9729 /* We have a true reference to the value in OP0.
9730 If there is an insn to add or subtract in this mode, queue it.
9731 Queueing the increment insn avoids the register shuffling
9732 that often results if we must increment now and first save
9733 the old value for subsequent use. */
9735 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9736 op0 = stabilize (op0);
9739 icode = (int) this_optab->handlers[(int) mode].insn_code;
9740 if (icode != (int) CODE_FOR_nothing
9741 /* Make sure that OP0 is valid for operands 0 and 1
9742 of the insn we want to queue. */
9743 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9744 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9746 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9747 op1 = force_reg (mode, op1);
9749 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9751 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9753 rtx addr = (general_operand (XEXP (op0, 0), mode)
9754 ? force_reg (Pmode, XEXP (op0, 0))
9755 : copy_to_reg (XEXP (op0, 0)));
9758 op0 = replace_equiv_address (op0, addr);
9759 temp = force_reg (GET_MODE (op0), op0);
9760 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9761 op1 = force_reg (mode, op1);
9763 /* The increment queue is LIFO, thus we have to `queue'
9764 the instructions in reverse order. */
9765 enqueue_insn (op0, gen_move_insn (op0, temp));
9766 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9771 /* Preincrement, or we can't increment with one simple insn. */
9773 /* Save a copy of the value before inc or dec, to return it later. */
9774 temp = value = copy_to_reg (op0);
9776 /* Arrange to return the incremented value. */
9777 /* Copy the rtx because expand_binop will protect from the queue,
9778 and the results of that would be invalid for us to return
9779 if our caller does emit_queue before using our result. */
9780 temp = copy_rtx (value = op0);
9782 /* Increment however we can. */
9783 op1 = expand_binop (mode, this_optab, value, op1, op0,
9784 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9786 /* Make sure the value is stored into OP0. */
9788 emit_move_insn (op0, op1);
9793 /* Generate code to calculate EXP using a store-flag instruction
9794 and return an rtx for the result. EXP is either a comparison
9795 or a TRUTH_NOT_EXPR whose operand is a comparison.
9797 If TARGET is nonzero, store the result there if convenient.
9799 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9802 Return zero if there is no suitable set-flag instruction
9803 available on this machine.
9805 Once expand_expr has been called on the arguments of the comparison,
9806 we are committed to doing the store flag, since it is not safe to
9807 re-evaluate the expression. We emit the store-flag insn by calling
9808 emit_store_flag, but only expand the arguments if we have a reason
9809 to believe that emit_store_flag will be successful. If we think that
9810 it will, but it isn't, we have to simulate the store-flag with a
9811 set/jump/set sequence. */
9814 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9817 tree arg0, arg1, type;
9819 enum machine_mode operand_mode;
9823 enum insn_code icode;
9824 rtx subtarget = target;
9827 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9828 result at the end. We can't simply invert the test since it would
9829 have already been inverted if it were valid. This case occurs for
9830 some floating-point comparisons. */
9832 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9833 invert = 1, exp = TREE_OPERAND (exp, 0);
9835 arg0 = TREE_OPERAND (exp, 0);
9836 arg1 = TREE_OPERAND (exp, 1);
9838 /* Don't crash if the comparison was erroneous. */
9839 if (arg0 == error_mark_node || arg1 == error_mark_node)
9842 type = TREE_TYPE (arg0);
9843 operand_mode = TYPE_MODE (type);
9844 unsignedp = TREE_UNSIGNED (type);
9846 /* We won't bother with BLKmode store-flag operations because it would mean
9847 passing a lot of information to emit_store_flag. */
9848 if (operand_mode == BLKmode)
9851 /* We won't bother with store-flag operations involving function pointers
9852 when function pointers must be canonicalized before comparisons. */
9853 #ifdef HAVE_canonicalize_funcptr_for_compare
9854 if (HAVE_canonicalize_funcptr_for_compare
9855 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9856 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9858 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9859 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9860 == FUNCTION_TYPE))))
9867 /* Get the rtx comparison code to use. We know that EXP is a comparison
9868 operation of some type. Some comparisons against 1 and -1 can be
9869 converted to comparisons with zero. Do so here so that the tests
9870 below will be aware that we have a comparison with zero. These
9871 tests will not catch constants in the first operand, but constants
9872 are rarely passed as the first operand. */
9874 switch (TREE_CODE (exp))
9883 if (integer_onep (arg1))
9884 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9886 code = unsignedp ? LTU : LT;
9889 if (! unsignedp && integer_all_onesp (arg1))
9890 arg1 = integer_zero_node, code = LT;
9892 code = unsignedp ? LEU : LE;
9895 if (! unsignedp && integer_all_onesp (arg1))
9896 arg1 = integer_zero_node, code = GE;
9898 code = unsignedp ? GTU : GT;
9901 if (integer_onep (arg1))
9902 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9904 code = unsignedp ? GEU : GE;
9907 case UNORDERED_EXPR:
9933 /* Put a constant second. */
9934 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9936 tem = arg0; arg0 = arg1; arg1 = tem;
9937 code = swap_condition (code);
9940 /* If this is an equality or inequality test of a single bit, we can
9941 do this by shifting the bit being tested to the low-order bit and
9942 masking the result with the constant 1. If the condition was EQ,
9943 we xor it with 1. This does not require an scc insn and is faster
9944 than an scc insn even if we have it.
9946 The code to make this transformation was moved into fold_single_bit_test,
9947 so we just call into the folder and expand its result. */
9949 if ((code == NE || code == EQ)
9950 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9951 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9953 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9954 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9956 target, VOIDmode, EXPAND_NORMAL);
9959 /* Now see if we are likely to be able to do this. Return if not. */
9960 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9963 icode = setcc_gen_code[(int) code];
9964 if (icode == CODE_FOR_nothing
9965 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9967 /* We can only do this if it is one of the special cases that
9968 can be handled without an scc insn. */
9969 if ((code == LT && integer_zerop (arg1))
9970 || (! only_cheap && code == GE && integer_zerop (arg1)))
9972 else if (BRANCH_COST >= 0
9973 && ! only_cheap && (code == NE || code == EQ)
9974 && TREE_CODE (type) != REAL_TYPE
9975 && ((abs_optab->handlers[(int) operand_mode].insn_code
9976 != CODE_FOR_nothing)
9977 || (ffs_optab->handlers[(int) operand_mode].insn_code
9978 != CODE_FOR_nothing)))
9984 if (! get_subtarget (target)
9985 || GET_MODE (subtarget) != operand_mode
9986 || ! safe_from_p (subtarget, arg1, 1))
9989 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9992 target = gen_reg_rtx (mode);
9994 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9995 because, if the emit_store_flag does anything it will succeed and
9996 OP0 and OP1 will not be used subsequently. */
9998 result = emit_store_flag (target, code,
9999 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10000 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10001 operand_mode, unsignedp, 1);
10006 result = expand_binop (mode, xor_optab, result, const1_rtx,
10007 result, 0, OPTAB_LIB_WIDEN);
10011 /* If this failed, we have to do this with set/compare/jump/set code. */
10012 if (GET_CODE (target) != REG
10013 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10014 target = gen_reg_rtx (GET_MODE (target));
10016 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10017 result = compare_from_rtx (op0, op1, code, unsignedp,
10018 operand_mode, NULL_RTX);
10019 if (GET_CODE (result) == CONST_INT)
10020 return (((result == const0_rtx && ! invert)
10021 || (result != const0_rtx && invert))
10022 ? const0_rtx : const1_rtx);
10024 /* The code of RESULT may not match CODE if compare_from_rtx
10025 decided to swap its operands and reverse the original code.
10027 We know that compare_from_rtx returns either a CONST_INT or
10028 a new comparison code, so it is safe to just extract the
10029 code from RESULT. */
10030 code = GET_CODE (result);
10032 label = gen_label_rtx ();
10033 if (bcc_gen_fctn[(int) code] == 0)
10036 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10037 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10038 emit_label (label);
10044 /* Stubs in case we haven't got a casesi insn. */
10045 #ifndef HAVE_casesi
10046 # define HAVE_casesi 0
10047 # define gen_casesi(a, b, c, d, e) (0)
10048 # define CODE_FOR_casesi CODE_FOR_nothing
10051 /* If the machine does not have a case insn that compares the bounds,
10052 this means extra overhead for dispatch tables, which raises the
10053 threshold for using them. */
10054 #ifndef CASE_VALUES_THRESHOLD
10055 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10056 #endif /* CASE_VALUES_THRESHOLD */
10059 case_values_threshold (void)
10061 return CASE_VALUES_THRESHOLD;
10064 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10065 0 otherwise (i.e. if there is no casesi instruction). */
10067 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10068 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
10070 enum machine_mode index_mode = SImode;
10071 int index_bits = GET_MODE_BITSIZE (index_mode);
10072 rtx op1, op2, index;
10073 enum machine_mode op_mode;
10078 /* Convert the index to SImode. */
10079 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10081 enum machine_mode omode = TYPE_MODE (index_type);
10082 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10084 /* We must handle the endpoints in the original mode. */
10085 index_expr = build (MINUS_EXPR, index_type,
10086 index_expr, minval);
10087 minval = integer_zero_node;
10088 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10089 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10090 omode, 1, default_label);
10091 /* Now we can safely truncate. */
10092 index = convert_to_mode (index_mode, index, 0);
10096 if (TYPE_MODE (index_type) != index_mode)
10098 index_expr = convert ((*lang_hooks.types.type_for_size)
10099 (index_bits, 0), index_expr);
10100 index_type = TREE_TYPE (index_expr);
10103 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10106 index = protect_from_queue (index, 0);
10107 do_pending_stack_adjust ();
10109 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10110 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10112 index = copy_to_mode_reg (op_mode, index);
10114 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10116 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10117 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10118 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10119 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10121 op1 = copy_to_mode_reg (op_mode, op1);
10123 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10125 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10126 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10127 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10128 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10130 op2 = copy_to_mode_reg (op_mode, op2);
10132 emit_jump_insn (gen_casesi (index, op1, op2,
10133 table_label, default_label));
10137 /* Attempt to generate a tablejump instruction; same concept. */
10138 #ifndef HAVE_tablejump
10139 #define HAVE_tablejump 0
10140 #define gen_tablejump(x, y) (0)
10143 /* Subroutine of the next function.
10145 INDEX is the value being switched on, with the lowest value
10146 in the table already subtracted.
10147 MODE is its expected mode (needed if INDEX is constant).
10148 RANGE is the length of the jump table.
10149 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10151 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10152 index value is out of range. */
10155 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10160 if (INTVAL (range) > cfun->max_jumptable_ents)
10161 cfun->max_jumptable_ents = INTVAL (range);
10163 /* Do an unsigned comparison (in the proper mode) between the index
10164 expression and the value which represents the length of the range.
10165 Since we just finished subtracting the lower bound of the range
10166 from the index expression, this comparison allows us to simultaneously
10167 check that the original index expression value is both greater than
10168 or equal to the minimum value of the range and less than or equal to
10169 the maximum value of the range. */
10171 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10174 /* If index is in range, it must fit in Pmode.
10175 Convert to Pmode so we can index with it. */
10177 index = convert_to_mode (Pmode, index, 1);
10179 /* Don't let a MEM slip thru, because then INDEX that comes
10180 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10181 and break_out_memory_refs will go to work on it and mess it up. */
10182 #ifdef PIC_CASE_VECTOR_ADDRESS
10183 if (flag_pic && GET_CODE (index) != REG)
10184 index = copy_to_mode_reg (Pmode, index);
10187 /* If flag_force_addr were to affect this address
10188 it could interfere with the tricky assumptions made
10189 about addresses that contain label-refs,
10190 which may be valid only very near the tablejump itself. */
10191 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10192 GET_MODE_SIZE, because this indicates how large insns are. The other
10193 uses should all be Pmode, because they are addresses. This code
10194 could fail if addresses and insns are not the same size. */
10195 index = gen_rtx_PLUS (Pmode,
10196 gen_rtx_MULT (Pmode, index,
10197 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10198 gen_rtx_LABEL_REF (Pmode, table_label));
10199 #ifdef PIC_CASE_VECTOR_ADDRESS
10201 index = PIC_CASE_VECTOR_ADDRESS (index);
10204 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10205 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10206 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10207 RTX_UNCHANGING_P (vector) = 1;
10208 MEM_NOTRAP_P (vector) = 1;
10209 convert_move (temp, vector, 0);
10211 emit_jump_insn (gen_tablejump (temp, table_label));
10213 /* If we are generating PIC code or if the table is PC-relative, the
10214 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10215 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10220 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10221 rtx table_label, rtx default_label)
10225 if (! HAVE_tablejump)
10228 index_expr = fold (build (MINUS_EXPR, index_type,
10229 convert (index_type, index_expr),
10230 convert (index_type, minval)));
10231 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10233 index = protect_from_queue (index, 0);
10234 do_pending_stack_adjust ();
10236 do_tablejump (index, TYPE_MODE (index_type),
10237 convert_modes (TYPE_MODE (index_type),
10238 TYPE_MODE (TREE_TYPE (range)),
10239 expand_expr (range, NULL_RTX,
10241 TREE_UNSIGNED (TREE_TYPE (range))),
10242 table_label, default_label);
10246 /* Nonzero if the mode is a valid vector mode for this architecture.
10247 This returns nonzero even if there is no hardware support for the
10248 vector mode, but we can emulate with narrower modes. */
10251 vector_mode_valid_p (enum machine_mode mode)
10253 enum mode_class class = GET_MODE_CLASS (mode);
10254 enum machine_mode innermode;
10256 /* Doh! What's going on? */
10257 if (class != MODE_VECTOR_INT
10258 && class != MODE_VECTOR_FLOAT)
10261 /* Hardware support. Woo hoo! */
10262 if (VECTOR_MODE_SUPPORTED_P (mode))
10265 innermode = GET_MODE_INNER (mode);
10267 /* We should probably return 1 if requesting V4DI and we have no DI,
10268 but we have V2DI, but this is probably very unlikely. */
10270 /* If we have support for the inner mode, we can safely emulate it.
10271 We may not have V2DI, but me can emulate with a pair of DIs. */
10272 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10275 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10277 const_vector_from_tree (tree exp)
10282 enum machine_mode inner, mode;
10284 mode = TYPE_MODE (TREE_TYPE (exp));
10286 if (is_zeros_p (exp))
10287 return CONST0_RTX (mode);
10289 units = GET_MODE_NUNITS (mode);
10290 inner = GET_MODE_INNER (mode);
10292 v = rtvec_alloc (units);
10294 link = TREE_VECTOR_CST_ELTS (exp);
10295 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10297 elt = TREE_VALUE (link);
10299 if (TREE_CODE (elt) == REAL_CST)
10300 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10303 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10304 TREE_INT_CST_HIGH (elt),
10308 /* Initialize remaining elements to 0. */
10309 for (; i < units; ++i)
10310 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10312 return gen_rtx_raw_CONST_VECTOR (mode, v);
10315 #include "gt-expr.h"