1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
71 #define STACK_PUSH_CODE PRE_INC
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
133 static rtx enqueue_insn PARAMS ((rtx, rtx));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
140 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142 static tree emit_block_move_libcall_fn PARAMS ((int));
143 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
144 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
146 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
148 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
150 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
152 struct store_by_pieces *));
153 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155 static tree clear_storage_libcall_fn PARAMS ((int));
156 static rtx compress_float_constant PARAMS ((rtx, rtx));
157 static rtx get_subtarget PARAMS ((rtx));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, int, int));
163 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int, tree,
168 static rtx var_rtx PARAMS ((tree));
170 static unsigned HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
171 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree,
174 static int is_aligning_offset PARAMS ((tree, tree));
175 static rtx expand_increment PARAMS ((tree, int, int));
176 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
178 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
180 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
181 static rtx const_vector_from_tree PARAMS ((tree));
183 /* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
187 static char direct_load[NUM_MACHINE_MODES];
188 static char direct_store[NUM_MACHINE_MODES];
190 /* Record for each mode whether we can float-extend from memory. */
192 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
213 /* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
217 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218 #define CLEAR_RATIO 2
220 /* If we are optimizing for space, cut down the default clear ratio. */
221 #define CLEAR_RATIO (optimize_size ? 3 : 15)
225 /* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227 #ifndef CLEAR_BY_PIECES_P
228 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
232 /* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235 #ifndef STORE_BY_PIECES_P
236 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
239 /* This array records the insn_code of insns to perform block moves. */
240 enum insn_code movstr_optab[NUM_MACHINE_MODES];
242 /* This array records the insn_code of insns to perform block clears. */
243 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
245 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
247 #ifndef SLOW_UNALIGNED_ACCESS
248 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
258 enum machine_mode mode;
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
267 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg = gen_rtx_REG (VOIDmode, -1);
273 insn = rtx_alloc (INSN);
274 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
275 PATTERN (insn) = pat;
277 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
278 mode = (enum machine_mode) ((int) mode + 1))
282 direct_load[(int) mode] = direct_store[(int) mode] = 0;
283 PUT_MODE (mem, mode);
284 PUT_MODE (mem1, mode);
285 PUT_MODE (reg, mode);
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
290 if (mode != VOIDmode && mode != BLKmode)
291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
292 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
295 if (! HARD_REGNO_MODE_OK (regno, mode))
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
305 SET_SRC (pat) = mem1;
306 SET_DEST (pat) = reg;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_load[(int) mode] = 1;
311 SET_DEST (pat) = mem;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
316 SET_DEST (pat) = mem1;
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 direct_store[(int) mode] = 1;
322 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
324 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
325 mode = GET_MODE_WIDER_MODE (mode))
327 enum machine_mode srcmode;
328 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
329 srcmode = GET_MODE_WIDER_MODE (srcmode))
333 ic = can_extend_p (mode, srcmode, 0);
334 if (ic == CODE_FOR_nothing)
337 PUT_MODE (mem, srcmode);
339 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
340 float_extend_from_mem[mode][srcmode] = true;
345 /* This is run at the start of compiling a function. */
350 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
353 pending_stack_adjust = 0;
354 stack_pointer_delta = 0;
355 inhibit_defer_pop = 0;
357 apply_args_value = 0;
361 /* Small sanity check that the queue is empty at the end of a function. */
364 finish_expr_for_function ()
370 /* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
373 /* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
381 enqueue_insn (var, body)
384 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
385 body, pending_chain);
386 return pending_chain;
389 /* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
405 protect_from_queue (x, modify)
409 RTX_CODE code = GET_CODE (x);
411 #if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain == 0)
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
424 if (code == MEM && GET_MODE (x) != BLKmode
425 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
428 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
432 rtx temp = gen_reg_rtx (GET_MODE (x));
434 emit_insn_before (gen_move_insn (temp, new),
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
448 rtx tem = protect_from_queue (XEXP (x, 0), 0);
449 if (tem != XEXP (x, 0))
455 else if (code == PLUS || code == MULT)
457 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
458 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
459 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
471 if (QUEUED_INSN (x) == 0)
472 return copy_to_reg (QUEUED_VAR (x));
473 /* If the increment has happened and a pre-increment copy exists,
475 if (QUEUED_COPY (x) != 0)
476 return QUEUED_COPY (x);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
482 return QUEUED_COPY (x);
485 /* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
494 enum rtx_code code = GET_CODE (x);
500 return queued_subexp_p (XEXP (x, 0));
504 return (queued_subexp_p (XEXP (x, 0))
505 || queued_subexp_p (XEXP (x, 1)));
511 /* Perform all the pending incrementations. */
517 while ((p = pending_chain))
519 rtx body = QUEUED_BODY (p);
521 switch (GET_CODE (body))
529 QUEUED_INSN (p) = body;
533 #ifdef ENABLE_CHECKING
540 QUEUED_INSN (p) = emit_insn (body);
544 pending_chain = QUEUED_NEXT (p);
548 /* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
554 convert_move (to, from, unsignedp)
558 enum machine_mode to_mode = GET_MODE (to);
559 enum machine_mode from_mode = GET_MODE (from);
560 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
561 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
565 /* rtx code for making an equivalent value. */
566 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
567 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
569 to = protect_from_queue (to, 1);
570 from = protect_from_queue (from, 0);
572 if (to_real != from_real)
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
579 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
581 >= GET_MODE_SIZE (to_mode))
582 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
583 from = gen_lowpart (to_mode, from), from_mode = to_mode;
585 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
588 if (to_mode == from_mode
589 || (from_mode == VOIDmode && CONSTANT_P (from)))
591 emit_move_insn (to, from);
595 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
597 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
600 if (VECTOR_MODE_P (to_mode))
601 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
603 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
605 emit_move_insn (to, from);
609 if (to_real != from_real)
616 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
618 /* Try converting directly if the insn is supported. */
619 if ((code = can_extend_p (to_mode, from_mode, 0))
622 emit_unop_insn (code, to, from, UNKNOWN);
627 #ifdef HAVE_trunchfqf2
628 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
630 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
634 #ifdef HAVE_trunctqfqf2
635 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
637 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
641 #ifdef HAVE_truncsfqf2
642 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
644 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
648 #ifdef HAVE_truncdfqf2
649 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
651 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
655 #ifdef HAVE_truncxfqf2
656 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
658 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
662 #ifdef HAVE_trunctfqf2
663 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
665 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
670 #ifdef HAVE_trunctqfhf2
671 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
673 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
677 #ifdef HAVE_truncsfhf2
678 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
680 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
684 #ifdef HAVE_truncdfhf2
685 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
687 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
691 #ifdef HAVE_truncxfhf2
692 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
694 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
698 #ifdef HAVE_trunctfhf2
699 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
701 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
706 #ifdef HAVE_truncsftqf2
707 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
709 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
713 #ifdef HAVE_truncdftqf2
714 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
716 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
720 #ifdef HAVE_truncxftqf2
721 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
723 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
727 #ifdef HAVE_trunctftqf2
728 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
730 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
735 #ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
742 #ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
749 #ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
756 #ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
763 #ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
778 libcall = extendsfdf2_libfunc;
782 libcall = extendsfxf2_libfunc;
786 libcall = extendsftf2_libfunc;
798 libcall = truncdfsf2_libfunc;
802 libcall = extenddfxf2_libfunc;
806 libcall = extenddftf2_libfunc;
818 libcall = truncxfsf2_libfunc;
822 libcall = truncxfdf2_libfunc;
834 libcall = trunctfsf2_libfunc;
838 libcall = trunctfdf2_libfunc;
850 if (libcall == (rtx) 0)
851 /* This conversion is not implemented yet. */
855 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
857 insns = get_insns ();
859 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
864 /* Now both modes are integers. */
866 /* Handle expanding beyond a word. */
867 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
868 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
875 enum machine_mode lowpart_mode;
876 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
878 /* Try converting directly if the insn is supported. */
879 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
882 /* If FROM is a SUBREG, put it into a register. Do this
883 so that we always generate the same set of insns for
884 better cse'ing; if an intermediate assignment occurred,
885 we won't be doing the operation directly on the SUBREG. */
886 if (optimize > 0 && GET_CODE (from) == SUBREG)
887 from = force_reg (from_mode, from);
888 emit_unop_insn (code, to, from, equiv_code);
891 /* Next, try converting via full word. */
892 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
893 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
894 != CODE_FOR_nothing))
896 if (GET_CODE (to) == REG)
897 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
898 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
899 emit_unop_insn (code, to,
900 gen_lowpart (word_mode, to), equiv_code);
904 /* No special multiword conversion insn; do it by hand. */
907 /* Since we will turn this into a no conflict block, we must ensure
908 that the source does not overlap the target. */
910 if (reg_overlap_mentioned_p (to, from))
911 from = force_reg (from_mode, from);
913 /* Get a copy of FROM widened to a word, if necessary. */
914 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
915 lowpart_mode = word_mode;
917 lowpart_mode = from_mode;
919 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
921 lowpart = gen_lowpart (lowpart_mode, to);
922 emit_move_insn (lowpart, lowfrom);
924 /* Compute the value to put in each remaining word. */
926 fill_value = const0_rtx;
931 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
932 && STORE_FLAG_VALUE == -1)
934 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
936 fill_value = gen_reg_rtx (word_mode);
937 emit_insn (gen_slt (fill_value));
943 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
944 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
946 fill_value = convert_to_mode (word_mode, fill_value, 1);
950 /* Fill the remaining words. */
951 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
953 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
954 rtx subword = operand_subword (to, index, 1, to_mode);
959 if (fill_value != subword)
960 emit_move_insn (subword, fill_value);
963 insns = get_insns ();
966 emit_no_conflict_block (insns, to, from, NULL_RTX,
967 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
971 /* Truncating multi-word to a word or less. */
972 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
973 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
975 if (!((GET_CODE (from) == MEM
976 && ! MEM_VOLATILE_P (from)
977 && direct_load[(int) to_mode]
978 && ! mode_dependent_address_p (XEXP (from, 0)))
979 || GET_CODE (from) == REG
980 || GET_CODE (from) == SUBREG))
981 from = force_reg (from_mode, from);
982 convert_move (to, gen_lowpart (word_mode, from), 0);
986 /* Handle pointer conversion. */ /* SPEE 900220. */
987 if (to_mode == PQImode)
989 if (from_mode != QImode)
990 from = convert_to_mode (QImode, from, unsignedp);
992 #ifdef HAVE_truncqipqi2
993 if (HAVE_truncqipqi2)
995 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
998 #endif /* HAVE_truncqipqi2 */
1002 if (from_mode == PQImode)
1004 if (to_mode != QImode)
1006 from = convert_to_mode (QImode, from, unsignedp);
1011 #ifdef HAVE_extendpqiqi2
1012 if (HAVE_extendpqiqi2)
1014 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1017 #endif /* HAVE_extendpqiqi2 */
1022 if (to_mode == PSImode)
1024 if (from_mode != SImode)
1025 from = convert_to_mode (SImode, from, unsignedp);
1027 #ifdef HAVE_truncsipsi2
1028 if (HAVE_truncsipsi2)
1030 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1033 #endif /* HAVE_truncsipsi2 */
1037 if (from_mode == PSImode)
1039 if (to_mode != SImode)
1041 from = convert_to_mode (SImode, from, unsignedp);
1046 #ifdef HAVE_extendpsisi2
1047 if (! unsignedp && HAVE_extendpsisi2)
1049 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1052 #endif /* HAVE_extendpsisi2 */
1053 #ifdef HAVE_zero_extendpsisi2
1054 if (unsignedp && HAVE_zero_extendpsisi2)
1056 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1059 #endif /* HAVE_zero_extendpsisi2 */
1064 if (to_mode == PDImode)
1066 if (from_mode != DImode)
1067 from = convert_to_mode (DImode, from, unsignedp);
1069 #ifdef HAVE_truncdipdi2
1070 if (HAVE_truncdipdi2)
1072 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1075 #endif /* HAVE_truncdipdi2 */
1079 if (from_mode == PDImode)
1081 if (to_mode != DImode)
1083 from = convert_to_mode (DImode, from, unsignedp);
1088 #ifdef HAVE_extendpdidi2
1089 if (HAVE_extendpdidi2)
1091 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1094 #endif /* HAVE_extendpdidi2 */
1099 /* Now follow all the conversions between integers
1100 no more than a word long. */
1102 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1103 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1104 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1105 GET_MODE_BITSIZE (from_mode)))
1107 if (!((GET_CODE (from) == MEM
1108 && ! MEM_VOLATILE_P (from)
1109 && direct_load[(int) to_mode]
1110 && ! mode_dependent_address_p (XEXP (from, 0)))
1111 || GET_CODE (from) == REG
1112 || GET_CODE (from) == SUBREG))
1113 from = force_reg (from_mode, from);
1114 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1115 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1116 from = copy_to_reg (from);
1117 emit_move_insn (to, gen_lowpart (to_mode, from));
1121 /* Handle extension. */
1122 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1124 /* Convert directly if that works. */
1125 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1126 != CODE_FOR_nothing)
1129 from = force_not_mem (from);
1131 emit_unop_insn (code, to, from, equiv_code);
1136 enum machine_mode intermediate;
1140 /* Search for a mode to convert via. */
1141 for (intermediate = from_mode; intermediate != VOIDmode;
1142 intermediate = GET_MODE_WIDER_MODE (intermediate))
1143 if (((can_extend_p (to_mode, intermediate, unsignedp)
1144 != CODE_FOR_nothing)
1145 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1146 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1147 GET_MODE_BITSIZE (intermediate))))
1148 && (can_extend_p (intermediate, from_mode, unsignedp)
1149 != CODE_FOR_nothing))
1151 convert_move (to, convert_to_mode (intermediate, from,
1152 unsignedp), unsignedp);
1156 /* No suitable intermediate mode.
1157 Generate what we need with shifts. */
1158 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1159 - GET_MODE_BITSIZE (from_mode), 0);
1160 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1161 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1163 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1166 emit_move_insn (to, tmp);
1171 /* Support special truncate insns for certain modes. */
1173 if (from_mode == DImode && to_mode == SImode)
1175 #ifdef HAVE_truncdisi2
1176 if (HAVE_truncdisi2)
1178 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1186 if (from_mode == DImode && to_mode == HImode)
1188 #ifdef HAVE_truncdihi2
1189 if (HAVE_truncdihi2)
1191 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1199 if (from_mode == DImode && to_mode == QImode)
1201 #ifdef HAVE_truncdiqi2
1202 if (HAVE_truncdiqi2)
1204 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1212 if (from_mode == SImode && to_mode == HImode)
1214 #ifdef HAVE_truncsihi2
1215 if (HAVE_truncsihi2)
1217 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1225 if (from_mode == SImode && to_mode == QImode)
1227 #ifdef HAVE_truncsiqi2
1228 if (HAVE_truncsiqi2)
1230 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1238 if (from_mode == HImode && to_mode == QImode)
1240 #ifdef HAVE_trunchiqi2
1241 if (HAVE_trunchiqi2)
1243 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1247 convert_move (to, force_reg (from_mode, from), unsignedp);
1251 if (from_mode == TImode && to_mode == DImode)
1253 #ifdef HAVE_trunctidi2
1254 if (HAVE_trunctidi2)
1256 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1260 convert_move (to, force_reg (from_mode, from), unsignedp);
1264 if (from_mode == TImode && to_mode == SImode)
1266 #ifdef HAVE_trunctisi2
1267 if (HAVE_trunctisi2)
1269 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1273 convert_move (to, force_reg (from_mode, from), unsignedp);
1277 if (from_mode == TImode && to_mode == HImode)
1279 #ifdef HAVE_trunctihi2
1280 if (HAVE_trunctihi2)
1282 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1286 convert_move (to, force_reg (from_mode, from), unsignedp);
1290 if (from_mode == TImode && to_mode == QImode)
1292 #ifdef HAVE_trunctiqi2
1293 if (HAVE_trunctiqi2)
1295 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1299 convert_move (to, force_reg (from_mode, from), unsignedp);
1303 /* Handle truncation of volatile memrefs, and so on;
1304 the things that couldn't be truncated directly,
1305 and for which there was no special instruction. */
1306 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1308 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1309 emit_move_insn (to, temp);
1313 /* Mode combination is not recognized. */
1317 /* Return an rtx for a value that would result
1318 from converting X to mode MODE.
1319 Both X and MODE may be floating, or both integer.
1320 UNSIGNEDP is nonzero if X is an unsigned value.
1321 This can be done by referring to a part of X in place
1322 or by copying to a new temporary with conversion.
1324 This function *must not* call protect_from_queue
1325 except when putting X into an insn (in which case convert_move does it). */
1328 convert_to_mode (mode, x, unsignedp)
1329 enum machine_mode mode;
1333 return convert_modes (mode, VOIDmode, x, unsignedp);
1336 /* Return an rtx for a value that would result
1337 from converting X from mode OLDMODE to mode MODE.
1338 Both modes may be floating, or both integer.
1339 UNSIGNEDP is nonzero if X is an unsigned value.
1341 This can be done by referring to a part of X in place
1342 or by copying to a new temporary with conversion.
1344 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1346 This function *must not* call protect_from_queue
1347 except when putting X into an insn (in which case convert_move does it). */
1350 convert_modes (mode, oldmode, x, unsignedp)
1351 enum machine_mode mode, oldmode;
1357 /* If FROM is a SUBREG that indicates that we have already done at least
1358 the required extension, strip it. */
1360 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1361 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1362 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1363 x = gen_lowpart (mode, x);
1365 if (GET_MODE (x) != VOIDmode)
1366 oldmode = GET_MODE (x);
1368 if (mode == oldmode)
1371 /* There is one case that we must handle specially: If we are converting
1372 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1373 we are to interpret the constant as unsigned, gen_lowpart will do
1374 the wrong if the constant appears negative. What we want to do is
1375 make the high-order word of the constant zero, not all ones. */
1377 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1378 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1379 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1381 HOST_WIDE_INT val = INTVAL (x);
1383 if (oldmode != VOIDmode
1384 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1386 int width = GET_MODE_BITSIZE (oldmode);
1388 /* We need to zero extend VAL. */
1389 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1392 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1395 /* We can do this with a gen_lowpart if both desired and current modes
1396 are integer, and this is either a constant integer, a register, or a
1397 non-volatile MEM. Except for the constant case where MODE is no
1398 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1400 if ((GET_CODE (x) == CONST_INT
1401 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1402 || (GET_MODE_CLASS (mode) == MODE_INT
1403 && GET_MODE_CLASS (oldmode) == MODE_INT
1404 && (GET_CODE (x) == CONST_DOUBLE
1405 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1406 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1407 && direct_load[(int) mode])
1408 || (GET_CODE (x) == REG
1409 && (! HARD_REGISTER_P (x)
1410 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1411 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1412 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1414 /* ?? If we don't know OLDMODE, we have to assume here that
1415 X does not need sign- or zero-extension. This may not be
1416 the case, but it's the best we can do. */
1417 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1418 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1420 HOST_WIDE_INT val = INTVAL (x);
1421 int width = GET_MODE_BITSIZE (oldmode);
1423 /* We must sign or zero-extend in this case. Start by
1424 zero-extending, then sign extend if we need to. */
1425 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1427 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1428 val |= (HOST_WIDE_INT) (-1) << width;
1430 return gen_int_mode (val, mode);
1433 return gen_lowpart (mode, x);
1436 temp = gen_reg_rtx (mode);
1437 convert_move (temp, x, unsignedp);
1441 /* This macro is used to determine what the largest unit size that
1442 move_by_pieces can use is. */
1444 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1445 move efficiently, as opposed to MOVE_MAX which is the maximum
1446 number of bytes we can move with a single instruction. */
1448 #ifndef MOVE_MAX_PIECES
1449 #define MOVE_MAX_PIECES MOVE_MAX
1452 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1453 store efficiently. Due to internal GCC limitations, this is
1454 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1455 for an immediate constant. */
1457 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1459 /* Generate several move instructions to copy LEN bytes from block FROM to
1460 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1461 and TO through protect_from_queue before calling.
1463 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1464 used to push FROM to the stack.
1466 ALIGN is maximum alignment we can assume. */
1469 move_by_pieces (to, from, len, align)
1471 unsigned HOST_WIDE_INT len;
1474 struct move_by_pieces data;
1475 rtx to_addr, from_addr = XEXP (from, 0);
1476 unsigned int max_size = MOVE_MAX_PIECES + 1;
1477 enum machine_mode mode = VOIDmode, tmode;
1478 enum insn_code icode;
1481 data.from_addr = from_addr;
1484 to_addr = XEXP (to, 0);
1487 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1488 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1490 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1497 #ifdef STACK_GROWS_DOWNWARD
1503 data.to_addr = to_addr;
1506 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1507 || GET_CODE (from_addr) == POST_INC
1508 || GET_CODE (from_addr) == POST_DEC);
1510 data.explicit_inc_from = 0;
1511 data.explicit_inc_to = 0;
1512 if (data.reverse) data.offset = len;
1515 /* If copying requires more than two move insns,
1516 copy addresses to registers (to make displacements shorter)
1517 and use post-increment if available. */
1518 if (!(data.autinc_from && data.autinc_to)
1519 && move_by_pieces_ninsns (len, align) > 2)
1521 /* Find the mode of the largest move... */
1522 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1523 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1524 if (GET_MODE_SIZE (tmode) < max_size)
1527 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1529 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1530 data.autinc_from = 1;
1531 data.explicit_inc_from = -1;
1533 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1535 data.from_addr = copy_addr_to_reg (from_addr);
1536 data.autinc_from = 1;
1537 data.explicit_inc_from = 1;
1539 if (!data.autinc_from && CONSTANT_P (from_addr))
1540 data.from_addr = copy_addr_to_reg (from_addr);
1541 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1543 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1545 data.explicit_inc_to = -1;
1547 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1549 data.to_addr = copy_addr_to_reg (to_addr);
1551 data.explicit_inc_to = 1;
1553 if (!data.autinc_to && CONSTANT_P (to_addr))
1554 data.to_addr = copy_addr_to_reg (to_addr);
1557 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1558 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1559 align = MOVE_MAX * BITS_PER_UNIT;
1561 /* First move what we can in the largest integer mode, then go to
1562 successively smaller modes. */
1564 while (max_size > 1)
1566 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1567 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1568 if (GET_MODE_SIZE (tmode) < max_size)
1571 if (mode == VOIDmode)
1574 icode = mov_optab->handlers[(int) mode].insn_code;
1575 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1576 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1578 max_size = GET_MODE_SIZE (mode);
1581 /* The code above should have handled everything. */
1586 /* Return number of insns required to move L bytes by pieces.
1587 ALIGN (in bits) is maximum alignment we can assume. */
1589 static unsigned HOST_WIDE_INT
1590 move_by_pieces_ninsns (l, align)
1591 unsigned HOST_WIDE_INT l;
1594 unsigned HOST_WIDE_INT n_insns = 0;
1595 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1597 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1598 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1599 align = MOVE_MAX * BITS_PER_UNIT;
1601 while (max_size > 1)
1603 enum machine_mode mode = VOIDmode, tmode;
1604 enum insn_code icode;
1606 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1607 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1608 if (GET_MODE_SIZE (tmode) < max_size)
1611 if (mode == VOIDmode)
1614 icode = mov_optab->handlers[(int) mode].insn_code;
1615 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1616 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1618 max_size = GET_MODE_SIZE (mode);
1626 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1627 with move instructions for mode MODE. GENFUN is the gen_... function
1628 to make a move insn for that mode. DATA has all the other info. */
1631 move_by_pieces_1 (genfun, mode, data)
1632 rtx (*genfun) PARAMS ((rtx, ...));
1633 enum machine_mode mode;
1634 struct move_by_pieces *data;
1636 unsigned int size = GET_MODE_SIZE (mode);
1637 rtx to1 = NULL_RTX, from1;
1639 while (data->len >= size)
1642 data->offset -= size;
1646 if (data->autinc_to)
1647 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1650 to1 = adjust_address (data->to, mode, data->offset);
1653 if (data->autinc_from)
1654 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1657 from1 = adjust_address (data->from, mode, data->offset);
1659 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1660 emit_insn (gen_add2_insn (data->to_addr,
1661 GEN_INT (-(HOST_WIDE_INT)size)));
1662 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1663 emit_insn (gen_add2_insn (data->from_addr,
1664 GEN_INT (-(HOST_WIDE_INT)size)));
1667 emit_insn ((*genfun) (to1, from1));
1670 #ifdef PUSH_ROUNDING
1671 emit_single_push_insn (mode, from1, NULL);
1677 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1678 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1679 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1680 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1682 if (! data->reverse)
1683 data->offset += size;
1689 /* Emit code to move a block Y to a block X. This may be done with
1690 string-move instructions, with multiple scalar move instructions,
1691 or with a library call.
1693 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1694 SIZE is an rtx that says how long they are.
1695 ALIGN is the maximum alignment we can assume they have.
1696 METHOD describes what kind of copy this is, and what mechanisms may be used.
1698 Return the address of the new block, if memcpy is called and returns it,
1702 emit_block_move (x, y, size, method)
1704 enum block_op_methods method;
1712 case BLOCK_OP_NORMAL:
1713 may_use_call = true;
1716 case BLOCK_OP_CALL_PARM:
1717 may_use_call = block_move_libcall_safe_for_call_parm ();
1719 /* Make inhibit_defer_pop nonzero around the library call
1720 to force it to pop the arguments right away. */
1724 case BLOCK_OP_NO_LIBCALL:
1725 may_use_call = false;
1732 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1734 if (GET_MODE (x) != BLKmode)
1736 if (GET_MODE (y) != BLKmode)
1739 x = protect_from_queue (x, 1);
1740 y = protect_from_queue (y, 0);
1741 size = protect_from_queue (size, 0);
1743 if (GET_CODE (x) != MEM)
1745 if (GET_CODE (y) != MEM)
1750 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1751 can be incorrect is coming from __builtin_memcpy. */
1752 if (GET_CODE (size) == CONST_INT)
1754 x = shallow_copy_rtx (x);
1755 y = shallow_copy_rtx (y);
1756 set_mem_size (x, size);
1757 set_mem_size (y, size);
1760 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1761 move_by_pieces (x, y, INTVAL (size), align);
1762 else if (emit_block_move_via_movstr (x, y, size, align))
1764 else if (may_use_call)
1765 retval = emit_block_move_via_libcall (x, y, size);
1767 emit_block_move_via_loop (x, y, size, align);
1769 if (method == BLOCK_OP_CALL_PARM)
1775 /* A subroutine of emit_block_move. Returns true if calling the
1776 block move libcall will not clobber any parameters which may have
1777 already been placed on the stack. */
1780 block_move_libcall_safe_for_call_parm ()
1786 /* Check to see whether memcpy takes all register arguments. */
1788 takes_regs_uninit, takes_regs_no, takes_regs_yes
1789 } takes_regs = takes_regs_uninit;
1793 case takes_regs_uninit:
1795 CUMULATIVE_ARGS args_so_far;
1798 fn = emit_block_move_libcall_fn (false);
1799 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1801 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1802 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1804 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1805 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1806 if (!tmp || !REG_P (tmp))
1807 goto fail_takes_regs;
1808 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1809 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1811 goto fail_takes_regs;
1813 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1816 takes_regs = takes_regs_yes;
1819 case takes_regs_yes:
1823 takes_regs = takes_regs_no;
1834 /* A subroutine of emit_block_move. Expand a movstr pattern;
1835 return true if successful. */
1838 emit_block_move_via_movstr (x, y, size, align)
1842 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1843 enum machine_mode mode;
1845 /* Since this is a move insn, we don't care about volatility. */
1848 /* Try the most limited insn first, because there's no point
1849 including more than one in the machine description unless
1850 the more limited one has some advantage. */
1852 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1853 mode = GET_MODE_WIDER_MODE (mode))
1855 enum insn_code code = movstr_optab[(int) mode];
1856 insn_operand_predicate_fn pred;
1858 if (code != CODE_FOR_nothing
1859 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1860 here because if SIZE is less than the mode mask, as it is
1861 returned by the macro, it will definitely be less than the
1862 actual mode mask. */
1863 && ((GET_CODE (size) == CONST_INT
1864 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1865 <= (GET_MODE_MASK (mode) >> 1)))
1866 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1867 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1868 || (*pred) (x, BLKmode))
1869 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1870 || (*pred) (y, BLKmode))
1871 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1872 || (*pred) (opalign, VOIDmode)))
1875 rtx last = get_last_insn ();
1878 op2 = convert_to_mode (mode, size, 1);
1879 pred = insn_data[(int) code].operand[2].predicate;
1880 if (pred != 0 && ! (*pred) (op2, mode))
1881 op2 = copy_to_mode_reg (mode, op2);
1883 /* ??? When called via emit_block_move_for_call, it'd be
1884 nice if there were some way to inform the backend, so
1885 that it doesn't fail the expansion because it thinks
1886 emitting the libcall would be more efficient. */
1888 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1896 delete_insns_since (last);
1904 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1905 Return the return value from memcpy, 0 otherwise. */
1908 emit_block_move_via_libcall (dst, src, size)
1911 rtx dst_addr, src_addr;
1912 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1913 enum machine_mode size_mode;
1916 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1918 It is unsafe to save the value generated by protect_from_queue and reuse
1919 it later. Consider what happens if emit_queue is called before the
1920 return value from protect_from_queue is used.
1922 Expansion of the CALL_EXPR below will call emit_queue before we are
1923 finished emitting RTL for argument setup. So if we are not careful we
1924 could get the wrong value for an argument.
1926 To avoid this problem we go ahead and emit code to copy the addresses of
1927 DST and SRC and SIZE into new pseudos. We can then place those new
1928 pseudos into an RTL_EXPR and use them later, even after a call to
1931 Note this is not strictly needed for library calls since they do not call
1932 emit_queue before loading their arguments. However, we may need to have
1933 library calls call emit_queue in the future since failing to do so could
1934 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1935 arguments in registers. */
1937 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1938 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1940 #ifdef POINTERS_EXTEND_UNSIGNED
1941 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1942 src_addr = convert_memory_address (ptr_mode, src_addr);
1945 dst_tree = make_tree (ptr_type_node, dst_addr);
1946 src_tree = make_tree (ptr_type_node, src_addr);
1948 if (TARGET_MEM_FUNCTIONS)
1949 size_mode = TYPE_MODE (sizetype);
1951 size_mode = TYPE_MODE (unsigned_type_node);
1953 size = convert_to_mode (size_mode, size, 1);
1954 size = copy_to_mode_reg (size_mode, size);
1956 /* It is incorrect to use the libcall calling conventions to call
1957 memcpy in this context. This could be a user call to memcpy and
1958 the user may wish to examine the return value from memcpy. For
1959 targets where libcalls and normal calls have different conventions
1960 for returning pointers, we could end up generating incorrect code.
1962 For convenience, we generate the call to bcopy this way as well. */
1964 if (TARGET_MEM_FUNCTIONS)
1965 size_tree = make_tree (sizetype, size);
1967 size_tree = make_tree (unsigned_type_node, size);
1969 fn = emit_block_move_libcall_fn (true);
1970 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1971 if (TARGET_MEM_FUNCTIONS)
1973 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1974 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1978 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1979 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1982 /* Now we have to build up the CALL_EXPR itself. */
1983 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1984 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1985 call_expr, arg_list, NULL_TREE);
1986 TREE_SIDE_EFFECTS (call_expr) = 1;
1988 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1990 /* If we are initializing a readonly value, show the above call clobbered
1991 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1992 the delay slot scheduler might overlook conflicts and take nasty
1994 if (RTX_UNCHANGING_P (dst))
1995 add_function_usage_to
1996 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1997 gen_rtx_CLOBBER (VOIDmode, dst),
2000 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
2003 /* A subroutine of emit_block_move_via_libcall. Create the tree node
2004 for the function we use for block copies. The first time FOR_CALL
2005 is true, we call assemble_external. */
2007 static GTY(()) tree block_move_fn;
2010 init_block_move_fn (asmspec)
2011 const char *asmspec;
2017 if (TARGET_MEM_FUNCTIONS)
2019 fn = get_identifier ("memcpy");
2020 args = build_function_type_list (ptr_type_node, ptr_type_node,
2021 const_ptr_type_node, sizetype,
2026 fn = get_identifier ("bcopy");
2027 args = build_function_type_list (void_type_node, const_ptr_type_node,
2028 ptr_type_node, unsigned_type_node,
2032 fn = build_decl (FUNCTION_DECL, fn, args);
2033 DECL_EXTERNAL (fn) = 1;
2034 TREE_PUBLIC (fn) = 1;
2035 DECL_ARTIFICIAL (fn) = 1;
2036 TREE_NOTHROW (fn) = 1;
2043 SET_DECL_RTL (block_move_fn, NULL_RTX);
2044 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
2049 emit_block_move_libcall_fn (for_call)
2052 static bool emitted_extern;
2055 init_block_move_fn (NULL);
2057 if (for_call && !emitted_extern)
2059 emitted_extern = true;
2060 make_decl_rtl (block_move_fn, NULL);
2061 assemble_external (block_move_fn);
2064 return block_move_fn;
2067 /* A subroutine of emit_block_move. Copy the data via an explicit
2068 loop. This is used only when libcalls are forbidden. */
2069 /* ??? It'd be nice to copy in hunks larger than QImode. */
2072 emit_block_move_via_loop (x, y, size, align)
2074 unsigned int align ATTRIBUTE_UNUSED;
2076 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2077 enum machine_mode iter_mode;
2079 iter_mode = GET_MODE (size);
2080 if (iter_mode == VOIDmode)
2081 iter_mode = word_mode;
2083 top_label = gen_label_rtx ();
2084 cmp_label = gen_label_rtx ();
2085 iter = gen_reg_rtx (iter_mode);
2087 emit_move_insn (iter, const0_rtx);
2089 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2090 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2091 do_pending_stack_adjust ();
2093 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2095 emit_jump (cmp_label);
2096 emit_label (top_label);
2098 tmp = convert_modes (Pmode, iter_mode, iter, true);
2099 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2100 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2101 x = change_address (x, QImode, x_addr);
2102 y = change_address (y, QImode, y_addr);
2104 emit_move_insn (x, y);
2106 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2107 true, OPTAB_LIB_WIDEN);
2109 emit_move_insn (iter, tmp);
2111 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2112 emit_label (cmp_label);
2114 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2117 emit_note (NULL, NOTE_INSN_LOOP_END);
2120 /* Copy all or part of a value X into registers starting at REGNO.
2121 The number of registers to be filled is NREGS. */
2124 move_block_to_reg (regno, x, nregs, mode)
2128 enum machine_mode mode;
2131 #ifdef HAVE_load_multiple
2139 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2140 x = validize_mem (force_const_mem (mode, x));
2142 /* See if the machine can do this with a load multiple insn. */
2143 #ifdef HAVE_load_multiple
2144 if (HAVE_load_multiple)
2146 last = get_last_insn ();
2147 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2155 delete_insns_since (last);
2159 for (i = 0; i < nregs; i++)
2160 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2161 operand_subword_force (x, i, mode));
2164 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2165 The number of registers to be filled is NREGS. SIZE indicates the number
2166 of bytes in the object X. */
2169 move_block_from_reg (regno, x, nregs, size)
2176 #ifdef HAVE_store_multiple
2180 enum machine_mode mode;
2185 /* If SIZE is that of a mode no bigger than a word, just use that
2186 mode's store operation. */
2187 if (size <= UNITS_PER_WORD
2188 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
2190 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2194 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2195 to the left before storing to memory. Note that the previous test
2196 doesn't handle all cases (e.g. SIZE == 3). */
2197 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
2199 rtx tem = operand_subword (x, 0, 1, BLKmode);
2205 shift = expand_shift (LSHIFT_EXPR, word_mode,
2206 gen_rtx_REG (word_mode, regno),
2207 build_int_2 ((UNITS_PER_WORD - size)
2208 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2209 emit_move_insn (tem, shift);
2213 /* See if the machine can do this with a store multiple insn. */
2214 #ifdef HAVE_store_multiple
2215 if (HAVE_store_multiple)
2217 last = get_last_insn ();
2218 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2226 delete_insns_since (last);
2230 for (i = 0; i < nregs; i++)
2232 rtx tem = operand_subword (x, i, 1, BLKmode);
2237 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2241 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2242 ORIG, where ORIG is a non-consecutive group of registers represented by
2243 a PARALLEL. The clone is identical to the original except in that the
2244 original set of registers is replaced by a new set of pseudo registers.
2245 The new set has the same modes as the original set. */
2248 gen_group_rtx (orig)
2254 if (GET_CODE (orig) != PARALLEL)
2257 length = XVECLEN (orig, 0);
2258 tmps = (rtx *) alloca (sizeof (rtx) * length);
2260 /* Skip a NULL entry in first slot. */
2261 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2266 for (; i < length; i++)
2268 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2269 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2271 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2274 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2277 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2278 registers represented by a PARALLEL. SSIZE represents the total size of
2279 block SRC in bytes, or -1 if not known. */
2280 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2281 the balance will be in what would be the low-order memory addresses, i.e.
2282 left justified for big endian, right justified for little endian. This
2283 happens to be true for the targets currently using this support. If this
2284 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2288 emit_group_load (dst, orig_src, ssize)
2295 if (GET_CODE (dst) != PARALLEL)
2298 /* Check for a NULL entry, used to indicate that the parameter goes
2299 both on the stack and in registers. */
2300 if (XEXP (XVECEXP (dst, 0, 0), 0))
2305 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2307 /* Process the pieces. */
2308 for (i = start; i < XVECLEN (dst, 0); i++)
2310 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2311 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2312 unsigned int bytelen = GET_MODE_SIZE (mode);
2315 /* Handle trailing fragments that run over the size of the struct. */
2316 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2318 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2319 bytelen = ssize - bytepos;
2324 /* If we won't be loading directly from memory, protect the real source
2325 from strange tricks we might play; but make sure that the source can
2326 be loaded directly into the destination. */
2328 if (GET_CODE (orig_src) != MEM
2329 && (!CONSTANT_P (orig_src)
2330 || (GET_MODE (orig_src) != mode
2331 && GET_MODE (orig_src) != VOIDmode)))
2333 if (GET_MODE (orig_src) == VOIDmode)
2334 src = gen_reg_rtx (mode);
2336 src = gen_reg_rtx (GET_MODE (orig_src));
2338 emit_move_insn (src, orig_src);
2341 /* Optimize the access just a bit. */
2342 if (GET_CODE (src) == MEM
2343 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2344 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2345 && bytelen == GET_MODE_SIZE (mode))
2347 tmps[i] = gen_reg_rtx (mode);
2348 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2350 else if (GET_CODE (src) == CONCAT)
2352 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2353 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2355 if ((bytepos == 0 && bytelen == slen0)
2356 || (bytepos != 0 && bytepos + bytelen <= slen))
2358 /* The following assumes that the concatenated objects all
2359 have the same size. In this case, a simple calculation
2360 can be used to determine the object and the bit field
2362 tmps[i] = XEXP (src, bytepos / slen0);
2363 if (! CONSTANT_P (tmps[i])
2364 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2365 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2366 (bytepos % slen0) * BITS_PER_UNIT,
2367 1, NULL_RTX, mode, mode, ssize);
2369 else if (bytepos == 0)
2371 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2372 emit_move_insn (mem, src);
2373 tmps[i] = adjust_address (mem, mode, 0);
2378 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2379 SIMD register, which is currently broken. While we get GCC
2380 to emit proper RTL for these cases, let's dump to memory. */
2381 else if (VECTOR_MODE_P (GET_MODE (dst))
2382 && GET_CODE (src) == REG)
2384 int slen = GET_MODE_SIZE (GET_MODE (src));
2387 mem = assign_stack_temp (GET_MODE (src), slen, 0);
2388 emit_move_insn (mem, src);
2389 tmps[i] = adjust_address (mem, mode, (int) bytepos);
2391 else if (CONSTANT_P (src)
2392 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2395 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2396 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2399 if (BYTES_BIG_ENDIAN && shift)
2400 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2401 tmps[i], 0, OPTAB_WIDEN);
2406 /* Copy the extracted pieces into the proper (probable) hard regs. */
2407 for (i = start; i < XVECLEN (dst, 0); i++)
2408 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2411 /* Emit code to move a block SRC to block DST, where SRC and DST are
2412 non-consecutive groups of registers, each represented by a PARALLEL. */
2415 emit_group_move (dst, src)
2420 if (GET_CODE (src) != PARALLEL
2421 || GET_CODE (dst) != PARALLEL
2422 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2425 /* Skip first entry if NULL. */
2426 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2427 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2428 XEXP (XVECEXP (src, 0, i), 0));
2431 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2432 registers represented by a PARALLEL. SSIZE represents the total size of
2433 block DST, or -1 if not known. */
2436 emit_group_store (orig_dst, src, ssize)
2443 if (GET_CODE (src) != PARALLEL)
2446 /* Check for a NULL entry, used to indicate that the parameter goes
2447 both on the stack and in registers. */
2448 if (XEXP (XVECEXP (src, 0, 0), 0))
2453 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2455 /* Copy the (probable) hard regs into pseudos. */
2456 for (i = start; i < XVECLEN (src, 0); i++)
2458 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2459 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2460 emit_move_insn (tmps[i], reg);
2464 /* If we won't be storing directly into memory, protect the real destination
2465 from strange tricks we might play. */
2467 if (GET_CODE (dst) == PARALLEL)
2471 /* We can get a PARALLEL dst if there is a conditional expression in
2472 a return statement. In that case, the dst and src are the same,
2473 so no action is necessary. */
2474 if (rtx_equal_p (dst, src))
2477 /* It is unclear if we can ever reach here, but we may as well handle
2478 it. Allocate a temporary, and split this into a store/load to/from
2481 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2482 emit_group_store (temp, src, ssize);
2483 emit_group_load (dst, temp, ssize);
2486 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2488 dst = gen_reg_rtx (GET_MODE (orig_dst));
2489 /* Make life a bit easier for combine. */
2490 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2493 /* Process the pieces. */
2494 for (i = start; i < XVECLEN (src, 0); i++)
2496 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2497 enum machine_mode mode = GET_MODE (tmps[i]);
2498 unsigned int bytelen = GET_MODE_SIZE (mode);
2501 /* Handle trailing fragments that run over the size of the struct. */
2502 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2504 if (BYTES_BIG_ENDIAN)
2506 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2507 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2508 tmps[i], 0, OPTAB_WIDEN);
2510 bytelen = ssize - bytepos;
2513 if (GET_CODE (dst) == CONCAT)
2515 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2516 dest = XEXP (dst, 0);
2517 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2519 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2520 dest = XEXP (dst, 1);
2522 else if (bytepos == 0 && XVECLEN (src, 0))
2524 dest = assign_stack_temp (GET_MODE (dest),
2525 GET_MODE_SIZE (GET_MODE (dest)), 0);
2526 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2535 /* Optimize the access just a bit. */
2536 if (GET_CODE (dest) == MEM
2537 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2538 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2539 && bytelen == GET_MODE_SIZE (mode))
2540 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2542 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2543 mode, tmps[i], ssize);
2548 /* Copy from the pseudo into the (probable) hard reg. */
2549 if (orig_dst != dst)
2550 emit_move_insn (orig_dst, dst);
2553 /* Generate code to copy a BLKmode object of TYPE out of a
2554 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2555 is null, a stack temporary is created. TGTBLK is returned.
2557 The primary purpose of this routine is to handle functions
2558 that return BLKmode structures in registers. Some machines
2559 (the PA for example) want to return all small structures
2560 in registers regardless of the structure's alignment. */
2563 copy_blkmode_from_reg (tgtblk, srcreg, type)
2568 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2569 rtx src = NULL, dst = NULL;
2570 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2571 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2575 tgtblk = assign_temp (build_qualified_type (type,
2577 | TYPE_QUAL_CONST)),
2579 preserve_temp_slots (tgtblk);
2582 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2583 into a new pseudo which is a full word. */
2585 if (GET_MODE (srcreg) != BLKmode
2586 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2587 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2589 /* Structures whose size is not a multiple of a word are aligned
2590 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2591 machine, this means we must skip the empty high order bytes when
2592 calculating the bit offset. */
2593 if (BYTES_BIG_ENDIAN
2594 && bytes % UNITS_PER_WORD)
2595 big_endian_correction
2596 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2598 /* Copy the structure BITSIZE bites at a time.
2600 We could probably emit more efficient code for machines which do not use
2601 strict alignment, but it doesn't seem worth the effort at the current
2603 for (bitpos = 0, xbitpos = big_endian_correction;
2604 bitpos < bytes * BITS_PER_UNIT;
2605 bitpos += bitsize, xbitpos += bitsize)
2607 /* We need a new source operand each time xbitpos is on a
2608 word boundary and when xbitpos == big_endian_correction
2609 (the first time through). */
2610 if (xbitpos % BITS_PER_WORD == 0
2611 || xbitpos == big_endian_correction)
2612 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2615 /* We need a new destination operand each time bitpos is on
2617 if (bitpos % BITS_PER_WORD == 0)
2618 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2620 /* Use xbitpos for the source extraction (right justified) and
2621 xbitpos for the destination store (left justified). */
2622 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2623 extract_bit_field (src, bitsize,
2624 xbitpos % BITS_PER_WORD, 1,
2625 NULL_RTX, word_mode, word_mode,
2633 /* Add a USE expression for REG to the (possibly empty) list pointed
2634 to by CALL_FUSAGE. REG must denote a hard register. */
2637 use_reg (call_fusage, reg)
2638 rtx *call_fusage, reg;
2640 if (GET_CODE (reg) != REG
2641 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2645 = gen_rtx_EXPR_LIST (VOIDmode,
2646 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2649 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2650 starting at REGNO. All of these registers must be hard registers. */
2653 use_regs (call_fusage, regno, nregs)
2660 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2663 for (i = 0; i < nregs; i++)
2664 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2667 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2668 PARALLEL REGS. This is for calls that pass values in multiple
2669 non-contiguous locations. The Irix 6 ABI has examples of this. */
2672 use_group_regs (call_fusage, regs)
2678 for (i = 0; i < XVECLEN (regs, 0); i++)
2680 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2682 /* A NULL entry means the parameter goes both on the stack and in
2683 registers. This can also be a MEM for targets that pass values
2684 partially on the stack and partially in registers. */
2685 if (reg != 0 && GET_CODE (reg) == REG)
2686 use_reg (call_fusage, reg);
2691 /* Determine whether the LEN bytes generated by CONSTFUN can be
2692 stored to memory using several move instructions. CONSTFUNDATA is
2693 a pointer which will be passed as argument in every CONSTFUN call.
2694 ALIGN is maximum alignment we can assume. Return nonzero if a
2695 call to store_by_pieces should succeed. */
2698 can_store_by_pieces (len, constfun, constfundata, align)
2699 unsigned HOST_WIDE_INT len;
2700 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2704 unsigned HOST_WIDE_INT max_size, l;
2705 HOST_WIDE_INT offset = 0;
2706 enum machine_mode mode, tmode;
2707 enum insn_code icode;
2711 if (! STORE_BY_PIECES_P (len, align))
2714 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2715 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2716 align = MOVE_MAX * BITS_PER_UNIT;
2718 /* We would first store what we can in the largest integer mode, then go to
2719 successively smaller modes. */
2722 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2727 max_size = STORE_MAX_PIECES + 1;
2728 while (max_size > 1)
2730 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2731 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2732 if (GET_MODE_SIZE (tmode) < max_size)
2735 if (mode == VOIDmode)
2738 icode = mov_optab->handlers[(int) mode].insn_code;
2739 if (icode != CODE_FOR_nothing
2740 && align >= GET_MODE_ALIGNMENT (mode))
2742 unsigned int size = GET_MODE_SIZE (mode);
2749 cst = (*constfun) (constfundata, offset, mode);
2750 if (!LEGITIMATE_CONSTANT_P (cst))
2760 max_size = GET_MODE_SIZE (mode);
2763 /* The code above should have handled everything. */
2771 /* Generate several move instructions to store LEN bytes generated by
2772 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2773 pointer which will be passed as argument in every CONSTFUN call.
2774 ALIGN is maximum alignment we can assume. */
2777 store_by_pieces (to, len, constfun, constfundata, align)
2779 unsigned HOST_WIDE_INT len;
2780 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2784 struct store_by_pieces data;
2786 if (! STORE_BY_PIECES_P (len, align))
2788 to = protect_from_queue (to, 1);
2789 data.constfun = constfun;
2790 data.constfundata = constfundata;
2793 store_by_pieces_1 (&data, align);
2796 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2797 rtx with BLKmode). The caller must pass TO through protect_from_queue
2798 before calling. ALIGN is maximum alignment we can assume. */
2801 clear_by_pieces (to, len, align)
2803 unsigned HOST_WIDE_INT len;
2806 struct store_by_pieces data;
2808 data.constfun = clear_by_pieces_1;
2809 data.constfundata = NULL;
2812 store_by_pieces_1 (&data, align);
2815 /* Callback routine for clear_by_pieces.
2816 Return const0_rtx unconditionally. */
2819 clear_by_pieces_1 (data, offset, mode)
2820 PTR data ATTRIBUTE_UNUSED;
2821 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2822 enum machine_mode mode ATTRIBUTE_UNUSED;
2827 /* Subroutine of clear_by_pieces and store_by_pieces.
2828 Generate several move instructions to store LEN bytes of block TO. (A MEM
2829 rtx with BLKmode). The caller must pass TO through protect_from_queue
2830 before calling. ALIGN is maximum alignment we can assume. */
2833 store_by_pieces_1 (data, align)
2834 struct store_by_pieces *data;
2837 rtx to_addr = XEXP (data->to, 0);
2838 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2839 enum machine_mode mode = VOIDmode, tmode;
2840 enum insn_code icode;
2843 data->to_addr = to_addr;
2845 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2846 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2848 data->explicit_inc_to = 0;
2850 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2852 data->offset = data->len;
2854 /* If storing requires more than two move insns,
2855 copy addresses to registers (to make displacements shorter)
2856 and use post-increment if available. */
2857 if (!data->autinc_to
2858 && move_by_pieces_ninsns (data->len, align) > 2)
2860 /* Determine the main mode we'll be using. */
2861 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2862 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2863 if (GET_MODE_SIZE (tmode) < max_size)
2866 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2868 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2869 data->autinc_to = 1;
2870 data->explicit_inc_to = -1;
2873 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2874 && ! data->autinc_to)
2876 data->to_addr = copy_addr_to_reg (to_addr);
2877 data->autinc_to = 1;
2878 data->explicit_inc_to = 1;
2881 if ( !data->autinc_to && CONSTANT_P (to_addr))
2882 data->to_addr = copy_addr_to_reg (to_addr);
2885 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2886 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2887 align = MOVE_MAX * BITS_PER_UNIT;
2889 /* First store what we can in the largest integer mode, then go to
2890 successively smaller modes. */
2892 while (max_size > 1)
2894 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2895 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2896 if (GET_MODE_SIZE (tmode) < max_size)
2899 if (mode == VOIDmode)
2902 icode = mov_optab->handlers[(int) mode].insn_code;
2903 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2904 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2906 max_size = GET_MODE_SIZE (mode);
2909 /* The code above should have handled everything. */
2914 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2915 with move instructions for mode MODE. GENFUN is the gen_... function
2916 to make a move insn for that mode. DATA has all the other info. */
2919 store_by_pieces_2 (genfun, mode, data)
2920 rtx (*genfun) PARAMS ((rtx, ...));
2921 enum machine_mode mode;
2922 struct store_by_pieces *data;
2924 unsigned int size = GET_MODE_SIZE (mode);
2927 while (data->len >= size)
2930 data->offset -= size;
2932 if (data->autinc_to)
2933 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2936 to1 = adjust_address (data->to, mode, data->offset);
2938 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2939 emit_insn (gen_add2_insn (data->to_addr,
2940 GEN_INT (-(HOST_WIDE_INT) size)));
2942 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2943 emit_insn ((*genfun) (to1, cst));
2945 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2946 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2948 if (! data->reverse)
2949 data->offset += size;
2955 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2956 its length in bytes. */
2959 clear_storage (object, size)
2964 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2965 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2967 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2968 just move a zero. Otherwise, do this a piece at a time. */
2969 if (GET_MODE (object) != BLKmode
2970 && GET_CODE (size) == CONST_INT
2971 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2972 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2975 object = protect_from_queue (object, 1);
2976 size = protect_from_queue (size, 0);
2978 if (GET_CODE (size) == CONST_INT
2979 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2980 clear_by_pieces (object, INTVAL (size), align);
2981 else if (clear_storage_via_clrstr (object, size, align))
2984 retval = clear_storage_via_libcall (object, size);
2990 /* A subroutine of clear_storage. Expand a clrstr pattern;
2991 return true if successful. */
2994 clear_storage_via_clrstr (object, size, align)
2998 /* Try the most limited insn first, because there's no point
2999 including more than one in the machine description unless
3000 the more limited one has some advantage. */
3002 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3003 enum machine_mode mode;
3005 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3006 mode = GET_MODE_WIDER_MODE (mode))
3008 enum insn_code code = clrstr_optab[(int) mode];
3009 insn_operand_predicate_fn pred;
3011 if (code != CODE_FOR_nothing
3012 /* We don't need MODE to be narrower than
3013 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
3014 the mode mask, as it is returned by the macro, it will
3015 definitely be less than the actual mode mask. */
3016 && ((GET_CODE (size) == CONST_INT
3017 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3018 <= (GET_MODE_MASK (mode) >> 1)))
3019 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3020 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
3021 || (*pred) (object, BLKmode))
3022 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
3023 || (*pred) (opalign, VOIDmode)))
3026 rtx last = get_last_insn ();
3029 op1 = convert_to_mode (mode, size, 1);
3030 pred = insn_data[(int) code].operand[1].predicate;
3031 if (pred != 0 && ! (*pred) (op1, mode))
3032 op1 = copy_to_mode_reg (mode, op1);
3034 pat = GEN_FCN ((int) code) (object, op1, opalign);
3041 delete_insns_since (last);
3048 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3049 Return the return value of memset, 0 otherwise. */
3052 clear_storage_via_libcall (object, size)
3055 tree call_expr, arg_list, fn, object_tree, size_tree;
3056 enum machine_mode size_mode;
3059 /* OBJECT or SIZE may have been passed through protect_from_queue.
3061 It is unsafe to save the value generated by protect_from_queue
3062 and reuse it later. Consider what happens if emit_queue is
3063 called before the return value from protect_from_queue is used.
3065 Expansion of the CALL_EXPR below will call emit_queue before
3066 we are finished emitting RTL for argument setup. So if we are
3067 not careful we could get the wrong value for an argument.
3069 To avoid this problem we go ahead and emit code to copy OBJECT
3070 and SIZE into new pseudos. We can then place those new pseudos
3071 into an RTL_EXPR and use them later, even after a call to
3074 Note this is not strictly needed for library calls since they
3075 do not call emit_queue before loading their arguments. However,
3076 we may need to have library calls call emit_queue in the future
3077 since failing to do so could cause problems for targets which
3078 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3080 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3082 if (TARGET_MEM_FUNCTIONS)
3083 size_mode = TYPE_MODE (sizetype);
3085 size_mode = TYPE_MODE (unsigned_type_node);
3086 size = convert_to_mode (size_mode, size, 1);
3087 size = copy_to_mode_reg (size_mode, size);
3089 /* It is incorrect to use the libcall calling conventions to call
3090 memset in this context. This could be a user call to memset and
3091 the user may wish to examine the return value from memset. For
3092 targets where libcalls and normal calls have different conventions
3093 for returning pointers, we could end up generating incorrect code.
3095 For convenience, we generate the call to bzero this way as well. */
3097 object_tree = make_tree (ptr_type_node, object);
3098 if (TARGET_MEM_FUNCTIONS)
3099 size_tree = make_tree (sizetype, size);
3101 size_tree = make_tree (unsigned_type_node, size);
3103 fn = clear_storage_libcall_fn (true);
3104 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3105 if (TARGET_MEM_FUNCTIONS)
3106 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3107 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3109 /* Now we have to build up the CALL_EXPR itself. */
3110 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3111 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3112 call_expr, arg_list, NULL_TREE);
3113 TREE_SIDE_EFFECTS (call_expr) = 1;
3115 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3117 /* If we are initializing a readonly value, show the above call
3118 clobbered it. Otherwise, a load from it may erroneously be
3119 hoisted from a loop. */
3120 if (RTX_UNCHANGING_P (object))
3121 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3123 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3126 /* A subroutine of clear_storage_via_libcall. Create the tree node
3127 for the function we use for block clears. The first time FOR_CALL
3128 is true, we call assemble_external. */
3130 static GTY(()) tree block_clear_fn;
3133 init_block_clear_fn (asmspec)
3134 const char *asmspec;
3136 if (!block_clear_fn)
3140 if (TARGET_MEM_FUNCTIONS)
3142 fn = get_identifier ("memset");
3143 args = build_function_type_list (ptr_type_node, ptr_type_node,
3144 integer_type_node, sizetype,
3149 fn = get_identifier ("bzero");
3150 args = build_function_type_list (void_type_node, ptr_type_node,
3151 unsigned_type_node, NULL_TREE);
3154 fn = build_decl (FUNCTION_DECL, fn, args);
3155 DECL_EXTERNAL (fn) = 1;
3156 TREE_PUBLIC (fn) = 1;
3157 DECL_ARTIFICIAL (fn) = 1;
3158 TREE_NOTHROW (fn) = 1;
3160 block_clear_fn = fn;
3165 SET_DECL_RTL (block_clear_fn, NULL_RTX);
3166 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
3171 clear_storage_libcall_fn (for_call)
3174 static bool emitted_extern;
3176 if (!block_clear_fn)
3177 init_block_clear_fn (NULL);
3179 if (for_call && !emitted_extern)
3181 emitted_extern = true;
3182 make_decl_rtl (block_clear_fn, NULL);
3183 assemble_external (block_clear_fn);
3186 return block_clear_fn;
3189 /* Generate code to copy Y into X.
3190 Both Y and X must have the same mode, except that
3191 Y can be a constant with VOIDmode.
3192 This mode cannot be BLKmode; use emit_block_move for that.
3194 Return the last instruction emitted. */
3197 emit_move_insn (x, y)
3200 enum machine_mode mode = GET_MODE (x);
3201 rtx y_cst = NULL_RTX;
3204 x = protect_from_queue (x, 1);
3205 y = protect_from_queue (y, 0);
3207 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3210 /* Never force constant_p_rtx to memory. */
3211 if (GET_CODE (y) == CONSTANT_P_RTX)
3213 else if (CONSTANT_P (y))
3216 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3217 && (last_insn = compress_float_constant (x, y)))
3220 if (!LEGITIMATE_CONSTANT_P (y))
3223 y = force_const_mem (mode, y);
3225 /* If the target's cannot_force_const_mem prevented the spill,
3226 assume that the target's move expanders will also take care
3227 of the non-legitimate constant. */
3233 /* If X or Y are memory references, verify that their addresses are valid
3235 if (GET_CODE (x) == MEM
3236 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3237 && ! push_operand (x, GET_MODE (x)))
3239 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3240 x = validize_mem (x);
3242 if (GET_CODE (y) == MEM
3243 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3245 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3246 y = validize_mem (y);
3248 if (mode == BLKmode)
3251 last_insn = emit_move_insn_1 (x, y);
3253 if (y_cst && GET_CODE (x) == REG)
3254 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3259 /* Low level part of emit_move_insn.
3260 Called just like emit_move_insn, but assumes X and Y
3261 are basically valid. */
3264 emit_move_insn_1 (x, y)
3267 enum machine_mode mode = GET_MODE (x);
3268 enum machine_mode submode;
3269 enum mode_class class = GET_MODE_CLASS (mode);
3271 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3274 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3276 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3278 /* Expand complex moves by moving real part and imag part, if possible. */
3279 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3280 && BLKmode != (submode = GET_MODE_INNER (mode))
3281 && (mov_optab->handlers[(int) submode].insn_code
3282 != CODE_FOR_nothing))
3284 /* Don't split destination if it is a stack push. */
3285 int stack = push_operand (x, GET_MODE (x));
3287 #ifdef PUSH_ROUNDING
3288 /* In case we output to the stack, but the size is smaller machine can
3289 push exactly, we need to use move instructions. */
3291 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3292 != GET_MODE_SIZE (submode)))
3295 HOST_WIDE_INT offset1, offset2;
3297 /* Do not use anti_adjust_stack, since we don't want to update
3298 stack_pointer_delta. */
3299 temp = expand_binop (Pmode,
3300 #ifdef STACK_GROWS_DOWNWARD
3308 (GET_MODE_SIZE (GET_MODE (x)))),
3309 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3311 if (temp != stack_pointer_rtx)
3312 emit_move_insn (stack_pointer_rtx, temp);
3314 #ifdef STACK_GROWS_DOWNWARD
3316 offset2 = GET_MODE_SIZE (submode);
3318 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3319 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3320 + GET_MODE_SIZE (submode));
3323 emit_move_insn (change_address (x, submode,
3324 gen_rtx_PLUS (Pmode,
3326 GEN_INT (offset1))),
3327 gen_realpart (submode, y));
3328 emit_move_insn (change_address (x, submode,
3329 gen_rtx_PLUS (Pmode,
3331 GEN_INT (offset2))),
3332 gen_imagpart (submode, y));
3336 /* If this is a stack, push the highpart first, so it
3337 will be in the argument order.
3339 In that case, change_address is used only to convert
3340 the mode, not to change the address. */
3343 /* Note that the real part always precedes the imag part in memory
3344 regardless of machine's endianness. */
3345 #ifdef STACK_GROWS_DOWNWARD
3346 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3347 (gen_rtx_MEM (submode, XEXP (x, 0)),
3348 gen_imagpart (submode, y)));
3349 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3350 (gen_rtx_MEM (submode, XEXP (x, 0)),
3351 gen_realpart (submode, y)));
3353 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3354 (gen_rtx_MEM (submode, XEXP (x, 0)),
3355 gen_realpart (submode, y)));
3356 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3357 (gen_rtx_MEM (submode, XEXP (x, 0)),
3358 gen_imagpart (submode, y)));
3363 rtx realpart_x, realpart_y;
3364 rtx imagpart_x, imagpart_y;
3366 /* If this is a complex value with each part being smaller than a
3367 word, the usual calling sequence will likely pack the pieces into
3368 a single register. Unfortunately, SUBREG of hard registers only
3369 deals in terms of words, so we have a problem converting input
3370 arguments to the CONCAT of two registers that is used elsewhere
3371 for complex values. If this is before reload, we can copy it into
3372 memory and reload. FIXME, we should see about using extract and
3373 insert on integer registers, but complex short and complex char
3374 variables should be rarely used. */
3375 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3376 && (reload_in_progress | reload_completed) == 0)
3379 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3381 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3383 if (packed_dest_p || packed_src_p)
3385 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3386 ? MODE_FLOAT : MODE_INT);
3388 enum machine_mode reg_mode
3389 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3391 if (reg_mode != BLKmode)
3393 rtx mem = assign_stack_temp (reg_mode,
3394 GET_MODE_SIZE (mode), 0);
3395 rtx cmem = adjust_address (mem, mode, 0);
3398 = N_("function using short complex types cannot be inline");
3402 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3404 emit_move_insn_1 (cmem, y);
3405 return emit_move_insn_1 (sreg, mem);
3409 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3411 emit_move_insn_1 (mem, sreg);
3412 return emit_move_insn_1 (x, cmem);
3418 realpart_x = gen_realpart (submode, x);
3419 realpart_y = gen_realpart (submode, y);
3420 imagpart_x = gen_imagpart (submode, x);
3421 imagpart_y = gen_imagpart (submode, y);
3423 /* Show the output dies here. This is necessary for SUBREGs
3424 of pseudos since we cannot track their lifetimes correctly;
3425 hard regs shouldn't appear here except as return values.
3426 We never want to emit such a clobber after reload. */
3428 && ! (reload_in_progress || reload_completed)
3429 && (GET_CODE (realpart_x) == SUBREG
3430 || GET_CODE (imagpart_x) == SUBREG))
3431 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3433 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3434 (realpart_x, realpart_y));
3435 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3436 (imagpart_x, imagpart_y));
3439 return get_last_insn ();
3442 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3443 find a mode to do it in. If we have a movcc, use it. Otherwise,
3444 find the MODE_INT mode of the same width. */
3445 else if (GET_MODE_CLASS (mode) == MODE_CC
3446 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3448 enum insn_code insn_code;
3449 enum machine_mode tmode = VOIDmode;
3453 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3456 for (tmode = QImode; tmode != VOIDmode;
3457 tmode = GET_MODE_WIDER_MODE (tmode))
3458 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3461 if (tmode == VOIDmode)
3464 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3465 may call change_address which is not appropriate if we were
3466 called when a reload was in progress. We don't have to worry
3467 about changing the address since the size in bytes is supposed to
3468 be the same. Copy the MEM to change the mode and move any
3469 substitutions from the old MEM to the new one. */
3471 if (reload_in_progress)
3473 x = gen_lowpart_common (tmode, x1);
3474 if (x == 0 && GET_CODE (x1) == MEM)
3476 x = adjust_address_nv (x1, tmode, 0);
3477 copy_replacements (x1, x);
3480 y = gen_lowpart_common (tmode, y1);
3481 if (y == 0 && GET_CODE (y1) == MEM)
3483 y = adjust_address_nv (y1, tmode, 0);
3484 copy_replacements (y1, y);
3489 x = gen_lowpart (tmode, x);
3490 y = gen_lowpart (tmode, y);
3493 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3494 return emit_insn (GEN_FCN (insn_code) (x, y));
3497 /* This will handle any multi-word or full-word mode that lacks a move_insn
3498 pattern. However, you will get better code if you define such patterns,
3499 even if they must turn into multiple assembler instructions. */
3500 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3507 #ifdef PUSH_ROUNDING
3509 /* If X is a push on the stack, do the push now and replace
3510 X with a reference to the stack pointer. */
3511 if (push_operand (x, GET_MODE (x)))
3516 /* Do not use anti_adjust_stack, since we don't want to update
3517 stack_pointer_delta. */
3518 temp = expand_binop (Pmode,
3519 #ifdef STACK_GROWS_DOWNWARD
3527 (GET_MODE_SIZE (GET_MODE (x)))),
3528 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3530 if (temp != stack_pointer_rtx)
3531 emit_move_insn (stack_pointer_rtx, temp);
3533 code = GET_CODE (XEXP (x, 0));
3535 /* Just hope that small offsets off SP are OK. */
3536 if (code == POST_INC)
3537 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3538 GEN_INT (-((HOST_WIDE_INT)
3539 GET_MODE_SIZE (GET_MODE (x)))));
3540 else if (code == POST_DEC)
3541 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3542 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3544 temp = stack_pointer_rtx;
3546 x = change_address (x, VOIDmode, temp);
3550 /* If we are in reload, see if either operand is a MEM whose address
3551 is scheduled for replacement. */
3552 if (reload_in_progress && GET_CODE (x) == MEM
3553 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3554 x = replace_equiv_address_nv (x, inner);
3555 if (reload_in_progress && GET_CODE (y) == MEM
3556 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3557 y = replace_equiv_address_nv (y, inner);
3563 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3566 rtx xpart = operand_subword (x, i, 1, mode);
3567 rtx ypart = operand_subword (y, i, 1, mode);
3569 /* If we can't get a part of Y, put Y into memory if it is a
3570 constant. Otherwise, force it into a register. If we still
3571 can't get a part of Y, abort. */
3572 if (ypart == 0 && CONSTANT_P (y))
3574 y = force_const_mem (mode, y);
3575 ypart = operand_subword (y, i, 1, mode);
3577 else if (ypart == 0)
3578 ypart = operand_subword_force (y, i, mode);
3580 if (xpart == 0 || ypart == 0)
3583 need_clobber |= (GET_CODE (xpart) == SUBREG);
3585 last_insn = emit_move_insn (xpart, ypart);
3591 /* Show the output dies here. This is necessary for SUBREGs
3592 of pseudos since we cannot track their lifetimes correctly;
3593 hard regs shouldn't appear here except as return values.
3594 We never want to emit such a clobber after reload. */
3596 && ! (reload_in_progress || reload_completed)
3597 && need_clobber != 0)
3598 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3608 /* If Y is representable exactly in a narrower mode, and the target can
3609 perform the extension directly from constant or memory, then emit the
3610 move as an extension. */
3613 compress_float_constant (x, y)
3616 enum machine_mode dstmode = GET_MODE (x);
3617 enum machine_mode orig_srcmode = GET_MODE (y);
3618 enum machine_mode srcmode;
3621 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3623 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3624 srcmode != orig_srcmode;
3625 srcmode = GET_MODE_WIDER_MODE (srcmode))
3628 rtx trunc_y, last_insn;
3630 /* Skip if the target can't extend this way. */
3631 ic = can_extend_p (dstmode, srcmode, 0);
3632 if (ic == CODE_FOR_nothing)
3635 /* Skip if the narrowed value isn't exact. */
3636 if (! exact_real_truncate (srcmode, &r))
3639 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3641 if (LEGITIMATE_CONSTANT_P (trunc_y))
3643 /* Skip if the target needs extra instructions to perform
3645 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3648 else if (float_extend_from_mem[dstmode][srcmode])
3649 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3653 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3654 last_insn = get_last_insn ();
3656 if (GET_CODE (x) == REG)
3657 REG_NOTES (last_insn)
3658 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3666 /* Pushing data onto the stack. */
3668 /* Push a block of length SIZE (perhaps variable)
3669 and return an rtx to address the beginning of the block.
3670 Note that it is not possible for the value returned to be a QUEUED.
3671 The value may be virtual_outgoing_args_rtx.
3673 EXTRA is the number of bytes of padding to push in addition to SIZE.
3674 BELOW nonzero means this padding comes at low addresses;
3675 otherwise, the padding comes at high addresses. */
3678 push_block (size, extra, below)
3684 size = convert_modes (Pmode, ptr_mode, size, 1);
3685 if (CONSTANT_P (size))
3686 anti_adjust_stack (plus_constant (size, extra));
3687 else if (GET_CODE (size) == REG && extra == 0)
3688 anti_adjust_stack (size);
3691 temp = copy_to_mode_reg (Pmode, size);
3693 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3694 temp, 0, OPTAB_LIB_WIDEN);
3695 anti_adjust_stack (temp);
3698 #ifndef STACK_GROWS_DOWNWARD
3704 temp = virtual_outgoing_args_rtx;
3705 if (extra != 0 && below)
3706 temp = plus_constant (temp, extra);
3710 if (GET_CODE (size) == CONST_INT)
3711 temp = plus_constant (virtual_outgoing_args_rtx,
3712 -INTVAL (size) - (below ? 0 : extra));
3713 else if (extra != 0 && !below)
3714 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3715 negate_rtx (Pmode, plus_constant (size, extra)));
3717 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3718 negate_rtx (Pmode, size));
3721 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3724 #ifdef PUSH_ROUNDING
3726 /* Emit single push insn. */
3729 emit_single_push_insn (mode, x, type)
3731 enum machine_mode mode;
3735 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3737 enum insn_code icode;
3738 insn_operand_predicate_fn pred;
3740 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3741 /* If there is push pattern, use it. Otherwise try old way of throwing
3742 MEM representing push operation to move expander. */
3743 icode = push_optab->handlers[(int) mode].insn_code;
3744 if (icode != CODE_FOR_nothing)
3746 if (((pred = insn_data[(int) icode].operand[0].predicate)
3747 && !((*pred) (x, mode))))
3748 x = force_reg (mode, x);
3749 emit_insn (GEN_FCN (icode) (x));
3752 if (GET_MODE_SIZE (mode) == rounded_size)
3753 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3756 #ifdef STACK_GROWS_DOWNWARD
3757 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3758 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3760 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3761 GEN_INT (rounded_size));
3763 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3766 dest = gen_rtx_MEM (mode, dest_addr);
3770 set_mem_attributes (dest, type, 1);
3772 if (flag_optimize_sibling_calls)
3773 /* Function incoming arguments may overlap with sibling call
3774 outgoing arguments and we cannot allow reordering of reads
3775 from function arguments with stores to outgoing arguments
3776 of sibling calls. */
3777 set_mem_alias_set (dest, 0);
3779 emit_move_insn (dest, x);
3783 /* Generate code to push X onto the stack, assuming it has mode MODE and
3785 MODE is redundant except when X is a CONST_INT (since they don't
3787 SIZE is an rtx for the size of data to be copied (in bytes),
3788 needed only if X is BLKmode.
3790 ALIGN (in bits) is maximum alignment we can assume.
3792 If PARTIAL and REG are both nonzero, then copy that many of the first
3793 words of X into registers starting with REG, and push the rest of X.
3794 The amount of space pushed is decreased by PARTIAL words,
3795 rounded *down* to a multiple of PARM_BOUNDARY.
3796 REG must be a hard register in this case.
3797 If REG is zero but PARTIAL is not, take any all others actions for an
3798 argument partially in registers, but do not actually load any
3801 EXTRA is the amount in bytes of extra space to leave next to this arg.
3802 This is ignored if an argument block has already been allocated.
3804 On a machine that lacks real push insns, ARGS_ADDR is the address of
3805 the bottom of the argument block for this call. We use indexing off there
3806 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3807 argument block has not been preallocated.
3809 ARGS_SO_FAR is the size of args previously pushed for this call.
3811 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3812 for arguments passed in registers. If nonzero, it will be the number
3813 of bytes required. */
3816 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3817 args_addr, args_so_far, reg_parm_stack_space,
3820 enum machine_mode mode;
3829 int reg_parm_stack_space;
3833 enum direction stack_direction
3834 #ifdef STACK_GROWS_DOWNWARD
3840 /* Decide where to pad the argument: `downward' for below,
3841 `upward' for above, or `none' for don't pad it.
3842 Default is below for small data on big-endian machines; else above. */
3843 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3845 /* Invert direction if stack is post-decrement.
3847 if (STACK_PUSH_CODE == POST_DEC)
3848 if (where_pad != none)
3849 where_pad = (where_pad == downward ? upward : downward);
3851 xinner = x = protect_from_queue (x, 0);
3853 if (mode == BLKmode)
3855 /* Copy a block into the stack, entirely or partially. */
3858 int used = partial * UNITS_PER_WORD;
3859 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3867 /* USED is now the # of bytes we need not copy to the stack
3868 because registers will take care of them. */
3871 xinner = adjust_address (xinner, BLKmode, used);
3873 /* If the partial register-part of the arg counts in its stack size,
3874 skip the part of stack space corresponding to the registers.
3875 Otherwise, start copying to the beginning of the stack space,
3876 by setting SKIP to 0. */
3877 skip = (reg_parm_stack_space == 0) ? 0 : used;
3879 #ifdef PUSH_ROUNDING
3880 /* Do it with several push insns if that doesn't take lots of insns
3881 and if there is no difficulty with push insns that skip bytes
3882 on the stack for alignment purposes. */
3885 && GET_CODE (size) == CONST_INT
3887 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3888 /* Here we avoid the case of a structure whose weak alignment
3889 forces many pushes of a small amount of data,
3890 and such small pushes do rounding that causes trouble. */
3891 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3892 || align >= BIGGEST_ALIGNMENT
3893 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3894 == (align / BITS_PER_UNIT)))
3895 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3897 /* Push padding now if padding above and stack grows down,
3898 or if padding below and stack grows up.
3899 But if space already allocated, this has already been done. */
3900 if (extra && args_addr == 0
3901 && where_pad != none && where_pad != stack_direction)
3902 anti_adjust_stack (GEN_INT (extra));
3904 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3907 #endif /* PUSH_ROUNDING */
3911 /* Otherwise make space on the stack and copy the data
3912 to the address of that space. */
3914 /* Deduct words put into registers from the size we must copy. */
3917 if (GET_CODE (size) == CONST_INT)
3918 size = GEN_INT (INTVAL (size) - used);
3920 size = expand_binop (GET_MODE (size), sub_optab, size,
3921 GEN_INT (used), NULL_RTX, 0,
3925 /* Get the address of the stack space.
3926 In this case, we do not deal with EXTRA separately.
3927 A single stack adjust will do. */
3930 temp = push_block (size, extra, where_pad == downward);
3933 else if (GET_CODE (args_so_far) == CONST_INT)
3934 temp = memory_address (BLKmode,
3935 plus_constant (args_addr,
3936 skip + INTVAL (args_so_far)));
3938 temp = memory_address (BLKmode,
3939 plus_constant (gen_rtx_PLUS (Pmode,
3944 if (!ACCUMULATE_OUTGOING_ARGS)
3946 /* If the source is referenced relative to the stack pointer,
3947 copy it to another register to stabilize it. We do not need
3948 to do this if we know that we won't be changing sp. */
3950 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3951 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3952 temp = copy_to_reg (temp);
3955 target = gen_rtx_MEM (BLKmode, temp);
3959 set_mem_attributes (target, type, 1);
3960 /* Function incoming arguments may overlap with sibling call
3961 outgoing arguments and we cannot allow reordering of reads
3962 from function arguments with stores to outgoing arguments
3963 of sibling calls. */
3964 set_mem_alias_set (target, 0);
3967 /* ALIGN may well be better aligned than TYPE, e.g. due to
3968 PARM_BOUNDARY. Assume the caller isn't lying. */
3969 set_mem_align (target, align);
3971 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3974 else if (partial > 0)
3976 /* Scalar partly in registers. */
3978 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3981 /* # words of start of argument
3982 that we must make space for but need not store. */
3983 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3984 int args_offset = INTVAL (args_so_far);
3987 /* Push padding now if padding above and stack grows down,
3988 or if padding below and stack grows up.
3989 But if space already allocated, this has already been done. */
3990 if (extra && args_addr == 0
3991 && where_pad != none && where_pad != stack_direction)
3992 anti_adjust_stack (GEN_INT (extra));
3994 /* If we make space by pushing it, we might as well push
3995 the real data. Otherwise, we can leave OFFSET nonzero
3996 and leave the space uninitialized. */
4000 /* Now NOT_STACK gets the number of words that we don't need to
4001 allocate on the stack. */
4002 not_stack = partial - offset;
4004 /* If the partial register-part of the arg counts in its stack size,
4005 skip the part of stack space corresponding to the registers.
4006 Otherwise, start copying to the beginning of the stack space,
4007 by setting SKIP to 0. */
4008 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4010 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
4011 x = validize_mem (force_const_mem (mode, x));
4013 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4014 SUBREGs of such registers are not allowed. */
4015 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
4016 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4017 x = copy_to_reg (x);
4019 /* Loop over all the words allocated on the stack for this arg. */
4020 /* We can do it by words, because any scalar bigger than a word
4021 has a size a multiple of a word. */
4022 #ifndef PUSH_ARGS_REVERSED
4023 for (i = not_stack; i < size; i++)
4025 for (i = size - 1; i >= not_stack; i--)
4027 if (i >= not_stack + offset)
4028 emit_push_insn (operand_subword_force (x, i, mode),
4029 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4031 GEN_INT (args_offset + ((i - not_stack + skip)
4033 reg_parm_stack_space, alignment_pad);
4040 /* Push padding now if padding above and stack grows down,
4041 or if padding below and stack grows up.
4042 But if space already allocated, this has already been done. */
4043 if (extra && args_addr == 0
4044 && where_pad != none && where_pad != stack_direction)
4045 anti_adjust_stack (GEN_INT (extra));
4047 #ifdef PUSH_ROUNDING
4048 if (args_addr == 0 && PUSH_ARGS)
4049 emit_single_push_insn (mode, x, type);
4053 if (GET_CODE (args_so_far) == CONST_INT)
4055 = memory_address (mode,
4056 plus_constant (args_addr,
4057 INTVAL (args_so_far)));
4059 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4061 dest = gen_rtx_MEM (mode, addr);
4064 set_mem_attributes (dest, type, 1);
4065 /* Function incoming arguments may overlap with sibling call
4066 outgoing arguments and we cannot allow reordering of reads
4067 from function arguments with stores to outgoing arguments
4068 of sibling calls. */
4069 set_mem_alias_set (dest, 0);
4072 emit_move_insn (dest, x);
4076 /* If part should go in registers, copy that part
4077 into the appropriate registers. Do this now, at the end,
4078 since mem-to-mem copies above may do function calls. */
4079 if (partial > 0 && reg != 0)
4081 /* Handle calls that pass values in multiple non-contiguous locations.
4082 The Irix 6 ABI has examples of this. */
4083 if (GET_CODE (reg) == PARALLEL)
4084 emit_group_load (reg, x, -1); /* ??? size? */
4086 move_block_to_reg (REGNO (reg), x, partial, mode);
4089 if (extra && args_addr == 0 && where_pad == stack_direction)
4090 anti_adjust_stack (GEN_INT (extra));
4092 if (alignment_pad && args_addr == 0)
4093 anti_adjust_stack (alignment_pad);
4096 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4104 /* Only registers can be subtargets. */
4105 || GET_CODE (x) != REG
4106 /* If the register is readonly, it can't be set more than once. */
4107 || RTX_UNCHANGING_P (x)
4108 /* Don't use hard regs to avoid extending their life. */
4109 || REGNO (x) < FIRST_PSEUDO_REGISTER
4110 /* Avoid subtargets inside loops,
4111 since they hide some invariant expressions. */
4112 || preserve_subexpressions_p ())
4116 /* Expand an assignment that stores the value of FROM into TO.
4117 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4118 (This may contain a QUEUED rtx;
4119 if the value is constant, this rtx is a constant.)
4120 Otherwise, the returned value is NULL_RTX.
4122 SUGGEST_REG is no longer actually used.
4123 It used to mean, copy the value through a register
4124 and return that register, if that is possible.
4125 We now use WANT_VALUE to decide whether to do this. */
4128 expand_assignment (to, from, want_value, suggest_reg)
4131 int suggest_reg ATTRIBUTE_UNUSED;
4136 /* Don't crash if the lhs of the assignment was erroneous. */
4138 if (TREE_CODE (to) == ERROR_MARK)
4140 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4141 return want_value ? result : NULL_RTX;
4144 /* Assignment of a structure component needs special treatment
4145 if the structure component's rtx is not simply a MEM.
4146 Assignment of an array element at a constant index, and assignment of
4147 an array element in an unaligned packed structure field, has the same
4150 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4151 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4152 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4154 enum machine_mode mode1;
4155 HOST_WIDE_INT bitsize, bitpos;
4163 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4164 &unsignedp, &volatilep);
4166 /* If we are going to use store_bit_field and extract_bit_field,
4167 make sure to_rtx will be safe for multiple use. */
4169 if (mode1 == VOIDmode && want_value)
4170 tem = stabilize_reference (tem);
4172 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4176 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4178 if (GET_CODE (to_rtx) != MEM)
4181 #ifdef POINTERS_EXTEND_UNSIGNED
4182 if (GET_MODE (offset_rtx) != Pmode)
4183 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4185 if (GET_MODE (offset_rtx) != ptr_mode)
4186 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4189 /* A constant address in TO_RTX can have VOIDmode, we must not try
4190 to call force_reg for that case. Avoid that case. */
4191 if (GET_CODE (to_rtx) == MEM
4192 && GET_MODE (to_rtx) == BLKmode
4193 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4195 && (bitpos % bitsize) == 0
4196 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4197 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4199 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4203 to_rtx = offset_address (to_rtx, offset_rtx,
4204 highest_pow2_factor_for_type (TREE_TYPE (to),
4208 if (GET_CODE (to_rtx) == MEM)
4210 /* If the field is at offset zero, we could have been given the
4211 DECL_RTX of the parent struct. Don't munge it. */
4212 to_rtx = shallow_copy_rtx (to_rtx);
4214 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4217 /* Deal with volatile and readonly fields. The former is only done
4218 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4219 if (volatilep && GET_CODE (to_rtx) == MEM)
4221 if (to_rtx == orig_to_rtx)
4222 to_rtx = copy_rtx (to_rtx);
4223 MEM_VOLATILE_P (to_rtx) = 1;
4226 if (TREE_CODE (to) == COMPONENT_REF
4227 && TREE_READONLY (TREE_OPERAND (to, 1)))
4229 if (to_rtx == orig_to_rtx)
4230 to_rtx = copy_rtx (to_rtx);
4231 RTX_UNCHANGING_P (to_rtx) = 1;
4234 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4236 if (to_rtx == orig_to_rtx)
4237 to_rtx = copy_rtx (to_rtx);
4238 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4241 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4243 /* Spurious cast for HPUX compiler. */
4244 ? ((enum machine_mode)
4245 TYPE_MODE (TREE_TYPE (to)))
4247 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4249 preserve_temp_slots (result);
4253 /* If the value is meaningful, convert RESULT to the proper mode.
4254 Otherwise, return nothing. */
4255 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4256 TYPE_MODE (TREE_TYPE (from)),
4258 TREE_UNSIGNED (TREE_TYPE (to)))
4262 /* If the rhs is a function call and its value is not an aggregate,
4263 call the function before we start to compute the lhs.
4264 This is needed for correct code for cases such as
4265 val = setjmp (buf) on machines where reference to val
4266 requires loading up part of an address in a separate insn.
4268 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4269 since it might be a promoted variable where the zero- or sign- extension
4270 needs to be done. Handling this in the normal way is safe because no
4271 computation is done before the call. */
4272 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4273 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4274 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4275 && GET_CODE (DECL_RTL (to)) == REG))
4280 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4282 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4284 /* Handle calls that return values in multiple non-contiguous locations.
4285 The Irix 6 ABI has examples of this. */
4286 if (GET_CODE (to_rtx) == PARALLEL)
4287 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4288 else if (GET_MODE (to_rtx) == BLKmode)
4289 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4292 #ifdef POINTERS_EXTEND_UNSIGNED
4293 if (POINTER_TYPE_P (TREE_TYPE (to))
4294 && GET_MODE (to_rtx) != GET_MODE (value))
4295 value = convert_memory_address (GET_MODE (to_rtx), value);
4297 emit_move_insn (to_rtx, value);
4299 preserve_temp_slots (to_rtx);
4302 return want_value ? to_rtx : NULL_RTX;
4305 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4306 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4309 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4311 /* Don't move directly into a return register. */
4312 if (TREE_CODE (to) == RESULT_DECL
4313 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4318 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4320 if (GET_CODE (to_rtx) == PARALLEL)
4321 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4323 emit_move_insn (to_rtx, temp);
4325 preserve_temp_slots (to_rtx);
4328 return want_value ? to_rtx : NULL_RTX;
4331 /* In case we are returning the contents of an object which overlaps
4332 the place the value is being stored, use a safe function when copying
4333 a value through a pointer into a structure value return block. */
4334 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4335 && current_function_returns_struct
4336 && !current_function_returns_pcc_struct)
4341 size = expr_size (from);
4342 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4344 if (TARGET_MEM_FUNCTIONS)
4345 emit_library_call (memmove_libfunc, LCT_NORMAL,
4346 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4347 XEXP (from_rtx, 0), Pmode,
4348 convert_to_mode (TYPE_MODE (sizetype),
4349 size, TREE_UNSIGNED (sizetype)),
4350 TYPE_MODE (sizetype));
4352 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4353 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4354 XEXP (to_rtx, 0), Pmode,
4355 convert_to_mode (TYPE_MODE (integer_type_node),
4357 TREE_UNSIGNED (integer_type_node)),
4358 TYPE_MODE (integer_type_node));
4360 preserve_temp_slots (to_rtx);
4363 return want_value ? to_rtx : NULL_RTX;
4366 /* Compute FROM and store the value in the rtx we got. */
4369 result = store_expr (from, to_rtx, want_value);
4370 preserve_temp_slots (result);
4373 return want_value ? result : NULL_RTX;
4376 /* Generate code for computing expression EXP,
4377 and storing the value into TARGET.
4378 TARGET may contain a QUEUED rtx.
4380 If WANT_VALUE & 1 is nonzero, return a copy of the value
4381 not in TARGET, so that we can be sure to use the proper
4382 value in a containing expression even if TARGET has something
4383 else stored in it. If possible, we copy the value through a pseudo
4384 and return that pseudo. Or, if the value is constant, we try to
4385 return the constant. In some cases, we return a pseudo
4386 copied *from* TARGET.
4388 If the mode is BLKmode then we may return TARGET itself.
4389 It turns out that in BLKmode it doesn't cause a problem.
4390 because C has no operators that could combine two different
4391 assignments into the same BLKmode object with different values
4392 with no sequence point. Will other languages need this to
4395 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4396 to catch quickly any cases where the caller uses the value
4397 and fails to set WANT_VALUE.
4399 If WANT_VALUE & 2 is set, this is a store into a call param on the
4400 stack, and block moves may need to be treated specially. */
4403 store_expr (exp, target, want_value)
4409 int dont_return_target = 0;
4410 int dont_store_target = 0;
4412 if (VOID_TYPE_P (TREE_TYPE (exp)))
4414 /* C++ can generate ?: expressions with a throw expression in one
4415 branch and an rvalue in the other. Here, we resolve attempts to
4416 store the throw expression's nonexistant result. */
4419 expand_expr (exp, const0_rtx, VOIDmode, 0);
4422 if (TREE_CODE (exp) == COMPOUND_EXPR)
4424 /* Perform first part of compound expression, then assign from second
4426 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4427 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4429 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4431 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4433 /* For conditional expression, get safe form of the target. Then
4434 test the condition, doing the appropriate assignment on either
4435 side. This avoids the creation of unnecessary temporaries.
4436 For non-BLKmode, it is more efficient not to do this. */
4438 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4441 target = protect_from_queue (target, 1);
4443 do_pending_stack_adjust ();
4445 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4446 start_cleanup_deferral ();
4447 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4448 end_cleanup_deferral ();
4450 emit_jump_insn (gen_jump (lab2));
4453 start_cleanup_deferral ();
4454 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4455 end_cleanup_deferral ();
4460 return want_value & 1 ? target : NULL_RTX;
4462 else if (queued_subexp_p (target))
4463 /* If target contains a postincrement, let's not risk
4464 using it as the place to generate the rhs. */
4466 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4468 /* Expand EXP into a new pseudo. */
4469 temp = gen_reg_rtx (GET_MODE (target));
4470 temp = expand_expr (exp, temp, GET_MODE (target),
4472 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4475 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4477 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4479 /* If target is volatile, ANSI requires accessing the value
4480 *from* the target, if it is accessed. So make that happen.
4481 In no case return the target itself. */
4482 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4483 dont_return_target = 1;
4485 else if ((want_value & 1) != 0
4486 && GET_CODE (target) == MEM
4487 && ! MEM_VOLATILE_P (target)
4488 && GET_MODE (target) != BLKmode)
4489 /* If target is in memory and caller wants value in a register instead,
4490 arrange that. Pass TARGET as target for expand_expr so that,
4491 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4492 We know expand_expr will not use the target in that case.
4493 Don't do this if TARGET is volatile because we are supposed
4494 to write it and then read it. */
4496 temp = expand_expr (exp, target, GET_MODE (target),
4497 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4498 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4500 /* If TEMP is already in the desired TARGET, only copy it from
4501 memory and don't store it there again. */
4503 || (rtx_equal_p (temp, target)
4504 && ! side_effects_p (temp) && ! side_effects_p (target)))
4505 dont_store_target = 1;
4506 temp = copy_to_reg (temp);
4508 dont_return_target = 1;
4510 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4511 /* If this is a scalar in a register that is stored in a wider mode
4512 than the declared mode, compute the result into its declared mode
4513 and then convert to the wider mode. Our value is the computed
4516 rtx inner_target = 0;
4518 /* If we don't want a value, we can do the conversion inside EXP,
4519 which will often result in some optimizations. Do the conversion
4520 in two steps: first change the signedness, if needed, then
4521 the extend. But don't do this if the type of EXP is a subtype
4522 of something else since then the conversion might involve
4523 more than just converting modes. */
4524 if ((want_value & 1) == 0
4525 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4526 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4528 if (TREE_UNSIGNED (TREE_TYPE (exp))
4529 != SUBREG_PROMOTED_UNSIGNED_P (target))
4531 ((*lang_hooks.types.signed_or_unsigned_type)
4532 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4534 exp = convert ((*lang_hooks.types.type_for_mode)
4535 (GET_MODE (SUBREG_REG (target)),
4536 SUBREG_PROMOTED_UNSIGNED_P (target)),
4539 inner_target = SUBREG_REG (target);
4542 temp = expand_expr (exp, inner_target, VOIDmode,
4543 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4545 /* If TEMP is a MEM and we want a result value, make the access
4546 now so it gets done only once. Strictly speaking, this is
4547 only necessary if the MEM is volatile, or if the address
4548 overlaps TARGET. But not performing the load twice also
4549 reduces the amount of rtl we generate and then have to CSE. */
4550 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4551 temp = copy_to_reg (temp);
4553 /* If TEMP is a VOIDmode constant, use convert_modes to make
4554 sure that we properly convert it. */
4555 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4557 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4558 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4559 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4560 GET_MODE (target), temp,
4561 SUBREG_PROMOTED_UNSIGNED_P (target));
4564 convert_move (SUBREG_REG (target), temp,
4565 SUBREG_PROMOTED_UNSIGNED_P (target));
4567 /* If we promoted a constant, change the mode back down to match
4568 target. Otherwise, the caller might get confused by a result whose
4569 mode is larger than expected. */
4571 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4573 if (GET_MODE (temp) != VOIDmode)
4575 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4576 SUBREG_PROMOTED_VAR_P (temp) = 1;
4577 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4578 SUBREG_PROMOTED_UNSIGNED_P (target));
4581 temp = convert_modes (GET_MODE (target),
4582 GET_MODE (SUBREG_REG (target)),
4583 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4586 return want_value & 1 ? temp : NULL_RTX;
4590 temp = expand_expr (exp, target, GET_MODE (target),
4591 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4592 /* Return TARGET if it's a specified hardware register.
4593 If TARGET is a volatile mem ref, either return TARGET
4594 or return a reg copied *from* TARGET; ANSI requires this.
4596 Otherwise, if TEMP is not TARGET, return TEMP
4597 if it is constant (for efficiency),
4598 or if we really want the correct value. */
4599 if (!(target && GET_CODE (target) == REG
4600 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4601 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4602 && ! rtx_equal_p (temp, target)
4603 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4604 dont_return_target = 1;
4607 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4608 the same as that of TARGET, adjust the constant. This is needed, for
4609 example, in case it is a CONST_DOUBLE and we want only a word-sized
4611 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4612 && TREE_CODE (exp) != ERROR_MARK
4613 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4614 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4615 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4617 /* If value was not generated in the target, store it there.
4618 Convert the value to TARGET's type first if necessary.
4619 If TEMP and TARGET compare equal according to rtx_equal_p, but
4620 one or both of them are volatile memory refs, we have to distinguish
4622 - expand_expr has used TARGET. In this case, we must not generate
4623 another copy. This can be detected by TARGET being equal according
4625 - expand_expr has not used TARGET - that means that the source just
4626 happens to have the same RTX form. Since temp will have been created
4627 by expand_expr, it will compare unequal according to == .
4628 We must generate a copy in this case, to reach the correct number
4629 of volatile memory references. */
4631 if ((! rtx_equal_p (temp, target)
4632 || (temp != target && (side_effects_p (temp)
4633 || side_effects_p (target))))
4634 && TREE_CODE (exp) != ERROR_MARK
4635 && ! dont_store_target
4636 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4637 but TARGET is not valid memory reference, TEMP will differ
4638 from TARGET although it is really the same location. */
4639 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4640 || target != DECL_RTL_IF_SET (exp))
4641 /* If there's nothing to copy, don't bother. Don't call expr_size
4642 unless necessary, because some front-ends (C++) expr_size-hook
4643 aborts on objects that are not supposed to be bit-copied or
4645 && expr_size (exp) != const0_rtx)
4647 target = protect_from_queue (target, 1);
4648 if (GET_MODE (temp) != GET_MODE (target)
4649 && GET_MODE (temp) != VOIDmode)
4651 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4652 if (dont_return_target)
4654 /* In this case, we will return TEMP,
4655 so make sure it has the proper mode.
4656 But don't forget to store the value into TARGET. */
4657 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4658 emit_move_insn (target, temp);
4661 convert_move (target, temp, unsignedp);
4664 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4666 /* Handle copying a string constant into an array. The string
4667 constant may be shorter than the array. So copy just the string's
4668 actual length, and clear the rest. First get the size of the data
4669 type of the string, which is actually the size of the target. */
4670 rtx size = expr_size (exp);
4672 if (GET_CODE (size) == CONST_INT
4673 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4674 emit_block_move (target, temp, size,
4676 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4679 /* Compute the size of the data to copy from the string. */
4681 = size_binop (MIN_EXPR,
4682 make_tree (sizetype, size),
4683 size_int (TREE_STRING_LENGTH (exp)));
4685 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4687 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4690 /* Copy that much. */
4691 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4692 TREE_UNSIGNED (sizetype));
4693 emit_block_move (target, temp, copy_size_rtx,
4695 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4697 /* Figure out how much is left in TARGET that we have to clear.
4698 Do all calculations in ptr_mode. */
4699 if (GET_CODE (copy_size_rtx) == CONST_INT)
4701 size = plus_constant (size, -INTVAL (copy_size_rtx));
4702 target = adjust_address (target, BLKmode,
4703 INTVAL (copy_size_rtx));
4707 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4708 copy_size_rtx, NULL_RTX, 0,
4711 #ifdef POINTERS_EXTEND_UNSIGNED
4712 if (GET_MODE (copy_size_rtx) != Pmode)
4713 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4714 TREE_UNSIGNED (sizetype));
4717 target = offset_address (target, copy_size_rtx,
4718 highest_pow2_factor (copy_size));
4719 label = gen_label_rtx ();
4720 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4721 GET_MODE (size), 0, label);
4724 if (size != const0_rtx)
4725 clear_storage (target, size);
4731 /* Handle calls that return values in multiple non-contiguous locations.
4732 The Irix 6 ABI has examples of this. */
4733 else if (GET_CODE (target) == PARALLEL)
4734 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4735 else if (GET_MODE (temp) == BLKmode)
4736 emit_block_move (target, temp, expr_size (exp),
4738 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4740 emit_move_insn (target, temp);
4743 /* If we don't want a value, return NULL_RTX. */
4744 if ((want_value & 1) == 0)
4747 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4748 ??? The latter test doesn't seem to make sense. */
4749 else if (dont_return_target && GET_CODE (temp) != MEM)
4752 /* Return TARGET itself if it is a hard register. */
4753 else if ((want_value & 1) != 0
4754 && GET_MODE (target) != BLKmode
4755 && ! (GET_CODE (target) == REG
4756 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4757 return copy_to_reg (target);
4763 /* Return 1 if EXP just contains zeros. */
4771 switch (TREE_CODE (exp))
4775 case NON_LVALUE_EXPR:
4776 case VIEW_CONVERT_EXPR:
4777 return is_zeros_p (TREE_OPERAND (exp, 0));
4780 return integer_zerop (exp);
4784 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4787 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4790 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4791 elt = TREE_CHAIN (elt))
4792 if (!is_zeros_p (TREE_VALUE (elt)))
4798 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4799 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4800 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4801 if (! is_zeros_p (TREE_VALUE (elt)))
4811 /* Return 1 if EXP contains mostly (3/4) zeros. */
4814 mostly_zeros_p (exp)
4817 if (TREE_CODE (exp) == CONSTRUCTOR)
4819 int elts = 0, zeros = 0;
4820 tree elt = CONSTRUCTOR_ELTS (exp);
4821 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4823 /* If there are no ranges of true bits, it is all zero. */
4824 return elt == NULL_TREE;
4826 for (; elt; elt = TREE_CHAIN (elt))
4828 /* We do not handle the case where the index is a RANGE_EXPR,
4829 so the statistic will be somewhat inaccurate.
4830 We do make a more accurate count in store_constructor itself,
4831 so since this function is only used for nested array elements,
4832 this should be close enough. */
4833 if (mostly_zeros_p (TREE_VALUE (elt)))
4838 return 4 * zeros >= 3 * elts;
4841 return is_zeros_p (exp);
4844 /* Helper function for store_constructor.
4845 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4846 TYPE is the type of the CONSTRUCTOR, not the element type.
4847 CLEARED is as for store_constructor.
4848 ALIAS_SET is the alias set to use for any stores.
4850 This provides a recursive shortcut back to store_constructor when it isn't
4851 necessary to go through store_field. This is so that we can pass through
4852 the cleared field to let store_constructor know that we may not have to
4853 clear a substructure if the outer structure has already been cleared. */
4856 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4859 unsigned HOST_WIDE_INT bitsize;
4860 HOST_WIDE_INT bitpos;
4861 enum machine_mode mode;
4866 if (TREE_CODE (exp) == CONSTRUCTOR
4867 && bitpos % BITS_PER_UNIT == 0
4868 /* If we have a nonzero bitpos for a register target, then we just
4869 let store_field do the bitfield handling. This is unlikely to
4870 generate unnecessary clear instructions anyways. */
4871 && (bitpos == 0 || GET_CODE (target) == MEM))
4873 if (GET_CODE (target) == MEM)
4875 = adjust_address (target,
4876 GET_MODE (target) == BLKmode
4878 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4879 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4882 /* Update the alias set, if required. */
4883 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4884 && MEM_ALIAS_SET (target) != 0)
4886 target = copy_rtx (target);
4887 set_mem_alias_set (target, alias_set);
4890 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4893 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4897 /* Store the value of constructor EXP into the rtx TARGET.
4898 TARGET is either a REG or a MEM; we know it cannot conflict, since
4899 safe_from_p has been called.
4900 CLEARED is true if TARGET is known to have been zero'd.
4901 SIZE is the number of bytes of TARGET we are allowed to modify: this
4902 may not be the same as the size of EXP if we are assigning to a field
4903 which has been packed to exclude padding bits. */
4906 store_constructor (exp, target, cleared, size)
4912 tree type = TREE_TYPE (exp);
4913 #ifdef WORD_REGISTER_OPERATIONS
4914 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4917 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4918 || TREE_CODE (type) == QUAL_UNION_TYPE)
4922 /* We either clear the aggregate or indicate the value is dead. */
4923 if ((TREE_CODE (type) == UNION_TYPE
4924 || TREE_CODE (type) == QUAL_UNION_TYPE)
4926 && ! CONSTRUCTOR_ELTS (exp))
4927 /* If the constructor is empty, clear the union. */
4929 clear_storage (target, expr_size (exp));
4933 /* If we are building a static constructor into a register,
4934 set the initial value as zero so we can fold the value into
4935 a constant. But if more than one register is involved,
4936 this probably loses. */
4937 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4938 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4940 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4944 /* If the constructor has fewer fields than the structure
4945 or if we are initializing the structure to mostly zeros,
4946 clear the whole structure first. Don't do this if TARGET is a
4947 register whose mode size isn't equal to SIZE since clear_storage
4948 can't handle this case. */
4949 else if (! cleared && size > 0
4950 && ((list_length (CONSTRUCTOR_ELTS (exp))
4951 != fields_length (type))
4952 || mostly_zeros_p (exp))
4953 && (GET_CODE (target) != REG
4954 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4957 rtx xtarget = target;
4959 if (readonly_fields_p (type))
4961 xtarget = copy_rtx (xtarget);
4962 RTX_UNCHANGING_P (xtarget) = 1;
4965 clear_storage (xtarget, GEN_INT (size));
4970 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4972 /* Store each element of the constructor into
4973 the corresponding field of TARGET. */
4975 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4977 tree field = TREE_PURPOSE (elt);
4978 tree value = TREE_VALUE (elt);
4979 enum machine_mode mode;
4980 HOST_WIDE_INT bitsize;
4981 HOST_WIDE_INT bitpos = 0;
4983 rtx to_rtx = target;
4985 /* Just ignore missing fields.
4986 We cleared the whole structure, above,
4987 if any fields are missing. */
4991 if (cleared && is_zeros_p (value))
4994 if (host_integerp (DECL_SIZE (field), 1))
4995 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4999 mode = DECL_MODE (field);
5000 if (DECL_BIT_FIELD (field))
5003 offset = DECL_FIELD_OFFSET (field);
5004 if (host_integerp (offset, 0)
5005 && host_integerp (bit_position (field), 0))
5007 bitpos = int_bit_position (field);
5011 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5017 if (contains_placeholder_p (offset))
5018 offset = build (WITH_RECORD_EXPR, sizetype,
5019 offset, make_tree (TREE_TYPE (exp), target));
5021 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5022 if (GET_CODE (to_rtx) != MEM)
5025 #ifdef POINTERS_EXTEND_UNSIGNED
5026 if (GET_MODE (offset_rtx) != Pmode)
5027 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5029 if (GET_MODE (offset_rtx) != ptr_mode)
5030 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5033 to_rtx = offset_address (to_rtx, offset_rtx,
5034 highest_pow2_factor (offset));
5037 if (TREE_READONLY (field))
5039 if (GET_CODE (to_rtx) == MEM)
5040 to_rtx = copy_rtx (to_rtx);
5042 RTX_UNCHANGING_P (to_rtx) = 1;
5045 #ifdef WORD_REGISTER_OPERATIONS
5046 /* If this initializes a field that is smaller than a word, at the
5047 start of a word, try to widen it to a full word.
5048 This special case allows us to output C++ member function
5049 initializations in a form that the optimizers can understand. */
5050 if (GET_CODE (target) == REG
5051 && bitsize < BITS_PER_WORD
5052 && bitpos % BITS_PER_WORD == 0
5053 && GET_MODE_CLASS (mode) == MODE_INT
5054 && TREE_CODE (value) == INTEGER_CST
5056 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5058 tree type = TREE_TYPE (value);
5060 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5062 type = (*lang_hooks.types.type_for_size)
5063 (BITS_PER_WORD, TREE_UNSIGNED (type));
5064 value = convert (type, value);
5067 if (BYTES_BIG_ENDIAN)
5069 = fold (build (LSHIFT_EXPR, type, value,
5070 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5071 bitsize = BITS_PER_WORD;
5076 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5077 && DECL_NONADDRESSABLE_P (field))
5079 to_rtx = copy_rtx (to_rtx);
5080 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5083 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5084 value, type, cleared,
5085 get_alias_set (TREE_TYPE (field)));
5088 else if (TREE_CODE (type) == ARRAY_TYPE
5089 || TREE_CODE (type) == VECTOR_TYPE)
5094 tree domain = TYPE_DOMAIN (type);
5095 tree elttype = TREE_TYPE (type);
5097 HOST_WIDE_INT minelt = 0;
5098 HOST_WIDE_INT maxelt = 0;
5100 /* Vectors are like arrays, but the domain is stored via an array
5102 if (TREE_CODE (type) == VECTOR_TYPE)
5104 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5105 the same field as TYPE_DOMAIN, we are not guaranteed that
5107 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5108 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5111 const_bounds_p = (TYPE_MIN_VALUE (domain)
5112 && TYPE_MAX_VALUE (domain)
5113 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5114 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5116 /* If we have constant bounds for the range of the type, get them. */
5119 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5120 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5123 /* If the constructor has fewer elements than the array,
5124 clear the whole array first. Similarly if this is
5125 static constructor of a non-BLKmode object. */
5126 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5130 HOST_WIDE_INT count = 0, zero_count = 0;
5131 need_to_clear = ! const_bounds_p;
5133 /* This loop is a more accurate version of the loop in
5134 mostly_zeros_p (it handles RANGE_EXPR in an index).
5135 It is also needed to check for missing elements. */
5136 for (elt = CONSTRUCTOR_ELTS (exp);
5137 elt != NULL_TREE && ! need_to_clear;
5138 elt = TREE_CHAIN (elt))
5140 tree index = TREE_PURPOSE (elt);
5141 HOST_WIDE_INT this_node_count;
5143 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5145 tree lo_index = TREE_OPERAND (index, 0);
5146 tree hi_index = TREE_OPERAND (index, 1);
5148 if (! host_integerp (lo_index, 1)
5149 || ! host_integerp (hi_index, 1))
5155 this_node_count = (tree_low_cst (hi_index, 1)
5156 - tree_low_cst (lo_index, 1) + 1);
5159 this_node_count = 1;
5161 count += this_node_count;
5162 if (mostly_zeros_p (TREE_VALUE (elt)))
5163 zero_count += this_node_count;
5166 /* Clear the entire array first if there are any missing elements,
5167 or if the incidence of zero elements is >= 75%. */
5169 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5173 if (need_to_clear && size > 0)
5178 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5180 clear_storage (target, GEN_INT (size));
5184 else if (REG_P (target))
5185 /* Inform later passes that the old value is dead. */
5186 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5188 /* Store each element of the constructor into
5189 the corresponding element of TARGET, determined
5190 by counting the elements. */
5191 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5193 elt = TREE_CHAIN (elt), i++)
5195 enum machine_mode mode;
5196 HOST_WIDE_INT bitsize;
5197 HOST_WIDE_INT bitpos;
5199 tree value = TREE_VALUE (elt);
5200 tree index = TREE_PURPOSE (elt);
5201 rtx xtarget = target;
5203 if (cleared && is_zeros_p (value))
5206 unsignedp = TREE_UNSIGNED (elttype);
5207 mode = TYPE_MODE (elttype);
5208 if (mode == BLKmode)
5209 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5210 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5213 bitsize = GET_MODE_BITSIZE (mode);
5215 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5217 tree lo_index = TREE_OPERAND (index, 0);
5218 tree hi_index = TREE_OPERAND (index, 1);
5219 rtx index_r, pos_rtx, loop_end;
5220 struct nesting *loop;
5221 HOST_WIDE_INT lo, hi, count;
5224 /* If the range is constant and "small", unroll the loop. */
5226 && host_integerp (lo_index, 0)
5227 && host_integerp (hi_index, 0)
5228 && (lo = tree_low_cst (lo_index, 0),
5229 hi = tree_low_cst (hi_index, 0),
5230 count = hi - lo + 1,
5231 (GET_CODE (target) != MEM
5233 || (host_integerp (TYPE_SIZE (elttype), 1)
5234 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5237 lo -= minelt; hi -= minelt;
5238 for (; lo <= hi; lo++)
5240 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5242 if (GET_CODE (target) == MEM
5243 && !MEM_KEEP_ALIAS_SET_P (target)
5244 && TREE_CODE (type) == ARRAY_TYPE
5245 && TYPE_NONALIASED_COMPONENT (type))
5247 target = copy_rtx (target);
5248 MEM_KEEP_ALIAS_SET_P (target) = 1;
5251 store_constructor_field
5252 (target, bitsize, bitpos, mode, value, type, cleared,
5253 get_alias_set (elttype));
5258 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5259 loop_end = gen_label_rtx ();
5261 unsignedp = TREE_UNSIGNED (domain);
5263 index = build_decl (VAR_DECL, NULL_TREE, domain);
5266 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5268 SET_DECL_RTL (index, index_r);
5269 if (TREE_CODE (value) == SAVE_EXPR
5270 && SAVE_EXPR_RTL (value) == 0)
5272 /* Make sure value gets expanded once before the
5274 expand_expr (value, const0_rtx, VOIDmode, 0);
5277 store_expr (lo_index, index_r, 0);
5278 loop = expand_start_loop (0);
5280 /* Assign value to element index. */
5282 = convert (ssizetype,
5283 fold (build (MINUS_EXPR, TREE_TYPE (index),
5284 index, TYPE_MIN_VALUE (domain))));
5285 position = size_binop (MULT_EXPR, position,
5287 TYPE_SIZE_UNIT (elttype)));
5289 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5290 xtarget = offset_address (target, pos_rtx,
5291 highest_pow2_factor (position));
5292 xtarget = adjust_address (xtarget, mode, 0);
5293 if (TREE_CODE (value) == CONSTRUCTOR)
5294 store_constructor (value, xtarget, cleared,
5295 bitsize / BITS_PER_UNIT);
5297 store_expr (value, xtarget, 0);
5299 expand_exit_loop_if_false (loop,
5300 build (LT_EXPR, integer_type_node,
5303 expand_increment (build (PREINCREMENT_EXPR,
5305 index, integer_one_node), 0, 0);
5307 emit_label (loop_end);
5310 else if ((index != 0 && ! host_integerp (index, 0))
5311 || ! host_integerp (TYPE_SIZE (elttype), 1))
5316 index = ssize_int (1);
5319 index = convert (ssizetype,
5320 fold (build (MINUS_EXPR, index,
5321 TYPE_MIN_VALUE (domain))));
5323 position = size_binop (MULT_EXPR, index,
5325 TYPE_SIZE_UNIT (elttype)));
5326 xtarget = offset_address (target,
5327 expand_expr (position, 0, VOIDmode, 0),
5328 highest_pow2_factor (position));
5329 xtarget = adjust_address (xtarget, mode, 0);
5330 store_expr (value, xtarget, 0);
5335 bitpos = ((tree_low_cst (index, 0) - minelt)
5336 * tree_low_cst (TYPE_SIZE (elttype), 1));
5338 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5340 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5341 && TREE_CODE (type) == ARRAY_TYPE
5342 && TYPE_NONALIASED_COMPONENT (type))
5344 target = copy_rtx (target);
5345 MEM_KEEP_ALIAS_SET_P (target) = 1;
5348 store_constructor_field (target, bitsize, bitpos, mode, value,
5349 type, cleared, get_alias_set (elttype));
5355 /* Set constructor assignments. */
5356 else if (TREE_CODE (type) == SET_TYPE)
5358 tree elt = CONSTRUCTOR_ELTS (exp);
5359 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5360 tree domain = TYPE_DOMAIN (type);
5361 tree domain_min, domain_max, bitlength;
5363 /* The default implementation strategy is to extract the constant
5364 parts of the constructor, use that to initialize the target,
5365 and then "or" in whatever non-constant ranges we need in addition.
5367 If a large set is all zero or all ones, it is
5368 probably better to set it using memset (if available) or bzero.
5369 Also, if a large set has just a single range, it may also be
5370 better to first clear all the first clear the set (using
5371 bzero/memset), and set the bits we want. */
5373 /* Check for all zeros. */
5374 if (elt == NULL_TREE && size > 0)
5377 clear_storage (target, GEN_INT (size));
5381 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5382 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5383 bitlength = size_binop (PLUS_EXPR,
5384 size_diffop (domain_max, domain_min),
5387 nbits = tree_low_cst (bitlength, 1);
5389 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5390 are "complicated" (more than one range), initialize (the
5391 constant parts) by copying from a constant. */
5392 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5393 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5395 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5396 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5397 char *bit_buffer = (char *) alloca (nbits);
5398 HOST_WIDE_INT word = 0;
5399 unsigned int bit_pos = 0;
5400 unsigned int ibit = 0;
5401 unsigned int offset = 0; /* In bytes from beginning of set. */
5403 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5406 if (bit_buffer[ibit])
5408 if (BYTES_BIG_ENDIAN)
5409 word |= (1 << (set_word_size - 1 - bit_pos));
5411 word |= 1 << bit_pos;
5415 if (bit_pos >= set_word_size || ibit == nbits)
5417 if (word != 0 || ! cleared)
5419 rtx datum = GEN_INT (word);
5422 /* The assumption here is that it is safe to use
5423 XEXP if the set is multi-word, but not if
5424 it's single-word. */
5425 if (GET_CODE (target) == MEM)
5426 to_rtx = adjust_address (target, mode, offset);
5427 else if (offset == 0)
5431 emit_move_insn (to_rtx, datum);
5438 offset += set_word_size / BITS_PER_UNIT;
5443 /* Don't bother clearing storage if the set is all ones. */
5444 if (TREE_CHAIN (elt) != NULL_TREE
5445 || (TREE_PURPOSE (elt) == NULL_TREE
5447 : ( ! host_integerp (TREE_VALUE (elt), 0)
5448 || ! host_integerp (TREE_PURPOSE (elt), 0)
5449 || (tree_low_cst (TREE_VALUE (elt), 0)
5450 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5451 != (HOST_WIDE_INT) nbits))))
5452 clear_storage (target, expr_size (exp));
5454 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5456 /* Start of range of element or NULL. */
5457 tree startbit = TREE_PURPOSE (elt);
5458 /* End of range of element, or element value. */
5459 tree endbit = TREE_VALUE (elt);
5460 HOST_WIDE_INT startb, endb;
5461 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5463 bitlength_rtx = expand_expr (bitlength,
5464 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5466 /* Handle non-range tuple element like [ expr ]. */
5467 if (startbit == NULL_TREE)
5469 startbit = save_expr (endbit);
5473 startbit = convert (sizetype, startbit);
5474 endbit = convert (sizetype, endbit);
5475 if (! integer_zerop (domain_min))
5477 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5478 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5480 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5481 EXPAND_CONST_ADDRESS);
5482 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5483 EXPAND_CONST_ADDRESS);
5489 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5490 (GET_MODE (target), 0),
5493 emit_move_insn (targetx, target);
5496 else if (GET_CODE (target) == MEM)
5501 /* Optimization: If startbit and endbit are constants divisible
5502 by BITS_PER_UNIT, call memset instead. */
5503 if (TARGET_MEM_FUNCTIONS
5504 && TREE_CODE (startbit) == INTEGER_CST
5505 && TREE_CODE (endbit) == INTEGER_CST
5506 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5507 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5509 emit_library_call (memset_libfunc, LCT_NORMAL,
5511 plus_constant (XEXP (targetx, 0),
5512 startb / BITS_PER_UNIT),
5514 constm1_rtx, TYPE_MODE (integer_type_node),
5515 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5516 TYPE_MODE (sizetype));
5519 emit_library_call (setbits_libfunc, LCT_NORMAL,
5520 VOIDmode, 4, XEXP (targetx, 0),
5521 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5522 startbit_rtx, TYPE_MODE (sizetype),
5523 endbit_rtx, TYPE_MODE (sizetype));
5526 emit_move_insn (target, targetx);
5534 /* Store the value of EXP (an expression tree)
5535 into a subfield of TARGET which has mode MODE and occupies
5536 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5537 If MODE is VOIDmode, it means that we are storing into a bit-field.
5539 If VALUE_MODE is VOIDmode, return nothing in particular.
5540 UNSIGNEDP is not used in this case.
5542 Otherwise, return an rtx for the value stored. This rtx
5543 has mode VALUE_MODE if that is convenient to do.
5544 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5546 TYPE is the type of the underlying object,
5548 ALIAS_SET is the alias set for the destination. This value will
5549 (in general) be different from that for TARGET, since TARGET is a
5550 reference to the containing structure. */
5553 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5556 HOST_WIDE_INT bitsize;
5557 HOST_WIDE_INT bitpos;
5558 enum machine_mode mode;
5560 enum machine_mode value_mode;
5565 HOST_WIDE_INT width_mask = 0;
5567 if (TREE_CODE (exp) == ERROR_MARK)
5570 /* If we have nothing to store, do nothing unless the expression has
5573 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5574 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5575 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5577 /* If we are storing into an unaligned field of an aligned union that is
5578 in a register, we may have the mode of TARGET being an integer mode but
5579 MODE == BLKmode. In that case, get an aligned object whose size and
5580 alignment are the same as TARGET and store TARGET into it (we can avoid
5581 the store if the field being stored is the entire width of TARGET). Then
5582 call ourselves recursively to store the field into a BLKmode version of
5583 that object. Finally, load from the object into TARGET. This is not
5584 very efficient in general, but should only be slightly more expensive
5585 than the otherwise-required unaligned accesses. Perhaps this can be
5586 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5587 twice, once with emit_move_insn and once via store_field. */
5590 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5592 rtx object = assign_temp (type, 0, 1, 1);
5593 rtx blk_object = adjust_address (object, BLKmode, 0);
5595 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5596 emit_move_insn (object, target);
5598 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5601 emit_move_insn (target, object);
5603 /* We want to return the BLKmode version of the data. */
5607 if (GET_CODE (target) == CONCAT)
5609 /* We're storing into a struct containing a single __complex. */
5613 return store_expr (exp, target, 0);
5616 /* If the structure is in a register or if the component
5617 is a bit field, we cannot use addressing to access it.
5618 Use bit-field techniques or SUBREG to store in it. */
5620 if (mode == VOIDmode
5621 || (mode != BLKmode && ! direct_store[(int) mode]
5622 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5623 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5624 || GET_CODE (target) == REG
5625 || GET_CODE (target) == SUBREG
5626 /* If the field isn't aligned enough to store as an ordinary memref,
5627 store it as a bit field. */
5629 && ((SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5630 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)))
5631 || bitpos % GET_MODE_ALIGNMENT (mode)))
5632 /* If the RHS and field are a constant size and the size of the
5633 RHS isn't the same size as the bitfield, we must use bitfield
5636 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5637 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5639 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5641 /* If BITSIZE is narrower than the size of the type of EXP
5642 we will be narrowing TEMP. Normally, what's wanted are the
5643 low-order bits. However, if EXP's type is a record and this is
5644 big-endian machine, we want the upper BITSIZE bits. */
5645 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5646 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5647 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5648 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5649 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5653 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5655 if (mode != VOIDmode && mode != BLKmode
5656 && mode != TYPE_MODE (TREE_TYPE (exp)))
5657 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5659 /* If the modes of TARGET and TEMP are both BLKmode, both
5660 must be in memory and BITPOS must be aligned on a byte
5661 boundary. If so, we simply do a block copy. */
5662 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5664 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5665 || bitpos % BITS_PER_UNIT != 0)
5668 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5669 emit_block_move (target, temp,
5670 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5674 return value_mode == VOIDmode ? const0_rtx : target;
5677 /* Store the value in the bitfield. */
5678 store_bit_field (target, bitsize, bitpos, mode, temp,
5679 int_size_in_bytes (type));
5681 if (value_mode != VOIDmode)
5683 /* The caller wants an rtx for the value.
5684 If possible, avoid refetching from the bitfield itself. */
5686 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5689 enum machine_mode tmode;
5691 tmode = GET_MODE (temp);
5692 if (tmode == VOIDmode)
5696 return expand_and (tmode, temp,
5697 gen_int_mode (width_mask, tmode),
5700 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5701 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5702 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5705 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5706 NULL_RTX, value_mode, VOIDmode,
5707 int_size_in_bytes (type));
5713 rtx addr = XEXP (target, 0);
5714 rtx to_rtx = target;
5716 /* If a value is wanted, it must be the lhs;
5717 so make the address stable for multiple use. */
5719 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5720 && ! CONSTANT_ADDRESS_P (addr)
5721 /* A frame-pointer reference is already stable. */
5722 && ! (GET_CODE (addr) == PLUS
5723 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5724 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5725 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5726 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5728 /* Now build a reference to just the desired component. */
5730 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5732 if (to_rtx == target)
5733 to_rtx = copy_rtx (to_rtx);
5735 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5736 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5737 set_mem_alias_set (to_rtx, alias_set);
5739 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5743 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5744 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5745 codes and find the ultimate containing object, which we return.
5747 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5748 bit position, and *PUNSIGNEDP to the signedness of the field.
5749 If the position of the field is variable, we store a tree
5750 giving the variable offset (in units) in *POFFSET.
5751 This offset is in addition to the bit position.
5752 If the position is not variable, we store 0 in *POFFSET.
5754 If any of the extraction expressions is volatile,
5755 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5757 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5758 is a mode that can be used to access the field. In that case, *PBITSIZE
5761 If the field describes a variable-sized object, *PMODE is set to
5762 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5763 this case, but the address of the object can be found. */
5766 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5767 punsignedp, pvolatilep)
5769 HOST_WIDE_INT *pbitsize;
5770 HOST_WIDE_INT *pbitpos;
5772 enum machine_mode *pmode;
5777 enum machine_mode mode = VOIDmode;
5778 tree offset = size_zero_node;
5779 tree bit_offset = bitsize_zero_node;
5780 tree placeholder_ptr = 0;
5783 /* First get the mode, signedness, and size. We do this from just the
5784 outermost expression. */
5785 if (TREE_CODE (exp) == COMPONENT_REF)
5787 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5788 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5789 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5791 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5793 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5795 size_tree = TREE_OPERAND (exp, 1);
5796 *punsignedp = TREE_UNSIGNED (exp);
5800 mode = TYPE_MODE (TREE_TYPE (exp));
5801 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5803 if (mode == BLKmode)
5804 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5806 *pbitsize = GET_MODE_BITSIZE (mode);
5811 if (! host_integerp (size_tree, 1))
5812 mode = BLKmode, *pbitsize = -1;
5814 *pbitsize = tree_low_cst (size_tree, 1);
5817 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5818 and find the ultimate containing object. */
5821 if (TREE_CODE (exp) == BIT_FIELD_REF)
5822 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5823 else if (TREE_CODE (exp) == COMPONENT_REF)
5825 tree field = TREE_OPERAND (exp, 1);
5826 tree this_offset = DECL_FIELD_OFFSET (field);
5828 /* If this field hasn't been filled in yet, don't go
5829 past it. This should only happen when folding expressions
5830 made during type construction. */
5831 if (this_offset == 0)
5833 else if (! TREE_CONSTANT (this_offset)
5834 && contains_placeholder_p (this_offset))
5835 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5837 offset = size_binop (PLUS_EXPR, offset, this_offset);
5838 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5839 DECL_FIELD_BIT_OFFSET (field));
5841 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5844 else if (TREE_CODE (exp) == ARRAY_REF
5845 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5847 tree index = TREE_OPERAND (exp, 1);
5848 tree array = TREE_OPERAND (exp, 0);
5849 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5850 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5851 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5853 /* We assume all arrays have sizes that are a multiple of a byte.
5854 First subtract the lower bound, if any, in the type of the
5855 index, then convert to sizetype and multiply by the size of the
5857 if (low_bound != 0 && ! integer_zerop (low_bound))
5858 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5861 /* If the index has a self-referential type, pass it to a
5862 WITH_RECORD_EXPR; if the component size is, pass our
5863 component to one. */
5864 if (! TREE_CONSTANT (index)
5865 && contains_placeholder_p (index))
5866 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5867 if (! TREE_CONSTANT (unit_size)
5868 && contains_placeholder_p (unit_size))
5869 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5871 offset = size_binop (PLUS_EXPR, offset,
5872 size_binop (MULT_EXPR,
5873 convert (sizetype, index),
5877 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5879 tree new = find_placeholder (exp, &placeholder_ptr);
5881 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5882 We might have been called from tree optimization where we
5883 haven't set up an object yet. */
5891 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5892 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5893 && ! ((TREE_CODE (exp) == NOP_EXPR
5894 || TREE_CODE (exp) == CONVERT_EXPR)
5895 && (TYPE_MODE (TREE_TYPE (exp))
5896 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5899 /* If any reference in the chain is volatile, the effect is volatile. */
5900 if (TREE_THIS_VOLATILE (exp))
5903 exp = TREE_OPERAND (exp, 0);
5906 /* If OFFSET is constant, see if we can return the whole thing as a
5907 constant bit position. Otherwise, split it up. */
5908 if (host_integerp (offset, 0)
5909 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5911 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5912 && host_integerp (tem, 0))
5913 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5915 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5921 /* Return 1 if T is an expression that get_inner_reference handles. */
5924 handled_component_p (t)
5927 switch (TREE_CODE (t))
5932 case ARRAY_RANGE_REF:
5933 case NON_LVALUE_EXPR:
5934 case VIEW_CONVERT_EXPR:
5939 return (TYPE_MODE (TREE_TYPE (t))
5940 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5947 /* Given an rtx VALUE that may contain additions and multiplications, return
5948 an equivalent value that just refers to a register, memory, or constant.
5949 This is done by generating instructions to perform the arithmetic and
5950 returning a pseudo-register containing the value.
5952 The returned value may be a REG, SUBREG, MEM or constant. */
5955 force_operand (value, target)
5959 /* Use subtarget as the target for operand 0 of a binary operation. */
5960 rtx subtarget = get_subtarget (target);
5961 enum rtx_code code = GET_CODE (value);
5963 /* Check for a PIC address load. */
5964 if ((code == PLUS || code == MINUS)
5965 && XEXP (value, 0) == pic_offset_table_rtx
5966 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5967 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5968 || GET_CODE (XEXP (value, 1)) == CONST))
5971 subtarget = gen_reg_rtx (GET_MODE (value));
5972 emit_move_insn (subtarget, value);
5976 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5979 target = gen_reg_rtx (GET_MODE (value));
5980 convert_move (target, force_operand (XEXP (value, 0), NULL),
5981 code == ZERO_EXTEND);
5985 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5987 op2 = XEXP (value, 1);
5988 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5990 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5993 op2 = negate_rtx (GET_MODE (value), op2);
5996 /* Check for an addition with OP2 a constant integer and our first
5997 operand a PLUS of a virtual register and something else. In that
5998 case, we want to emit the sum of the virtual register and the
5999 constant first and then add the other value. This allows virtual
6000 register instantiation to simply modify the constant rather than
6001 creating another one around this addition. */
6002 if (code == PLUS && GET_CODE (op2) == CONST_INT
6003 && GET_CODE (XEXP (value, 0)) == PLUS
6004 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
6005 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6006 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6008 rtx temp = expand_simple_binop (GET_MODE (value), code,
6009 XEXP (XEXP (value, 0), 0), op2,
6010 subtarget, 0, OPTAB_LIB_WIDEN);
6011 return expand_simple_binop (GET_MODE (value), code, temp,
6012 force_operand (XEXP (XEXP (value,
6014 target, 0, OPTAB_LIB_WIDEN);
6017 op1 = force_operand (XEXP (value, 0), subtarget);
6018 op2 = force_operand (op2, NULL_RTX);
6022 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6024 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6025 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6026 target, 1, OPTAB_LIB_WIDEN);
6028 return expand_divmod (0,
6029 FLOAT_MODE_P (GET_MODE (value))
6030 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6031 GET_MODE (value), op1, op2, target, 0);
6034 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6038 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6042 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6046 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6047 target, 0, OPTAB_LIB_WIDEN);
6050 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6051 target, 1, OPTAB_LIB_WIDEN);
6054 if (GET_RTX_CLASS (code) == '1')
6056 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6057 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6060 #ifdef INSN_SCHEDULING
6061 /* On machines that have insn scheduling, we want all memory reference to be
6062 explicit, so we need to deal with such paradoxical SUBREGs. */
6063 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6064 && (GET_MODE_SIZE (GET_MODE (value))
6065 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6067 = simplify_gen_subreg (GET_MODE (value),
6068 force_reg (GET_MODE (SUBREG_REG (value)),
6069 force_operand (SUBREG_REG (value),
6071 GET_MODE (SUBREG_REG (value)),
6072 SUBREG_BYTE (value));
6078 /* Subroutine of expand_expr: return nonzero iff there is no way that
6079 EXP can reference X, which is being modified. TOP_P is nonzero if this
6080 call is going to be used to determine whether we need a temporary
6081 for EXP, as opposed to a recursive call to this function.
6083 It is always safe for this routine to return zero since it merely
6084 searches for optimization opportunities. */
6087 safe_from_p (x, exp, top_p)
6094 static tree save_expr_list;
6097 /* If EXP has varying size, we MUST use a target since we currently
6098 have no way of allocating temporaries of variable size
6099 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6100 So we assume here that something at a higher level has prevented a
6101 clash. This is somewhat bogus, but the best we can do. Only
6102 do this when X is BLKmode and when we are at the top level. */
6103 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6104 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6105 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6106 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6107 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6109 && GET_MODE (x) == BLKmode)
6110 /* If X is in the outgoing argument area, it is always safe. */
6111 || (GET_CODE (x) == MEM
6112 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6113 || (GET_CODE (XEXP (x, 0)) == PLUS
6114 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6117 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6118 find the underlying pseudo. */
6119 if (GET_CODE (x) == SUBREG)
6122 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6126 /* A SAVE_EXPR might appear many times in the expression passed to the
6127 top-level safe_from_p call, and if it has a complex subexpression,
6128 examining it multiple times could result in a combinatorial explosion.
6129 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6130 with optimization took about 28 minutes to compile -- even though it was
6131 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6132 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6133 we have processed. Note that the only test of top_p was above. */
6142 rtn = safe_from_p (x, exp, 0);
6144 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6145 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6150 /* Now look at our tree code and possibly recurse. */
6151 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6154 exp_rtl = DECL_RTL_IF_SET (exp);
6161 if (TREE_CODE (exp) == TREE_LIST)
6165 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6167 exp = TREE_CHAIN (exp);
6170 if (TREE_CODE (exp) != TREE_LIST)
6171 return safe_from_p (x, exp, 0);
6174 else if (TREE_CODE (exp) == ERROR_MARK)
6175 return 1; /* An already-visited SAVE_EXPR? */
6181 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6186 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6190 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6191 the expression. If it is set, we conflict iff we are that rtx or
6192 both are in memory. Otherwise, we check all operands of the
6193 expression recursively. */
6195 switch (TREE_CODE (exp))
6198 /* If the operand is static or we are static, we can't conflict.
6199 Likewise if we don't conflict with the operand at all. */
6200 if (staticp (TREE_OPERAND (exp, 0))
6201 || TREE_STATIC (exp)
6202 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6205 /* Otherwise, the only way this can conflict is if we are taking
6206 the address of a DECL a that address if part of X, which is
6208 exp = TREE_OPERAND (exp, 0);
6211 if (!DECL_RTL_SET_P (exp)
6212 || GET_CODE (DECL_RTL (exp)) != MEM)
6215 exp_rtl = XEXP (DECL_RTL (exp), 0);
6220 if (GET_CODE (x) == MEM
6221 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6222 get_alias_set (exp)))
6227 /* Assume that the call will clobber all hard registers and
6229 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6230 || GET_CODE (x) == MEM)
6235 /* If a sequence exists, we would have to scan every instruction
6236 in the sequence to see if it was safe. This is probably not
6238 if (RTL_EXPR_SEQUENCE (exp))
6241 exp_rtl = RTL_EXPR_RTL (exp);
6244 case WITH_CLEANUP_EXPR:
6245 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6248 case CLEANUP_POINT_EXPR:
6249 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6252 exp_rtl = SAVE_EXPR_RTL (exp);
6256 /* If we've already scanned this, don't do it again. Otherwise,
6257 show we've scanned it and record for clearing the flag if we're
6259 if (TREE_PRIVATE (exp))
6262 TREE_PRIVATE (exp) = 1;
6263 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6265 TREE_PRIVATE (exp) = 0;
6269 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6273 /* The only operand we look at is operand 1. The rest aren't
6274 part of the expression. */
6275 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6277 case METHOD_CALL_EXPR:
6278 /* This takes an rtx argument, but shouldn't appear here. */
6285 /* If we have an rtx, we do not need to scan our operands. */
6289 nops = first_rtl_op (TREE_CODE (exp));
6290 for (i = 0; i < nops; i++)
6291 if (TREE_OPERAND (exp, i) != 0
6292 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6295 /* If this is a language-specific tree code, it may require
6296 special handling. */
6297 if ((unsigned int) TREE_CODE (exp)
6298 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6299 && !(*lang_hooks.safe_from_p) (x, exp))
6303 /* If we have an rtl, find any enclosed object. Then see if we conflict
6307 if (GET_CODE (exp_rtl) == SUBREG)
6309 exp_rtl = SUBREG_REG (exp_rtl);
6310 if (GET_CODE (exp_rtl) == REG
6311 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6315 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6316 are memory and they conflict. */
6317 return ! (rtx_equal_p (x, exp_rtl)
6318 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6319 && true_dependence (exp_rtl, VOIDmode, x,
6320 rtx_addr_varies_p)));
6323 /* If we reach here, it is safe. */
6327 /* Subroutine of expand_expr: return rtx if EXP is a
6328 variable or parameter; else return 0. */
6335 switch (TREE_CODE (exp))
6339 return DECL_RTL (exp);
6345 #ifdef MAX_INTEGER_COMPUTATION_MODE
6348 check_max_integer_computation_mode (exp)
6351 enum tree_code code;
6352 enum machine_mode mode;
6354 /* Strip any NOPs that don't change the mode. */
6356 code = TREE_CODE (exp);
6358 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6359 if (code == NOP_EXPR
6360 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6363 /* First check the type of the overall operation. We need only look at
6364 unary, binary and relational operations. */
6365 if (TREE_CODE_CLASS (code) == '1'
6366 || TREE_CODE_CLASS (code) == '2'
6367 || TREE_CODE_CLASS (code) == '<')
6369 mode = TYPE_MODE (TREE_TYPE (exp));
6370 if (GET_MODE_CLASS (mode) == MODE_INT
6371 && mode > MAX_INTEGER_COMPUTATION_MODE)
6372 internal_error ("unsupported wide integer operation");
6375 /* Check operand of a unary op. */
6376 if (TREE_CODE_CLASS (code) == '1')
6378 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6379 if (GET_MODE_CLASS (mode) == MODE_INT
6380 && mode > MAX_INTEGER_COMPUTATION_MODE)
6381 internal_error ("unsupported wide integer operation");
6384 /* Check operands of a binary/comparison op. */
6385 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6387 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6388 if (GET_MODE_CLASS (mode) == MODE_INT
6389 && mode > MAX_INTEGER_COMPUTATION_MODE)
6390 internal_error ("unsupported wide integer operation");
6392 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6393 if (GET_MODE_CLASS (mode) == MODE_INT
6394 && mode > MAX_INTEGER_COMPUTATION_MODE)
6395 internal_error ("unsupported wide integer operation");
6400 /* Return the highest power of two that EXP is known to be a multiple of.
6401 This is used in updating alignment of MEMs in array references. */
6403 static unsigned HOST_WIDE_INT
6404 highest_pow2_factor (exp)
6407 unsigned HOST_WIDE_INT c0, c1;
6409 switch (TREE_CODE (exp))
6412 /* We can find the lowest bit that's a one. If the low
6413 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6414 We need to handle this case since we can find it in a COND_EXPR,
6415 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6416 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6418 if (TREE_CONSTANT_OVERFLOW (exp))
6419 return BIGGEST_ALIGNMENT;
6422 /* Note: tree_low_cst is intentionally not used here,
6423 we don't care about the upper bits. */
6424 c0 = TREE_INT_CST_LOW (exp);
6426 return c0 ? c0 : BIGGEST_ALIGNMENT;
6430 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6431 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6432 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6433 return MIN (c0, c1);
6436 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6437 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6440 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6442 if (integer_pow2p (TREE_OPERAND (exp, 1))
6443 && host_integerp (TREE_OPERAND (exp, 1), 1))
6445 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6446 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6447 return MAX (1, c0 / c1);
6451 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6452 case SAVE_EXPR: case WITH_RECORD_EXPR:
6453 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6456 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6459 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6460 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6461 return MIN (c0, c1);
6470 /* Similar, except that it is known that the expression must be a multiple
6471 of the alignment of TYPE. */
6473 static unsigned HOST_WIDE_INT
6474 highest_pow2_factor_for_type (type, exp)
6478 unsigned HOST_WIDE_INT type_align, factor;
6480 factor = highest_pow2_factor (exp);
6481 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6482 return MAX (factor, type_align);
6485 /* Return an object on the placeholder list that matches EXP, a
6486 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6487 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6488 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6489 is a location which initially points to a starting location in the
6490 placeholder list (zero means start of the list) and where a pointer into
6491 the placeholder list at which the object is found is placed. */
6494 find_placeholder (exp, plist)
6498 tree type = TREE_TYPE (exp);
6499 tree placeholder_expr;
6501 for (placeholder_expr
6502 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6503 placeholder_expr != 0;
6504 placeholder_expr = TREE_CHAIN (placeholder_expr))
6506 tree need_type = TYPE_MAIN_VARIANT (type);
6509 /* Find the outermost reference that is of the type we want. If none,
6510 see if any object has a type that is a pointer to the type we
6512 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6513 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6514 || TREE_CODE (elt) == COND_EXPR)
6515 ? TREE_OPERAND (elt, 1)
6516 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6517 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6518 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6519 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6520 ? TREE_OPERAND (elt, 0) : 0))
6521 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6524 *plist = placeholder_expr;
6528 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6530 = ((TREE_CODE (elt) == COMPOUND_EXPR
6531 || TREE_CODE (elt) == COND_EXPR)
6532 ? TREE_OPERAND (elt, 1)
6533 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6534 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6535 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6536 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6537 ? TREE_OPERAND (elt, 0) : 0))
6538 if (POINTER_TYPE_P (TREE_TYPE (elt))
6539 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6543 *plist = placeholder_expr;
6544 return build1 (INDIRECT_REF, need_type, elt);
6551 /* expand_expr: generate code for computing expression EXP.
6552 An rtx for the computed value is returned. The value is never null.
6553 In the case of a void EXP, const0_rtx is returned.
6555 The value may be stored in TARGET if TARGET is nonzero.
6556 TARGET is just a suggestion; callers must assume that
6557 the rtx returned may not be the same as TARGET.
6559 If TARGET is CONST0_RTX, it means that the value will be ignored.
6561 If TMODE is not VOIDmode, it suggests generating the
6562 result in mode TMODE. But this is done only when convenient.
6563 Otherwise, TMODE is ignored and the value generated in its natural mode.
6564 TMODE is just a suggestion; callers must assume that
6565 the rtx returned may not have mode TMODE.
6567 Note that TARGET may have neither TMODE nor MODE. In that case, it
6568 probably will not be used.
6570 If MODIFIER is EXPAND_SUM then when EXP is an addition
6571 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6572 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6573 products as above, or REG or MEM, or constant.
6574 Ordinarily in such cases we would output mul or add instructions
6575 and then return a pseudo reg containing the sum.
6577 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6578 it also marks a label as absolutely required (it can't be dead).
6579 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6580 This is used for outputting expressions used in initializers.
6582 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6583 with a constant address even if that address is not normally legitimate.
6584 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6586 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6587 a call parameter. Such targets require special care as we haven't yet
6588 marked TARGET so that it's safe from being trashed by libcalls. We
6589 don't want to use TARGET for anything but the final result;
6590 Intermediate values must go elsewhere. Additionally, calls to
6591 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6594 expand_expr (exp, target, tmode, modifier)
6597 enum machine_mode tmode;
6598 enum expand_modifier modifier;
6601 tree type = TREE_TYPE (exp);
6602 int unsignedp = TREE_UNSIGNED (type);
6603 enum machine_mode mode;
6604 enum tree_code code = TREE_CODE (exp);
6606 rtx subtarget, original_target;
6610 /* Handle ERROR_MARK before anybody tries to access its type. */
6611 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6613 op0 = CONST0_RTX (tmode);
6619 mode = TYPE_MODE (type);
6620 /* Use subtarget as the target for operand 0 of a binary operation. */
6621 subtarget = get_subtarget (target);
6622 original_target = target;
6623 ignore = (target == const0_rtx
6624 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6625 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6626 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6627 && TREE_CODE (type) == VOID_TYPE));
6629 /* If we are going to ignore this result, we need only do something
6630 if there is a side-effect somewhere in the expression. If there
6631 is, short-circuit the most common cases here. Note that we must
6632 not call expand_expr with anything but const0_rtx in case this
6633 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6637 if (! TREE_SIDE_EFFECTS (exp))
6640 /* Ensure we reference a volatile object even if value is ignored, but
6641 don't do this if all we are doing is taking its address. */
6642 if (TREE_THIS_VOLATILE (exp)
6643 && TREE_CODE (exp) != FUNCTION_DECL
6644 && mode != VOIDmode && mode != BLKmode
6645 && modifier != EXPAND_CONST_ADDRESS)
6647 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6648 if (GET_CODE (temp) == MEM)
6649 temp = copy_to_reg (temp);
6653 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6654 || code == INDIRECT_REF || code == BUFFER_REF)
6655 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6658 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6659 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6661 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6662 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6665 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6666 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6667 /* If the second operand has no side effects, just evaluate
6669 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6671 else if (code == BIT_FIELD_REF)
6673 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6674 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6675 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6682 #ifdef MAX_INTEGER_COMPUTATION_MODE
6683 /* Only check stuff here if the mode we want is different from the mode
6684 of the expression; if it's the same, check_max_integer_computation_mode
6685 will handle it. Do we really need to check this stuff at all? */
6688 && GET_MODE (target) != mode
6689 && TREE_CODE (exp) != INTEGER_CST
6690 && TREE_CODE (exp) != PARM_DECL
6691 && TREE_CODE (exp) != ARRAY_REF
6692 && TREE_CODE (exp) != ARRAY_RANGE_REF
6693 && TREE_CODE (exp) != COMPONENT_REF
6694 && TREE_CODE (exp) != BIT_FIELD_REF
6695 && TREE_CODE (exp) != INDIRECT_REF
6696 && TREE_CODE (exp) != CALL_EXPR
6697 && TREE_CODE (exp) != VAR_DECL
6698 && TREE_CODE (exp) != RTL_EXPR)
6700 enum machine_mode mode = GET_MODE (target);
6702 if (GET_MODE_CLASS (mode) == MODE_INT
6703 && mode > MAX_INTEGER_COMPUTATION_MODE)
6704 internal_error ("unsupported wide integer operation");
6708 && TREE_CODE (exp) != INTEGER_CST
6709 && TREE_CODE (exp) != PARM_DECL
6710 && TREE_CODE (exp) != ARRAY_REF
6711 && TREE_CODE (exp) != ARRAY_RANGE_REF
6712 && TREE_CODE (exp) != COMPONENT_REF
6713 && TREE_CODE (exp) != BIT_FIELD_REF
6714 && TREE_CODE (exp) != INDIRECT_REF
6715 && TREE_CODE (exp) != VAR_DECL
6716 && TREE_CODE (exp) != CALL_EXPR
6717 && TREE_CODE (exp) != RTL_EXPR
6718 && GET_MODE_CLASS (tmode) == MODE_INT
6719 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6720 internal_error ("unsupported wide integer operation");
6722 check_max_integer_computation_mode (exp);
6725 /* If will do cse, generate all results into pseudo registers
6726 since 1) that allows cse to find more things
6727 and 2) otherwise cse could produce an insn the machine
6728 cannot support. An exception is a CONSTRUCTOR into a multi-word
6729 MEM: that's much more likely to be most efficient into the MEM.
6730 Another is a CALL_EXPR which must return in memory. */
6732 if (! cse_not_expected && mode != BLKmode && target
6733 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6734 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6735 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6742 tree function = decl_function_context (exp);
6743 /* Handle using a label in a containing function. */
6744 if (function != current_function_decl
6745 && function != inline_function_decl && function != 0)
6747 struct function *p = find_function_data (function);
6748 p->expr->x_forced_labels
6749 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6750 p->expr->x_forced_labels);
6754 if (modifier == EXPAND_INITIALIZER)
6755 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6760 temp = gen_rtx_MEM (FUNCTION_MODE,
6761 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6762 if (function != current_function_decl
6763 && function != inline_function_decl && function != 0)
6764 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6769 if (!DECL_RTL_SET_P (exp))
6771 error_with_decl (exp, "prior parameter's size depends on `%s'");
6772 return CONST0_RTX (mode);
6775 /* ... fall through ... */
6778 /* If a static var's type was incomplete when the decl was written,
6779 but the type is complete now, lay out the decl now. */
6780 if (DECL_SIZE (exp) == 0
6781 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6782 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6783 layout_decl (exp, 0);
6785 /* ... fall through ... */
6789 if (DECL_RTL (exp) == 0)
6792 /* Ensure variable marked as used even if it doesn't go through
6793 a parser. If it hasn't be used yet, write out an external
6795 if (! TREE_USED (exp))
6797 assemble_external (exp);
6798 TREE_USED (exp) = 1;
6801 /* Show we haven't gotten RTL for this yet. */
6804 /* Handle variables inherited from containing functions. */
6805 context = decl_function_context (exp);
6807 /* We treat inline_function_decl as an alias for the current function
6808 because that is the inline function whose vars, types, etc.
6809 are being merged into the current function.
6810 See expand_inline_function. */
6812 if (context != 0 && context != current_function_decl
6813 && context != inline_function_decl
6814 /* If var is static, we don't need a static chain to access it. */
6815 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6816 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6820 /* Mark as non-local and addressable. */
6821 DECL_NONLOCAL (exp) = 1;
6822 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6824 (*lang_hooks.mark_addressable) (exp);
6825 if (GET_CODE (DECL_RTL (exp)) != MEM)
6827 addr = XEXP (DECL_RTL (exp), 0);
6828 if (GET_CODE (addr) == MEM)
6830 = replace_equiv_address (addr,
6831 fix_lexical_addr (XEXP (addr, 0), exp));
6833 addr = fix_lexical_addr (addr, exp);
6835 temp = replace_equiv_address (DECL_RTL (exp), addr);
6838 /* This is the case of an array whose size is to be determined
6839 from its initializer, while the initializer is still being parsed.
6842 else if (GET_CODE (DECL_RTL (exp)) == MEM
6843 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6844 temp = validize_mem (DECL_RTL (exp));
6846 /* If DECL_RTL is memory, we are in the normal case and either
6847 the address is not valid or it is not a register and -fforce-addr
6848 is specified, get the address into a register. */
6850 else if (GET_CODE (DECL_RTL (exp)) == MEM
6851 && modifier != EXPAND_CONST_ADDRESS
6852 && modifier != EXPAND_SUM
6853 && modifier != EXPAND_INITIALIZER
6854 && (! memory_address_p (DECL_MODE (exp),
6855 XEXP (DECL_RTL (exp), 0))
6857 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6858 temp = replace_equiv_address (DECL_RTL (exp),
6859 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6861 /* If we got something, return it. But first, set the alignment
6862 if the address is a register. */
6865 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6866 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6871 /* If the mode of DECL_RTL does not match that of the decl, it
6872 must be a promoted value. We return a SUBREG of the wanted mode,
6873 but mark it so that we know that it was already extended. */
6875 if (GET_CODE (DECL_RTL (exp)) == REG
6876 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6878 /* Get the signedness used for this variable. Ensure we get the
6879 same mode we got when the variable was declared. */
6880 if (GET_MODE (DECL_RTL (exp))
6881 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6882 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6885 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6886 SUBREG_PROMOTED_VAR_P (temp) = 1;
6887 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6891 return DECL_RTL (exp);
6894 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6895 TREE_INT_CST_HIGH (exp), mode);
6897 /* ??? If overflow is set, fold will have done an incomplete job,
6898 which can result in (plus xx (const_int 0)), which can get
6899 simplified by validate_replace_rtx during virtual register
6900 instantiation, which can result in unrecognizable insns.
6901 Avoid this by forcing all overflows into registers. */
6902 if (TREE_CONSTANT_OVERFLOW (exp)
6903 && modifier != EXPAND_INITIALIZER)
6904 temp = force_reg (mode, temp);
6909 return const_vector_from_tree (exp);
6912 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6915 /* If optimized, generate immediate CONST_DOUBLE
6916 which will be turned into memory by reload if necessary.
6918 We used to force a register so that loop.c could see it. But
6919 this does not allow gen_* patterns to perform optimizations with
6920 the constants. It also produces two insns in cases like "x = 1.0;".
6921 On most machines, floating-point constants are not permitted in
6922 many insns, so we'd end up copying it to a register in any case.
6924 Now, we do the copying in expand_binop, if appropriate. */
6925 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6926 TYPE_MODE (TREE_TYPE (exp)));
6930 temp = output_constant_def (exp, 1);
6932 /* temp contains a constant address.
6933 On RISC machines where a constant address isn't valid,
6934 make some insns to get that address into a register. */
6935 if (modifier != EXPAND_CONST_ADDRESS
6936 && modifier != EXPAND_INITIALIZER
6937 && modifier != EXPAND_SUM
6938 && (! memory_address_p (mode, XEXP (temp, 0))
6939 || flag_force_addr))
6940 return replace_equiv_address (temp,
6941 copy_rtx (XEXP (temp, 0)));
6944 case EXPR_WITH_FILE_LOCATION:
6947 location_t saved_loc = input_location;
6948 input_filename = EXPR_WFL_FILENAME (exp);
6949 input_line = EXPR_WFL_LINENO (exp);
6950 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6951 emit_line_note (input_filename, input_line);
6952 /* Possibly avoid switching back and forth here. */
6953 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6954 input_location = saved_loc;
6959 context = decl_function_context (exp);
6961 /* If this SAVE_EXPR was at global context, assume we are an
6962 initialization function and move it into our context. */
6964 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6966 /* We treat inline_function_decl as an alias for the current function
6967 because that is the inline function whose vars, types, etc.
6968 are being merged into the current function.
6969 See expand_inline_function. */
6970 if (context == current_function_decl || context == inline_function_decl)
6973 /* If this is non-local, handle it. */
6976 /* The following call just exists to abort if the context is
6977 not of a containing function. */
6978 find_function_data (context);
6980 temp = SAVE_EXPR_RTL (exp);
6981 if (temp && GET_CODE (temp) == REG)
6983 put_var_into_stack (exp, /*rescan=*/true);
6984 temp = SAVE_EXPR_RTL (exp);
6986 if (temp == 0 || GET_CODE (temp) != MEM)
6989 replace_equiv_address (temp,
6990 fix_lexical_addr (XEXP (temp, 0), exp));
6992 if (SAVE_EXPR_RTL (exp) == 0)
6994 if (mode == VOIDmode)
6997 temp = assign_temp (build_qualified_type (type,
6999 | TYPE_QUAL_CONST)),
7002 SAVE_EXPR_RTL (exp) = temp;
7003 if (!optimize && GET_CODE (temp) == REG)
7004 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
7007 /* If the mode of TEMP does not match that of the expression, it
7008 must be a promoted value. We pass store_expr a SUBREG of the
7009 wanted mode but mark it so that we know that it was already
7012 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
7014 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7015 promote_mode (type, mode, &unsignedp, 0);
7016 SUBREG_PROMOTED_VAR_P (temp) = 1;
7017 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7020 if (temp == const0_rtx)
7021 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7023 store_expr (TREE_OPERAND (exp, 0), temp,
7024 modifier == EXPAND_STACK_PARM ? 2 : 0);
7026 TREE_USED (exp) = 1;
7029 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7030 must be a promoted value. We return a SUBREG of the wanted mode,
7031 but mark it so that we know that it was already extended. */
7033 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
7034 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
7036 /* Compute the signedness and make the proper SUBREG. */
7037 promote_mode (type, mode, &unsignedp, 0);
7038 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7039 SUBREG_PROMOTED_VAR_P (temp) = 1;
7040 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7044 return SAVE_EXPR_RTL (exp);
7049 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7050 TREE_OPERAND (exp, 0)
7051 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
7055 case PLACEHOLDER_EXPR:
7057 tree old_list = placeholder_list;
7058 tree placeholder_expr = 0;
7060 exp = find_placeholder (exp, &placeholder_expr);
7064 placeholder_list = TREE_CHAIN (placeholder_expr);
7065 temp = expand_expr (exp, original_target, tmode, modifier);
7066 placeholder_list = old_list;
7070 case WITH_RECORD_EXPR:
7071 /* Put the object on the placeholder list, expand our first operand,
7072 and pop the list. */
7073 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7075 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7077 placeholder_list = TREE_CHAIN (placeholder_list);
7081 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7082 expand_goto (TREE_OPERAND (exp, 0));
7084 expand_computed_goto (TREE_OPERAND (exp, 0));
7088 expand_exit_loop_if_false (NULL,
7089 invert_truthvalue (TREE_OPERAND (exp, 0)));
7092 case LABELED_BLOCK_EXPR:
7093 if (LABELED_BLOCK_BODY (exp))
7094 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7095 /* Should perhaps use expand_label, but this is simpler and safer. */
7096 do_pending_stack_adjust ();
7097 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7100 case EXIT_BLOCK_EXPR:
7101 if (EXIT_BLOCK_RETURN (exp))
7102 sorry ("returned value in block_exit_expr");
7103 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7108 expand_start_loop (1);
7109 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7117 tree vars = TREE_OPERAND (exp, 0);
7119 /* Need to open a binding contour here because
7120 if there are any cleanups they must be contained here. */
7121 expand_start_bindings (2);
7123 /* Mark the corresponding BLOCK for output in its proper place. */
7124 if (TREE_OPERAND (exp, 2) != 0
7125 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7126 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7128 /* If VARS have not yet been expanded, expand them now. */
7131 if (!DECL_RTL_SET_P (vars))
7133 expand_decl_init (vars);
7134 vars = TREE_CHAIN (vars);
7137 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7139 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7145 if (RTL_EXPR_SEQUENCE (exp))
7147 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7149 emit_insn (RTL_EXPR_SEQUENCE (exp));
7150 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7152 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7153 free_temps_for_rtl_expr (exp);
7154 return RTL_EXPR_RTL (exp);
7157 /* If we don't need the result, just ensure we evaluate any
7163 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7164 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7169 /* All elts simple constants => refer to a constant in memory. But
7170 if this is a non-BLKmode mode, let it store a field at a time
7171 since that should make a CONST_INT or CONST_DOUBLE when we
7172 fold. Likewise, if we have a target we can use, it is best to
7173 store directly into the target unless the type is large enough
7174 that memcpy will be used. If we are making an initializer and
7175 all operands are constant, put it in memory as well.
7177 FIXME: Avoid trying to fill vector constructors piece-meal.
7178 Output them with output_constant_def below unless we're sure
7179 they're zeros. This should go away when vector initializers
7180 are treated like VECTOR_CST instead of arrays.
7182 else if ((TREE_STATIC (exp)
7183 && ((mode == BLKmode
7184 && ! (target != 0 && safe_from_p (target, exp, 1)))
7185 || TREE_ADDRESSABLE (exp)
7186 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7187 && (! MOVE_BY_PIECES_P
7188 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7190 && ((TREE_CODE (type) == VECTOR_TYPE
7191 && !is_zeros_p (exp))
7192 || ! mostly_zeros_p (exp)))))
7193 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7195 rtx constructor = output_constant_def (exp, 1);
7197 if (modifier != EXPAND_CONST_ADDRESS
7198 && modifier != EXPAND_INITIALIZER
7199 && modifier != EXPAND_SUM)
7200 constructor = validize_mem (constructor);
7206 /* Handle calls that pass values in multiple non-contiguous
7207 locations. The Irix 6 ABI has examples of this. */
7208 if (target == 0 || ! safe_from_p (target, exp, 1)
7209 || GET_CODE (target) == PARALLEL
7210 || modifier == EXPAND_STACK_PARM)
7212 = assign_temp (build_qualified_type (type,
7214 | (TREE_READONLY (exp)
7215 * TYPE_QUAL_CONST))),
7216 0, TREE_ADDRESSABLE (exp), 1);
7218 store_constructor (exp, target, 0, int_expr_size (exp));
7224 tree exp1 = TREE_OPERAND (exp, 0);
7226 tree string = string_constant (exp1, &index);
7228 /* Try to optimize reads from const strings. */
7230 && TREE_CODE (string) == STRING_CST
7231 && TREE_CODE (index) == INTEGER_CST
7232 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7233 && GET_MODE_CLASS (mode) == MODE_INT
7234 && GET_MODE_SIZE (mode) == 1
7235 && modifier != EXPAND_WRITE)
7236 return gen_int_mode (TREE_STRING_POINTER (string)
7237 [TREE_INT_CST_LOW (index)], mode);
7239 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7240 op0 = memory_address (mode, op0);
7241 temp = gen_rtx_MEM (mode, op0);
7242 set_mem_attributes (temp, exp, 0);
7244 /* If we are writing to this object and its type is a record with
7245 readonly fields, we must mark it as readonly so it will
7246 conflict with readonly references to those fields. */
7247 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7248 RTX_UNCHANGING_P (temp) = 1;
7254 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7258 tree array = TREE_OPERAND (exp, 0);
7259 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7260 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7261 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7264 /* Optimize the special-case of a zero lower bound.
7266 We convert the low_bound to sizetype to avoid some problems
7267 with constant folding. (E.g. suppose the lower bound is 1,
7268 and its mode is QI. Without the conversion, (ARRAY
7269 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7270 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7272 if (! integer_zerop (low_bound))
7273 index = size_diffop (index, convert (sizetype, low_bound));
7275 /* Fold an expression like: "foo"[2].
7276 This is not done in fold so it won't happen inside &.
7277 Don't fold if this is for wide characters since it's too
7278 difficult to do correctly and this is a very rare case. */
7280 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7281 && TREE_CODE (array) == STRING_CST
7282 && TREE_CODE (index) == INTEGER_CST
7283 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7284 && GET_MODE_CLASS (mode) == MODE_INT
7285 && GET_MODE_SIZE (mode) == 1)
7286 return gen_int_mode (TREE_STRING_POINTER (array)
7287 [TREE_INT_CST_LOW (index)], mode);
7289 /* If this is a constant index into a constant array,
7290 just get the value from the array. Handle both the cases when
7291 we have an explicit constructor and when our operand is a variable
7292 that was declared const. */
7294 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7295 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7296 && TREE_CODE (index) == INTEGER_CST
7297 && 0 > compare_tree_int (index,
7298 list_length (CONSTRUCTOR_ELTS
7299 (TREE_OPERAND (exp, 0)))))
7303 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7304 i = TREE_INT_CST_LOW (index);
7305 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7309 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7313 else if (optimize >= 1
7314 && modifier != EXPAND_CONST_ADDRESS
7315 && modifier != EXPAND_INITIALIZER
7316 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7317 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7318 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7320 if (TREE_CODE (index) == INTEGER_CST)
7322 tree init = DECL_INITIAL (array);
7324 if (TREE_CODE (init) == CONSTRUCTOR)
7328 for (elem = CONSTRUCTOR_ELTS (init);
7330 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7331 elem = TREE_CHAIN (elem))
7334 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7335 return expand_expr (fold (TREE_VALUE (elem)), target,
7338 else if (TREE_CODE (init) == STRING_CST
7339 && 0 > compare_tree_int (index,
7340 TREE_STRING_LENGTH (init)))
7342 tree type = TREE_TYPE (TREE_TYPE (init));
7343 enum machine_mode mode = TYPE_MODE (type);
7345 if (GET_MODE_CLASS (mode) == MODE_INT
7346 && GET_MODE_SIZE (mode) == 1)
7347 return gen_int_mode (TREE_STRING_POINTER (init)
7348 [TREE_INT_CST_LOW (index)], mode);
7353 goto normal_inner_ref;
7356 /* If the operand is a CONSTRUCTOR, we can just extract the
7357 appropriate field if it is present. */
7358 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7362 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7363 elt = TREE_CHAIN (elt))
7364 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7365 /* We can normally use the value of the field in the
7366 CONSTRUCTOR. However, if this is a bitfield in
7367 an integral mode that we can fit in a HOST_WIDE_INT,
7368 we must mask only the number of bits in the bitfield,
7369 since this is done implicitly by the constructor. If
7370 the bitfield does not meet either of those conditions,
7371 we can't do this optimization. */
7372 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7373 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7375 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7376 <= HOST_BITS_PER_WIDE_INT))))
7378 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7379 && modifier == EXPAND_STACK_PARM)
7381 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7382 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7384 HOST_WIDE_INT bitsize
7385 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7386 enum machine_mode imode
7387 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7389 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7391 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7392 op0 = expand_and (imode, op0, op1, target);
7397 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7400 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7402 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7410 goto normal_inner_ref;
7413 case ARRAY_RANGE_REF:
7416 enum machine_mode mode1;
7417 HOST_WIDE_INT bitsize, bitpos;
7420 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7421 &mode1, &unsignedp, &volatilep);
7424 /* If we got back the original object, something is wrong. Perhaps
7425 we are evaluating an expression too early. In any event, don't
7426 infinitely recurse. */
7430 /* If TEM's type is a union of variable size, pass TARGET to the inner
7431 computation, since it will need a temporary and TARGET is known
7432 to have to do. This occurs in unchecked conversion in Ada. */
7436 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7437 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7439 && modifier != EXPAND_STACK_PARM
7440 ? target : NULL_RTX),
7442 (modifier == EXPAND_INITIALIZER
7443 || modifier == EXPAND_CONST_ADDRESS
7444 || modifier == EXPAND_STACK_PARM)
7445 ? modifier : EXPAND_NORMAL);
7447 /* If this is a constant, put it into a register if it is a
7448 legitimate constant and OFFSET is 0 and memory if it isn't. */
7449 if (CONSTANT_P (op0))
7451 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7452 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7454 op0 = force_reg (mode, op0);
7456 op0 = validize_mem (force_const_mem (mode, op0));
7461 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7464 /* If this object is in a register, put it into memory.
7465 This case can't occur in C, but can in Ada if we have
7466 unchecked conversion of an expression from a scalar type to
7467 an array or record type. */
7468 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7469 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7471 /* If the operand is a SAVE_EXPR, we can deal with this by
7472 forcing the SAVE_EXPR into memory. */
7473 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7475 put_var_into_stack (TREE_OPERAND (exp, 0),
7477 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7482 = build_qualified_type (TREE_TYPE (tem),
7483 (TYPE_QUALS (TREE_TYPE (tem))
7484 | TYPE_QUAL_CONST));
7485 rtx memloc = assign_temp (nt, 1, 1, 1);
7487 emit_move_insn (memloc, op0);
7492 if (GET_CODE (op0) != MEM)
7495 #ifdef POINTERS_EXTEND_UNSIGNED
7496 if (GET_MODE (offset_rtx) != Pmode)
7497 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7499 if (GET_MODE (offset_rtx) != ptr_mode)
7500 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7503 /* A constant address in OP0 can have VOIDmode, we must not try
7504 to call force_reg for that case. Avoid that case. */
7505 if (GET_CODE (op0) == MEM
7506 && GET_MODE (op0) == BLKmode
7507 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7509 && (bitpos % bitsize) == 0
7510 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7511 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7513 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7517 op0 = offset_address (op0, offset_rtx,
7518 highest_pow2_factor (offset));
7521 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7522 record its alignment as BIGGEST_ALIGNMENT. */
7523 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7524 && is_aligning_offset (offset, tem))
7525 set_mem_align (op0, BIGGEST_ALIGNMENT);
7527 /* Don't forget about volatility even if this is a bitfield. */
7528 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7530 if (op0 == orig_op0)
7531 op0 = copy_rtx (op0);
7533 MEM_VOLATILE_P (op0) = 1;
7536 /* The following code doesn't handle CONCAT.
7537 Assume only bitpos == 0 can be used for CONCAT, due to
7538 one element arrays having the same mode as its element. */
7539 if (GET_CODE (op0) == CONCAT)
7541 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7546 /* In cases where an aligned union has an unaligned object
7547 as a field, we might be extracting a BLKmode value from
7548 an integer-mode (e.g., SImode) object. Handle this case
7549 by doing the extract into an object as wide as the field
7550 (which we know to be the width of a basic mode), then
7551 storing into memory, and changing the mode to BLKmode. */
7552 if (mode1 == VOIDmode
7553 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7554 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7555 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7556 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7557 && modifier != EXPAND_CONST_ADDRESS
7558 && modifier != EXPAND_INITIALIZER)
7559 /* If the field isn't aligned enough to fetch as a memref,
7560 fetch it as a bit field. */
7561 || (mode1 != BLKmode
7562 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7563 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)))
7564 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7565 /* If the type and the field are a constant size and the
7566 size of the type isn't the same size as the bitfield,
7567 we must use bitfield operations. */
7569 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7571 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7574 enum machine_mode ext_mode = mode;
7576 if (ext_mode == BLKmode
7577 && ! (target != 0 && GET_CODE (op0) == MEM
7578 && GET_CODE (target) == MEM
7579 && bitpos % BITS_PER_UNIT == 0))
7580 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7582 if (ext_mode == BLKmode)
7584 /* In this case, BITPOS must start at a byte boundary and
7585 TARGET, if specified, must be a MEM. */
7586 if (GET_CODE (op0) != MEM
7587 || (target != 0 && GET_CODE (target) != MEM)
7588 || bitpos % BITS_PER_UNIT != 0)
7591 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7593 target = assign_temp (type, 0, 1, 1);
7595 emit_block_move (target, op0,
7596 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7598 (modifier == EXPAND_STACK_PARM
7599 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7604 op0 = validize_mem (op0);
7606 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7607 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7609 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7610 (modifier == EXPAND_STACK_PARM
7611 ? NULL_RTX : target),
7613 int_size_in_bytes (TREE_TYPE (tem)));
7615 /* If the result is a record type and BITSIZE is narrower than
7616 the mode of OP0, an integral mode, and this is a big endian
7617 machine, we must put the field into the high-order bits. */
7618 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7619 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7620 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7621 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7622 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7626 if (mode == BLKmode)
7628 rtx new = assign_temp (build_qualified_type
7629 ((*lang_hooks.types.type_for_mode)
7631 TYPE_QUAL_CONST), 0, 1, 1);
7633 emit_move_insn (new, op0);
7634 op0 = copy_rtx (new);
7635 PUT_MODE (op0, BLKmode);
7636 set_mem_attributes (op0, exp, 1);
7642 /* If the result is BLKmode, use that to access the object
7644 if (mode == BLKmode)
7647 /* Get a reference to just this component. */
7648 if (modifier == EXPAND_CONST_ADDRESS
7649 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7650 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7652 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7654 if (op0 == orig_op0)
7655 op0 = copy_rtx (op0);
7657 set_mem_attributes (op0, exp, 0);
7658 if (GET_CODE (XEXP (op0, 0)) == REG)
7659 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7661 MEM_VOLATILE_P (op0) |= volatilep;
7662 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7663 || modifier == EXPAND_CONST_ADDRESS
7664 || modifier == EXPAND_INITIALIZER)
7666 else if (target == 0)
7667 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7669 convert_move (target, op0, unsignedp);
7675 rtx insn, before = get_last_insn (), vtbl_ref;
7677 /* Evaluate the interior expression. */
7678 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7681 /* Get or create an instruction off which to hang a note. */
7682 if (REG_P (subtarget))
7685 insn = get_last_insn ();
7688 if (! INSN_P (insn))
7689 insn = prev_nonnote_insn (insn);
7693 target = gen_reg_rtx (GET_MODE (subtarget));
7694 insn = emit_move_insn (target, subtarget);
7697 /* Collect the data for the note. */
7698 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7699 vtbl_ref = plus_constant (vtbl_ref,
7700 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7701 /* Discard the initial CONST that was added. */
7702 vtbl_ref = XEXP (vtbl_ref, 0);
7705 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7710 /* Intended for a reference to a buffer of a file-object in Pascal.
7711 But it's not certain that a special tree code will really be
7712 necessary for these. INDIRECT_REF might work for them. */
7718 /* Pascal set IN expression.
7721 rlo = set_low - (set_low%bits_per_word);
7722 the_word = set [ (index - rlo)/bits_per_word ];
7723 bit_index = index % bits_per_word;
7724 bitmask = 1 << bit_index;
7725 return !!(the_word & bitmask); */
7727 tree set = TREE_OPERAND (exp, 0);
7728 tree index = TREE_OPERAND (exp, 1);
7729 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7730 tree set_type = TREE_TYPE (set);
7731 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7732 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7733 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7734 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7735 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7736 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7737 rtx setaddr = XEXP (setval, 0);
7738 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7740 rtx diff, quo, rem, addr, bit, result;
7742 /* If domain is empty, answer is no. Likewise if index is constant
7743 and out of bounds. */
7744 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7745 && TREE_CODE (set_low_bound) == INTEGER_CST
7746 && tree_int_cst_lt (set_high_bound, set_low_bound))
7747 || (TREE_CODE (index) == INTEGER_CST
7748 && TREE_CODE (set_low_bound) == INTEGER_CST
7749 && tree_int_cst_lt (index, set_low_bound))
7750 || (TREE_CODE (set_high_bound) == INTEGER_CST
7751 && TREE_CODE (index) == INTEGER_CST
7752 && tree_int_cst_lt (set_high_bound, index))))
7756 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7758 /* If we get here, we have to generate the code for both cases
7759 (in range and out of range). */
7761 op0 = gen_label_rtx ();
7762 op1 = gen_label_rtx ();
7764 if (! (GET_CODE (index_val) == CONST_INT
7765 && GET_CODE (lo_r) == CONST_INT))
7766 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7767 GET_MODE (index_val), iunsignedp, op1);
7769 if (! (GET_CODE (index_val) == CONST_INT
7770 && GET_CODE (hi_r) == CONST_INT))
7771 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7772 GET_MODE (index_val), iunsignedp, op1);
7774 /* Calculate the element number of bit zero in the first word
7776 if (GET_CODE (lo_r) == CONST_INT)
7777 rlow = GEN_INT (INTVAL (lo_r)
7778 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7780 rlow = expand_binop (index_mode, and_optab, lo_r,
7781 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7782 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7784 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7785 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7787 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7788 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7789 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7790 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7792 addr = memory_address (byte_mode,
7793 expand_binop (index_mode, add_optab, diff,
7794 setaddr, NULL_RTX, iunsignedp,
7797 /* Extract the bit we want to examine. */
7798 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7799 gen_rtx_MEM (byte_mode, addr),
7800 make_tree (TREE_TYPE (index), rem),
7802 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7803 GET_MODE (target) == byte_mode ? target : 0,
7804 1, OPTAB_LIB_WIDEN);
7806 if (result != target)
7807 convert_move (target, result, 1);
7809 /* Output the code to handle the out-of-range case. */
7812 emit_move_insn (target, const0_rtx);
7817 case WITH_CLEANUP_EXPR:
7818 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7820 WITH_CLEANUP_EXPR_RTL (exp)
7821 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7822 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7823 CLEANUP_EH_ONLY (exp));
7825 /* That's it for this cleanup. */
7826 TREE_OPERAND (exp, 1) = 0;
7828 return WITH_CLEANUP_EXPR_RTL (exp);
7830 case CLEANUP_POINT_EXPR:
7832 /* Start a new binding layer that will keep track of all cleanup
7833 actions to be performed. */
7834 expand_start_bindings (2);
7836 target_temp_slot_level = temp_slot_level;
7838 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7839 /* If we're going to use this value, load it up now. */
7841 op0 = force_not_mem (op0);
7842 preserve_temp_slots (op0);
7843 expand_end_bindings (NULL_TREE, 0, 0);
7848 /* Check for a built-in function. */
7849 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7850 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7852 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7854 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7855 == BUILT_IN_FRONTEND)
7856 return (*lang_hooks.expand_expr) (exp, original_target,
7859 return expand_builtin (exp, target, subtarget, tmode, ignore);
7862 return expand_call (exp, target, ignore);
7864 case NON_LVALUE_EXPR:
7867 case REFERENCE_EXPR:
7868 if (TREE_OPERAND (exp, 0) == error_mark_node)
7871 if (TREE_CODE (type) == UNION_TYPE)
7873 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7875 /* If both input and output are BLKmode, this conversion isn't doing
7876 anything except possibly changing memory attribute. */
7877 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7879 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7882 result = copy_rtx (result);
7883 set_mem_attributes (result, exp, 0);
7888 target = assign_temp (type, 0, 1, 1);
7890 if (GET_CODE (target) == MEM)
7891 /* Store data into beginning of memory target. */
7892 store_expr (TREE_OPERAND (exp, 0),
7893 adjust_address (target, TYPE_MODE (valtype), 0),
7894 modifier == EXPAND_STACK_PARM ? 2 : 0);
7896 else if (GET_CODE (target) == REG)
7897 /* Store this field into a union of the proper type. */
7898 store_field (target,
7899 MIN ((int_size_in_bytes (TREE_TYPE
7900 (TREE_OPERAND (exp, 0)))
7902 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7903 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7904 VOIDmode, 0, type, 0);
7908 /* Return the entire union. */
7912 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7914 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7917 /* If the signedness of the conversion differs and OP0 is
7918 a promoted SUBREG, clear that indication since we now
7919 have to do the proper extension. */
7920 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7921 && GET_CODE (op0) == SUBREG)
7922 SUBREG_PROMOTED_VAR_P (op0) = 0;
7927 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7928 if (GET_MODE (op0) == mode)
7931 /* If OP0 is a constant, just convert it into the proper mode. */
7932 if (CONSTANT_P (op0))
7934 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7935 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7937 if (modifier == EXPAND_INITIALIZER)
7938 return simplify_gen_subreg (mode, op0, inner_mode,
7939 subreg_lowpart_offset (mode,
7942 return convert_modes (mode, inner_mode, op0,
7943 TREE_UNSIGNED (inner_type));
7946 if (modifier == EXPAND_INITIALIZER)
7947 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7951 convert_to_mode (mode, op0,
7952 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7954 convert_move (target, op0,
7955 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7958 case VIEW_CONVERT_EXPR:
7959 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7961 /* If the input and output modes are both the same, we are done.
7962 Otherwise, if neither mode is BLKmode and both are integral and within
7963 a word, we can use gen_lowpart. If neither is true, make sure the
7964 operand is in memory and convert the MEM to the new mode. */
7965 if (TYPE_MODE (type) == GET_MODE (op0))
7967 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7968 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7969 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7970 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7971 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7972 op0 = gen_lowpart (TYPE_MODE (type), op0);
7973 else if (GET_CODE (op0) != MEM)
7975 /* If the operand is not a MEM, force it into memory. Since we
7976 are going to be be changing the mode of the MEM, don't call
7977 force_const_mem for constants because we don't allow pool
7978 constants to change mode. */
7979 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7981 if (TREE_ADDRESSABLE (exp))
7984 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7986 = assign_stack_temp_for_type
7987 (TYPE_MODE (inner_type),
7988 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7990 emit_move_insn (target, op0);
7994 /* At this point, OP0 is in the correct mode. If the output type is such
7995 that the operand is known to be aligned, indicate that it is.
7996 Otherwise, we need only be concerned about alignment for non-BLKmode
7998 if (GET_CODE (op0) == MEM)
8000 op0 = copy_rtx (op0);
8002 if (TYPE_ALIGN_OK (type))
8003 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8004 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8005 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8007 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8008 HOST_WIDE_INT temp_size
8009 = MAX (int_size_in_bytes (inner_type),
8010 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8011 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8012 temp_size, 0, type);
8013 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8015 if (TREE_ADDRESSABLE (exp))
8018 if (GET_MODE (op0) == BLKmode)
8019 emit_block_move (new_with_op0_mode, op0,
8020 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8021 (modifier == EXPAND_STACK_PARM
8022 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8024 emit_move_insn (new_with_op0_mode, op0);
8029 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8035 this_optab = ! unsignedp && flag_trapv
8036 && (GET_MODE_CLASS (mode) == MODE_INT)
8037 ? addv_optab : add_optab;
8039 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8040 something else, make sure we add the register to the constant and
8041 then to the other thing. This case can occur during strength
8042 reduction and doing it this way will produce better code if the
8043 frame pointer or argument pointer is eliminated.
8045 fold-const.c will ensure that the constant is always in the inner
8046 PLUS_EXPR, so the only case we need to do anything about is if
8047 sp, ap, or fp is our second argument, in which case we must swap
8048 the innermost first argument and our second argument. */
8050 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8051 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8052 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8053 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8054 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8055 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8057 tree t = TREE_OPERAND (exp, 1);
8059 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8060 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8063 /* If the result is to be ptr_mode and we are adding an integer to
8064 something, we might be forming a constant. So try to use
8065 plus_constant. If it produces a sum and we can't accept it,
8066 use force_operand. This allows P = &ARR[const] to generate
8067 efficient code on machines where a SYMBOL_REF is not a valid
8070 If this is an EXPAND_SUM call, always return the sum. */
8071 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8072 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8074 if (modifier == EXPAND_STACK_PARM)
8076 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8077 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8078 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8082 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8084 /* Use immed_double_const to ensure that the constant is
8085 truncated according to the mode of OP1, then sign extended
8086 to a HOST_WIDE_INT. Using the constant directly can result
8087 in non-canonical RTL in a 64x32 cross compile. */
8089 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8091 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8092 op1 = plus_constant (op1, INTVAL (constant_part));
8093 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8094 op1 = force_operand (op1, target);
8098 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8099 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8100 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8104 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8105 (modifier == EXPAND_INITIALIZER
8106 ? EXPAND_INITIALIZER : EXPAND_SUM));
8107 if (! CONSTANT_P (op0))
8109 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8110 VOIDmode, modifier);
8111 /* Don't go to both_summands if modifier
8112 says it's not right to return a PLUS. */
8113 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8117 /* Use immed_double_const to ensure that the constant is
8118 truncated according to the mode of OP1, then sign extended
8119 to a HOST_WIDE_INT. Using the constant directly can result
8120 in non-canonical RTL in a 64x32 cross compile. */
8122 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8124 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8125 op0 = plus_constant (op0, INTVAL (constant_part));
8126 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8127 op0 = force_operand (op0, target);
8132 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8135 /* No sense saving up arithmetic to be done
8136 if it's all in the wrong mode to form part of an address.
8137 And force_operand won't know whether to sign-extend or
8139 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8140 || mode != ptr_mode)
8142 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8143 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8144 if (op0 == const0_rtx)
8146 if (op1 == const0_rtx)
8151 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8152 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8154 /* We come here from MINUS_EXPR when the second operand is a
8157 /* Make sure any term that's a sum with a constant comes last. */
8158 if (GET_CODE (op0) == PLUS
8159 && CONSTANT_P (XEXP (op0, 1)))
8165 /* If adding to a sum including a constant,
8166 associate it to put the constant outside. */
8167 if (GET_CODE (op1) == PLUS
8168 && CONSTANT_P (XEXP (op1, 1)))
8170 rtx constant_term = const0_rtx;
8172 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8175 /* Ensure that MULT comes first if there is one. */
8176 else if (GET_CODE (op0) == MULT)
8177 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8179 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8181 /* Let's also eliminate constants from op0 if possible. */
8182 op0 = eliminate_constant_term (op0, &constant_term);
8184 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8185 their sum should be a constant. Form it into OP1, since the
8186 result we want will then be OP0 + OP1. */
8188 temp = simplify_binary_operation (PLUS, mode, constant_term,
8193 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8196 /* Put a constant term last and put a multiplication first. */
8197 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8198 temp = op1, op1 = op0, op0 = temp;
8200 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8201 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8204 /* For initializers, we are allowed to return a MINUS of two
8205 symbolic constants. Here we handle all cases when both operands
8207 /* Handle difference of two symbolic constants,
8208 for the sake of an initializer. */
8209 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8210 && really_constant_p (TREE_OPERAND (exp, 0))
8211 && really_constant_p (TREE_OPERAND (exp, 1)))
8213 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8215 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8218 /* If the last operand is a CONST_INT, use plus_constant of
8219 the negated constant. Else make the MINUS. */
8220 if (GET_CODE (op1) == CONST_INT)
8221 return plus_constant (op0, - INTVAL (op1));
8223 return gen_rtx_MINUS (mode, op0, op1);
8226 this_optab = ! unsignedp && flag_trapv
8227 && (GET_MODE_CLASS(mode) == MODE_INT)
8228 ? subv_optab : sub_optab;
8230 /* No sense saving up arithmetic to be done
8231 if it's all in the wrong mode to form part of an address.
8232 And force_operand won't know whether to sign-extend or
8234 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8235 || mode != ptr_mode)
8238 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8241 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8242 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8244 /* Convert A - const to A + (-const). */
8245 if (GET_CODE (op1) == CONST_INT)
8247 op1 = negate_rtx (mode, op1);
8254 /* If first operand is constant, swap them.
8255 Thus the following special case checks need only
8256 check the second operand. */
8257 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8259 tree t1 = TREE_OPERAND (exp, 0);
8260 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8261 TREE_OPERAND (exp, 1) = t1;
8264 /* Attempt to return something suitable for generating an
8265 indexed address, for machines that support that. */
8267 if (modifier == EXPAND_SUM && mode == ptr_mode
8268 && host_integerp (TREE_OPERAND (exp, 1), 0))
8270 tree exp1 = TREE_OPERAND (exp, 1);
8272 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8275 /* If we knew for certain that this is arithmetic for an array
8276 reference, and we knew the bounds of the array, then we could
8277 apply the distributive law across (PLUS X C) for constant C.
8278 Without such knowledge, we risk overflowing the computation
8279 when both X and C are large, but X+C isn't. */
8280 /* ??? Could perhaps special-case EXP being unsigned and C being
8281 positive. In that case we are certain that X+C is no smaller
8282 than X and so the transformed expression will overflow iff the
8283 original would have. */
8285 if (GET_CODE (op0) != REG)
8286 op0 = force_operand (op0, NULL_RTX);
8287 if (GET_CODE (op0) != REG)
8288 op0 = copy_to_mode_reg (mode, op0);
8290 return gen_rtx_MULT (mode, op0,
8291 gen_int_mode (tree_low_cst (exp1, 0),
8292 TYPE_MODE (TREE_TYPE (exp1))));
8295 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8298 if (modifier == EXPAND_STACK_PARM)
8301 /* Check for multiplying things that have been extended
8302 from a narrower type. If this machine supports multiplying
8303 in that narrower type with a result in the desired type,
8304 do it that way, and avoid the explicit type-conversion. */
8305 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8306 && TREE_CODE (type) == INTEGER_TYPE
8307 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8308 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8309 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8310 && int_fits_type_p (TREE_OPERAND (exp, 1),
8311 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8312 /* Don't use a widening multiply if a shift will do. */
8313 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8314 > HOST_BITS_PER_WIDE_INT)
8315 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8317 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8318 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8320 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8321 /* If both operands are extended, they must either both
8322 be zero-extended or both be sign-extended. */
8323 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8325 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8327 enum machine_mode innermode
8328 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8329 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8330 ? smul_widen_optab : umul_widen_optab);
8331 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8332 ? umul_widen_optab : smul_widen_optab);
8333 if (mode == GET_MODE_WIDER_MODE (innermode))
8335 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8337 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8338 NULL_RTX, VOIDmode, 0);
8339 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8340 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8343 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8344 NULL_RTX, VOIDmode, 0);
8347 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8348 && innermode == word_mode)
8351 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8352 NULL_RTX, VOIDmode, 0);
8353 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8354 op1 = convert_modes (innermode, mode,
8355 expand_expr (TREE_OPERAND (exp, 1),
8356 NULL_RTX, VOIDmode, 0),
8359 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8360 NULL_RTX, VOIDmode, 0);
8361 temp = expand_binop (mode, other_optab, op0, op1, target,
8362 unsignedp, OPTAB_LIB_WIDEN);
8363 htem = expand_mult_highpart_adjust (innermode,
8364 gen_highpart (innermode, temp),
8366 gen_highpart (innermode, temp),
8368 emit_move_insn (gen_highpart (innermode, temp), htem);
8373 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8374 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8375 return expand_mult (mode, op0, op1, target, unsignedp);
8377 case TRUNC_DIV_EXPR:
8378 case FLOOR_DIV_EXPR:
8380 case ROUND_DIV_EXPR:
8381 case EXACT_DIV_EXPR:
8382 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8384 if (modifier == EXPAND_STACK_PARM)
8386 /* Possible optimization: compute the dividend with EXPAND_SUM
8387 then if the divisor is constant can optimize the case
8388 where some terms of the dividend have coeffs divisible by it. */
8389 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8390 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8391 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8394 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8395 expensive divide. If not, combine will rebuild the original
8397 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8398 && TREE_CODE (type) == REAL_TYPE
8399 && !real_onep (TREE_OPERAND (exp, 0)))
8400 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8401 build (RDIV_EXPR, type,
8402 build_real (type, dconst1),
8403 TREE_OPERAND (exp, 1))),
8404 target, tmode, modifier);
8405 this_optab = sdiv_optab;
8408 case TRUNC_MOD_EXPR:
8409 case FLOOR_MOD_EXPR:
8411 case ROUND_MOD_EXPR:
8412 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8414 if (modifier == EXPAND_STACK_PARM)
8416 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8417 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8418 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8420 case FIX_ROUND_EXPR:
8421 case FIX_FLOOR_EXPR:
8423 abort (); /* Not used for C. */
8425 case FIX_TRUNC_EXPR:
8426 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8427 if (target == 0 || modifier == EXPAND_STACK_PARM)
8428 target = gen_reg_rtx (mode);
8429 expand_fix (target, op0, unsignedp);
8433 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8434 if (target == 0 || modifier == EXPAND_STACK_PARM)
8435 target = gen_reg_rtx (mode);
8436 /* expand_float can't figure out what to do if FROM has VOIDmode.
8437 So give it the correct mode. With -O, cse will optimize this. */
8438 if (GET_MODE (op0) == VOIDmode)
8439 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8441 expand_float (target, op0,
8442 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8446 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8447 if (modifier == EXPAND_STACK_PARM)
8449 temp = expand_unop (mode,
8450 ! unsignedp && flag_trapv
8451 && (GET_MODE_CLASS(mode) == MODE_INT)
8452 ? negv_optab : neg_optab, op0, target, 0);
8458 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8459 if (modifier == EXPAND_STACK_PARM)
8462 /* Handle complex values specially. */
8463 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8464 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8465 return expand_complex_abs (mode, op0, target, unsignedp);
8467 /* Unsigned abs is simply the operand. Testing here means we don't
8468 risk generating incorrect code below. */
8469 if (TREE_UNSIGNED (type))
8472 return expand_abs (mode, op0, target, unsignedp,
8473 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8477 target = original_target;
8479 || modifier == EXPAND_STACK_PARM
8480 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8481 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8482 || GET_MODE (target) != mode
8483 || (GET_CODE (target) == REG
8484 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8485 target = gen_reg_rtx (mode);
8486 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8487 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8489 /* First try to do it with a special MIN or MAX instruction.
8490 If that does not win, use a conditional jump to select the proper
8492 this_optab = (TREE_UNSIGNED (type)
8493 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8494 : (code == MIN_EXPR ? smin_optab : smax_optab));
8496 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8501 /* At this point, a MEM target is no longer useful; we will get better
8504 if (GET_CODE (target) == MEM)
8505 target = gen_reg_rtx (mode);
8508 emit_move_insn (target, op0);
8510 op0 = gen_label_rtx ();
8512 /* If this mode is an integer too wide to compare properly,
8513 compare word by word. Rely on cse to optimize constant cases. */
8514 if (GET_MODE_CLASS (mode) == MODE_INT
8515 && ! can_compare_p (GE, mode, ccp_jump))
8517 if (code == MAX_EXPR)
8518 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8519 target, op1, NULL_RTX, op0);
8521 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8522 op1, target, NULL_RTX, op0);
8526 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8527 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8528 unsignedp, mode, NULL_RTX, NULL_RTX,
8531 emit_move_insn (target, op1);
8536 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8537 if (modifier == EXPAND_STACK_PARM)
8539 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8545 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8546 if (modifier == EXPAND_STACK_PARM)
8548 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8554 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8555 temp = expand_unop (mode, clz_optab, op0, target, 1);
8561 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8562 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8568 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8569 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8575 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8576 temp = expand_unop (mode, parity_optab, op0, target, 1);
8581 /* ??? Can optimize bitwise operations with one arg constant.
8582 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8583 and (a bitwise1 b) bitwise2 b (etc)
8584 but that is probably not worth while. */
8586 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8587 boolean values when we want in all cases to compute both of them. In
8588 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8589 as actual zero-or-1 values and then bitwise anding. In cases where
8590 there cannot be any side effects, better code would be made by
8591 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8592 how to recognize those cases. */
8594 case TRUTH_AND_EXPR:
8596 this_optab = and_optab;
8601 this_optab = ior_optab;
8604 case TRUTH_XOR_EXPR:
8606 this_optab = xor_optab;
8613 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8615 if (modifier == EXPAND_STACK_PARM)
8617 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8618 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8621 /* Could determine the answer when only additive constants differ. Also,
8622 the addition of one can be handled by changing the condition. */
8629 case UNORDERED_EXPR:
8636 temp = do_store_flag (exp,
8637 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8638 tmode != VOIDmode ? tmode : mode, 0);
8642 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8643 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8645 && GET_CODE (original_target) == REG
8646 && (GET_MODE (original_target)
8647 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8649 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8652 /* If temp is constant, we can just compute the result. */
8653 if (GET_CODE (temp) == CONST_INT)
8655 if (INTVAL (temp) != 0)
8656 emit_move_insn (target, const1_rtx);
8658 emit_move_insn (target, const0_rtx);
8663 if (temp != original_target)
8665 enum machine_mode mode1 = GET_MODE (temp);
8666 if (mode1 == VOIDmode)
8667 mode1 = tmode != VOIDmode ? tmode : mode;
8669 temp = copy_to_mode_reg (mode1, temp);
8672 op1 = gen_label_rtx ();
8673 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8674 GET_MODE (temp), unsignedp, op1);
8675 emit_move_insn (temp, const1_rtx);
8680 /* If no set-flag instruction, must generate a conditional
8681 store into a temporary variable. Drop through
8682 and handle this like && and ||. */
8684 case TRUTH_ANDIF_EXPR:
8685 case TRUTH_ORIF_EXPR:
8688 || modifier == EXPAND_STACK_PARM
8689 || ! safe_from_p (target, exp, 1)
8690 /* Make sure we don't have a hard reg (such as function's return
8691 value) live across basic blocks, if not optimizing. */
8692 || (!optimize && GET_CODE (target) == REG
8693 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8694 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8697 emit_clr_insn (target);
8699 op1 = gen_label_rtx ();
8700 jumpifnot (exp, op1);
8703 emit_0_to_1_insn (target);
8706 return ignore ? const0_rtx : target;
8708 case TRUTH_NOT_EXPR:
8709 if (modifier == EXPAND_STACK_PARM)
8711 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8712 /* The parser is careful to generate TRUTH_NOT_EXPR
8713 only with operands that are always zero or one. */
8714 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8715 target, 1, OPTAB_LIB_WIDEN);
8721 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8723 return expand_expr (TREE_OPERAND (exp, 1),
8724 (ignore ? const0_rtx : target),
8725 VOIDmode, modifier);
8728 /* If we would have a "singleton" (see below) were it not for a
8729 conversion in each arm, bring that conversion back out. */
8730 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8731 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8732 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8733 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8735 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8736 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8738 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8739 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8740 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8741 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8742 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8743 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8744 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8745 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8746 return expand_expr (build1 (NOP_EXPR, type,
8747 build (COND_EXPR, TREE_TYPE (iftrue),
8748 TREE_OPERAND (exp, 0),
8750 target, tmode, modifier);
8754 /* Note that COND_EXPRs whose type is a structure or union
8755 are required to be constructed to contain assignments of
8756 a temporary variable, so that we can evaluate them here
8757 for side effect only. If type is void, we must do likewise. */
8759 /* If an arm of the branch requires a cleanup,
8760 only that cleanup is performed. */
8763 tree binary_op = 0, unary_op = 0;
8765 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8766 convert it to our mode, if necessary. */
8767 if (integer_onep (TREE_OPERAND (exp, 1))
8768 && integer_zerop (TREE_OPERAND (exp, 2))
8769 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8773 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8778 if (modifier == EXPAND_STACK_PARM)
8780 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8781 if (GET_MODE (op0) == mode)
8785 target = gen_reg_rtx (mode);
8786 convert_move (target, op0, unsignedp);
8790 /* Check for X ? A + B : A. If we have this, we can copy A to the
8791 output and conditionally add B. Similarly for unary operations.
8792 Don't do this if X has side-effects because those side effects
8793 might affect A or B and the "?" operation is a sequence point in
8794 ANSI. (operand_equal_p tests for side effects.) */
8796 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8797 && operand_equal_p (TREE_OPERAND (exp, 2),
8798 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8799 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8800 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8801 && operand_equal_p (TREE_OPERAND (exp, 1),
8802 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8803 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8804 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8805 && operand_equal_p (TREE_OPERAND (exp, 2),
8806 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8807 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8808 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8809 && operand_equal_p (TREE_OPERAND (exp, 1),
8810 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8811 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8813 /* If we are not to produce a result, we have no target. Otherwise,
8814 if a target was specified use it; it will not be used as an
8815 intermediate target unless it is safe. If no target, use a
8820 else if (modifier == EXPAND_STACK_PARM)
8821 temp = assign_temp (type, 0, 0, 1);
8822 else if (original_target
8823 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8824 || (singleton && GET_CODE (original_target) == REG
8825 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8826 && original_target == var_rtx (singleton)))
8827 && GET_MODE (original_target) == mode
8828 #ifdef HAVE_conditional_move
8829 && (! can_conditionally_move_p (mode)
8830 || GET_CODE (original_target) == REG
8831 || TREE_ADDRESSABLE (type))
8833 && (GET_CODE (original_target) != MEM
8834 || TREE_ADDRESSABLE (type)))
8835 temp = original_target;
8836 else if (TREE_ADDRESSABLE (type))
8839 temp = assign_temp (type, 0, 0, 1);
8841 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8842 do the test of X as a store-flag operation, do this as
8843 A + ((X != 0) << log C). Similarly for other simple binary
8844 operators. Only do for C == 1 if BRANCH_COST is low. */
8845 if (temp && singleton && binary_op
8846 && (TREE_CODE (binary_op) == PLUS_EXPR
8847 || TREE_CODE (binary_op) == MINUS_EXPR
8848 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8849 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8850 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8851 : integer_onep (TREE_OPERAND (binary_op, 1)))
8852 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8856 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8857 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8858 ? addv_optab : add_optab)
8859 : TREE_CODE (binary_op) == MINUS_EXPR
8860 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8861 ? subv_optab : sub_optab)
8862 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8865 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8866 if (singleton == TREE_OPERAND (exp, 1))
8867 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8869 cond = TREE_OPERAND (exp, 0);
8871 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8873 mode, BRANCH_COST <= 1);
8875 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8876 result = expand_shift (LSHIFT_EXPR, mode, result,
8877 build_int_2 (tree_log2
8881 (safe_from_p (temp, singleton, 1)
8882 ? temp : NULL_RTX), 0);
8886 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8887 return expand_binop (mode, boptab, op1, result, temp,
8888 unsignedp, OPTAB_LIB_WIDEN);
8892 do_pending_stack_adjust ();
8894 op0 = gen_label_rtx ();
8896 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8900 /* If the target conflicts with the other operand of the
8901 binary op, we can't use it. Also, we can't use the target
8902 if it is a hard register, because evaluating the condition
8903 might clobber it. */
8905 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8906 || (GET_CODE (temp) == REG
8907 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8908 temp = gen_reg_rtx (mode);
8909 store_expr (singleton, temp,
8910 modifier == EXPAND_STACK_PARM ? 2 : 0);
8913 expand_expr (singleton,
8914 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8915 if (singleton == TREE_OPERAND (exp, 1))
8916 jumpif (TREE_OPERAND (exp, 0), op0);
8918 jumpifnot (TREE_OPERAND (exp, 0), op0);
8920 start_cleanup_deferral ();
8921 if (binary_op && temp == 0)
8922 /* Just touch the other operand. */
8923 expand_expr (TREE_OPERAND (binary_op, 1),
8924 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8926 store_expr (build (TREE_CODE (binary_op), type,
8927 make_tree (type, temp),
8928 TREE_OPERAND (binary_op, 1)),
8929 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8931 store_expr (build1 (TREE_CODE (unary_op), type,
8932 make_tree (type, temp)),
8933 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8936 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8937 comparison operator. If we have one of these cases, set the
8938 output to A, branch on A (cse will merge these two references),
8939 then set the output to FOO. */
8941 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8942 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8943 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8944 TREE_OPERAND (exp, 1), 0)
8945 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8946 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8947 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8949 if (GET_CODE (temp) == REG
8950 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8951 temp = gen_reg_rtx (mode);
8952 store_expr (TREE_OPERAND (exp, 1), temp,
8953 modifier == EXPAND_STACK_PARM ? 2 : 0);
8954 jumpif (TREE_OPERAND (exp, 0), op0);
8956 start_cleanup_deferral ();
8957 store_expr (TREE_OPERAND (exp, 2), temp,
8958 modifier == EXPAND_STACK_PARM ? 2 : 0);
8962 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8963 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8964 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8965 TREE_OPERAND (exp, 2), 0)
8966 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8967 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8968 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8970 if (GET_CODE (temp) == REG
8971 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8972 temp = gen_reg_rtx (mode);
8973 store_expr (TREE_OPERAND (exp, 2), temp,
8974 modifier == EXPAND_STACK_PARM ? 2 : 0);
8975 jumpifnot (TREE_OPERAND (exp, 0), op0);
8977 start_cleanup_deferral ();
8978 store_expr (TREE_OPERAND (exp, 1), temp,
8979 modifier == EXPAND_STACK_PARM ? 2 : 0);
8984 op1 = gen_label_rtx ();
8985 jumpifnot (TREE_OPERAND (exp, 0), op0);
8987 start_cleanup_deferral ();
8989 /* One branch of the cond can be void, if it never returns. For
8990 example A ? throw : E */
8992 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8993 store_expr (TREE_OPERAND (exp, 1), temp,
8994 modifier == EXPAND_STACK_PARM ? 2 : 0);
8996 expand_expr (TREE_OPERAND (exp, 1),
8997 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8998 end_cleanup_deferral ();
9000 emit_jump_insn (gen_jump (op1));
9003 start_cleanup_deferral ();
9005 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
9006 store_expr (TREE_OPERAND (exp, 2), temp,
9007 modifier == EXPAND_STACK_PARM ? 2 : 0);
9009 expand_expr (TREE_OPERAND (exp, 2),
9010 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9013 end_cleanup_deferral ();
9024 /* Something needs to be initialized, but we didn't know
9025 where that thing was when building the tree. For example,
9026 it could be the return value of a function, or a parameter
9027 to a function which lays down in the stack, or a temporary
9028 variable which must be passed by reference.
9030 We guarantee that the expression will either be constructed
9031 or copied into our original target. */
9033 tree slot = TREE_OPERAND (exp, 0);
9034 tree cleanups = NULL_TREE;
9037 if (TREE_CODE (slot) != VAR_DECL)
9041 target = original_target;
9043 /* Set this here so that if we get a target that refers to a
9044 register variable that's already been used, put_reg_into_stack
9045 knows that it should fix up those uses. */
9046 TREE_USED (slot) = 1;
9050 if (DECL_RTL_SET_P (slot))
9052 target = DECL_RTL (slot);
9053 /* If we have already expanded the slot, so don't do
9055 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9060 target = assign_temp (type, 2, 0, 1);
9061 /* All temp slots at this level must not conflict. */
9062 preserve_temp_slots (target);
9063 SET_DECL_RTL (slot, target);
9064 if (TREE_ADDRESSABLE (slot))
9065 put_var_into_stack (slot, /*rescan=*/false);
9067 /* Since SLOT is not known to the called function
9068 to belong to its stack frame, we must build an explicit
9069 cleanup. This case occurs when we must build up a reference
9070 to pass the reference as an argument. In this case,
9071 it is very likely that such a reference need not be
9074 if (TREE_OPERAND (exp, 2) == 0)
9075 TREE_OPERAND (exp, 2)
9076 = (*lang_hooks.maybe_build_cleanup) (slot);
9077 cleanups = TREE_OPERAND (exp, 2);
9082 /* This case does occur, when expanding a parameter which
9083 needs to be constructed on the stack. The target
9084 is the actual stack address that we want to initialize.
9085 The function we call will perform the cleanup in this case. */
9087 /* If we have already assigned it space, use that space,
9088 not target that we were passed in, as our target
9089 parameter is only a hint. */
9090 if (DECL_RTL_SET_P (slot))
9092 target = DECL_RTL (slot);
9093 /* If we have already expanded the slot, so don't do
9095 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9100 SET_DECL_RTL (slot, target);
9101 /* If we must have an addressable slot, then make sure that
9102 the RTL that we just stored in slot is OK. */
9103 if (TREE_ADDRESSABLE (slot))
9104 put_var_into_stack (slot, /*rescan=*/true);
9108 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9109 /* Mark it as expanded. */
9110 TREE_OPERAND (exp, 1) = NULL_TREE;
9112 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9114 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9121 tree lhs = TREE_OPERAND (exp, 0);
9122 tree rhs = TREE_OPERAND (exp, 1);
9124 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9130 /* If lhs is complex, expand calls in rhs before computing it.
9131 That's so we don't compute a pointer and save it over a
9132 call. If lhs is simple, compute it first so we can give it
9133 as a target if the rhs is just a call. This avoids an
9134 extra temp and copy and that prevents a partial-subsumption
9135 which makes bad code. Actually we could treat
9136 component_ref's of vars like vars. */
9138 tree lhs = TREE_OPERAND (exp, 0);
9139 tree rhs = TREE_OPERAND (exp, 1);
9143 /* Check for |= or &= of a bitfield of size one into another bitfield
9144 of size 1. In this case, (unless we need the result of the
9145 assignment) we can do this more efficiently with a
9146 test followed by an assignment, if necessary.
9148 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9149 things change so we do, this code should be enhanced to
9152 && TREE_CODE (lhs) == COMPONENT_REF
9153 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9154 || TREE_CODE (rhs) == BIT_AND_EXPR)
9155 && TREE_OPERAND (rhs, 0) == lhs
9156 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9157 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9158 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9160 rtx label = gen_label_rtx ();
9162 do_jump (TREE_OPERAND (rhs, 1),
9163 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9164 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9165 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9166 (TREE_CODE (rhs) == BIT_IOR_EXPR
9168 : integer_zero_node)),
9170 do_pending_stack_adjust ();
9175 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9181 if (!TREE_OPERAND (exp, 0))
9182 expand_null_return ();
9184 expand_return (TREE_OPERAND (exp, 0));
9187 case PREINCREMENT_EXPR:
9188 case PREDECREMENT_EXPR:
9189 return expand_increment (exp, 0, ignore);
9191 case POSTINCREMENT_EXPR:
9192 case POSTDECREMENT_EXPR:
9193 /* Faster to treat as pre-increment if result is not used. */
9194 return expand_increment (exp, ! ignore, ignore);
9197 if (modifier == EXPAND_STACK_PARM)
9199 /* Are we taking the address of a nested function? */
9200 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9201 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9202 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9203 && ! TREE_STATIC (exp))
9205 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9206 op0 = force_operand (op0, target);
9208 /* If we are taking the address of something erroneous, just
9210 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9212 /* If we are taking the address of a constant and are at the
9213 top level, we have to use output_constant_def since we can't
9214 call force_const_mem at top level. */
9216 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9217 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9219 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9222 /* We make sure to pass const0_rtx down if we came in with
9223 ignore set, to avoid doing the cleanups twice for something. */
9224 op0 = expand_expr (TREE_OPERAND (exp, 0),
9225 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9226 (modifier == EXPAND_INITIALIZER
9227 ? modifier : EXPAND_CONST_ADDRESS));
9229 /* If we are going to ignore the result, OP0 will have been set
9230 to const0_rtx, so just return it. Don't get confused and
9231 think we are taking the address of the constant. */
9235 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9236 clever and returns a REG when given a MEM. */
9237 op0 = protect_from_queue (op0, 1);
9239 /* We would like the object in memory. If it is a constant, we can
9240 have it be statically allocated into memory. For a non-constant,
9241 we need to allocate some memory and store the value into it. */
9243 if (CONSTANT_P (op0))
9244 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9246 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9247 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9248 || GET_CODE (op0) == PARALLEL)
9250 /* If the operand is a SAVE_EXPR, we can deal with this by
9251 forcing the SAVE_EXPR into memory. */
9252 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9254 put_var_into_stack (TREE_OPERAND (exp, 0),
9256 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9260 /* If this object is in a register, it can't be BLKmode. */
9261 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9262 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9264 if (GET_CODE (op0) == PARALLEL)
9265 /* Handle calls that pass values in multiple
9266 non-contiguous locations. The Irix 6 ABI has examples
9268 emit_group_store (memloc, op0,
9269 int_size_in_bytes (inner_type));
9271 emit_move_insn (memloc, op0);
9277 if (GET_CODE (op0) != MEM)
9280 mark_temp_addr_taken (op0);
9281 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9283 op0 = XEXP (op0, 0);
9284 #ifdef POINTERS_EXTEND_UNSIGNED
9285 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9286 && mode == ptr_mode)
9287 op0 = convert_memory_address (ptr_mode, op0);
9292 /* If OP0 is not aligned as least as much as the type requires, we
9293 need to make a temporary, copy OP0 to it, and take the address of
9294 the temporary. We want to use the alignment of the type, not of
9295 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9296 the test for BLKmode means that can't happen. The test for
9297 BLKmode is because we never make mis-aligned MEMs with
9300 We don't need to do this at all if the machine doesn't have
9301 strict alignment. */
9302 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9303 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9305 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9307 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9310 if (TYPE_ALIGN_OK (inner_type))
9313 if (TREE_ADDRESSABLE (inner_type))
9315 /* We can't make a bitwise copy of this object, so fail. */
9316 error ("cannot take the address of an unaligned member");
9320 new = assign_stack_temp_for_type
9321 (TYPE_MODE (inner_type),
9322 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9323 : int_size_in_bytes (inner_type),
9324 1, build_qualified_type (inner_type,
9325 (TYPE_QUALS (inner_type)
9326 | TYPE_QUAL_CONST)));
9328 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9329 (modifier == EXPAND_STACK_PARM
9330 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9335 op0 = force_operand (XEXP (op0, 0), target);
9339 && GET_CODE (op0) != REG
9340 && modifier != EXPAND_CONST_ADDRESS
9341 && modifier != EXPAND_INITIALIZER
9342 && modifier != EXPAND_SUM)
9343 op0 = force_reg (Pmode, op0);
9345 if (GET_CODE (op0) == REG
9346 && ! REG_USERVAR_P (op0))
9347 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9349 #ifdef POINTERS_EXTEND_UNSIGNED
9350 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9351 && mode == ptr_mode)
9352 op0 = convert_memory_address (ptr_mode, op0);
9357 case ENTRY_VALUE_EXPR:
9360 /* COMPLEX type for Extended Pascal & Fortran */
9363 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9366 /* Get the rtx code of the operands. */
9367 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9368 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9371 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9375 /* Move the real (op0) and imaginary (op1) parts to their location. */
9376 emit_move_insn (gen_realpart (mode, target), op0);
9377 emit_move_insn (gen_imagpart (mode, target), op1);
9379 insns = get_insns ();
9382 /* Complex construction should appear as a single unit. */
9383 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9384 each with a separate pseudo as destination.
9385 It's not correct for flow to treat them as a unit. */
9386 if (GET_CODE (target) != CONCAT)
9387 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9395 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9396 return gen_realpart (mode, op0);
9399 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9400 return gen_imagpart (mode, op0);
9404 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9408 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9411 target = gen_reg_rtx (mode);
9415 /* Store the realpart and the negated imagpart to target. */
9416 emit_move_insn (gen_realpart (partmode, target),
9417 gen_realpart (partmode, op0));
9419 imag_t = gen_imagpart (partmode, target);
9420 temp = expand_unop (partmode,
9421 ! unsignedp && flag_trapv
9422 && (GET_MODE_CLASS(partmode) == MODE_INT)
9423 ? negv_optab : neg_optab,
9424 gen_imagpart (partmode, op0), imag_t, 0);
9426 emit_move_insn (imag_t, temp);
9428 insns = get_insns ();
9431 /* Conjugate should appear as a single unit
9432 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9433 each with a separate pseudo as destination.
9434 It's not correct for flow to treat them as a unit. */
9435 if (GET_CODE (target) != CONCAT)
9436 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9443 case TRY_CATCH_EXPR:
9445 tree handler = TREE_OPERAND (exp, 1);
9447 expand_eh_region_start ();
9449 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9451 expand_eh_region_end_cleanup (handler);
9456 case TRY_FINALLY_EXPR:
9458 tree try_block = TREE_OPERAND (exp, 0);
9459 tree finally_block = TREE_OPERAND (exp, 1);
9461 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9463 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9464 is not sufficient, so we cannot expand the block twice.
9465 So we play games with GOTO_SUBROUTINE_EXPR to let us
9466 expand the thing only once. */
9467 /* When not optimizing, we go ahead with this form since
9468 (1) user breakpoints operate more predictably without
9469 code duplication, and
9470 (2) we're not running any of the global optimizers
9471 that would explode in time/space with the highly
9472 connected CFG created by the indirect branching. */
9474 rtx finally_label = gen_label_rtx ();
9475 rtx done_label = gen_label_rtx ();
9476 rtx return_link = gen_reg_rtx (Pmode);
9477 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9478 (tree) finally_label, (tree) return_link);
9479 TREE_SIDE_EFFECTS (cleanup) = 1;
9481 /* Start a new binding layer that will keep track of all cleanup
9482 actions to be performed. */
9483 expand_start_bindings (2);
9484 target_temp_slot_level = temp_slot_level;
9486 expand_decl_cleanup (NULL_TREE, cleanup);
9487 op0 = expand_expr (try_block, target, tmode, modifier);
9489 preserve_temp_slots (op0);
9490 expand_end_bindings (NULL_TREE, 0, 0);
9491 emit_jump (done_label);
9492 emit_label (finally_label);
9493 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9494 emit_indirect_jump (return_link);
9495 emit_label (done_label);
9499 expand_start_bindings (2);
9500 target_temp_slot_level = temp_slot_level;
9502 expand_decl_cleanup (NULL_TREE, finally_block);
9503 op0 = expand_expr (try_block, target, tmode, modifier);
9505 preserve_temp_slots (op0);
9506 expand_end_bindings (NULL_TREE, 0, 0);
9512 case GOTO_SUBROUTINE_EXPR:
9514 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9515 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9516 rtx return_address = gen_label_rtx ();
9517 emit_move_insn (return_link,
9518 gen_rtx_LABEL_REF (Pmode, return_address));
9520 emit_label (return_address);
9525 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9528 return get_exception_pointer (cfun);
9531 /* Function descriptors are not valid except for as
9532 initialization constants, and should not be expanded. */
9536 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9539 /* Here to do an ordinary binary operator, generating an instruction
9540 from the optab already placed in `this_optab'. */
9542 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9544 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9545 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9547 if (modifier == EXPAND_STACK_PARM)
9549 temp = expand_binop (mode, this_optab, op0, op1, target,
9550 unsignedp, OPTAB_LIB_WIDEN);
9556 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9557 when applied to the address of EXP produces an address known to be
9558 aligned more than BIGGEST_ALIGNMENT. */
9561 is_aligning_offset (offset, exp)
9565 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9566 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9567 || TREE_CODE (offset) == NOP_EXPR
9568 || TREE_CODE (offset) == CONVERT_EXPR
9569 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9570 offset = TREE_OPERAND (offset, 0);
9572 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9573 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9574 if (TREE_CODE (offset) != BIT_AND_EXPR
9575 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9576 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9577 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9580 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9581 It must be NEGATE_EXPR. Then strip any more conversions. */
9582 offset = TREE_OPERAND (offset, 0);
9583 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9584 || TREE_CODE (offset) == NOP_EXPR
9585 || TREE_CODE (offset) == CONVERT_EXPR)
9586 offset = TREE_OPERAND (offset, 0);
9588 if (TREE_CODE (offset) != NEGATE_EXPR)
9591 offset = TREE_OPERAND (offset, 0);
9592 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9593 || TREE_CODE (offset) == NOP_EXPR
9594 || TREE_CODE (offset) == CONVERT_EXPR)
9595 offset = TREE_OPERAND (offset, 0);
9597 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9598 whose type is the same as EXP. */
9599 return (TREE_CODE (offset) == ADDR_EXPR
9600 && (TREE_OPERAND (offset, 0) == exp
9601 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9602 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9603 == TREE_TYPE (exp)))));
9606 /* Return the tree node if an ARG corresponds to a string constant or zero
9607 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9608 in bytes within the string that ARG is accessing. The type of the
9609 offset will be `sizetype'. */
9612 string_constant (arg, ptr_offset)
9618 if (TREE_CODE (arg) == ADDR_EXPR
9619 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9621 *ptr_offset = size_zero_node;
9622 return TREE_OPERAND (arg, 0);
9624 else if (TREE_CODE (arg) == PLUS_EXPR)
9626 tree arg0 = TREE_OPERAND (arg, 0);
9627 tree arg1 = TREE_OPERAND (arg, 1);
9632 if (TREE_CODE (arg0) == ADDR_EXPR
9633 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9635 *ptr_offset = convert (sizetype, arg1);
9636 return TREE_OPERAND (arg0, 0);
9638 else if (TREE_CODE (arg1) == ADDR_EXPR
9639 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9641 *ptr_offset = convert (sizetype, arg0);
9642 return TREE_OPERAND (arg1, 0);
9649 /* Expand code for a post- or pre- increment or decrement
9650 and return the RTX for the result.
9651 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9654 expand_increment (exp, post, ignore)
9660 tree incremented = TREE_OPERAND (exp, 0);
9661 optab this_optab = add_optab;
9663 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9664 int op0_is_copy = 0;
9665 int single_insn = 0;
9666 /* 1 means we can't store into OP0 directly,
9667 because it is a subreg narrower than a word,
9668 and we don't dare clobber the rest of the word. */
9671 /* Stabilize any component ref that might need to be
9672 evaluated more than once below. */
9674 || TREE_CODE (incremented) == BIT_FIELD_REF
9675 || (TREE_CODE (incremented) == COMPONENT_REF
9676 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9677 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9678 incremented = stabilize_reference (incremented);
9679 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9680 ones into save exprs so that they don't accidentally get evaluated
9681 more than once by the code below. */
9682 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9683 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9684 incremented = save_expr (incremented);
9686 /* Compute the operands as RTX.
9687 Note whether OP0 is the actual lvalue or a copy of it:
9688 I believe it is a copy iff it is a register or subreg
9689 and insns were generated in computing it. */
9691 temp = get_last_insn ();
9692 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9694 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9695 in place but instead must do sign- or zero-extension during assignment,
9696 so we copy it into a new register and let the code below use it as
9699 Note that we can safely modify this SUBREG since it is know not to be
9700 shared (it was made by the expand_expr call above). */
9702 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9705 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9709 else if (GET_CODE (op0) == SUBREG
9710 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9712 /* We cannot increment this SUBREG in place. If we are
9713 post-incrementing, get a copy of the old value. Otherwise,
9714 just mark that we cannot increment in place. */
9716 op0 = copy_to_reg (op0);
9721 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9722 && temp != get_last_insn ());
9723 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9725 /* Decide whether incrementing or decrementing. */
9726 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9727 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9728 this_optab = sub_optab;
9730 /* Convert decrement by a constant into a negative increment. */
9731 if (this_optab == sub_optab
9732 && GET_CODE (op1) == CONST_INT)
9734 op1 = GEN_INT (-INTVAL (op1));
9735 this_optab = add_optab;
9738 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9739 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9741 /* For a preincrement, see if we can do this with a single instruction. */
9744 icode = (int) this_optab->handlers[(int) mode].insn_code;
9745 if (icode != (int) CODE_FOR_nothing
9746 /* Make sure that OP0 is valid for operands 0 and 1
9747 of the insn we want to queue. */
9748 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9749 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9750 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9754 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9755 then we cannot just increment OP0. We must therefore contrive to
9756 increment the original value. Then, for postincrement, we can return
9757 OP0 since it is a copy of the old value. For preincrement, expand here
9758 unless we can do it with a single insn.
9760 Likewise if storing directly into OP0 would clobber high bits
9761 we need to preserve (bad_subreg). */
9762 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9764 /* This is the easiest way to increment the value wherever it is.
9765 Problems with multiple evaluation of INCREMENTED are prevented
9766 because either (1) it is a component_ref or preincrement,
9767 in which case it was stabilized above, or (2) it is an array_ref
9768 with constant index in an array in a register, which is
9769 safe to reevaluate. */
9770 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9771 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9772 ? MINUS_EXPR : PLUS_EXPR),
9775 TREE_OPERAND (exp, 1));
9777 while (TREE_CODE (incremented) == NOP_EXPR
9778 || TREE_CODE (incremented) == CONVERT_EXPR)
9780 newexp = convert (TREE_TYPE (incremented), newexp);
9781 incremented = TREE_OPERAND (incremented, 0);
9784 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9785 return post ? op0 : temp;
9790 /* We have a true reference to the value in OP0.
9791 If there is an insn to add or subtract in this mode, queue it.
9792 Queueing the increment insn avoids the register shuffling
9793 that often results if we must increment now and first save
9794 the old value for subsequent use. */
9796 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9797 op0 = stabilize (op0);
9800 icode = (int) this_optab->handlers[(int) mode].insn_code;
9801 if (icode != (int) CODE_FOR_nothing
9802 /* Make sure that OP0 is valid for operands 0 and 1
9803 of the insn we want to queue. */
9804 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9805 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9807 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9808 op1 = force_reg (mode, op1);
9810 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9812 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9814 rtx addr = (general_operand (XEXP (op0, 0), mode)
9815 ? force_reg (Pmode, XEXP (op0, 0))
9816 : copy_to_reg (XEXP (op0, 0)));
9819 op0 = replace_equiv_address (op0, addr);
9820 temp = force_reg (GET_MODE (op0), op0);
9821 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9822 op1 = force_reg (mode, op1);
9824 /* The increment queue is LIFO, thus we have to `queue'
9825 the instructions in reverse order. */
9826 enqueue_insn (op0, gen_move_insn (op0, temp));
9827 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9832 /* Preincrement, or we can't increment with one simple insn. */
9834 /* Save a copy of the value before inc or dec, to return it later. */
9835 temp = value = copy_to_reg (op0);
9837 /* Arrange to return the incremented value. */
9838 /* Copy the rtx because expand_binop will protect from the queue,
9839 and the results of that would be invalid for us to return
9840 if our caller does emit_queue before using our result. */
9841 temp = copy_rtx (value = op0);
9843 /* Increment however we can. */
9844 op1 = expand_binop (mode, this_optab, value, op1, op0,
9845 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9847 /* Make sure the value is stored into OP0. */
9849 emit_move_insn (op0, op1);
9854 /* Generate code to calculate EXP using a store-flag instruction
9855 and return an rtx for the result. EXP is either a comparison
9856 or a TRUTH_NOT_EXPR whose operand is a comparison.
9858 If TARGET is nonzero, store the result there if convenient.
9860 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9863 Return zero if there is no suitable set-flag instruction
9864 available on this machine.
9866 Once expand_expr has been called on the arguments of the comparison,
9867 we are committed to doing the store flag, since it is not safe to
9868 re-evaluate the expression. We emit the store-flag insn by calling
9869 emit_store_flag, but only expand the arguments if we have a reason
9870 to believe that emit_store_flag will be successful. If we think that
9871 it will, but it isn't, we have to simulate the store-flag with a
9872 set/jump/set sequence. */
9875 do_store_flag (exp, target, mode, only_cheap)
9878 enum machine_mode mode;
9882 tree arg0, arg1, type;
9884 enum machine_mode operand_mode;
9888 enum insn_code icode;
9889 rtx subtarget = target;
9892 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9893 result at the end. We can't simply invert the test since it would
9894 have already been inverted if it were valid. This case occurs for
9895 some floating-point comparisons. */
9897 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9898 invert = 1, exp = TREE_OPERAND (exp, 0);
9900 arg0 = TREE_OPERAND (exp, 0);
9901 arg1 = TREE_OPERAND (exp, 1);
9903 /* Don't crash if the comparison was erroneous. */
9904 if (arg0 == error_mark_node || arg1 == error_mark_node)
9907 type = TREE_TYPE (arg0);
9908 operand_mode = TYPE_MODE (type);
9909 unsignedp = TREE_UNSIGNED (type);
9911 /* We won't bother with BLKmode store-flag operations because it would mean
9912 passing a lot of information to emit_store_flag. */
9913 if (operand_mode == BLKmode)
9916 /* We won't bother with store-flag operations involving function pointers
9917 when function pointers must be canonicalized before comparisons. */
9918 #ifdef HAVE_canonicalize_funcptr_for_compare
9919 if (HAVE_canonicalize_funcptr_for_compare
9920 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9921 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9923 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9924 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9925 == FUNCTION_TYPE))))
9932 /* Get the rtx comparison code to use. We know that EXP is a comparison
9933 operation of some type. Some comparisons against 1 and -1 can be
9934 converted to comparisons with zero. Do so here so that the tests
9935 below will be aware that we have a comparison with zero. These
9936 tests will not catch constants in the first operand, but constants
9937 are rarely passed as the first operand. */
9939 switch (TREE_CODE (exp))
9948 if (integer_onep (arg1))
9949 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9951 code = unsignedp ? LTU : LT;
9954 if (! unsignedp && integer_all_onesp (arg1))
9955 arg1 = integer_zero_node, code = LT;
9957 code = unsignedp ? LEU : LE;
9960 if (! unsignedp && integer_all_onesp (arg1))
9961 arg1 = integer_zero_node, code = GE;
9963 code = unsignedp ? GTU : GT;
9966 if (integer_onep (arg1))
9967 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9969 code = unsignedp ? GEU : GE;
9972 case UNORDERED_EXPR:
9998 /* Put a constant second. */
9999 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10001 tem = arg0; arg0 = arg1; arg1 = tem;
10002 code = swap_condition (code);
10005 /* If this is an equality or inequality test of a single bit, we can
10006 do this by shifting the bit being tested to the low-order bit and
10007 masking the result with the constant 1. If the condition was EQ,
10008 we xor it with 1. This does not require an scc insn and is faster
10009 than an scc insn even if we have it. */
10011 if ((code == NE || code == EQ)
10012 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10013 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10015 tree inner = TREE_OPERAND (arg0, 0);
10016 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10019 /* If INNER is a right shift of a constant and it plus BITNUM does
10020 not overflow, adjust BITNUM and INNER. */
10022 if (TREE_CODE (inner) == RSHIFT_EXPR
10023 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10024 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10025 && bitnum < TYPE_PRECISION (type)
10026 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10027 bitnum - TYPE_PRECISION (type)))
10029 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10030 inner = TREE_OPERAND (inner, 0);
10033 /* If we are going to be able to omit the AND below, we must do our
10034 operations as unsigned. If we must use the AND, we have a choice.
10035 Normally unsigned is faster, but for some machines signed is. */
10036 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10037 #ifdef LOAD_EXTEND_OP
10038 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10044 if (! get_subtarget (subtarget)
10045 || GET_MODE (subtarget) != operand_mode
10046 || ! safe_from_p (subtarget, inner, 1))
10049 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10052 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10053 size_int (bitnum), subtarget, ops_unsignedp);
10055 if (GET_MODE (op0) != mode)
10056 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10058 if ((code == EQ && ! invert) || (code == NE && invert))
10059 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10060 ops_unsignedp, OPTAB_LIB_WIDEN);
10062 /* Put the AND last so it can combine with more things. */
10063 if (bitnum != TYPE_PRECISION (type) - 1)
10064 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10069 /* Now see if we are likely to be able to do this. Return if not. */
10070 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10073 icode = setcc_gen_code[(int) code];
10074 if (icode == CODE_FOR_nothing
10075 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10077 /* We can only do this if it is one of the special cases that
10078 can be handled without an scc insn. */
10079 if ((code == LT && integer_zerop (arg1))
10080 || (! only_cheap && code == GE && integer_zerop (arg1)))
10082 else if (BRANCH_COST >= 0
10083 && ! only_cheap && (code == NE || code == EQ)
10084 && TREE_CODE (type) != REAL_TYPE
10085 && ((abs_optab->handlers[(int) operand_mode].insn_code
10086 != CODE_FOR_nothing)
10087 || (ffs_optab->handlers[(int) operand_mode].insn_code
10088 != CODE_FOR_nothing)))
10094 if (! get_subtarget (target)
10095 || GET_MODE (subtarget) != operand_mode
10096 || ! safe_from_p (subtarget, arg1, 1))
10099 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10100 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10103 target = gen_reg_rtx (mode);
10105 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10106 because, if the emit_store_flag does anything it will succeed and
10107 OP0 and OP1 will not be used subsequently. */
10109 result = emit_store_flag (target, code,
10110 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10111 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10112 operand_mode, unsignedp, 1);
10117 result = expand_binop (mode, xor_optab, result, const1_rtx,
10118 result, 0, OPTAB_LIB_WIDEN);
10122 /* If this failed, we have to do this with set/compare/jump/set code. */
10123 if (GET_CODE (target) != REG
10124 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10125 target = gen_reg_rtx (GET_MODE (target));
10127 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10128 result = compare_from_rtx (op0, op1, code, unsignedp,
10129 operand_mode, NULL_RTX);
10130 if (GET_CODE (result) == CONST_INT)
10131 return (((result == const0_rtx && ! invert)
10132 || (result != const0_rtx && invert))
10133 ? const0_rtx : const1_rtx);
10135 /* The code of RESULT may not match CODE if compare_from_rtx
10136 decided to swap its operands and reverse the original code.
10138 We know that compare_from_rtx returns either a CONST_INT or
10139 a new comparison code, so it is safe to just extract the
10140 code from RESULT. */
10141 code = GET_CODE (result);
10143 label = gen_label_rtx ();
10144 if (bcc_gen_fctn[(int) code] == 0)
10147 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10148 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10149 emit_label (label);
10155 /* Stubs in case we haven't got a casesi insn. */
10156 #ifndef HAVE_casesi
10157 # define HAVE_casesi 0
10158 # define gen_casesi(a, b, c, d, e) (0)
10159 # define CODE_FOR_casesi CODE_FOR_nothing
10162 /* If the machine does not have a case insn that compares the bounds,
10163 this means extra overhead for dispatch tables, which raises the
10164 threshold for using them. */
10165 #ifndef CASE_VALUES_THRESHOLD
10166 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10167 #endif /* CASE_VALUES_THRESHOLD */
10170 case_values_threshold ()
10172 return CASE_VALUES_THRESHOLD;
10175 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10176 0 otherwise (i.e. if there is no casesi instruction). */
10178 try_casesi (index_type, index_expr, minval, range,
10179 table_label, default_label)
10180 tree index_type, index_expr, minval, range;
10181 rtx table_label ATTRIBUTE_UNUSED;
10184 enum machine_mode index_mode = SImode;
10185 int index_bits = GET_MODE_BITSIZE (index_mode);
10186 rtx op1, op2, index;
10187 enum machine_mode op_mode;
10192 /* Convert the index to SImode. */
10193 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10195 enum machine_mode omode = TYPE_MODE (index_type);
10196 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10198 /* We must handle the endpoints in the original mode. */
10199 index_expr = build (MINUS_EXPR, index_type,
10200 index_expr, minval);
10201 minval = integer_zero_node;
10202 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10203 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10204 omode, 1, default_label);
10205 /* Now we can safely truncate. */
10206 index = convert_to_mode (index_mode, index, 0);
10210 if (TYPE_MODE (index_type) != index_mode)
10212 index_expr = convert ((*lang_hooks.types.type_for_size)
10213 (index_bits, 0), index_expr);
10214 index_type = TREE_TYPE (index_expr);
10217 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10220 index = protect_from_queue (index, 0);
10221 do_pending_stack_adjust ();
10223 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10224 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10226 index = copy_to_mode_reg (op_mode, index);
10228 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10230 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10231 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10232 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10233 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10235 op1 = copy_to_mode_reg (op_mode, op1);
10237 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10239 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10240 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10241 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10242 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10244 op2 = copy_to_mode_reg (op_mode, op2);
10246 emit_jump_insn (gen_casesi (index, op1, op2,
10247 table_label, default_label));
10251 /* Attempt to generate a tablejump instruction; same concept. */
10252 #ifndef HAVE_tablejump
10253 #define HAVE_tablejump 0
10254 #define gen_tablejump(x, y) (0)
10257 /* Subroutine of the next function.
10259 INDEX is the value being switched on, with the lowest value
10260 in the table already subtracted.
10261 MODE is its expected mode (needed if INDEX is constant).
10262 RANGE is the length of the jump table.
10263 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10265 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10266 index value is out of range. */
10269 do_tablejump (index, mode, range, table_label, default_label)
10270 rtx index, range, table_label, default_label;
10271 enum machine_mode mode;
10275 if (INTVAL (range) > cfun->max_jumptable_ents)
10276 cfun->max_jumptable_ents = INTVAL (range);
10278 /* Do an unsigned comparison (in the proper mode) between the index
10279 expression and the value which represents the length of the range.
10280 Since we just finished subtracting the lower bound of the range
10281 from the index expression, this comparison allows us to simultaneously
10282 check that the original index expression value is both greater than
10283 or equal to the minimum value of the range and less than or equal to
10284 the maximum value of the range. */
10286 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10289 /* If index is in range, it must fit in Pmode.
10290 Convert to Pmode so we can index with it. */
10292 index = convert_to_mode (Pmode, index, 1);
10294 /* Don't let a MEM slip thru, because then INDEX that comes
10295 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10296 and break_out_memory_refs will go to work on it and mess it up. */
10297 #ifdef PIC_CASE_VECTOR_ADDRESS
10298 if (flag_pic && GET_CODE (index) != REG)
10299 index = copy_to_mode_reg (Pmode, index);
10302 /* If flag_force_addr were to affect this address
10303 it could interfere with the tricky assumptions made
10304 about addresses that contain label-refs,
10305 which may be valid only very near the tablejump itself. */
10306 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10307 GET_MODE_SIZE, because this indicates how large insns are. The other
10308 uses should all be Pmode, because they are addresses. This code
10309 could fail if addresses and insns are not the same size. */
10310 index = gen_rtx_PLUS (Pmode,
10311 gen_rtx_MULT (Pmode, index,
10312 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10313 gen_rtx_LABEL_REF (Pmode, table_label));
10314 #ifdef PIC_CASE_VECTOR_ADDRESS
10316 index = PIC_CASE_VECTOR_ADDRESS (index);
10319 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10320 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10321 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10322 RTX_UNCHANGING_P (vector) = 1;
10323 MEM_NOTRAP_P (vector) = 1;
10324 convert_move (temp, vector, 0);
10326 emit_jump_insn (gen_tablejump (temp, table_label));
10328 /* If we are generating PIC code or if the table is PC-relative, the
10329 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10330 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10335 try_tablejump (index_type, index_expr, minval, range,
10336 table_label, default_label)
10337 tree index_type, index_expr, minval, range;
10338 rtx table_label, default_label;
10342 if (! HAVE_tablejump)
10345 index_expr = fold (build (MINUS_EXPR, index_type,
10346 convert (index_type, index_expr),
10347 convert (index_type, minval)));
10348 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10350 index = protect_from_queue (index, 0);
10351 do_pending_stack_adjust ();
10353 do_tablejump (index, TYPE_MODE (index_type),
10354 convert_modes (TYPE_MODE (index_type),
10355 TYPE_MODE (TREE_TYPE (range)),
10356 expand_expr (range, NULL_RTX,
10358 TREE_UNSIGNED (TREE_TYPE (range))),
10359 table_label, default_label);
10363 /* Nonzero if the mode is a valid vector mode for this architecture.
10364 This returns nonzero even if there is no hardware support for the
10365 vector mode, but we can emulate with narrower modes. */
10368 vector_mode_valid_p (mode)
10369 enum machine_mode mode;
10371 enum mode_class class = GET_MODE_CLASS (mode);
10372 enum machine_mode innermode;
10374 /* Doh! What's going on? */
10375 if (class != MODE_VECTOR_INT
10376 && class != MODE_VECTOR_FLOAT)
10379 /* Hardware support. Woo hoo! */
10380 if (VECTOR_MODE_SUPPORTED_P (mode))
10383 innermode = GET_MODE_INNER (mode);
10385 /* We should probably return 1 if requesting V4DI and we have no DI,
10386 but we have V2DI, but this is probably very unlikely. */
10388 /* If we have support for the inner mode, we can safely emulate it.
10389 We may not have V2DI, but me can emulate with a pair of DIs. */
10390 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10393 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10395 const_vector_from_tree (exp)
10401 enum machine_mode inner, mode;
10403 mode = TYPE_MODE (TREE_TYPE (exp));
10405 if (is_zeros_p (exp))
10406 return CONST0_RTX (mode);
10408 units = GET_MODE_NUNITS (mode);
10409 inner = GET_MODE_INNER (mode);
10411 v = rtvec_alloc (units);
10413 link = TREE_VECTOR_CST_ELTS (exp);
10414 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10416 elt = TREE_VALUE (link);
10418 if (TREE_CODE (elt) == REAL_CST)
10419 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10422 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10423 TREE_INT_CST_HIGH (elt),
10427 return gen_rtx_raw_CONST_VECTOR (mode, v);
10430 #include "gt-expr.h"