1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
45 #include "langhooks.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
84 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
85 static tree placeholder_list = 0;
87 /* This structure is used by move_by_pieces to describe the move to
98 int explicit_inc_from;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
104 /* This structure is used by store_by_pieces to describe the clear to
107 struct store_by_pieces
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
120 extern struct obstack permanent_obstack;
122 static rtx enqueue_insn PARAMS ((rtx, rtx));
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124 PARAMS ((unsigned HOST_WIDE_INT,
126 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *));
128 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
130 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
132 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
134 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
136 struct store_by_pieces *));
137 static rtx get_subtarget PARAMS ((rtx));
138 static int is_zeros_p PARAMS ((tree));
139 static int mostly_zeros_p PARAMS ((tree));
140 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int));
143 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
144 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
145 HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree,
148 static rtx var_rtx PARAMS ((tree));
149 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
150 static rtx expand_increment PARAMS ((tree, int, int));
151 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
152 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
153 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
155 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
157 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
159 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
168 /* If a memory-to-memory move would take MOVE_RATIO or more simple
169 move-instruction sequences, we will do a movstr or libcall instead. */
172 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
175 /* If we are optimizing for space (-Os), cut down the default move ratio. */
176 #define MOVE_RATIO (optimize_size ? 3 : 15)
180 /* This macro is used to determine whether move_by_pieces should be called
181 to perform a structure copy. */
182 #ifndef MOVE_BY_PIECES_P
183 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
187 /* This array records the insn_code of insns to perform block moves. */
188 enum insn_code movstr_optab[NUM_MACHINE_MODES];
190 /* This array records the insn_code of insns to perform block clears. */
191 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
193 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
195 #ifndef SLOW_UNALIGNED_ACCESS
196 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
199 /* This is run once per compilation to set up which modes can be used
200 directly in memory and to initialize the block move optab. */
206 enum machine_mode mode;
212 /* Try indexing by frame ptr and try by stack ptr.
213 It is known that on the Convex the stack ptr isn't a valid index.
214 With luck, one or the other is valid on any machine. */
215 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
216 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
218 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
219 pat = PATTERN (insn);
221 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
222 mode = (enum machine_mode) ((int) mode + 1))
227 direct_load[(int) mode] = direct_store[(int) mode] = 0;
228 PUT_MODE (mem, mode);
229 PUT_MODE (mem1, mode);
231 /* See if there is some register that can be used in this mode and
232 directly loaded or stored from memory. */
234 if (mode != VOIDmode && mode != BLKmode)
235 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
236 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
239 if (! HARD_REGNO_MODE_OK (regno, mode))
242 reg = gen_rtx_REG (mode, regno);
245 SET_DEST (pat) = reg;
246 if (recog (pat, insn, &num_clobbers) >= 0)
247 direct_load[(int) mode] = 1;
249 SET_SRC (pat) = mem1;
250 SET_DEST (pat) = reg;
251 if (recog (pat, insn, &num_clobbers) >= 0)
252 direct_load[(int) mode] = 1;
255 SET_DEST (pat) = mem;
256 if (recog (pat, insn, &num_clobbers) >= 0)
257 direct_store[(int) mode] = 1;
260 SET_DEST (pat) = mem1;
261 if (recog (pat, insn, &num_clobbers) >= 0)
262 direct_store[(int) mode] = 1;
269 /* This is run at the start of compiling a function. */
274 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
277 pending_stack_adjust = 0;
278 stack_pointer_delta = 0;
279 inhibit_defer_pop = 0;
281 apply_args_value = 0;
287 struct expr_status *p;
292 ggc_mark_rtx (p->x_saveregs_value);
293 ggc_mark_rtx (p->x_apply_args_value);
294 ggc_mark_rtx (p->x_forced_labels);
305 /* Small sanity check that the queue is empty at the end of a function. */
308 finish_expr_for_function ()
314 /* Manage the queue of increment instructions to be output
315 for POSTINCREMENT_EXPR expressions, etc. */
317 /* Queue up to increment (or change) VAR later. BODY says how:
318 BODY should be the same thing you would pass to emit_insn
319 to increment right away. It will go to emit_insn later on.
321 The value is a QUEUED expression to be used in place of VAR
322 where you want to guarantee the pre-incrementation value of VAR. */
325 enqueue_insn (var, body)
328 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
329 body, pending_chain);
330 return pending_chain;
333 /* Use protect_from_queue to convert a QUEUED expression
334 into something that you can put immediately into an instruction.
335 If the queued incrementation has not happened yet,
336 protect_from_queue returns the variable itself.
337 If the incrementation has happened, protect_from_queue returns a temp
338 that contains a copy of the old value of the variable.
340 Any time an rtx which might possibly be a QUEUED is to be put
341 into an instruction, it must be passed through protect_from_queue first.
342 QUEUED expressions are not meaningful in instructions.
344 Do not pass a value through protect_from_queue and then hold
345 on to it for a while before putting it in an instruction!
346 If the queue is flushed in between, incorrect code will result. */
349 protect_from_queue (x, modify)
353 RTX_CODE code = GET_CODE (x);
355 #if 0 /* A QUEUED can hang around after the queue is forced out. */
356 /* Shortcut for most common case. */
357 if (pending_chain == 0)
363 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
364 use of autoincrement. Make a copy of the contents of the memory
365 location rather than a copy of the address, but not if the value is
366 of mode BLKmode. Don't modify X in place since it might be
368 if (code == MEM && GET_MODE (x) != BLKmode
369 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
372 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
376 rtx temp = gen_reg_rtx (GET_MODE (x));
378 emit_insn_before (gen_move_insn (temp, new),
383 /* Copy the address into a pseudo, so that the returned value
384 remains correct across calls to emit_queue. */
385 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
388 /* Otherwise, recursively protect the subexpressions of all
389 the kinds of rtx's that can contain a QUEUED. */
392 rtx tem = protect_from_queue (XEXP (x, 0), 0);
393 if (tem != XEXP (x, 0))
399 else if (code == PLUS || code == MULT)
401 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
402 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
403 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
412 /* If the increment has not happened, use the variable itself. Copy it
413 into a new pseudo so that the value remains correct across calls to
415 if (QUEUED_INSN (x) == 0)
416 return copy_to_reg (QUEUED_VAR (x));
417 /* If the increment has happened and a pre-increment copy exists,
419 if (QUEUED_COPY (x) != 0)
420 return QUEUED_COPY (x);
421 /* The increment has happened but we haven't set up a pre-increment copy.
422 Set one up now, and use it. */
423 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
424 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
426 return QUEUED_COPY (x);
429 /* Return nonzero if X contains a QUEUED expression:
430 if it contains anything that will be altered by a queued increment.
431 We handle only combinations of MEM, PLUS, MINUS and MULT operators
432 since memory addresses generally contain only those. */
438 enum rtx_code code = GET_CODE (x);
444 return queued_subexp_p (XEXP (x, 0));
448 return (queued_subexp_p (XEXP (x, 0))
449 || queued_subexp_p (XEXP (x, 1)));
455 /* Perform all the pending incrementations. */
461 while ((p = pending_chain))
463 rtx body = QUEUED_BODY (p);
465 if (GET_CODE (body) == SEQUENCE)
467 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
468 emit_insn (QUEUED_BODY (p));
471 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
472 pending_chain = QUEUED_NEXT (p);
476 /* Copy data from FROM to TO, where the machine modes are not the same.
477 Both modes may be integer, or both may be floating.
478 UNSIGNEDP should be nonzero if FROM is an unsigned type.
479 This causes zero-extension instead of sign-extension. */
482 convert_move (to, from, unsignedp)
486 enum machine_mode to_mode = GET_MODE (to);
487 enum machine_mode from_mode = GET_MODE (from);
488 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
489 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
493 /* rtx code for making an equivalent value. */
494 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
496 to = protect_from_queue (to, 1);
497 from = protect_from_queue (from, 0);
499 if (to_real != from_real)
502 /* If FROM is a SUBREG that indicates that we have already done at least
503 the required extension, strip it. We don't handle such SUBREGs as
506 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
507 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
508 >= GET_MODE_SIZE (to_mode))
509 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
510 from = gen_lowpart (to_mode, from), from_mode = to_mode;
512 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
515 if (to_mode == from_mode
516 || (from_mode == VOIDmode && CONSTANT_P (from)))
518 emit_move_insn (to, from);
522 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
524 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
527 if (VECTOR_MODE_P (to_mode))
528 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
530 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
532 emit_move_insn (to, from);
536 if (to_real != from_real)
543 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
545 /* Try converting directly if the insn is supported. */
546 if ((code = can_extend_p (to_mode, from_mode, 0))
549 emit_unop_insn (code, to, from, UNKNOWN);
554 #ifdef HAVE_trunchfqf2
555 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
557 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
561 #ifdef HAVE_trunctqfqf2
562 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
564 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
568 #ifdef HAVE_truncsfqf2
569 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
571 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
575 #ifdef HAVE_truncdfqf2
576 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
578 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
582 #ifdef HAVE_truncxfqf2
583 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
585 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
589 #ifdef HAVE_trunctfqf2
590 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
592 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
597 #ifdef HAVE_trunctqfhf2
598 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
600 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
604 #ifdef HAVE_truncsfhf2
605 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
607 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
611 #ifdef HAVE_truncdfhf2
612 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
614 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
618 #ifdef HAVE_truncxfhf2
619 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
621 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
625 #ifdef HAVE_trunctfhf2
626 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
628 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
633 #ifdef HAVE_truncsftqf2
634 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
636 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
640 #ifdef HAVE_truncdftqf2
641 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
643 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
647 #ifdef HAVE_truncxftqf2
648 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
650 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
654 #ifdef HAVE_trunctftqf2
655 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
657 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
662 #ifdef HAVE_truncdfsf2
663 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
665 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
669 #ifdef HAVE_truncxfsf2
670 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
672 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
676 #ifdef HAVE_trunctfsf2
677 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
679 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
683 #ifdef HAVE_truncxfdf2
684 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
686 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
690 #ifdef HAVE_trunctfdf2
691 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
693 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
705 libcall = extendsfdf2_libfunc;
709 libcall = extendsfxf2_libfunc;
713 libcall = extendsftf2_libfunc;
725 libcall = truncdfsf2_libfunc;
729 libcall = extenddfxf2_libfunc;
733 libcall = extenddftf2_libfunc;
745 libcall = truncxfsf2_libfunc;
749 libcall = truncxfdf2_libfunc;
761 libcall = trunctfsf2_libfunc;
765 libcall = trunctfdf2_libfunc;
777 if (libcall == (rtx) 0)
778 /* This conversion is not implemented yet. */
782 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
784 insns = get_insns ();
786 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
791 /* Now both modes are integers. */
793 /* Handle expanding beyond a word. */
794 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
795 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
802 enum machine_mode lowpart_mode;
803 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
805 /* Try converting directly if the insn is supported. */
806 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
809 /* If FROM is a SUBREG, put it into a register. Do this
810 so that we always generate the same set of insns for
811 better cse'ing; if an intermediate assignment occurred,
812 we won't be doing the operation directly on the SUBREG. */
813 if (optimize > 0 && GET_CODE (from) == SUBREG)
814 from = force_reg (from_mode, from);
815 emit_unop_insn (code, to, from, equiv_code);
818 /* Next, try converting via full word. */
819 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
820 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
821 != CODE_FOR_nothing))
823 if (GET_CODE (to) == REG)
824 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
825 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
826 emit_unop_insn (code, to,
827 gen_lowpart (word_mode, to), equiv_code);
831 /* No special multiword conversion insn; do it by hand. */
834 /* Since we will turn this into a no conflict block, we must ensure
835 that the source does not overlap the target. */
837 if (reg_overlap_mentioned_p (to, from))
838 from = force_reg (from_mode, from);
840 /* Get a copy of FROM widened to a word, if necessary. */
841 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
842 lowpart_mode = word_mode;
844 lowpart_mode = from_mode;
846 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
848 lowpart = gen_lowpart (lowpart_mode, to);
849 emit_move_insn (lowpart, lowfrom);
851 /* Compute the value to put in each remaining word. */
853 fill_value = const0_rtx;
858 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
859 && STORE_FLAG_VALUE == -1)
861 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
863 fill_value = gen_reg_rtx (word_mode);
864 emit_insn (gen_slt (fill_value));
870 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
871 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
873 fill_value = convert_to_mode (word_mode, fill_value, 1);
877 /* Fill the remaining words. */
878 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
880 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
881 rtx subword = operand_subword (to, index, 1, to_mode);
886 if (fill_value != subword)
887 emit_move_insn (subword, fill_value);
890 insns = get_insns ();
893 emit_no_conflict_block (insns, to, from, NULL_RTX,
894 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
898 /* Truncating multi-word to a word or less. */
899 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
900 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
902 if (!((GET_CODE (from) == MEM
903 && ! MEM_VOLATILE_P (from)
904 && direct_load[(int) to_mode]
905 && ! mode_dependent_address_p (XEXP (from, 0)))
906 || GET_CODE (from) == REG
907 || GET_CODE (from) == SUBREG))
908 from = force_reg (from_mode, from);
909 convert_move (to, gen_lowpart (word_mode, from), 0);
913 /* Handle pointer conversion. */ /* SPEE 900220. */
914 if (to_mode == PQImode)
916 if (from_mode != QImode)
917 from = convert_to_mode (QImode, from, unsignedp);
919 #ifdef HAVE_truncqipqi2
920 if (HAVE_truncqipqi2)
922 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
925 #endif /* HAVE_truncqipqi2 */
929 if (from_mode == PQImode)
931 if (to_mode != QImode)
933 from = convert_to_mode (QImode, from, unsignedp);
938 #ifdef HAVE_extendpqiqi2
939 if (HAVE_extendpqiqi2)
941 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
944 #endif /* HAVE_extendpqiqi2 */
949 if (to_mode == PSImode)
951 if (from_mode != SImode)
952 from = convert_to_mode (SImode, from, unsignedp);
954 #ifdef HAVE_truncsipsi2
955 if (HAVE_truncsipsi2)
957 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
960 #endif /* HAVE_truncsipsi2 */
964 if (from_mode == PSImode)
966 if (to_mode != SImode)
968 from = convert_to_mode (SImode, from, unsignedp);
973 #ifdef HAVE_extendpsisi2
974 if (! unsignedp && HAVE_extendpsisi2)
976 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
979 #endif /* HAVE_extendpsisi2 */
980 #ifdef HAVE_zero_extendpsisi2
981 if (unsignedp && HAVE_zero_extendpsisi2)
983 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
986 #endif /* HAVE_zero_extendpsisi2 */
991 if (to_mode == PDImode)
993 if (from_mode != DImode)
994 from = convert_to_mode (DImode, from, unsignedp);
996 #ifdef HAVE_truncdipdi2
997 if (HAVE_truncdipdi2)
999 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1002 #endif /* HAVE_truncdipdi2 */
1006 if (from_mode == PDImode)
1008 if (to_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1015 #ifdef HAVE_extendpdidi2
1016 if (HAVE_extendpdidi2)
1018 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1021 #endif /* HAVE_extendpdidi2 */
1026 /* Now follow all the conversions between integers
1027 no more than a word long. */
1029 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1030 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1031 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1032 GET_MODE_BITSIZE (from_mode)))
1034 if (!((GET_CODE (from) == MEM
1035 && ! MEM_VOLATILE_P (from)
1036 && direct_load[(int) to_mode]
1037 && ! mode_dependent_address_p (XEXP (from, 0)))
1038 || GET_CODE (from) == REG
1039 || GET_CODE (from) == SUBREG))
1040 from = force_reg (from_mode, from);
1041 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1042 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1043 from = copy_to_reg (from);
1044 emit_move_insn (to, gen_lowpart (to_mode, from));
1048 /* Handle extension. */
1049 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1051 /* Convert directly if that works. */
1052 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1053 != CODE_FOR_nothing)
1056 from = force_not_mem (from);
1058 emit_unop_insn (code, to, from, equiv_code);
1063 enum machine_mode intermediate;
1067 /* Search for a mode to convert via. */
1068 for (intermediate = from_mode; intermediate != VOIDmode;
1069 intermediate = GET_MODE_WIDER_MODE (intermediate))
1070 if (((can_extend_p (to_mode, intermediate, unsignedp)
1071 != CODE_FOR_nothing)
1072 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1073 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1074 GET_MODE_BITSIZE (intermediate))))
1075 && (can_extend_p (intermediate, from_mode, unsignedp)
1076 != CODE_FOR_nothing))
1078 convert_move (to, convert_to_mode (intermediate, from,
1079 unsignedp), unsignedp);
1083 /* No suitable intermediate mode.
1084 Generate what we need with shifts. */
1085 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1086 - GET_MODE_BITSIZE (from_mode), 0);
1087 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1088 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1090 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1093 emit_move_insn (to, tmp);
1098 /* Support special truncate insns for certain modes. */
1100 if (from_mode == DImode && to_mode == SImode)
1102 #ifdef HAVE_truncdisi2
1103 if (HAVE_truncdisi2)
1105 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1113 if (from_mode == DImode && to_mode == HImode)
1115 #ifdef HAVE_truncdihi2
1116 if (HAVE_truncdihi2)
1118 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1126 if (from_mode == DImode && to_mode == QImode)
1128 #ifdef HAVE_truncdiqi2
1129 if (HAVE_truncdiqi2)
1131 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1139 if (from_mode == SImode && to_mode == HImode)
1141 #ifdef HAVE_truncsihi2
1142 if (HAVE_truncsihi2)
1144 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1152 if (from_mode == SImode && to_mode == QImode)
1154 #ifdef HAVE_truncsiqi2
1155 if (HAVE_truncsiqi2)
1157 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 if (from_mode == HImode && to_mode == QImode)
1167 #ifdef HAVE_trunchiqi2
1168 if (HAVE_trunchiqi2)
1170 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 if (from_mode == TImode && to_mode == DImode)
1180 #ifdef HAVE_trunctidi2
1181 if (HAVE_trunctidi2)
1183 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 if (from_mode == TImode && to_mode == SImode)
1193 #ifdef HAVE_trunctisi2
1194 if (HAVE_trunctisi2)
1196 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 if (from_mode == TImode && to_mode == HImode)
1206 #ifdef HAVE_trunctihi2
1207 if (HAVE_trunctihi2)
1209 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 if (from_mode == TImode && to_mode == QImode)
1219 #ifdef HAVE_trunctiqi2
1220 if (HAVE_trunctiqi2)
1222 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1230 /* Handle truncation of volatile memrefs, and so on;
1231 the things that couldn't be truncated directly,
1232 and for which there was no special instruction. */
1233 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1235 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1236 emit_move_insn (to, temp);
1240 /* Mode combination is not recognized. */
1244 /* Return an rtx for a value that would result
1245 from converting X to mode MODE.
1246 Both X and MODE may be floating, or both integer.
1247 UNSIGNEDP is nonzero if X is an unsigned value.
1248 This can be done by referring to a part of X in place
1249 or by copying to a new temporary with conversion.
1251 This function *must not* call protect_from_queue
1252 except when putting X into an insn (in which case convert_move does it). */
1255 convert_to_mode (mode, x, unsignedp)
1256 enum machine_mode mode;
1260 return convert_modes (mode, VOIDmode, x, unsignedp);
1263 /* Return an rtx for a value that would result
1264 from converting X from mode OLDMODE to mode MODE.
1265 Both modes may be floating, or both integer.
1266 UNSIGNEDP is nonzero if X is an unsigned value.
1268 This can be done by referring to a part of X in place
1269 or by copying to a new temporary with conversion.
1271 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1273 This function *must not* call protect_from_queue
1274 except when putting X into an insn (in which case convert_move does it). */
1277 convert_modes (mode, oldmode, x, unsignedp)
1278 enum machine_mode mode, oldmode;
1284 /* If FROM is a SUBREG that indicates that we have already done at least
1285 the required extension, strip it. */
1287 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1288 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1289 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1290 x = gen_lowpart (mode, x);
1292 if (GET_MODE (x) != VOIDmode)
1293 oldmode = GET_MODE (x);
1295 if (mode == oldmode)
1298 /* There is one case that we must handle specially: If we are converting
1299 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1300 we are to interpret the constant as unsigned, gen_lowpart will do
1301 the wrong if the constant appears negative. What we want to do is
1302 make the high-order word of the constant zero, not all ones. */
1304 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1305 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1306 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1308 HOST_WIDE_INT val = INTVAL (x);
1310 if (oldmode != VOIDmode
1311 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1313 int width = GET_MODE_BITSIZE (oldmode);
1315 /* We need to zero extend VAL. */
1316 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1319 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1322 /* We can do this with a gen_lowpart if both desired and current modes
1323 are integer, and this is either a constant integer, a register, or a
1324 non-volatile MEM. Except for the constant case where MODE is no
1325 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1327 if ((GET_CODE (x) == CONST_INT
1328 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1329 || (GET_MODE_CLASS (mode) == MODE_INT
1330 && GET_MODE_CLASS (oldmode) == MODE_INT
1331 && (GET_CODE (x) == CONST_DOUBLE
1332 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1333 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1334 && direct_load[(int) mode])
1335 || (GET_CODE (x) == REG
1336 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1337 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1339 /* ?? If we don't know OLDMODE, we have to assume here that
1340 X does not need sign- or zero-extension. This may not be
1341 the case, but it's the best we can do. */
1342 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1343 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1345 HOST_WIDE_INT val = INTVAL (x);
1346 int width = GET_MODE_BITSIZE (oldmode);
1348 /* We must sign or zero-extend in this case. Start by
1349 zero-extending, then sign extend if we need to. */
1350 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1352 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1353 val |= (HOST_WIDE_INT) (-1) << width;
1355 return GEN_INT (trunc_int_for_mode (val, mode));
1358 return gen_lowpart (mode, x);
1361 temp = gen_reg_rtx (mode);
1362 convert_move (temp, x, unsignedp);
1366 /* This macro is used to determine what the largest unit size that
1367 move_by_pieces can use is. */
1369 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1370 move efficiently, as opposed to MOVE_MAX which is the maximum
1371 number of bytes we can move with a single instruction. */
1373 #ifndef MOVE_MAX_PIECES
1374 #define MOVE_MAX_PIECES MOVE_MAX
1377 /* Generate several move instructions to copy LEN bytes from block FROM to
1378 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1379 and TO through protect_from_queue before calling.
1381 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1382 used to push FROM to the stack.
1384 ALIGN is maximum alignment we can assume. */
1387 move_by_pieces (to, from, len, align)
1389 unsigned HOST_WIDE_INT len;
1392 struct move_by_pieces data;
1393 rtx to_addr, from_addr = XEXP (from, 0);
1394 unsigned int max_size = MOVE_MAX_PIECES + 1;
1395 enum machine_mode mode = VOIDmode, tmode;
1396 enum insn_code icode;
1399 data.from_addr = from_addr;
1402 to_addr = XEXP (to, 0);
1405 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1406 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1408 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1415 #ifdef STACK_GROWS_DOWNWARD
1421 data.to_addr = to_addr;
1424 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1425 || GET_CODE (from_addr) == POST_INC
1426 || GET_CODE (from_addr) == POST_DEC);
1428 data.explicit_inc_from = 0;
1429 data.explicit_inc_to = 0;
1430 if (data.reverse) data.offset = len;
1433 /* If copying requires more than two move insns,
1434 copy addresses to registers (to make displacements shorter)
1435 and use post-increment if available. */
1436 if (!(data.autinc_from && data.autinc_to)
1437 && move_by_pieces_ninsns (len, align) > 2)
1439 /* Find the mode of the largest move... */
1440 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1441 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1442 if (GET_MODE_SIZE (tmode) < max_size)
1445 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1447 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1448 data.autinc_from = 1;
1449 data.explicit_inc_from = -1;
1451 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1453 data.from_addr = copy_addr_to_reg (from_addr);
1454 data.autinc_from = 1;
1455 data.explicit_inc_from = 1;
1457 if (!data.autinc_from && CONSTANT_P (from_addr))
1458 data.from_addr = copy_addr_to_reg (from_addr);
1459 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1461 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1463 data.explicit_inc_to = -1;
1465 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1467 data.to_addr = copy_addr_to_reg (to_addr);
1469 data.explicit_inc_to = 1;
1471 if (!data.autinc_to && CONSTANT_P (to_addr))
1472 data.to_addr = copy_addr_to_reg (to_addr);
1475 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1476 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1477 align = MOVE_MAX * BITS_PER_UNIT;
1479 /* First move what we can in the largest integer mode, then go to
1480 successively smaller modes. */
1482 while (max_size > 1)
1484 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1485 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1486 if (GET_MODE_SIZE (tmode) < max_size)
1489 if (mode == VOIDmode)
1492 icode = mov_optab->handlers[(int) mode].insn_code;
1493 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1494 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1496 max_size = GET_MODE_SIZE (mode);
1499 /* The code above should have handled everything. */
1504 /* Return number of insns required to move L bytes by pieces.
1505 ALIGN (in bits) is maximum alignment we can assume. */
1507 static unsigned HOST_WIDE_INT
1508 move_by_pieces_ninsns (l, align)
1509 unsigned HOST_WIDE_INT l;
1512 unsigned HOST_WIDE_INT n_insns = 0;
1513 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1515 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1516 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1517 align = MOVE_MAX * BITS_PER_UNIT;
1519 while (max_size > 1)
1521 enum machine_mode mode = VOIDmode, tmode;
1522 enum insn_code icode;
1524 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1525 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1526 if (GET_MODE_SIZE (tmode) < max_size)
1529 if (mode == VOIDmode)
1532 icode = mov_optab->handlers[(int) mode].insn_code;
1533 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1534 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1536 max_size = GET_MODE_SIZE (mode);
1544 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1545 with move instructions for mode MODE. GENFUN is the gen_... function
1546 to make a move insn for that mode. DATA has all the other info. */
1549 move_by_pieces_1 (genfun, mode, data)
1550 rtx (*genfun) PARAMS ((rtx, ...));
1551 enum machine_mode mode;
1552 struct move_by_pieces *data;
1554 unsigned int size = GET_MODE_SIZE (mode);
1555 rtx to1 = NULL_RTX, from1;
1557 while (data->len >= size)
1560 data->offset -= size;
1564 if (data->autinc_to)
1565 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1568 to1 = adjust_address (data->to, mode, data->offset);
1571 if (data->autinc_from)
1572 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1575 from1 = adjust_address (data->from, mode, data->offset);
1577 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1578 emit_insn (gen_add2_insn (data->to_addr,
1579 GEN_INT (-(HOST_WIDE_INT)size)));
1580 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1581 emit_insn (gen_add2_insn (data->from_addr,
1582 GEN_INT (-(HOST_WIDE_INT)size)));
1585 emit_insn ((*genfun) (to1, from1));
1588 #ifdef PUSH_ROUNDING
1589 emit_single_push_insn (mode, from1, NULL);
1595 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1596 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1597 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1598 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1600 if (! data->reverse)
1601 data->offset += size;
1607 /* Emit code to move a block Y to a block X.
1608 This may be done with string-move instructions,
1609 with multiple scalar move instructions, or with a library call.
1611 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1613 SIZE is an rtx that says how long they are.
1614 ALIGN is the maximum alignment we can assume they have.
1616 Return the address of the new block, if memcpy is called and returns it,
1620 emit_block_move (x, y, size)
1625 #ifdef TARGET_MEM_FUNCTIONS
1627 tree call_expr, arg_list;
1629 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1631 if (GET_MODE (x) != BLKmode)
1634 if (GET_MODE (y) != BLKmode)
1637 x = protect_from_queue (x, 1);
1638 y = protect_from_queue (y, 0);
1639 size = protect_from_queue (size, 0);
1641 if (GET_CODE (x) != MEM)
1643 if (GET_CODE (y) != MEM)
1648 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1649 move_by_pieces (x, y, INTVAL (size), align);
1652 /* Try the most limited insn first, because there's no point
1653 including more than one in the machine description unless
1654 the more limited one has some advantage. */
1656 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1657 enum machine_mode mode;
1659 /* Since this is a move insn, we don't care about volatility. */
1662 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1663 mode = GET_MODE_WIDER_MODE (mode))
1665 enum insn_code code = movstr_optab[(int) mode];
1666 insn_operand_predicate_fn pred;
1668 if (code != CODE_FOR_nothing
1669 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1670 here because if SIZE is less than the mode mask, as it is
1671 returned by the macro, it will definitely be less than the
1672 actual mode mask. */
1673 && ((GET_CODE (size) == CONST_INT
1674 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1675 <= (GET_MODE_MASK (mode) >> 1)))
1676 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1677 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1678 || (*pred) (x, BLKmode))
1679 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1680 || (*pred) (y, BLKmode))
1681 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1682 || (*pred) (opalign, VOIDmode)))
1685 rtx last = get_last_insn ();
1688 op2 = convert_to_mode (mode, size, 1);
1689 pred = insn_data[(int) code].operand[2].predicate;
1690 if (pred != 0 && ! (*pred) (op2, mode))
1691 op2 = copy_to_mode_reg (mode, op2);
1693 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1701 delete_insns_since (last);
1707 /* X, Y, or SIZE may have been passed through protect_from_queue.
1709 It is unsafe to save the value generated by protect_from_queue
1710 and reuse it later. Consider what happens if emit_queue is
1711 called before the return value from protect_from_queue is used.
1713 Expansion of the CALL_EXPR below will call emit_queue before
1714 we are finished emitting RTL for argument setup. So if we are
1715 not careful we could get the wrong value for an argument.
1717 To avoid this problem we go ahead and emit code to copy X, Y &
1718 SIZE into new pseudos. We can then place those new pseudos
1719 into an RTL_EXPR and use them later, even after a call to
1722 Note this is not strictly needed for library calls since they
1723 do not call emit_queue before loading their arguments. However,
1724 we may need to have library calls call emit_queue in the future
1725 since failing to do so could cause problems for targets which
1726 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1727 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1728 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1730 #ifdef TARGET_MEM_FUNCTIONS
1731 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1733 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1734 TREE_UNSIGNED (integer_type_node));
1735 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1738 #ifdef TARGET_MEM_FUNCTIONS
1739 /* It is incorrect to use the libcall calling conventions to call
1740 memcpy in this context.
1742 This could be a user call to memcpy and the user may wish to
1743 examine the return value from memcpy.
1745 For targets where libcalls and normal calls have different conventions
1746 for returning pointers, we could end up generating incorrect code.
1748 So instead of using a libcall sequence we build up a suitable
1749 CALL_EXPR and expand the call in the normal fashion. */
1750 if (fn == NULL_TREE)
1754 /* This was copied from except.c, I don't know if all this is
1755 necessary in this context or not. */
1756 fn = get_identifier ("memcpy");
1757 fntype = build_pointer_type (void_type_node);
1758 fntype = build_function_type (fntype, NULL_TREE);
1759 fn = build_decl (FUNCTION_DECL, fn, fntype);
1760 ggc_add_tree_root (&fn, 1);
1761 DECL_EXTERNAL (fn) = 1;
1762 TREE_PUBLIC (fn) = 1;
1763 DECL_ARTIFICIAL (fn) = 1;
1764 TREE_NOTHROW (fn) = 1;
1765 make_decl_rtl (fn, NULL);
1766 assemble_external (fn);
1769 /* We need to make an argument list for the function call.
1771 memcpy has three arguments, the first two are void * addresses and
1772 the last is a size_t byte count for the copy. */
1774 = build_tree_list (NULL_TREE,
1775 make_tree (build_pointer_type (void_type_node), x));
1776 TREE_CHAIN (arg_list)
1777 = build_tree_list (NULL_TREE,
1778 make_tree (build_pointer_type (void_type_node), y));
1779 TREE_CHAIN (TREE_CHAIN (arg_list))
1780 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1781 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1783 /* Now we have to build up the CALL_EXPR itself. */
1784 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1785 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1786 call_expr, arg_list, NULL_TREE);
1787 TREE_SIDE_EFFECTS (call_expr) = 1;
1789 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1791 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1792 VOIDmode, 3, y, Pmode, x, Pmode,
1793 convert_to_mode (TYPE_MODE (integer_type_node), size,
1794 TREE_UNSIGNED (integer_type_node)),
1795 TYPE_MODE (integer_type_node));
1798 /* If we are initializing a readonly value, show the above call
1799 clobbered it. Otherwise, a load from it may erroneously be hoisted
1801 if (RTX_UNCHANGING_P (x))
1802 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1808 /* Copy all or part of a value X into registers starting at REGNO.
1809 The number of registers to be filled is NREGS. */
1812 move_block_to_reg (regno, x, nregs, mode)
1816 enum machine_mode mode;
1819 #ifdef HAVE_load_multiple
1827 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1828 x = validize_mem (force_const_mem (mode, x));
1830 /* See if the machine can do this with a load multiple insn. */
1831 #ifdef HAVE_load_multiple
1832 if (HAVE_load_multiple)
1834 last = get_last_insn ();
1835 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1843 delete_insns_since (last);
1847 for (i = 0; i < nregs; i++)
1848 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1849 operand_subword_force (x, i, mode));
1852 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1853 The number of registers to be filled is NREGS. SIZE indicates the number
1854 of bytes in the object X. */
1857 move_block_from_reg (regno, x, nregs, size)
1864 #ifdef HAVE_store_multiple
1868 enum machine_mode mode;
1873 /* If SIZE is that of a mode no bigger than a word, just use that
1874 mode's store operation. */
1875 if (size <= UNITS_PER_WORD
1876 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1877 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1879 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1883 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1884 to the left before storing to memory. Note that the previous test
1885 doesn't handle all cases (e.g. SIZE == 3). */
1886 if (size < UNITS_PER_WORD
1888 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1890 rtx tem = operand_subword (x, 0, 1, BLKmode);
1896 shift = expand_shift (LSHIFT_EXPR, word_mode,
1897 gen_rtx_REG (word_mode, regno),
1898 build_int_2 ((UNITS_PER_WORD - size)
1899 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1900 emit_move_insn (tem, shift);
1904 /* See if the machine can do this with a store multiple insn. */
1905 #ifdef HAVE_store_multiple
1906 if (HAVE_store_multiple)
1908 last = get_last_insn ();
1909 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1917 delete_insns_since (last);
1921 for (i = 0; i < nregs; i++)
1923 rtx tem = operand_subword (x, i, 1, BLKmode);
1928 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1932 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1933 registers represented by a PARALLEL. SSIZE represents the total size of
1934 block SRC in bytes, or -1 if not known. */
1935 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1936 the balance will be in what would be the low-order memory addresses, i.e.
1937 left justified for big endian, right justified for little endian. This
1938 happens to be true for the targets currently using this support. If this
1939 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1943 emit_group_load (dst, orig_src, ssize)
1950 if (GET_CODE (dst) != PARALLEL)
1953 /* Check for a NULL entry, used to indicate that the parameter goes
1954 both on the stack and in registers. */
1955 if (XEXP (XVECEXP (dst, 0, 0), 0))
1960 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1962 /* Process the pieces. */
1963 for (i = start; i < XVECLEN (dst, 0); i++)
1965 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1966 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1967 unsigned int bytelen = GET_MODE_SIZE (mode);
1970 /* Handle trailing fragments that run over the size of the struct. */
1971 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1973 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1974 bytelen = ssize - bytepos;
1979 /* If we won't be loading directly from memory, protect the real source
1980 from strange tricks we might play; but make sure that the source can
1981 be loaded directly into the destination. */
1983 if (GET_CODE (orig_src) != MEM
1984 && (!CONSTANT_P (orig_src)
1985 || (GET_MODE (orig_src) != mode
1986 && GET_MODE (orig_src) != VOIDmode)))
1988 if (GET_MODE (orig_src) == VOIDmode)
1989 src = gen_reg_rtx (mode);
1991 src = gen_reg_rtx (GET_MODE (orig_src));
1993 emit_move_insn (src, orig_src);
1996 /* Optimize the access just a bit. */
1997 if (GET_CODE (src) == MEM
1998 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
1999 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2000 && bytelen == GET_MODE_SIZE (mode))
2002 tmps[i] = gen_reg_rtx (mode);
2003 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2005 else if (GET_CODE (src) == CONCAT)
2008 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2009 tmps[i] = XEXP (src, 0);
2010 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2011 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2012 tmps[i] = XEXP (src, 1);
2013 else if (bytepos == 0)
2015 rtx mem = assign_stack_temp (GET_MODE (src),
2016 GET_MODE_SIZE (GET_MODE (src)), 0);
2017 emit_move_insn (mem, src);
2018 tmps[i] = adjust_address (mem, mode, 0);
2023 else if (CONSTANT_P (src)
2024 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2027 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2028 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2031 if (BYTES_BIG_ENDIAN && shift)
2032 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2033 tmps[i], 0, OPTAB_WIDEN);
2038 /* Copy the extracted pieces into the proper (probable) hard regs. */
2039 for (i = start; i < XVECLEN (dst, 0); i++)
2040 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2043 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2044 registers represented by a PARALLEL. SSIZE represents the total size of
2045 block DST, or -1 if not known. */
2048 emit_group_store (orig_dst, src, ssize)
2055 if (GET_CODE (src) != PARALLEL)
2058 /* Check for a NULL entry, used to indicate that the parameter goes
2059 both on the stack and in registers. */
2060 if (XEXP (XVECEXP (src, 0, 0), 0))
2065 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2067 /* Copy the (probable) hard regs into pseudos. */
2068 for (i = start; i < XVECLEN (src, 0); i++)
2070 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2071 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2072 emit_move_insn (tmps[i], reg);
2076 /* If we won't be storing directly into memory, protect the real destination
2077 from strange tricks we might play. */
2079 if (GET_CODE (dst) == PARALLEL)
2083 /* We can get a PARALLEL dst if there is a conditional expression in
2084 a return statement. In that case, the dst and src are the same,
2085 so no action is necessary. */
2086 if (rtx_equal_p (dst, src))
2089 /* It is unclear if we can ever reach here, but we may as well handle
2090 it. Allocate a temporary, and split this into a store/load to/from
2093 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2094 emit_group_store (temp, src, ssize);
2095 emit_group_load (dst, temp, ssize);
2098 else if (GET_CODE (dst) != MEM)
2100 dst = gen_reg_rtx (GET_MODE (orig_dst));
2101 /* Make life a bit easier for combine. */
2102 emit_move_insn (dst, const0_rtx);
2105 /* Process the pieces. */
2106 for (i = start; i < XVECLEN (src, 0); i++)
2108 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2109 enum machine_mode mode = GET_MODE (tmps[i]);
2110 unsigned int bytelen = GET_MODE_SIZE (mode);
2112 /* Handle trailing fragments that run over the size of the struct. */
2113 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2115 if (BYTES_BIG_ENDIAN)
2117 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2118 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2119 tmps[i], 0, OPTAB_WIDEN);
2121 bytelen = ssize - bytepos;
2124 /* Optimize the access just a bit. */
2125 if (GET_CODE (dst) == MEM
2126 && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
2127 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2128 && bytelen == GET_MODE_SIZE (mode))
2129 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2131 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2132 mode, tmps[i], ssize);
2137 /* Copy from the pseudo into the (probable) hard reg. */
2138 if (GET_CODE (dst) == REG)
2139 emit_move_insn (orig_dst, dst);
2142 /* Generate code to copy a BLKmode object of TYPE out of a
2143 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2144 is null, a stack temporary is created. TGTBLK is returned.
2146 The primary purpose of this routine is to handle functions
2147 that return BLKmode structures in registers. Some machines
2148 (the PA for example) want to return all small structures
2149 in registers regardless of the structure's alignment. */
2152 copy_blkmode_from_reg (tgtblk, srcreg, type)
2157 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2158 rtx src = NULL, dst = NULL;
2159 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2160 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2164 tgtblk = assign_temp (build_qualified_type (type,
2166 | TYPE_QUAL_CONST)),
2168 preserve_temp_slots (tgtblk);
2171 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2172 into a new pseudo which is a full word.
2174 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2175 the wrong part of the register gets copied so we fake a type conversion
2177 if (GET_MODE (srcreg) != BLKmode
2178 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2180 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2181 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2183 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2186 /* Structures whose size is not a multiple of a word are aligned
2187 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2188 machine, this means we must skip the empty high order bytes when
2189 calculating the bit offset. */
2190 if (BYTES_BIG_ENDIAN
2191 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2192 && bytes % UNITS_PER_WORD)
2193 big_endian_correction
2194 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2196 /* Copy the structure BITSIZE bites at a time.
2198 We could probably emit more efficient code for machines which do not use
2199 strict alignment, but it doesn't seem worth the effort at the current
2201 for (bitpos = 0, xbitpos = big_endian_correction;
2202 bitpos < bytes * BITS_PER_UNIT;
2203 bitpos += bitsize, xbitpos += bitsize)
2205 /* We need a new source operand each time xbitpos is on a
2206 word boundary and when xbitpos == big_endian_correction
2207 (the first time through). */
2208 if (xbitpos % BITS_PER_WORD == 0
2209 || xbitpos == big_endian_correction)
2210 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2213 /* We need a new destination operand each time bitpos is on
2215 if (bitpos % BITS_PER_WORD == 0)
2216 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2218 /* Use xbitpos for the source extraction (right justified) and
2219 xbitpos for the destination store (left justified). */
2220 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2221 extract_bit_field (src, bitsize,
2222 xbitpos % BITS_PER_WORD, 1,
2223 NULL_RTX, word_mode, word_mode,
2231 /* Add a USE expression for REG to the (possibly empty) list pointed
2232 to by CALL_FUSAGE. REG must denote a hard register. */
2235 use_reg (call_fusage, reg)
2236 rtx *call_fusage, reg;
2238 if (GET_CODE (reg) != REG
2239 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2243 = gen_rtx_EXPR_LIST (VOIDmode,
2244 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2247 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2248 starting at REGNO. All of these registers must be hard registers. */
2251 use_regs (call_fusage, regno, nregs)
2258 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2261 for (i = 0; i < nregs; i++)
2262 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2265 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2266 PARALLEL REGS. This is for calls that pass values in multiple
2267 non-contiguous locations. The Irix 6 ABI has examples of this. */
2270 use_group_regs (call_fusage, regs)
2276 for (i = 0; i < XVECLEN (regs, 0); i++)
2278 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2280 /* A NULL entry means the parameter goes both on the stack and in
2281 registers. This can also be a MEM for targets that pass values
2282 partially on the stack and partially in registers. */
2283 if (reg != 0 && GET_CODE (reg) == REG)
2284 use_reg (call_fusage, reg);
2290 can_store_by_pieces (len, constfun, constfundata, align)
2291 unsigned HOST_WIDE_INT len;
2292 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2296 unsigned HOST_WIDE_INT max_size, l;
2297 HOST_WIDE_INT offset = 0;
2298 enum machine_mode mode, tmode;
2299 enum insn_code icode;
2303 if (! MOVE_BY_PIECES_P (len, align))
2306 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2307 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2308 align = MOVE_MAX * BITS_PER_UNIT;
2310 /* We would first store what we can in the largest integer mode, then go to
2311 successively smaller modes. */
2314 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2319 max_size = MOVE_MAX_PIECES + 1;
2320 while (max_size > 1)
2322 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2323 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2324 if (GET_MODE_SIZE (tmode) < max_size)
2327 if (mode == VOIDmode)
2330 icode = mov_optab->handlers[(int) mode].insn_code;
2331 if (icode != CODE_FOR_nothing
2332 && align >= GET_MODE_ALIGNMENT (mode))
2334 unsigned int size = GET_MODE_SIZE (mode);
2341 cst = (*constfun) (constfundata, offset, mode);
2342 if (!LEGITIMATE_CONSTANT_P (cst))
2352 max_size = GET_MODE_SIZE (mode);
2355 /* The code above should have handled everything. */
2363 /* Generate several move instructions to store LEN bytes generated by
2364 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2365 pointer which will be passed as argument in every CONSTFUN call.
2366 ALIGN is maximum alignment we can assume. */
2369 store_by_pieces (to, len, constfun, constfundata, align)
2371 unsigned HOST_WIDE_INT len;
2372 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2376 struct store_by_pieces data;
2378 if (! MOVE_BY_PIECES_P (len, align))
2380 to = protect_from_queue (to, 1);
2381 data.constfun = constfun;
2382 data.constfundata = constfundata;
2385 store_by_pieces_1 (&data, align);
2388 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2389 rtx with BLKmode). The caller must pass TO through protect_from_queue
2390 before calling. ALIGN is maximum alignment we can assume. */
2393 clear_by_pieces (to, len, align)
2395 unsigned HOST_WIDE_INT len;
2398 struct store_by_pieces data;
2400 data.constfun = clear_by_pieces_1;
2401 data.constfundata = NULL;
2404 store_by_pieces_1 (&data, align);
2407 /* Callback routine for clear_by_pieces.
2408 Return const0_rtx unconditionally. */
2411 clear_by_pieces_1 (data, offset, mode)
2412 PTR data ATTRIBUTE_UNUSED;
2413 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2414 enum machine_mode mode ATTRIBUTE_UNUSED;
2419 /* Subroutine of clear_by_pieces and store_by_pieces.
2420 Generate several move instructions to store LEN bytes of block TO. (A MEM
2421 rtx with BLKmode). The caller must pass TO through protect_from_queue
2422 before calling. ALIGN is maximum alignment we can assume. */
2425 store_by_pieces_1 (data, align)
2426 struct store_by_pieces *data;
2429 rtx to_addr = XEXP (data->to, 0);
2430 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2431 enum machine_mode mode = VOIDmode, tmode;
2432 enum insn_code icode;
2435 data->to_addr = to_addr;
2437 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2438 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2440 data->explicit_inc_to = 0;
2442 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2444 data->offset = data->len;
2446 /* If storing requires more than two move insns,
2447 copy addresses to registers (to make displacements shorter)
2448 and use post-increment if available. */
2449 if (!data->autinc_to
2450 && move_by_pieces_ninsns (data->len, align) > 2)
2452 /* Determine the main mode we'll be using. */
2453 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2454 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2455 if (GET_MODE_SIZE (tmode) < max_size)
2458 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2460 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2461 data->autinc_to = 1;
2462 data->explicit_inc_to = -1;
2465 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2466 && ! data->autinc_to)
2468 data->to_addr = copy_addr_to_reg (to_addr);
2469 data->autinc_to = 1;
2470 data->explicit_inc_to = 1;
2473 if ( !data->autinc_to && CONSTANT_P (to_addr))
2474 data->to_addr = copy_addr_to_reg (to_addr);
2477 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2478 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2479 align = MOVE_MAX * BITS_PER_UNIT;
2481 /* First store what we can in the largest integer mode, then go to
2482 successively smaller modes. */
2484 while (max_size > 1)
2486 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2487 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2488 if (GET_MODE_SIZE (tmode) < max_size)
2491 if (mode == VOIDmode)
2494 icode = mov_optab->handlers[(int) mode].insn_code;
2495 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2496 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2498 max_size = GET_MODE_SIZE (mode);
2501 /* The code above should have handled everything. */
2506 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2507 with move instructions for mode MODE. GENFUN is the gen_... function
2508 to make a move insn for that mode. DATA has all the other info. */
2511 store_by_pieces_2 (genfun, mode, data)
2512 rtx (*genfun) PARAMS ((rtx, ...));
2513 enum machine_mode mode;
2514 struct store_by_pieces *data;
2516 unsigned int size = GET_MODE_SIZE (mode);
2519 while (data->len >= size)
2522 data->offset -= size;
2524 if (data->autinc_to)
2525 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2528 to1 = adjust_address (data->to, mode, data->offset);
2530 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2531 emit_insn (gen_add2_insn (data->to_addr,
2532 GEN_INT (-(HOST_WIDE_INT) size)));
2534 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2535 emit_insn ((*genfun) (to1, cst));
2537 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2538 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2540 if (! data->reverse)
2541 data->offset += size;
2547 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2548 its length in bytes. */
2551 clear_storage (object, size)
2555 #ifdef TARGET_MEM_FUNCTIONS
2557 tree call_expr, arg_list;
2560 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2561 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2563 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2564 just move a zero. Otherwise, do this a piece at a time. */
2565 if ((GET_MODE (object) != BLKmode
2566 && !VECTOR_MODE_P (GET_MODE (object)))
2567 && GET_CODE (size) == CONST_INT
2568 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2569 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2572 object = protect_from_queue (object, 1);
2573 size = protect_from_queue (size, 0);
2575 if (GET_CODE (size) == CONST_INT
2576 && MOVE_BY_PIECES_P (INTVAL (size), align))
2577 clear_by_pieces (object, INTVAL (size), align);
2580 /* Try the most limited insn first, because there's no point
2581 including more than one in the machine description unless
2582 the more limited one has some advantage. */
2584 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2585 enum machine_mode mode;
2587 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2588 mode = GET_MODE_WIDER_MODE (mode))
2590 enum insn_code code = clrstr_optab[(int) mode];
2591 insn_operand_predicate_fn pred;
2593 if (code != CODE_FOR_nothing
2594 /* We don't need MODE to be narrower than
2595 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2596 the mode mask, as it is returned by the macro, it will
2597 definitely be less than the actual mode mask. */
2598 && ((GET_CODE (size) == CONST_INT
2599 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2600 <= (GET_MODE_MASK (mode) >> 1)))
2601 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2602 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2603 || (*pred) (object, BLKmode))
2604 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2605 || (*pred) (opalign, VOIDmode)))
2608 rtx last = get_last_insn ();
2611 op1 = convert_to_mode (mode, size, 1);
2612 pred = insn_data[(int) code].operand[1].predicate;
2613 if (pred != 0 && ! (*pred) (op1, mode))
2614 op1 = copy_to_mode_reg (mode, op1);
2616 pat = GEN_FCN ((int) code) (object, op1, opalign);
2623 delete_insns_since (last);
2627 /* OBJECT or SIZE may have been passed through protect_from_queue.
2629 It is unsafe to save the value generated by protect_from_queue
2630 and reuse it later. Consider what happens if emit_queue is
2631 called before the return value from protect_from_queue is used.
2633 Expansion of the CALL_EXPR below will call emit_queue before
2634 we are finished emitting RTL for argument setup. So if we are
2635 not careful we could get the wrong value for an argument.
2637 To avoid this problem we go ahead and emit code to copy OBJECT
2638 and SIZE into new pseudos. We can then place those new pseudos
2639 into an RTL_EXPR and use them later, even after a call to
2642 Note this is not strictly needed for library calls since they
2643 do not call emit_queue before loading their arguments. However,
2644 we may need to have library calls call emit_queue in the future
2645 since failing to do so could cause problems for targets which
2646 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2647 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2649 #ifdef TARGET_MEM_FUNCTIONS
2650 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2652 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2653 TREE_UNSIGNED (integer_type_node));
2654 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2657 #ifdef TARGET_MEM_FUNCTIONS
2658 /* It is incorrect to use the libcall calling conventions to call
2659 memset in this context.
2661 This could be a user call to memset and the user may wish to
2662 examine the return value from memset.
2664 For targets where libcalls and normal calls have different
2665 conventions for returning pointers, we could end up generating
2668 So instead of using a libcall sequence we build up a suitable
2669 CALL_EXPR and expand the call in the normal fashion. */
2670 if (fn == NULL_TREE)
2674 /* This was copied from except.c, I don't know if all this is
2675 necessary in this context or not. */
2676 fn = get_identifier ("memset");
2677 fntype = build_pointer_type (void_type_node);
2678 fntype = build_function_type (fntype, NULL_TREE);
2679 fn = build_decl (FUNCTION_DECL, fn, fntype);
2680 ggc_add_tree_root (&fn, 1);
2681 DECL_EXTERNAL (fn) = 1;
2682 TREE_PUBLIC (fn) = 1;
2683 DECL_ARTIFICIAL (fn) = 1;
2684 TREE_NOTHROW (fn) = 1;
2685 make_decl_rtl (fn, NULL);
2686 assemble_external (fn);
2689 /* We need to make an argument list for the function call.
2691 memset has three arguments, the first is a void * addresses, the
2692 second an integer with the initialization value, the last is a
2693 size_t byte count for the copy. */
2695 = build_tree_list (NULL_TREE,
2696 make_tree (build_pointer_type (void_type_node),
2698 TREE_CHAIN (arg_list)
2699 = build_tree_list (NULL_TREE,
2700 make_tree (integer_type_node, const0_rtx));
2701 TREE_CHAIN (TREE_CHAIN (arg_list))
2702 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2703 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2705 /* Now we have to build up the CALL_EXPR itself. */
2706 call_expr = build1 (ADDR_EXPR,
2707 build_pointer_type (TREE_TYPE (fn)), fn);
2708 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2709 call_expr, arg_list, NULL_TREE);
2710 TREE_SIDE_EFFECTS (call_expr) = 1;
2712 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2714 emit_library_call (bzero_libfunc, LCT_NORMAL,
2715 VOIDmode, 2, object, Pmode, size,
2716 TYPE_MODE (integer_type_node));
2719 /* If we are initializing a readonly value, show the above call
2720 clobbered it. Otherwise, a load from it may erroneously be
2721 hoisted from a loop. */
2722 if (RTX_UNCHANGING_P (object))
2723 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2730 /* Generate code to copy Y into X.
2731 Both Y and X must have the same mode, except that
2732 Y can be a constant with VOIDmode.
2733 This mode cannot be BLKmode; use emit_block_move for that.
2735 Return the last instruction emitted. */
2738 emit_move_insn (x, y)
2741 enum machine_mode mode = GET_MODE (x);
2742 rtx y_cst = NULL_RTX;
2745 x = protect_from_queue (x, 1);
2746 y = protect_from_queue (y, 0);
2748 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2751 /* Never force constant_p_rtx to memory. */
2752 if (GET_CODE (y) == CONSTANT_P_RTX)
2754 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2757 y = force_const_mem (mode, y);
2760 /* If X or Y are memory references, verify that their addresses are valid
2762 if (GET_CODE (x) == MEM
2763 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2764 && ! push_operand (x, GET_MODE (x)))
2766 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2767 x = validize_mem (x);
2769 if (GET_CODE (y) == MEM
2770 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2772 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2773 y = validize_mem (y);
2775 if (mode == BLKmode)
2778 last_insn = emit_move_insn_1 (x, y);
2780 if (y_cst && GET_CODE (x) == REG)
2781 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2786 /* Low level part of emit_move_insn.
2787 Called just like emit_move_insn, but assumes X and Y
2788 are basically valid. */
2791 emit_move_insn_1 (x, y)
2794 enum machine_mode mode = GET_MODE (x);
2795 enum machine_mode submode;
2796 enum mode_class class = GET_MODE_CLASS (mode);
2798 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2801 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2803 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2805 /* Expand complex moves by moving real part and imag part, if possible. */
2806 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2807 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2809 (class == MODE_COMPLEX_INT
2810 ? MODE_INT : MODE_FLOAT),
2812 && (mov_optab->handlers[(int) submode].insn_code
2813 != CODE_FOR_nothing))
2815 /* Don't split destination if it is a stack push. */
2816 int stack = push_operand (x, GET_MODE (x));
2818 #ifdef PUSH_ROUNDING
2819 /* In case we output to the stack, but the size is smaller machine can
2820 push exactly, we need to use move instructions. */
2822 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2823 != GET_MODE_SIZE (submode)))
2826 HOST_WIDE_INT offset1, offset2;
2828 /* Do not use anti_adjust_stack, since we don't want to update
2829 stack_pointer_delta. */
2830 temp = expand_binop (Pmode,
2831 #ifdef STACK_GROWS_DOWNWARD
2839 (GET_MODE_SIZE (GET_MODE (x)))),
2840 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2842 if (temp != stack_pointer_rtx)
2843 emit_move_insn (stack_pointer_rtx, temp);
2845 #ifdef STACK_GROWS_DOWNWARD
2847 offset2 = GET_MODE_SIZE (submode);
2849 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2850 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2851 + GET_MODE_SIZE (submode));
2854 emit_move_insn (change_address (x, submode,
2855 gen_rtx_PLUS (Pmode,
2857 GEN_INT (offset1))),
2858 gen_realpart (submode, y));
2859 emit_move_insn (change_address (x, submode,
2860 gen_rtx_PLUS (Pmode,
2862 GEN_INT (offset2))),
2863 gen_imagpart (submode, y));
2867 /* If this is a stack, push the highpart first, so it
2868 will be in the argument order.
2870 In that case, change_address is used only to convert
2871 the mode, not to change the address. */
2874 /* Note that the real part always precedes the imag part in memory
2875 regardless of machine's endianness. */
2876 #ifdef STACK_GROWS_DOWNWARD
2877 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2878 (gen_rtx_MEM (submode, XEXP (x, 0)),
2879 gen_imagpart (submode, y)));
2880 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2881 (gen_rtx_MEM (submode, XEXP (x, 0)),
2882 gen_realpart (submode, y)));
2884 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2885 (gen_rtx_MEM (submode, XEXP (x, 0)),
2886 gen_realpart (submode, y)));
2887 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2888 (gen_rtx_MEM (submode, XEXP (x, 0)),
2889 gen_imagpart (submode, y)));
2894 rtx realpart_x, realpart_y;
2895 rtx imagpart_x, imagpart_y;
2897 /* If this is a complex value with each part being smaller than a
2898 word, the usual calling sequence will likely pack the pieces into
2899 a single register. Unfortunately, SUBREG of hard registers only
2900 deals in terms of words, so we have a problem converting input
2901 arguments to the CONCAT of two registers that is used elsewhere
2902 for complex values. If this is before reload, we can copy it into
2903 memory and reload. FIXME, we should see about using extract and
2904 insert on integer registers, but complex short and complex char
2905 variables should be rarely used. */
2906 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2907 && (reload_in_progress | reload_completed) == 0)
2910 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2912 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2914 if (packed_dest_p || packed_src_p)
2916 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2917 ? MODE_FLOAT : MODE_INT);
2919 enum machine_mode reg_mode
2920 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2922 if (reg_mode != BLKmode)
2924 rtx mem = assign_stack_temp (reg_mode,
2925 GET_MODE_SIZE (mode), 0);
2926 rtx cmem = adjust_address (mem, mode, 0);
2929 = N_("function using short complex types cannot be inline");
2933 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2935 emit_move_insn_1 (cmem, y);
2936 return emit_move_insn_1 (sreg, mem);
2940 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2942 emit_move_insn_1 (mem, sreg);
2943 return emit_move_insn_1 (x, cmem);
2949 realpart_x = gen_realpart (submode, x);
2950 realpart_y = gen_realpart (submode, y);
2951 imagpart_x = gen_imagpart (submode, x);
2952 imagpart_y = gen_imagpart (submode, y);
2954 /* Show the output dies here. This is necessary for SUBREGs
2955 of pseudos since we cannot track their lifetimes correctly;
2956 hard regs shouldn't appear here except as return values.
2957 We never want to emit such a clobber after reload. */
2959 && ! (reload_in_progress || reload_completed)
2960 && (GET_CODE (realpart_x) == SUBREG
2961 || GET_CODE (imagpart_x) == SUBREG))
2962 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2964 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2965 (realpart_x, realpart_y));
2966 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2967 (imagpart_x, imagpart_y));
2970 return get_last_insn ();
2973 /* This will handle any multi-word mode that lacks a move_insn pattern.
2974 However, you will get better code if you define such patterns,
2975 even if they must turn into multiple assembler instructions. */
2976 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2983 #ifdef PUSH_ROUNDING
2985 /* If X is a push on the stack, do the push now and replace
2986 X with a reference to the stack pointer. */
2987 if (push_operand (x, GET_MODE (x)))
2992 /* Do not use anti_adjust_stack, since we don't want to update
2993 stack_pointer_delta. */
2994 temp = expand_binop (Pmode,
2995 #ifdef STACK_GROWS_DOWNWARD
3003 (GET_MODE_SIZE (GET_MODE (x)))),
3004 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3006 if (temp != stack_pointer_rtx)
3007 emit_move_insn (stack_pointer_rtx, temp);
3009 code = GET_CODE (XEXP (x, 0));
3011 /* Just hope that small offsets off SP are OK. */
3012 if (code == POST_INC)
3013 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3014 GEN_INT (-((HOST_WIDE_INT)
3015 GET_MODE_SIZE (GET_MODE (x)))));
3016 else if (code == POST_DEC)
3017 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3018 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3020 temp = stack_pointer_rtx;
3022 x = change_address (x, VOIDmode, temp);
3026 /* If we are in reload, see if either operand is a MEM whose address
3027 is scheduled for replacement. */
3028 if (reload_in_progress && GET_CODE (x) == MEM
3029 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3030 x = replace_equiv_address_nv (x, inner);
3031 if (reload_in_progress && GET_CODE (y) == MEM
3032 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3033 y = replace_equiv_address_nv (y, inner);
3039 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3042 rtx xpart = operand_subword (x, i, 1, mode);
3043 rtx ypart = operand_subword (y, i, 1, mode);
3045 /* If we can't get a part of Y, put Y into memory if it is a
3046 constant. Otherwise, force it into a register. If we still
3047 can't get a part of Y, abort. */
3048 if (ypart == 0 && CONSTANT_P (y))
3050 y = force_const_mem (mode, y);
3051 ypart = operand_subword (y, i, 1, mode);
3053 else if (ypart == 0)
3054 ypart = operand_subword_force (y, i, mode);
3056 if (xpart == 0 || ypart == 0)
3059 need_clobber |= (GET_CODE (xpart) == SUBREG);
3061 last_insn = emit_move_insn (xpart, ypart);
3064 seq = gen_sequence ();
3067 /* Show the output dies here. This is necessary for SUBREGs
3068 of pseudos since we cannot track their lifetimes correctly;
3069 hard regs shouldn't appear here except as return values.
3070 We never want to emit such a clobber after reload. */
3072 && ! (reload_in_progress || reload_completed)
3073 && need_clobber != 0)
3074 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3084 /* Pushing data onto the stack. */
3086 /* Push a block of length SIZE (perhaps variable)
3087 and return an rtx to address the beginning of the block.
3088 Note that it is not possible for the value returned to be a QUEUED.
3089 The value may be virtual_outgoing_args_rtx.
3091 EXTRA is the number of bytes of padding to push in addition to SIZE.
3092 BELOW nonzero means this padding comes at low addresses;
3093 otherwise, the padding comes at high addresses. */
3096 push_block (size, extra, below)
3102 size = convert_modes (Pmode, ptr_mode, size, 1);
3103 if (CONSTANT_P (size))
3104 anti_adjust_stack (plus_constant (size, extra));
3105 else if (GET_CODE (size) == REG && extra == 0)
3106 anti_adjust_stack (size);
3109 temp = copy_to_mode_reg (Pmode, size);
3111 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3112 temp, 0, OPTAB_LIB_WIDEN);
3113 anti_adjust_stack (temp);
3116 #ifndef STACK_GROWS_DOWNWARD
3122 temp = virtual_outgoing_args_rtx;
3123 if (extra != 0 && below)
3124 temp = plus_constant (temp, extra);
3128 if (GET_CODE (size) == CONST_INT)
3129 temp = plus_constant (virtual_outgoing_args_rtx,
3130 -INTVAL (size) - (below ? 0 : extra));
3131 else if (extra != 0 && !below)
3132 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3133 negate_rtx (Pmode, plus_constant (size, extra)));
3135 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3136 negate_rtx (Pmode, size));
3139 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3142 #ifdef PUSH_ROUNDING
3144 /* Emit single push insn. */
3147 emit_single_push_insn (mode, x, type)
3149 enum machine_mode mode;
3153 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3155 enum insn_code icode;
3156 insn_operand_predicate_fn pred;
3158 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3159 /* If there is push pattern, use it. Otherwise try old way of throwing
3160 MEM representing push operation to move expander. */
3161 icode = push_optab->handlers[(int) mode].insn_code;
3162 if (icode != CODE_FOR_nothing)
3164 if (((pred = insn_data[(int) icode].operand[0].predicate)
3165 && !((*pred) (x, mode))))
3166 x = force_reg (mode, x);
3167 emit_insn (GEN_FCN (icode) (x));
3170 if (GET_MODE_SIZE (mode) == rounded_size)
3171 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3174 #ifdef STACK_GROWS_DOWNWARD
3175 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3176 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3178 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3179 GEN_INT (rounded_size));
3181 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3184 dest = gen_rtx_MEM (mode, dest_addr);
3188 set_mem_attributes (dest, type, 1);
3190 if (flag_optimize_sibling_calls)
3191 /* Function incoming arguments may overlap with sibling call
3192 outgoing arguments and we cannot allow reordering of reads
3193 from function arguments with stores to outgoing arguments
3194 of sibling calls. */
3195 set_mem_alias_set (dest, 0);
3197 emit_move_insn (dest, x);
3201 /* Generate code to push X onto the stack, assuming it has mode MODE and
3203 MODE is redundant except when X is a CONST_INT (since they don't
3205 SIZE is an rtx for the size of data to be copied (in bytes),
3206 needed only if X is BLKmode.
3208 ALIGN (in bits) is maximum alignment we can assume.
3210 If PARTIAL and REG are both nonzero, then copy that many of the first
3211 words of X into registers starting with REG, and push the rest of X.
3212 The amount of space pushed is decreased by PARTIAL words,
3213 rounded *down* to a multiple of PARM_BOUNDARY.
3214 REG must be a hard register in this case.
3215 If REG is zero but PARTIAL is not, take any all others actions for an
3216 argument partially in registers, but do not actually load any
3219 EXTRA is the amount in bytes of extra space to leave next to this arg.
3220 This is ignored if an argument block has already been allocated.
3222 On a machine that lacks real push insns, ARGS_ADDR is the address of
3223 the bottom of the argument block for this call. We use indexing off there
3224 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3225 argument block has not been preallocated.
3227 ARGS_SO_FAR is the size of args previously pushed for this call.
3229 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3230 for arguments passed in registers. If nonzero, it will be the number
3231 of bytes required. */
3234 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3235 args_addr, args_so_far, reg_parm_stack_space,
3238 enum machine_mode mode;
3247 int reg_parm_stack_space;
3251 enum direction stack_direction
3252 #ifdef STACK_GROWS_DOWNWARD
3258 /* Decide where to pad the argument: `downward' for below,
3259 `upward' for above, or `none' for don't pad it.
3260 Default is below for small data on big-endian machines; else above. */
3261 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3263 /* Invert direction if stack is post-decrement.
3265 if (STACK_PUSH_CODE == POST_DEC)
3266 if (where_pad != none)
3267 where_pad = (where_pad == downward ? upward : downward);
3269 xinner = x = protect_from_queue (x, 0);
3271 if (mode == BLKmode)
3273 /* Copy a block into the stack, entirely or partially. */
3276 int used = partial * UNITS_PER_WORD;
3277 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3285 /* USED is now the # of bytes we need not copy to the stack
3286 because registers will take care of them. */
3289 xinner = adjust_address (xinner, BLKmode, used);
3291 /* If the partial register-part of the arg counts in its stack size,
3292 skip the part of stack space corresponding to the registers.
3293 Otherwise, start copying to the beginning of the stack space,
3294 by setting SKIP to 0. */
3295 skip = (reg_parm_stack_space == 0) ? 0 : used;
3297 #ifdef PUSH_ROUNDING
3298 /* Do it with several push insns if that doesn't take lots of insns
3299 and if there is no difficulty with push insns that skip bytes
3300 on the stack for alignment purposes. */
3303 && GET_CODE (size) == CONST_INT
3305 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3306 /* Here we avoid the case of a structure whose weak alignment
3307 forces many pushes of a small amount of data,
3308 and such small pushes do rounding that causes trouble. */
3309 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3310 || align >= BIGGEST_ALIGNMENT
3311 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3312 == (align / BITS_PER_UNIT)))
3313 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3315 /* Push padding now if padding above and stack grows down,
3316 or if padding below and stack grows up.
3317 But if space already allocated, this has already been done. */
3318 if (extra && args_addr == 0
3319 && where_pad != none && where_pad != stack_direction)
3320 anti_adjust_stack (GEN_INT (extra));
3322 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3325 #endif /* PUSH_ROUNDING */
3329 /* Otherwise make space on the stack and copy the data
3330 to the address of that space. */
3332 /* Deduct words put into registers from the size we must copy. */
3335 if (GET_CODE (size) == CONST_INT)
3336 size = GEN_INT (INTVAL (size) - used);
3338 size = expand_binop (GET_MODE (size), sub_optab, size,
3339 GEN_INT (used), NULL_RTX, 0,
3343 /* Get the address of the stack space.
3344 In this case, we do not deal with EXTRA separately.
3345 A single stack adjust will do. */
3348 temp = push_block (size, extra, where_pad == downward);
3351 else if (GET_CODE (args_so_far) == CONST_INT)
3352 temp = memory_address (BLKmode,
3353 plus_constant (args_addr,
3354 skip + INTVAL (args_so_far)));
3356 temp = memory_address (BLKmode,
3357 plus_constant (gen_rtx_PLUS (Pmode,
3361 target = gen_rtx_MEM (BLKmode, temp);
3365 set_mem_attributes (target, type, 1);
3366 /* Function incoming arguments may overlap with sibling call
3367 outgoing arguments and we cannot allow reordering of reads
3368 from function arguments with stores to outgoing arguments
3369 of sibling calls. */
3370 set_mem_alias_set (target, 0);
3373 set_mem_align (target, align);
3375 /* TEMP is the address of the block. Copy the data there. */
3376 if (GET_CODE (size) == CONST_INT
3377 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3379 move_by_pieces (target, xinner, INTVAL (size), align);
3384 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3385 enum machine_mode mode;
3387 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3389 mode = GET_MODE_WIDER_MODE (mode))
3391 enum insn_code code = movstr_optab[(int) mode];
3392 insn_operand_predicate_fn pred;
3394 if (code != CODE_FOR_nothing
3395 && ((GET_CODE (size) == CONST_INT
3396 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3397 <= (GET_MODE_MASK (mode) >> 1)))
3398 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3399 && (!(pred = insn_data[(int) code].operand[0].predicate)
3400 || ((*pred) (target, BLKmode)))
3401 && (!(pred = insn_data[(int) code].operand[1].predicate)
3402 || ((*pred) (xinner, BLKmode)))
3403 && (!(pred = insn_data[(int) code].operand[3].predicate)
3404 || ((*pred) (opalign, VOIDmode))))
3406 rtx op2 = convert_to_mode (mode, size, 1);
3407 rtx last = get_last_insn ();
3410 pred = insn_data[(int) code].operand[2].predicate;
3411 if (pred != 0 && ! (*pred) (op2, mode))
3412 op2 = copy_to_mode_reg (mode, op2);
3414 pat = GEN_FCN ((int) code) (target, xinner,
3422 delete_insns_since (last);
3427 if (!ACCUMULATE_OUTGOING_ARGS)
3429 /* If the source is referenced relative to the stack pointer,
3430 copy it to another register to stabilize it. We do not need
3431 to do this if we know that we won't be changing sp. */
3433 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3434 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3435 temp = copy_to_reg (temp);
3438 /* Make inhibit_defer_pop nonzero around the library call
3439 to force it to pop the bcopy-arguments right away. */
3441 #ifdef TARGET_MEM_FUNCTIONS
3442 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3443 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3444 convert_to_mode (TYPE_MODE (sizetype),
3445 size, TREE_UNSIGNED (sizetype)),
3446 TYPE_MODE (sizetype));
3448 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3449 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3450 convert_to_mode (TYPE_MODE (integer_type_node),
3452 TREE_UNSIGNED (integer_type_node)),
3453 TYPE_MODE (integer_type_node));
3458 else if (partial > 0)
3460 /* Scalar partly in registers. */
3462 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3465 /* # words of start of argument
3466 that we must make space for but need not store. */
3467 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3468 int args_offset = INTVAL (args_so_far);
3471 /* Push padding now if padding above and stack grows down,
3472 or if padding below and stack grows up.
3473 But if space already allocated, this has already been done. */
3474 if (extra && args_addr == 0
3475 && where_pad != none && where_pad != stack_direction)
3476 anti_adjust_stack (GEN_INT (extra));
3478 /* If we make space by pushing it, we might as well push
3479 the real data. Otherwise, we can leave OFFSET nonzero
3480 and leave the space uninitialized. */
3484 /* Now NOT_STACK gets the number of words that we don't need to
3485 allocate on the stack. */
3486 not_stack = partial - offset;
3488 /* If the partial register-part of the arg counts in its stack size,
3489 skip the part of stack space corresponding to the registers.
3490 Otherwise, start copying to the beginning of the stack space,
3491 by setting SKIP to 0. */
3492 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3494 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3495 x = validize_mem (force_const_mem (mode, x));
3497 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3498 SUBREGs of such registers are not allowed. */
3499 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3500 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3501 x = copy_to_reg (x);
3503 /* Loop over all the words allocated on the stack for this arg. */
3504 /* We can do it by words, because any scalar bigger than a word
3505 has a size a multiple of a word. */
3506 #ifndef PUSH_ARGS_REVERSED
3507 for (i = not_stack; i < size; i++)
3509 for (i = size - 1; i >= not_stack; i--)
3511 if (i >= not_stack + offset)
3512 emit_push_insn (operand_subword_force (x, i, mode),
3513 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3515 GEN_INT (args_offset + ((i - not_stack + skip)
3517 reg_parm_stack_space, alignment_pad);
3522 rtx target = NULL_RTX;
3525 /* Push padding now if padding above and stack grows down,
3526 or if padding below and stack grows up.
3527 But if space already allocated, this has already been done. */
3528 if (extra && args_addr == 0
3529 && where_pad != none && where_pad != stack_direction)
3530 anti_adjust_stack (GEN_INT (extra));
3532 #ifdef PUSH_ROUNDING
3533 if (args_addr == 0 && PUSH_ARGS)
3534 emit_single_push_insn (mode, x, type);
3538 if (GET_CODE (args_so_far) == CONST_INT)
3540 = memory_address (mode,
3541 plus_constant (args_addr,
3542 INTVAL (args_so_far)));
3544 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3547 dest = gen_rtx_MEM (mode, addr);
3550 set_mem_attributes (dest, type, 1);
3551 /* Function incoming arguments may overlap with sibling call
3552 outgoing arguments and we cannot allow reordering of reads
3553 from function arguments with stores to outgoing arguments
3554 of sibling calls. */
3555 set_mem_alias_set (dest, 0);
3558 emit_move_insn (dest, x);
3564 /* If part should go in registers, copy that part
3565 into the appropriate registers. Do this now, at the end,
3566 since mem-to-mem copies above may do function calls. */
3567 if (partial > 0 && reg != 0)
3569 /* Handle calls that pass values in multiple non-contiguous locations.
3570 The Irix 6 ABI has examples of this. */
3571 if (GET_CODE (reg) == PARALLEL)
3572 emit_group_load (reg, x, -1); /* ??? size? */
3574 move_block_to_reg (REGNO (reg), x, partial, mode);
3577 if (extra && args_addr == 0 && where_pad == stack_direction)
3578 anti_adjust_stack (GEN_INT (extra));
3580 if (alignment_pad && args_addr == 0)
3581 anti_adjust_stack (alignment_pad);
3584 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3592 /* Only registers can be subtargets. */
3593 || GET_CODE (x) != REG
3594 /* If the register is readonly, it can't be set more than once. */
3595 || RTX_UNCHANGING_P (x)
3596 /* Don't use hard regs to avoid extending their life. */
3597 || REGNO (x) < FIRST_PSEUDO_REGISTER
3598 /* Avoid subtargets inside loops,
3599 since they hide some invariant expressions. */
3600 || preserve_subexpressions_p ())
3604 /* Expand an assignment that stores the value of FROM into TO.
3605 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3606 (This may contain a QUEUED rtx;
3607 if the value is constant, this rtx is a constant.)
3608 Otherwise, the returned value is NULL_RTX.
3610 SUGGEST_REG is no longer actually used.
3611 It used to mean, copy the value through a register
3612 and return that register, if that is possible.
3613 We now use WANT_VALUE to decide whether to do this. */
3616 expand_assignment (to, from, want_value, suggest_reg)
3619 int suggest_reg ATTRIBUTE_UNUSED;
3624 /* Don't crash if the lhs of the assignment was erroneous. */
3626 if (TREE_CODE (to) == ERROR_MARK)
3628 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3629 return want_value ? result : NULL_RTX;
3632 /* Assignment of a structure component needs special treatment
3633 if the structure component's rtx is not simply a MEM.
3634 Assignment of an array element at a constant index, and assignment of
3635 an array element in an unaligned packed structure field, has the same
3638 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3639 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3641 enum machine_mode mode1;
3642 HOST_WIDE_INT bitsize, bitpos;
3650 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3651 &unsignedp, &volatilep);
3653 /* If we are going to use store_bit_field and extract_bit_field,
3654 make sure to_rtx will be safe for multiple use. */
3656 if (mode1 == VOIDmode && want_value)
3657 tem = stabilize_reference (tem);
3659 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3663 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3665 if (GET_CODE (to_rtx) != MEM)
3668 if (GET_MODE (offset_rtx) != ptr_mode)
3669 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3671 #ifdef POINTERS_EXTEND_UNSIGNED
3672 if (GET_MODE (offset_rtx) != Pmode)
3673 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3676 /* A constant address in TO_RTX can have VOIDmode, we must not try
3677 to call force_reg for that case. Avoid that case. */
3678 if (GET_CODE (to_rtx) == MEM
3679 && GET_MODE (to_rtx) == BLKmode
3680 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3682 && (bitpos % bitsize) == 0
3683 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3684 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3687 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3689 if (GET_CODE (XEXP (temp, 0)) == REG)
3692 to_rtx = (replace_equiv_address
3693 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3698 to_rtx = offset_address (to_rtx, offset_rtx,
3699 highest_pow2_factor (offset));
3702 if (GET_CODE (to_rtx) == MEM)
3704 tree old_expr = MEM_EXPR (to_rtx);
3706 /* If the field is at offset zero, we could have been given the
3707 DECL_RTX of the parent struct. Don't munge it. */
3708 to_rtx = shallow_copy_rtx (to_rtx);
3710 set_mem_attributes (to_rtx, to, 0);
3712 /* If we changed MEM_EXPR, that means we're now referencing
3713 the COMPONENT_REF, which means that MEM_OFFSET must be
3714 relative to that field. But we've not yet reflected BITPOS
3715 in TO_RTX. This will be done in store_field. Adjust for
3716 that by biasing MEM_OFFSET by -bitpos. */
3717 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3718 && (bitpos / BITS_PER_UNIT) != 0)
3719 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3720 - (bitpos / BITS_PER_UNIT)));
3723 /* Deal with volatile and readonly fields. The former is only done
3724 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3725 if (volatilep && GET_CODE (to_rtx) == MEM)
3727 if (to_rtx == orig_to_rtx)
3728 to_rtx = copy_rtx (to_rtx);
3729 MEM_VOLATILE_P (to_rtx) = 1;
3732 if (TREE_CODE (to) == COMPONENT_REF
3733 && TREE_READONLY (TREE_OPERAND (to, 1)))
3735 if (to_rtx == orig_to_rtx)
3736 to_rtx = copy_rtx (to_rtx);
3737 RTX_UNCHANGING_P (to_rtx) = 1;
3740 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3742 if (to_rtx == orig_to_rtx)
3743 to_rtx = copy_rtx (to_rtx);
3744 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3747 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3749 /* Spurious cast for HPUX compiler. */
3750 ? ((enum machine_mode)
3751 TYPE_MODE (TREE_TYPE (to)))
3753 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3755 preserve_temp_slots (result);
3759 /* If the value is meaningful, convert RESULT to the proper mode.
3760 Otherwise, return nothing. */
3761 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3762 TYPE_MODE (TREE_TYPE (from)),
3764 TREE_UNSIGNED (TREE_TYPE (to)))
3768 /* If the rhs is a function call and its value is not an aggregate,
3769 call the function before we start to compute the lhs.
3770 This is needed for correct code for cases such as
3771 val = setjmp (buf) on machines where reference to val
3772 requires loading up part of an address in a separate insn.
3774 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3775 since it might be a promoted variable where the zero- or sign- extension
3776 needs to be done. Handling this in the normal way is safe because no
3777 computation is done before the call. */
3778 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3779 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3780 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3781 && GET_CODE (DECL_RTL (to)) == REG))
3786 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3788 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3790 /* Handle calls that return values in multiple non-contiguous locations.
3791 The Irix 6 ABI has examples of this. */
3792 if (GET_CODE (to_rtx) == PARALLEL)
3793 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3794 else if (GET_MODE (to_rtx) == BLKmode)
3795 emit_block_move (to_rtx, value, expr_size (from));
3798 #ifdef POINTERS_EXTEND_UNSIGNED
3799 if (POINTER_TYPE_P (TREE_TYPE (to))
3800 && GET_MODE (to_rtx) != GET_MODE (value))
3801 value = convert_memory_address (GET_MODE (to_rtx), value);
3803 emit_move_insn (to_rtx, value);
3805 preserve_temp_slots (to_rtx);
3808 return want_value ? to_rtx : NULL_RTX;
3811 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3812 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3815 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3817 /* Don't move directly into a return register. */
3818 if (TREE_CODE (to) == RESULT_DECL
3819 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3824 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3826 if (GET_CODE (to_rtx) == PARALLEL)
3827 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3829 emit_move_insn (to_rtx, temp);
3831 preserve_temp_slots (to_rtx);
3834 return want_value ? to_rtx : NULL_RTX;
3837 /* In case we are returning the contents of an object which overlaps
3838 the place the value is being stored, use a safe function when copying
3839 a value through a pointer into a structure value return block. */
3840 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3841 && current_function_returns_struct
3842 && !current_function_returns_pcc_struct)
3847 size = expr_size (from);
3848 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3850 #ifdef TARGET_MEM_FUNCTIONS
3851 emit_library_call (memmove_libfunc, LCT_NORMAL,
3852 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3853 XEXP (from_rtx, 0), Pmode,
3854 convert_to_mode (TYPE_MODE (sizetype),
3855 size, TREE_UNSIGNED (sizetype)),
3856 TYPE_MODE (sizetype));
3858 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3859 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3860 XEXP (to_rtx, 0), Pmode,
3861 convert_to_mode (TYPE_MODE (integer_type_node),
3862 size, TREE_UNSIGNED (integer_type_node)),
3863 TYPE_MODE (integer_type_node));
3866 preserve_temp_slots (to_rtx);
3869 return want_value ? to_rtx : NULL_RTX;
3872 /* Compute FROM and store the value in the rtx we got. */
3875 result = store_expr (from, to_rtx, want_value);
3876 preserve_temp_slots (result);
3879 return want_value ? result : NULL_RTX;
3882 /* Generate code for computing expression EXP,
3883 and storing the value into TARGET.
3884 TARGET may contain a QUEUED rtx.
3886 If WANT_VALUE is nonzero, return a copy of the value
3887 not in TARGET, so that we can be sure to use the proper
3888 value in a containing expression even if TARGET has something
3889 else stored in it. If possible, we copy the value through a pseudo
3890 and return that pseudo. Or, if the value is constant, we try to
3891 return the constant. In some cases, we return a pseudo
3892 copied *from* TARGET.
3894 If the mode is BLKmode then we may return TARGET itself.
3895 It turns out that in BLKmode it doesn't cause a problem.
3896 because C has no operators that could combine two different
3897 assignments into the same BLKmode object with different values
3898 with no sequence point. Will other languages need this to
3901 If WANT_VALUE is 0, we return NULL, to make sure
3902 to catch quickly any cases where the caller uses the value
3903 and fails to set WANT_VALUE. */
3906 store_expr (exp, target, want_value)
3912 int dont_return_target = 0;
3913 int dont_store_target = 0;
3915 if (TREE_CODE (exp) == COMPOUND_EXPR)
3917 /* Perform first part of compound expression, then assign from second
3919 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3921 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3923 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3925 /* For conditional expression, get safe form of the target. Then
3926 test the condition, doing the appropriate assignment on either
3927 side. This avoids the creation of unnecessary temporaries.
3928 For non-BLKmode, it is more efficient not to do this. */
3930 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3933 target = protect_from_queue (target, 1);
3935 do_pending_stack_adjust ();
3937 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3938 start_cleanup_deferral ();
3939 store_expr (TREE_OPERAND (exp, 1), target, 0);
3940 end_cleanup_deferral ();
3942 emit_jump_insn (gen_jump (lab2));
3945 start_cleanup_deferral ();
3946 store_expr (TREE_OPERAND (exp, 2), target, 0);
3947 end_cleanup_deferral ();
3952 return want_value ? target : NULL_RTX;
3954 else if (queued_subexp_p (target))
3955 /* If target contains a postincrement, let's not risk
3956 using it as the place to generate the rhs. */
3958 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3960 /* Expand EXP into a new pseudo. */
3961 temp = gen_reg_rtx (GET_MODE (target));
3962 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3965 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3967 /* If target is volatile, ANSI requires accessing the value
3968 *from* the target, if it is accessed. So make that happen.
3969 In no case return the target itself. */
3970 if (! MEM_VOLATILE_P (target) && want_value)
3971 dont_return_target = 1;
3973 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3974 && GET_MODE (target) != BLKmode)
3975 /* If target is in memory and caller wants value in a register instead,
3976 arrange that. Pass TARGET as target for expand_expr so that,
3977 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3978 We know expand_expr will not use the target in that case.
3979 Don't do this if TARGET is volatile because we are supposed
3980 to write it and then read it. */
3982 temp = expand_expr (exp, target, GET_MODE (target), 0);
3983 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3985 /* If TEMP is already in the desired TARGET, only copy it from
3986 memory and don't store it there again. */
3988 || (rtx_equal_p (temp, target)
3989 && ! side_effects_p (temp) && ! side_effects_p (target)))
3990 dont_store_target = 1;
3991 temp = copy_to_reg (temp);
3993 dont_return_target = 1;
3995 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3996 /* If this is an scalar in a register that is stored in a wider mode
3997 than the declared mode, compute the result into its declared mode
3998 and then convert to the wider mode. Our value is the computed
4001 /* If we don't want a value, we can do the conversion inside EXP,
4002 which will often result in some optimizations. Do the conversion
4003 in two steps: first change the signedness, if needed, then
4004 the extend. But don't do this if the type of EXP is a subtype
4005 of something else since then the conversion might involve
4006 more than just converting modes. */
4007 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4008 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4010 if (TREE_UNSIGNED (TREE_TYPE (exp))
4011 != SUBREG_PROMOTED_UNSIGNED_P (target))
4014 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4018 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4019 SUBREG_PROMOTED_UNSIGNED_P (target)),
4023 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4025 /* If TEMP is a volatile MEM and we want a result value, make
4026 the access now so it gets done only once. Likewise if
4027 it contains TARGET. */
4028 if (GET_CODE (temp) == MEM && want_value
4029 && (MEM_VOLATILE_P (temp)
4030 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4031 temp = copy_to_reg (temp);
4033 /* If TEMP is a VOIDmode constant, use convert_modes to make
4034 sure that we properly convert it. */
4035 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4037 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4038 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4039 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4040 GET_MODE (target), temp,
4041 SUBREG_PROMOTED_UNSIGNED_P (target));
4044 convert_move (SUBREG_REG (target), temp,
4045 SUBREG_PROMOTED_UNSIGNED_P (target));
4047 /* If we promoted a constant, change the mode back down to match
4048 target. Otherwise, the caller might get confused by a result whose
4049 mode is larger than expected. */
4051 if (want_value && GET_MODE (temp) != GET_MODE (target))
4053 if (GET_MODE (temp) != VOIDmode)
4055 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4056 SUBREG_PROMOTED_VAR_P (temp) = 1;
4057 SUBREG_PROMOTED_UNSIGNED_P (temp)
4058 = SUBREG_PROMOTED_UNSIGNED_P (target);
4061 temp = convert_modes (GET_MODE (target),
4062 GET_MODE (SUBREG_REG (target)),
4063 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4066 return want_value ? temp : NULL_RTX;
4070 temp = expand_expr (exp, target, GET_MODE (target), 0);
4071 /* Return TARGET if it's a specified hardware register.
4072 If TARGET is a volatile mem ref, either return TARGET
4073 or return a reg copied *from* TARGET; ANSI requires this.
4075 Otherwise, if TEMP is not TARGET, return TEMP
4076 if it is constant (for efficiency),
4077 or if we really want the correct value. */
4078 if (!(target && GET_CODE (target) == REG
4079 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4080 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4081 && ! rtx_equal_p (temp, target)
4082 && (CONSTANT_P (temp) || want_value))
4083 dont_return_target = 1;
4086 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4087 the same as that of TARGET, adjust the constant. This is needed, for
4088 example, in case it is a CONST_DOUBLE and we want only a word-sized
4090 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4091 && TREE_CODE (exp) != ERROR_MARK
4092 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4093 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4094 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4096 /* If value was not generated in the target, store it there.
4097 Convert the value to TARGET's type first if necessary.
4098 If TEMP and TARGET compare equal according to rtx_equal_p, but
4099 one or both of them are volatile memory refs, we have to distinguish
4101 - expand_expr has used TARGET. In this case, we must not generate
4102 another copy. This can be detected by TARGET being equal according
4104 - expand_expr has not used TARGET - that means that the source just
4105 happens to have the same RTX form. Since temp will have been created
4106 by expand_expr, it will compare unequal according to == .
4107 We must generate a copy in this case, to reach the correct number
4108 of volatile memory references. */
4110 if ((! rtx_equal_p (temp, target)
4111 || (temp != target && (side_effects_p (temp)
4112 || side_effects_p (target))))
4113 && TREE_CODE (exp) != ERROR_MARK
4114 && ! dont_store_target)
4116 target = protect_from_queue (target, 1);
4117 if (GET_MODE (temp) != GET_MODE (target)
4118 && GET_MODE (temp) != VOIDmode)
4120 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4121 if (dont_return_target)
4123 /* In this case, we will return TEMP,
4124 so make sure it has the proper mode.
4125 But don't forget to store the value into TARGET. */
4126 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4127 emit_move_insn (target, temp);
4130 convert_move (target, temp, unsignedp);
4133 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4135 /* Handle copying a string constant into an array. The string
4136 constant may be shorter than the array. So copy just the string's
4137 actual length, and clear the rest. First get the size of the data
4138 type of the string, which is actually the size of the target. */
4139 rtx size = expr_size (exp);
4141 if (GET_CODE (size) == CONST_INT
4142 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4143 emit_block_move (target, temp, size);
4146 /* Compute the size of the data to copy from the string. */
4148 = size_binop (MIN_EXPR,
4149 make_tree (sizetype, size),
4150 size_int (TREE_STRING_LENGTH (exp)));
4151 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4155 /* Copy that much. */
4156 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4157 emit_block_move (target, temp, copy_size_rtx);
4159 /* Figure out how much is left in TARGET that we have to clear.
4160 Do all calculations in ptr_mode. */
4161 if (GET_CODE (copy_size_rtx) == CONST_INT)
4163 size = plus_constant (size, -INTVAL (copy_size_rtx));
4164 target = adjust_address (target, BLKmode,
4165 INTVAL (copy_size_rtx));
4169 size = expand_binop (ptr_mode, sub_optab, size,
4170 copy_size_rtx, NULL_RTX, 0,
4173 #ifdef POINTERS_EXTEND_UNSIGNED
4174 if (GET_MODE (copy_size_rtx) != Pmode)
4175 copy_size_rtx = convert_memory_address (Pmode,
4179 target = offset_address (target, copy_size_rtx,
4180 highest_pow2_factor (copy_size));
4181 label = gen_label_rtx ();
4182 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4183 GET_MODE (size), 0, label);
4186 if (size != const0_rtx)
4187 clear_storage (target, size);
4193 /* Handle calls that return values in multiple non-contiguous locations.
4194 The Irix 6 ABI has examples of this. */
4195 else if (GET_CODE (target) == PARALLEL)
4196 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4197 else if (GET_MODE (temp) == BLKmode)
4198 emit_block_move (target, temp, expr_size (exp));
4200 emit_move_insn (target, temp);
4203 /* If we don't want a value, return NULL_RTX. */
4207 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4208 ??? The latter test doesn't seem to make sense. */
4209 else if (dont_return_target && GET_CODE (temp) != MEM)
4212 /* Return TARGET itself if it is a hard register. */
4213 else if (want_value && GET_MODE (target) != BLKmode
4214 && ! (GET_CODE (target) == REG
4215 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4216 return copy_to_reg (target);
4222 /* Return 1 if EXP just contains zeros. */
4230 switch (TREE_CODE (exp))
4234 case NON_LVALUE_EXPR:
4235 case VIEW_CONVERT_EXPR:
4236 return is_zeros_p (TREE_OPERAND (exp, 0));
4239 return integer_zerop (exp);
4243 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4246 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4249 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4250 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4251 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4252 if (! is_zeros_p (TREE_VALUE (elt)))
4262 /* Return 1 if EXP contains mostly (3/4) zeros. */
4265 mostly_zeros_p (exp)
4268 if (TREE_CODE (exp) == CONSTRUCTOR)
4270 int elts = 0, zeros = 0;
4271 tree elt = CONSTRUCTOR_ELTS (exp);
4272 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4274 /* If there are no ranges of true bits, it is all zero. */
4275 return elt == NULL_TREE;
4277 for (; elt; elt = TREE_CHAIN (elt))
4279 /* We do not handle the case where the index is a RANGE_EXPR,
4280 so the statistic will be somewhat inaccurate.
4281 We do make a more accurate count in store_constructor itself,
4282 so since this function is only used for nested array elements,
4283 this should be close enough. */
4284 if (mostly_zeros_p (TREE_VALUE (elt)))
4289 return 4 * zeros >= 3 * elts;
4292 return is_zeros_p (exp);
4295 /* Helper function for store_constructor.
4296 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4297 TYPE is the type of the CONSTRUCTOR, not the element type.
4298 CLEARED is as for store_constructor.
4299 ALIAS_SET is the alias set to use for any stores.
4301 This provides a recursive shortcut back to store_constructor when it isn't
4302 necessary to go through store_field. This is so that we can pass through
4303 the cleared field to let store_constructor know that we may not have to
4304 clear a substructure if the outer structure has already been cleared. */
4307 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4310 unsigned HOST_WIDE_INT bitsize;
4311 HOST_WIDE_INT bitpos;
4312 enum machine_mode mode;
4317 if (TREE_CODE (exp) == CONSTRUCTOR
4318 && bitpos % BITS_PER_UNIT == 0
4319 /* If we have a non-zero bitpos for a register target, then we just
4320 let store_field do the bitfield handling. This is unlikely to
4321 generate unnecessary clear instructions anyways. */
4322 && (bitpos == 0 || GET_CODE (target) == MEM))
4324 if (GET_CODE (target) == MEM)
4326 = adjust_address (target,
4327 GET_MODE (target) == BLKmode
4329 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4330 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4333 /* Update the alias set, if required. */
4334 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4335 && MEM_ALIAS_SET (target) != 0)
4337 target = copy_rtx (target);
4338 set_mem_alias_set (target, alias_set);
4341 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4344 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4348 /* Store the value of constructor EXP into the rtx TARGET.
4349 TARGET is either a REG or a MEM; we know it cannot conflict, since
4350 safe_from_p has been called.
4351 CLEARED is true if TARGET is known to have been zero'd.
4352 SIZE is the number of bytes of TARGET we are allowed to modify: this
4353 may not be the same as the size of EXP if we are assigning to a field
4354 which has been packed to exclude padding bits. */
4357 store_constructor (exp, target, cleared, size)
4363 tree type = TREE_TYPE (exp);
4364 #ifdef WORD_REGISTER_OPERATIONS
4365 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4368 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4369 || TREE_CODE (type) == QUAL_UNION_TYPE)
4373 /* We either clear the aggregate or indicate the value is dead. */
4374 if ((TREE_CODE (type) == UNION_TYPE
4375 || TREE_CODE (type) == QUAL_UNION_TYPE)
4377 && ! CONSTRUCTOR_ELTS (exp))
4378 /* If the constructor is empty, clear the union. */
4380 clear_storage (target, expr_size (exp));
4384 /* If we are building a static constructor into a register,
4385 set the initial value as zero so we can fold the value into
4386 a constant. But if more than one register is involved,
4387 this probably loses. */
4388 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4389 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4391 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4395 /* If the constructor has fewer fields than the structure
4396 or if we are initializing the structure to mostly zeros,
4397 clear the whole structure first. Don't do this if TARGET is a
4398 register whose mode size isn't equal to SIZE since clear_storage
4399 can't handle this case. */
4400 else if (! cleared && size > 0
4401 && ((list_length (CONSTRUCTOR_ELTS (exp))
4402 != fields_length (type))
4403 || mostly_zeros_p (exp))
4404 && (GET_CODE (target) != REG
4405 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4408 clear_storage (target, GEN_INT (size));
4413 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4415 /* Store each element of the constructor into
4416 the corresponding field of TARGET. */
4418 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4420 tree field = TREE_PURPOSE (elt);
4421 tree value = TREE_VALUE (elt);
4422 enum machine_mode mode;
4423 HOST_WIDE_INT bitsize;
4424 HOST_WIDE_INT bitpos = 0;
4427 rtx to_rtx = target;
4429 /* Just ignore missing fields.
4430 We cleared the whole structure, above,
4431 if any fields are missing. */
4435 if (cleared && is_zeros_p (value))
4438 if (host_integerp (DECL_SIZE (field), 1))
4439 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4443 unsignedp = TREE_UNSIGNED (field);
4444 mode = DECL_MODE (field);
4445 if (DECL_BIT_FIELD (field))
4448 offset = DECL_FIELD_OFFSET (field);
4449 if (host_integerp (offset, 0)
4450 && host_integerp (bit_position (field), 0))
4452 bitpos = int_bit_position (field);
4456 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4462 if (contains_placeholder_p (offset))
4463 offset = build (WITH_RECORD_EXPR, sizetype,
4464 offset, make_tree (TREE_TYPE (exp), target));
4466 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4467 if (GET_CODE (to_rtx) != MEM)
4470 if (GET_MODE (offset_rtx) != ptr_mode)
4471 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4473 #ifdef POINTERS_EXTEND_UNSIGNED
4474 if (GET_MODE (offset_rtx) != Pmode)
4475 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4478 to_rtx = offset_address (to_rtx, offset_rtx,
4479 highest_pow2_factor (offset));
4482 if (TREE_READONLY (field))
4484 if (GET_CODE (to_rtx) == MEM)
4485 to_rtx = copy_rtx (to_rtx);
4487 RTX_UNCHANGING_P (to_rtx) = 1;
4490 #ifdef WORD_REGISTER_OPERATIONS
4491 /* If this initializes a field that is smaller than a word, at the
4492 start of a word, try to widen it to a full word.
4493 This special case allows us to output C++ member function
4494 initializations in a form that the optimizers can understand. */
4495 if (GET_CODE (target) == REG
4496 && bitsize < BITS_PER_WORD
4497 && bitpos % BITS_PER_WORD == 0
4498 && GET_MODE_CLASS (mode) == MODE_INT
4499 && TREE_CODE (value) == INTEGER_CST
4501 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4503 tree type = TREE_TYPE (value);
4505 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4507 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4508 value = convert (type, value);
4511 if (BYTES_BIG_ENDIAN)
4513 = fold (build (LSHIFT_EXPR, type, value,
4514 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4515 bitsize = BITS_PER_WORD;
4520 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4521 && DECL_NONADDRESSABLE_P (field))
4523 to_rtx = copy_rtx (to_rtx);
4524 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4527 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4528 value, type, cleared,
4529 get_alias_set (TREE_TYPE (field)));
4532 else if (TREE_CODE (type) == ARRAY_TYPE
4533 || TREE_CODE (type) == VECTOR_TYPE)
4538 tree domain = TYPE_DOMAIN (type);
4539 tree elttype = TREE_TYPE (type);
4541 HOST_WIDE_INT minelt = 0;
4542 HOST_WIDE_INT maxelt = 0;
4544 /* Vectors are like arrays, but the domain is stored via an array
4546 if (TREE_CODE (type) == VECTOR_TYPE)
4548 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4549 the same field as TYPE_DOMAIN, we are not guaranteed that
4551 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4552 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4555 const_bounds_p = (TYPE_MIN_VALUE (domain)
4556 && TYPE_MAX_VALUE (domain)
4557 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4558 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4560 /* If we have constant bounds for the range of the type, get them. */
4563 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4564 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4567 /* If the constructor has fewer elements than the array,
4568 clear the whole array first. Similarly if this is
4569 static constructor of a non-BLKmode object. */
4570 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4574 HOST_WIDE_INT count = 0, zero_count = 0;
4575 need_to_clear = ! const_bounds_p;
4577 /* This loop is a more accurate version of the loop in
4578 mostly_zeros_p (it handles RANGE_EXPR in an index).
4579 It is also needed to check for missing elements. */
4580 for (elt = CONSTRUCTOR_ELTS (exp);
4581 elt != NULL_TREE && ! need_to_clear;
4582 elt = TREE_CHAIN (elt))
4584 tree index = TREE_PURPOSE (elt);
4585 HOST_WIDE_INT this_node_count;
4587 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4589 tree lo_index = TREE_OPERAND (index, 0);
4590 tree hi_index = TREE_OPERAND (index, 1);
4592 if (! host_integerp (lo_index, 1)
4593 || ! host_integerp (hi_index, 1))
4599 this_node_count = (tree_low_cst (hi_index, 1)
4600 - tree_low_cst (lo_index, 1) + 1);
4603 this_node_count = 1;
4605 count += this_node_count;
4606 if (mostly_zeros_p (TREE_VALUE (elt)))
4607 zero_count += this_node_count;
4610 /* Clear the entire array first if there are any missing elements,
4611 or if the incidence of zero elements is >= 75%. */
4613 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4617 if (need_to_clear && size > 0)
4620 clear_storage (target, GEN_INT (size));
4623 else if (REG_P (target))
4624 /* Inform later passes that the old value is dead. */
4625 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4627 /* Store each element of the constructor into
4628 the corresponding element of TARGET, determined
4629 by counting the elements. */
4630 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4632 elt = TREE_CHAIN (elt), i++)
4634 enum machine_mode mode;
4635 HOST_WIDE_INT bitsize;
4636 HOST_WIDE_INT bitpos;
4638 tree value = TREE_VALUE (elt);
4639 tree index = TREE_PURPOSE (elt);
4640 rtx xtarget = target;
4642 if (cleared && is_zeros_p (value))
4645 unsignedp = TREE_UNSIGNED (elttype);
4646 mode = TYPE_MODE (elttype);
4647 if (mode == BLKmode)
4648 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4649 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4652 bitsize = GET_MODE_BITSIZE (mode);
4654 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4656 tree lo_index = TREE_OPERAND (index, 0);
4657 tree hi_index = TREE_OPERAND (index, 1);
4658 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4659 struct nesting *loop;
4660 HOST_WIDE_INT lo, hi, count;
4663 /* If the range is constant and "small", unroll the loop. */
4665 && host_integerp (lo_index, 0)
4666 && host_integerp (hi_index, 0)
4667 && (lo = tree_low_cst (lo_index, 0),
4668 hi = tree_low_cst (hi_index, 0),
4669 count = hi - lo + 1,
4670 (GET_CODE (target) != MEM
4672 || (host_integerp (TYPE_SIZE (elttype), 1)
4673 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4676 lo -= minelt; hi -= minelt;
4677 for (; lo <= hi; lo++)
4679 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4681 if (GET_CODE (target) == MEM
4682 && !MEM_KEEP_ALIAS_SET_P (target)
4683 && TREE_CODE (type) == ARRAY_TYPE
4684 && TYPE_NONALIASED_COMPONENT (type))
4686 target = copy_rtx (target);
4687 MEM_KEEP_ALIAS_SET_P (target) = 1;
4690 store_constructor_field
4691 (target, bitsize, bitpos, mode, value, type, cleared,
4692 get_alias_set (elttype));
4697 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4698 loop_top = gen_label_rtx ();
4699 loop_end = gen_label_rtx ();
4701 unsignedp = TREE_UNSIGNED (domain);
4703 index = build_decl (VAR_DECL, NULL_TREE, domain);
4706 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4708 SET_DECL_RTL (index, index_r);
4709 if (TREE_CODE (value) == SAVE_EXPR
4710 && SAVE_EXPR_RTL (value) == 0)
4712 /* Make sure value gets expanded once before the
4714 expand_expr (value, const0_rtx, VOIDmode, 0);
4717 store_expr (lo_index, index_r, 0);
4718 loop = expand_start_loop (0);
4720 /* Assign value to element index. */
4722 = convert (ssizetype,
4723 fold (build (MINUS_EXPR, TREE_TYPE (index),
4724 index, TYPE_MIN_VALUE (domain))));
4725 position = size_binop (MULT_EXPR, position,
4727 TYPE_SIZE_UNIT (elttype)));
4729 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4730 xtarget = offset_address (target, pos_rtx,
4731 highest_pow2_factor (position));
4732 xtarget = adjust_address (xtarget, mode, 0);
4733 if (TREE_CODE (value) == CONSTRUCTOR)
4734 store_constructor (value, xtarget, cleared,
4735 bitsize / BITS_PER_UNIT);
4737 store_expr (value, xtarget, 0);
4739 expand_exit_loop_if_false (loop,
4740 build (LT_EXPR, integer_type_node,
4743 expand_increment (build (PREINCREMENT_EXPR,
4745 index, integer_one_node), 0, 0);
4747 emit_label (loop_end);
4750 else if ((index != 0 && ! host_integerp (index, 0))
4751 || ! host_integerp (TYPE_SIZE (elttype), 1))
4756 index = ssize_int (1);
4759 index = convert (ssizetype,
4760 fold (build (MINUS_EXPR, index,
4761 TYPE_MIN_VALUE (domain))));
4763 position = size_binop (MULT_EXPR, index,
4765 TYPE_SIZE_UNIT (elttype)));
4766 xtarget = offset_address (target,
4767 expand_expr (position, 0, VOIDmode, 0),
4768 highest_pow2_factor (position));
4769 xtarget = adjust_address (xtarget, mode, 0);
4770 store_expr (value, xtarget, 0);
4775 bitpos = ((tree_low_cst (index, 0) - minelt)
4776 * tree_low_cst (TYPE_SIZE (elttype), 1));
4778 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4780 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4781 && TREE_CODE (type) == ARRAY_TYPE
4782 && TYPE_NONALIASED_COMPONENT (type))
4784 target = copy_rtx (target);
4785 MEM_KEEP_ALIAS_SET_P (target) = 1;
4788 store_constructor_field (target, bitsize, bitpos, mode, value,
4789 type, cleared, get_alias_set (elttype));
4795 /* Set constructor assignments. */
4796 else if (TREE_CODE (type) == SET_TYPE)
4798 tree elt = CONSTRUCTOR_ELTS (exp);
4799 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4800 tree domain = TYPE_DOMAIN (type);
4801 tree domain_min, domain_max, bitlength;
4803 /* The default implementation strategy is to extract the constant
4804 parts of the constructor, use that to initialize the target,
4805 and then "or" in whatever non-constant ranges we need in addition.
4807 If a large set is all zero or all ones, it is
4808 probably better to set it using memset (if available) or bzero.
4809 Also, if a large set has just a single range, it may also be
4810 better to first clear all the first clear the set (using
4811 bzero/memset), and set the bits we want. */
4813 /* Check for all zeros. */
4814 if (elt == NULL_TREE && size > 0)
4817 clear_storage (target, GEN_INT (size));
4821 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4822 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4823 bitlength = size_binop (PLUS_EXPR,
4824 size_diffop (domain_max, domain_min),
4827 nbits = tree_low_cst (bitlength, 1);
4829 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4830 are "complicated" (more than one range), initialize (the
4831 constant parts) by copying from a constant. */
4832 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4833 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4835 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4836 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4837 char *bit_buffer = (char *) alloca (nbits);
4838 HOST_WIDE_INT word = 0;
4839 unsigned int bit_pos = 0;
4840 unsigned int ibit = 0;
4841 unsigned int offset = 0; /* In bytes from beginning of set. */
4843 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4846 if (bit_buffer[ibit])
4848 if (BYTES_BIG_ENDIAN)
4849 word |= (1 << (set_word_size - 1 - bit_pos));
4851 word |= 1 << bit_pos;
4855 if (bit_pos >= set_word_size || ibit == nbits)
4857 if (word != 0 || ! cleared)
4859 rtx datum = GEN_INT (word);
4862 /* The assumption here is that it is safe to use
4863 XEXP if the set is multi-word, but not if
4864 it's single-word. */
4865 if (GET_CODE (target) == MEM)
4866 to_rtx = adjust_address (target, mode, offset);
4867 else if (offset == 0)
4871 emit_move_insn (to_rtx, datum);
4878 offset += set_word_size / BITS_PER_UNIT;
4883 /* Don't bother clearing storage if the set is all ones. */
4884 if (TREE_CHAIN (elt) != NULL_TREE
4885 || (TREE_PURPOSE (elt) == NULL_TREE
4887 : ( ! host_integerp (TREE_VALUE (elt), 0)
4888 || ! host_integerp (TREE_PURPOSE (elt), 0)
4889 || (tree_low_cst (TREE_VALUE (elt), 0)
4890 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4891 != (HOST_WIDE_INT) nbits))))
4892 clear_storage (target, expr_size (exp));
4894 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4896 /* Start of range of element or NULL. */
4897 tree startbit = TREE_PURPOSE (elt);
4898 /* End of range of element, or element value. */
4899 tree endbit = TREE_VALUE (elt);
4900 #ifdef TARGET_MEM_FUNCTIONS
4901 HOST_WIDE_INT startb, endb;
4903 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4905 bitlength_rtx = expand_expr (bitlength,
4906 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4908 /* Handle non-range tuple element like [ expr ]. */
4909 if (startbit == NULL_TREE)
4911 startbit = save_expr (endbit);
4915 startbit = convert (sizetype, startbit);
4916 endbit = convert (sizetype, endbit);
4917 if (! integer_zerop (domain_min))
4919 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4920 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4922 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4923 EXPAND_CONST_ADDRESS);
4924 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4925 EXPAND_CONST_ADDRESS);
4931 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4934 emit_move_insn (targetx, target);
4937 else if (GET_CODE (target) == MEM)
4942 #ifdef TARGET_MEM_FUNCTIONS
4943 /* Optimization: If startbit and endbit are
4944 constants divisible by BITS_PER_UNIT,
4945 call memset instead. */
4946 if (TREE_CODE (startbit) == INTEGER_CST
4947 && TREE_CODE (endbit) == INTEGER_CST
4948 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4949 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4951 emit_library_call (memset_libfunc, LCT_NORMAL,
4953 plus_constant (XEXP (targetx, 0),
4954 startb / BITS_PER_UNIT),
4956 constm1_rtx, TYPE_MODE (integer_type_node),
4957 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4958 TYPE_MODE (sizetype));
4962 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4963 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4964 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4965 startbit_rtx, TYPE_MODE (sizetype),
4966 endbit_rtx, TYPE_MODE (sizetype));
4969 emit_move_insn (target, targetx);
4977 /* Store the value of EXP (an expression tree)
4978 into a subfield of TARGET which has mode MODE and occupies
4979 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4980 If MODE is VOIDmode, it means that we are storing into a bit-field.
4982 If VALUE_MODE is VOIDmode, return nothing in particular.
4983 UNSIGNEDP is not used in this case.
4985 Otherwise, return an rtx for the value stored. This rtx
4986 has mode VALUE_MODE if that is convenient to do.
4987 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4989 TYPE is the type of the underlying object,
4991 ALIAS_SET is the alias set for the destination. This value will
4992 (in general) be different from that for TARGET, since TARGET is a
4993 reference to the containing structure. */
4996 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
4999 HOST_WIDE_INT bitsize;
5000 HOST_WIDE_INT bitpos;
5001 enum machine_mode mode;
5003 enum machine_mode value_mode;
5008 HOST_WIDE_INT width_mask = 0;
5010 if (TREE_CODE (exp) == ERROR_MARK)
5013 /* If we have nothing to store, do nothing unless the expression has
5016 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5017 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5018 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5020 /* If we are storing into an unaligned field of an aligned union that is
5021 in a register, we may have the mode of TARGET being an integer mode but
5022 MODE == BLKmode. In that case, get an aligned object whose size and
5023 alignment are the same as TARGET and store TARGET into it (we can avoid
5024 the store if the field being stored is the entire width of TARGET). Then
5025 call ourselves recursively to store the field into a BLKmode version of
5026 that object. Finally, load from the object into TARGET. This is not
5027 very efficient in general, but should only be slightly more expensive
5028 than the otherwise-required unaligned accesses. Perhaps this can be
5029 cleaned up later. */
5032 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5036 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5038 rtx blk_object = adjust_address (object, BLKmode, 0);
5040 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5041 emit_move_insn (object, target);
5043 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5046 emit_move_insn (target, object);
5048 /* We want to return the BLKmode version of the data. */
5052 if (GET_CODE (target) == CONCAT)
5054 /* We're storing into a struct containing a single __complex. */
5058 return store_expr (exp, target, 0);
5061 /* If the structure is in a register or if the component
5062 is a bit field, we cannot use addressing to access it.
5063 Use bit-field techniques or SUBREG to store in it. */
5065 if (mode == VOIDmode
5066 || (mode != BLKmode && ! direct_store[(int) mode]
5067 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5068 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5069 || GET_CODE (target) == REG
5070 || GET_CODE (target) == SUBREG
5071 /* If the field isn't aligned enough to store as an ordinary memref,
5072 store it as a bit field. */
5073 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5074 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5075 || bitpos % GET_MODE_ALIGNMENT (mode)))
5076 /* If the RHS and field are a constant size and the size of the
5077 RHS isn't the same size as the bitfield, we must use bitfield
5080 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5081 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5083 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5085 /* If BITSIZE is narrower than the size of the type of EXP
5086 we will be narrowing TEMP. Normally, what's wanted are the
5087 low-order bits. However, if EXP's type is a record and this is
5088 big-endian machine, we want the upper BITSIZE bits. */
5089 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5090 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5091 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5092 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5093 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5097 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5099 if (mode != VOIDmode && mode != BLKmode
5100 && mode != TYPE_MODE (TREE_TYPE (exp)))
5101 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5103 /* If the modes of TARGET and TEMP are both BLKmode, both
5104 must be in memory and BITPOS must be aligned on a byte
5105 boundary. If so, we simply do a block copy. */
5106 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5108 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5109 || bitpos % BITS_PER_UNIT != 0)
5112 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5113 emit_block_move (target, temp,
5114 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5117 return value_mode == VOIDmode ? const0_rtx : target;
5120 /* Store the value in the bitfield. */
5121 store_bit_field (target, bitsize, bitpos, mode, temp,
5122 int_size_in_bytes (type));
5124 if (value_mode != VOIDmode)
5126 /* The caller wants an rtx for the value.
5127 If possible, avoid refetching from the bitfield itself. */
5129 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5132 enum machine_mode tmode;
5135 return expand_and (temp,
5139 GET_MODE (temp) == VOIDmode
5141 : GET_MODE (temp))), NULL_RTX);
5143 tmode = GET_MODE (temp);
5144 if (tmode == VOIDmode)
5146 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5147 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5148 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5151 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5152 NULL_RTX, value_mode, VOIDmode,
5153 int_size_in_bytes (type));
5159 rtx addr = XEXP (target, 0);
5160 rtx to_rtx = target;
5162 /* If a value is wanted, it must be the lhs;
5163 so make the address stable for multiple use. */
5165 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5166 && ! CONSTANT_ADDRESS_P (addr)
5167 /* A frame-pointer reference is already stable. */
5168 && ! (GET_CODE (addr) == PLUS
5169 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5170 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5171 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5172 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5174 /* Now build a reference to just the desired component. */
5176 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5178 if (to_rtx == target)
5179 to_rtx = copy_rtx (to_rtx);
5181 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5182 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5183 set_mem_alias_set (to_rtx, alias_set);
5185 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5189 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5190 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5191 codes and find the ultimate containing object, which we return.
5193 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5194 bit position, and *PUNSIGNEDP to the signedness of the field.
5195 If the position of the field is variable, we store a tree
5196 giving the variable offset (in units) in *POFFSET.
5197 This offset is in addition to the bit position.
5198 If the position is not variable, we store 0 in *POFFSET.
5200 If any of the extraction expressions is volatile,
5201 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5203 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5204 is a mode that can be used to access the field. In that case, *PBITSIZE
5207 If the field describes a variable-sized object, *PMODE is set to
5208 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5209 this case, but the address of the object can be found. */
5212 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5213 punsignedp, pvolatilep)
5215 HOST_WIDE_INT *pbitsize;
5216 HOST_WIDE_INT *pbitpos;
5218 enum machine_mode *pmode;
5223 enum machine_mode mode = VOIDmode;
5224 tree offset = size_zero_node;
5225 tree bit_offset = bitsize_zero_node;
5226 tree placeholder_ptr = 0;
5229 /* First get the mode, signedness, and size. We do this from just the
5230 outermost expression. */
5231 if (TREE_CODE (exp) == COMPONENT_REF)
5233 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5234 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5235 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5237 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5239 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5241 size_tree = TREE_OPERAND (exp, 1);
5242 *punsignedp = TREE_UNSIGNED (exp);
5246 mode = TYPE_MODE (TREE_TYPE (exp));
5247 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5249 if (mode == BLKmode)
5250 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5252 *pbitsize = GET_MODE_BITSIZE (mode);
5257 if (! host_integerp (size_tree, 1))
5258 mode = BLKmode, *pbitsize = -1;
5260 *pbitsize = tree_low_cst (size_tree, 1);
5263 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5264 and find the ultimate containing object. */
5267 if (TREE_CODE (exp) == BIT_FIELD_REF)
5268 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5269 else if (TREE_CODE (exp) == COMPONENT_REF)
5271 tree field = TREE_OPERAND (exp, 1);
5272 tree this_offset = DECL_FIELD_OFFSET (field);
5274 /* If this field hasn't been filled in yet, don't go
5275 past it. This should only happen when folding expressions
5276 made during type construction. */
5277 if (this_offset == 0)
5279 else if (! TREE_CONSTANT (this_offset)
5280 && contains_placeholder_p (this_offset))
5281 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5283 offset = size_binop (PLUS_EXPR, offset, this_offset);
5284 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5285 DECL_FIELD_BIT_OFFSET (field));
5287 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5290 else if (TREE_CODE (exp) == ARRAY_REF
5291 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5293 tree index = TREE_OPERAND (exp, 1);
5294 tree array = TREE_OPERAND (exp, 0);
5295 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5296 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5297 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5299 /* We assume all arrays have sizes that are a multiple of a byte.
5300 First subtract the lower bound, if any, in the type of the
5301 index, then convert to sizetype and multiply by the size of the
5303 if (low_bound != 0 && ! integer_zerop (low_bound))
5304 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5307 /* If the index has a self-referential type, pass it to a
5308 WITH_RECORD_EXPR; if the component size is, pass our
5309 component to one. */
5310 if (! TREE_CONSTANT (index)
5311 && contains_placeholder_p (index))
5312 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5313 if (! TREE_CONSTANT (unit_size)
5314 && contains_placeholder_p (unit_size))
5315 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5317 offset = size_binop (PLUS_EXPR, offset,
5318 size_binop (MULT_EXPR,
5319 convert (sizetype, index),
5323 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5325 tree new = find_placeholder (exp, &placeholder_ptr);
5327 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5328 We might have been called from tree optimization where we
5329 haven't set up an object yet. */
5337 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5338 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5339 && ! ((TREE_CODE (exp) == NOP_EXPR
5340 || TREE_CODE (exp) == CONVERT_EXPR)
5341 && (TYPE_MODE (TREE_TYPE (exp))
5342 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5345 /* If any reference in the chain is volatile, the effect is volatile. */
5346 if (TREE_THIS_VOLATILE (exp))
5349 exp = TREE_OPERAND (exp, 0);
5352 /* If OFFSET is constant, see if we can return the whole thing as a
5353 constant bit position. Otherwise, split it up. */
5354 if (host_integerp (offset, 0)
5355 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5357 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5358 && host_integerp (tem, 0))
5359 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5361 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5367 /* Return 1 if T is an expression that get_inner_reference handles. */
5370 handled_component_p (t)
5373 switch (TREE_CODE (t))
5378 case ARRAY_RANGE_REF:
5379 case NON_LVALUE_EXPR:
5380 case VIEW_CONVERT_EXPR:
5385 return (TYPE_MODE (TREE_TYPE (t))
5386 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5393 /* Given an rtx VALUE that may contain additions and multiplications, return
5394 an equivalent value that just refers to a register, memory, or constant.
5395 This is done by generating instructions to perform the arithmetic and
5396 returning a pseudo-register containing the value.
5398 The returned value may be a REG, SUBREG, MEM or constant. */
5401 force_operand (value, target)
5405 /* Use a temporary to force order of execution of calls to
5409 /* Use subtarget as the target for operand 0 of a binary operation. */
5410 rtx subtarget = get_subtarget (target);
5412 /* Check for a PIC address load. */
5413 if ((GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5414 && XEXP (value, 0) == pic_offset_table_rtx
5415 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5416 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5417 || GET_CODE (XEXP (value, 1)) == CONST))
5420 subtarget = gen_reg_rtx (GET_MODE (value));
5421 emit_move_insn (subtarget, value);
5425 if (GET_CODE (value) == PLUS)
5426 binoptab = add_optab;
5427 else if (GET_CODE (value) == MINUS)
5428 binoptab = sub_optab;
5429 else if (GET_CODE (value) == MULT)
5431 op2 = XEXP (value, 1);
5432 if (!CONSTANT_P (op2)
5433 && !(GET_CODE (op2) == REG && op2 != subtarget))
5435 tmp = force_operand (XEXP (value, 0), subtarget);
5436 return expand_mult (GET_MODE (value), tmp,
5437 force_operand (op2, NULL_RTX),
5443 op2 = XEXP (value, 1);
5444 if (!CONSTANT_P (op2)
5445 && !(GET_CODE (op2) == REG && op2 != subtarget))
5447 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5449 binoptab = add_optab;
5450 op2 = negate_rtx (GET_MODE (value), op2);
5453 /* Check for an addition with OP2 a constant integer and our first
5454 operand a PLUS of a virtual register and something else. In that
5455 case, we want to emit the sum of the virtual register and the
5456 constant first and then add the other value. This allows virtual
5457 register instantiation to simply modify the constant rather than
5458 creating another one around this addition. */
5459 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5460 && GET_CODE (XEXP (value, 0)) == PLUS
5461 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5462 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5463 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5465 rtx temp = expand_binop (GET_MODE (value), binoptab,
5466 XEXP (XEXP (value, 0), 0), op2,
5467 subtarget, 0, OPTAB_LIB_WIDEN);
5468 return expand_binop (GET_MODE (value), binoptab, temp,
5469 force_operand (XEXP (XEXP (value, 0), 1), 0),
5470 target, 0, OPTAB_LIB_WIDEN);
5473 tmp = force_operand (XEXP (value, 0), subtarget);
5474 return expand_binop (GET_MODE (value), binoptab, tmp,
5475 force_operand (op2, NULL_RTX),
5476 target, 0, OPTAB_LIB_WIDEN);
5477 /* We give UNSIGNEDP = 0 to expand_binop
5478 because the only operations we are expanding here are signed ones. */
5481 #ifdef INSN_SCHEDULING
5482 /* On machines that have insn scheduling, we want all memory reference to be
5483 explicit, so we need to deal with such paradoxical SUBREGs. */
5484 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5485 && (GET_MODE_SIZE (GET_MODE (value))
5486 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5488 = simplify_gen_subreg (GET_MODE (value),
5489 force_reg (GET_MODE (SUBREG_REG (value)),
5490 force_operand (SUBREG_REG (value),
5492 GET_MODE (SUBREG_REG (value)),
5493 SUBREG_BYTE (value));
5499 /* Subroutine of expand_expr: return nonzero iff there is no way that
5500 EXP can reference X, which is being modified. TOP_P is nonzero if this
5501 call is going to be used to determine whether we need a temporary
5502 for EXP, as opposed to a recursive call to this function.
5504 It is always safe for this routine to return zero since it merely
5505 searches for optimization opportunities. */
5508 safe_from_p (x, exp, top_p)
5515 static tree save_expr_list;
5518 /* If EXP has varying size, we MUST use a target since we currently
5519 have no way of allocating temporaries of variable size
5520 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5521 So we assume here that something at a higher level has prevented a
5522 clash. This is somewhat bogus, but the best we can do. Only
5523 do this when X is BLKmode and when we are at the top level. */
5524 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5525 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5526 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5527 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5528 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5530 && GET_MODE (x) == BLKmode)
5531 /* If X is in the outgoing argument area, it is always safe. */
5532 || (GET_CODE (x) == MEM
5533 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5534 || (GET_CODE (XEXP (x, 0)) == PLUS
5535 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5538 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5539 find the underlying pseudo. */
5540 if (GET_CODE (x) == SUBREG)
5543 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5547 /* A SAVE_EXPR might appear many times in the expression passed to the
5548 top-level safe_from_p call, and if it has a complex subexpression,
5549 examining it multiple times could result in a combinatorial explosion.
5550 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5551 with optimization took about 28 minutes to compile -- even though it was
5552 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5553 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5554 we have processed. Note that the only test of top_p was above. */
5563 rtn = safe_from_p (x, exp, 0);
5565 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5566 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5571 /* Now look at our tree code and possibly recurse. */
5572 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5575 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5582 if (TREE_CODE (exp) == TREE_LIST)
5583 return ((TREE_VALUE (exp) == 0
5584 || safe_from_p (x, TREE_VALUE (exp), 0))
5585 && (TREE_CHAIN (exp) == 0
5586 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5587 else if (TREE_CODE (exp) == ERROR_MARK)
5588 return 1; /* An already-visited SAVE_EXPR? */
5593 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5597 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5598 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5602 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5603 the expression. If it is set, we conflict iff we are that rtx or
5604 both are in memory. Otherwise, we check all operands of the
5605 expression recursively. */
5607 switch (TREE_CODE (exp))
5610 /* If the operand is static or we are static, we can't conflict.
5611 Likewise if we don't conflict with the operand at all. */
5612 if (staticp (TREE_OPERAND (exp, 0))
5613 || TREE_STATIC (exp)
5614 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5617 /* Otherwise, the only way this can conflict is if we are taking
5618 the address of a DECL a that address if part of X, which is
5620 exp = TREE_OPERAND (exp, 0);
5623 if (!DECL_RTL_SET_P (exp)
5624 || GET_CODE (DECL_RTL (exp)) != MEM)
5627 exp_rtl = XEXP (DECL_RTL (exp), 0);
5632 if (GET_CODE (x) == MEM
5633 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5634 get_alias_set (exp)))
5639 /* Assume that the call will clobber all hard registers and
5641 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5642 || GET_CODE (x) == MEM)
5647 /* If a sequence exists, we would have to scan every instruction
5648 in the sequence to see if it was safe. This is probably not
5650 if (RTL_EXPR_SEQUENCE (exp))
5653 exp_rtl = RTL_EXPR_RTL (exp);
5656 case WITH_CLEANUP_EXPR:
5657 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5660 case CLEANUP_POINT_EXPR:
5661 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5664 exp_rtl = SAVE_EXPR_RTL (exp);
5668 /* If we've already scanned this, don't do it again. Otherwise,
5669 show we've scanned it and record for clearing the flag if we're
5671 if (TREE_PRIVATE (exp))
5674 TREE_PRIVATE (exp) = 1;
5675 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5677 TREE_PRIVATE (exp) = 0;
5681 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5685 /* The only operand we look at is operand 1. The rest aren't
5686 part of the expression. */
5687 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5689 case METHOD_CALL_EXPR:
5690 /* This takes an rtx argument, but shouldn't appear here. */
5697 /* If we have an rtx, we do not need to scan our operands. */
5701 nops = first_rtl_op (TREE_CODE (exp));
5702 for (i = 0; i < nops; i++)
5703 if (TREE_OPERAND (exp, i) != 0
5704 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5707 /* If this is a language-specific tree code, it may require
5708 special handling. */
5709 if ((unsigned int) TREE_CODE (exp)
5710 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5711 && !(*lang_hooks.safe_from_p) (x, exp))
5715 /* If we have an rtl, find any enclosed object. Then see if we conflict
5719 if (GET_CODE (exp_rtl) == SUBREG)
5721 exp_rtl = SUBREG_REG (exp_rtl);
5722 if (GET_CODE (exp_rtl) == REG
5723 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5727 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5728 are memory and they conflict. */
5729 return ! (rtx_equal_p (x, exp_rtl)
5730 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5731 && true_dependence (exp_rtl, GET_MODE (x), x,
5732 rtx_addr_varies_p)));
5735 /* If we reach here, it is safe. */
5739 /* Subroutine of expand_expr: return rtx if EXP is a
5740 variable or parameter; else return 0. */
5747 switch (TREE_CODE (exp))
5751 return DECL_RTL (exp);
5757 #ifdef MAX_INTEGER_COMPUTATION_MODE
5760 check_max_integer_computation_mode (exp)
5763 enum tree_code code;
5764 enum machine_mode mode;
5766 /* Strip any NOPs that don't change the mode. */
5768 code = TREE_CODE (exp);
5770 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5771 if (code == NOP_EXPR
5772 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5775 /* First check the type of the overall operation. We need only look at
5776 unary, binary and relational operations. */
5777 if (TREE_CODE_CLASS (code) == '1'
5778 || TREE_CODE_CLASS (code) == '2'
5779 || TREE_CODE_CLASS (code) == '<')
5781 mode = TYPE_MODE (TREE_TYPE (exp));
5782 if (GET_MODE_CLASS (mode) == MODE_INT
5783 && mode > MAX_INTEGER_COMPUTATION_MODE)
5784 internal_error ("unsupported wide integer operation");
5787 /* Check operand of a unary op. */
5788 if (TREE_CODE_CLASS (code) == '1')
5790 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5791 if (GET_MODE_CLASS (mode) == MODE_INT
5792 && mode > MAX_INTEGER_COMPUTATION_MODE)
5793 internal_error ("unsupported wide integer operation");
5796 /* Check operands of a binary/comparison op. */
5797 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5799 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5800 if (GET_MODE_CLASS (mode) == MODE_INT
5801 && mode > MAX_INTEGER_COMPUTATION_MODE)
5802 internal_error ("unsupported wide integer operation");
5804 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5805 if (GET_MODE_CLASS (mode) == MODE_INT
5806 && mode > MAX_INTEGER_COMPUTATION_MODE)
5807 internal_error ("unsupported wide integer operation");
5812 /* Return the highest power of two that EXP is known to be a multiple of.
5813 This is used in updating alignment of MEMs in array references. */
5815 static HOST_WIDE_INT
5816 highest_pow2_factor (exp)
5819 HOST_WIDE_INT c0, c1;
5821 switch (TREE_CODE (exp))
5824 /* If the integer is expressable in a HOST_WIDE_INT, we can find the
5825 lowest bit that's a one. If the result is zero, return
5826 BIGGEST_ALIGNMENT. We need to handle this case since we can find it
5827 in a COND_EXPR, a MIN_EXPR, or a MAX_EXPR. If the constant overlows,
5828 we have an erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5830 if (TREE_CONSTANT_OVERFLOW (exp)
5831 || integer_zerop (exp))
5832 return BIGGEST_ALIGNMENT;
5833 else if (host_integerp (exp, 0))
5835 c0 = tree_low_cst (exp, 0);
5836 c0 = c0 < 0 ? - c0 : c0;
5841 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5842 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5843 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5844 return MIN (c0, c1);
5847 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5848 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5851 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5853 if (integer_pow2p (TREE_OPERAND (exp, 1))
5854 && host_integerp (TREE_OPERAND (exp, 1), 1))
5856 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5857 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5858 return MAX (1, c0 / c1);
5862 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5863 case SAVE_EXPR: case WITH_RECORD_EXPR:
5864 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5867 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5870 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5871 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5872 return MIN (c0, c1);
5881 /* Return an object on the placeholder list that matches EXP, a
5882 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
5883 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
5884 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
5885 is a location which initially points to a starting location in the
5886 placeholder list (zero means start of the list) and where a pointer into
5887 the placeholder list at which the object is found is placed. */
5890 find_placeholder (exp, plist)
5894 tree type = TREE_TYPE (exp);
5895 tree placeholder_expr;
5897 for (placeholder_expr
5898 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5899 placeholder_expr != 0;
5900 placeholder_expr = TREE_CHAIN (placeholder_expr))
5902 tree need_type = TYPE_MAIN_VARIANT (type);
5905 /* Find the outermost reference that is of the type we want. If none,
5906 see if any object has a type that is a pointer to the type we
5908 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5909 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5910 || TREE_CODE (elt) == COND_EXPR)
5911 ? TREE_OPERAND (elt, 1)
5912 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5913 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5914 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5915 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5916 ? TREE_OPERAND (elt, 0) : 0))
5917 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5920 *plist = placeholder_expr;
5924 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5926 = ((TREE_CODE (elt) == COMPOUND_EXPR
5927 || TREE_CODE (elt) == COND_EXPR)
5928 ? TREE_OPERAND (elt, 1)
5929 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5930 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5931 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5932 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5933 ? TREE_OPERAND (elt, 0) : 0))
5934 if (POINTER_TYPE_P (TREE_TYPE (elt))
5935 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5939 *plist = placeholder_expr;
5940 return build1 (INDIRECT_REF, need_type, elt);
5947 /* expand_expr: generate code for computing expression EXP.
5948 An rtx for the computed value is returned. The value is never null.
5949 In the case of a void EXP, const0_rtx is returned.
5951 The value may be stored in TARGET if TARGET is nonzero.
5952 TARGET is just a suggestion; callers must assume that
5953 the rtx returned may not be the same as TARGET.
5955 If TARGET is CONST0_RTX, it means that the value will be ignored.
5957 If TMODE is not VOIDmode, it suggests generating the
5958 result in mode TMODE. But this is done only when convenient.
5959 Otherwise, TMODE is ignored and the value generated in its natural mode.
5960 TMODE is just a suggestion; callers must assume that
5961 the rtx returned may not have mode TMODE.
5963 Note that TARGET may have neither TMODE nor MODE. In that case, it
5964 probably will not be used.
5966 If MODIFIER is EXPAND_SUM then when EXP is an addition
5967 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5968 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5969 products as above, or REG or MEM, or constant.
5970 Ordinarily in such cases we would output mul or add instructions
5971 and then return a pseudo reg containing the sum.
5973 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5974 it also marks a label as absolutely required (it can't be dead).
5975 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5976 This is used for outputting expressions used in initializers.
5978 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5979 with a constant address even if that address is not normally legitimate.
5980 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5983 expand_expr (exp, target, tmode, modifier)
5986 enum machine_mode tmode;
5987 enum expand_modifier modifier;
5990 tree type = TREE_TYPE (exp);
5991 int unsignedp = TREE_UNSIGNED (type);
5992 enum machine_mode mode;
5993 enum tree_code code = TREE_CODE (exp);
5995 rtx subtarget, original_target;
5999 /* Handle ERROR_MARK before anybody tries to access its type. */
6000 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6002 op0 = CONST0_RTX (tmode);
6008 mode = TYPE_MODE (type);
6009 /* Use subtarget as the target for operand 0 of a binary operation. */
6010 subtarget = get_subtarget (target);
6011 original_target = target;
6012 ignore = (target == const0_rtx
6013 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6014 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6015 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6016 && TREE_CODE (type) == VOID_TYPE));
6018 /* If we are going to ignore this result, we need only do something
6019 if there is a side-effect somewhere in the expression. If there
6020 is, short-circuit the most common cases here. Note that we must
6021 not call expand_expr with anything but const0_rtx in case this
6022 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6026 if (! TREE_SIDE_EFFECTS (exp))
6029 /* Ensure we reference a volatile object even if value is ignored, but
6030 don't do this if all we are doing is taking its address. */
6031 if (TREE_THIS_VOLATILE (exp)
6032 && TREE_CODE (exp) != FUNCTION_DECL
6033 && mode != VOIDmode && mode != BLKmode
6034 && modifier != EXPAND_CONST_ADDRESS)
6036 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6037 if (GET_CODE (temp) == MEM)
6038 temp = copy_to_reg (temp);
6042 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6043 || code == INDIRECT_REF || code == BUFFER_REF)
6044 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6047 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6048 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6050 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6051 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6054 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6055 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6056 /* If the second operand has no side effects, just evaluate
6058 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6060 else if (code == BIT_FIELD_REF)
6062 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6063 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6064 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6071 #ifdef MAX_INTEGER_COMPUTATION_MODE
6072 /* Only check stuff here if the mode we want is different from the mode
6073 of the expression; if it's the same, check_max_integer_computiation_mode
6074 will handle it. Do we really need to check this stuff at all? */
6077 && GET_MODE (target) != mode
6078 && TREE_CODE (exp) != INTEGER_CST
6079 && TREE_CODE (exp) != PARM_DECL
6080 && TREE_CODE (exp) != ARRAY_REF
6081 && TREE_CODE (exp) != ARRAY_RANGE_REF
6082 && TREE_CODE (exp) != COMPONENT_REF
6083 && TREE_CODE (exp) != BIT_FIELD_REF
6084 && TREE_CODE (exp) != INDIRECT_REF
6085 && TREE_CODE (exp) != CALL_EXPR
6086 && TREE_CODE (exp) != VAR_DECL
6087 && TREE_CODE (exp) != RTL_EXPR)
6089 enum machine_mode mode = GET_MODE (target);
6091 if (GET_MODE_CLASS (mode) == MODE_INT
6092 && mode > MAX_INTEGER_COMPUTATION_MODE)
6093 internal_error ("unsupported wide integer operation");
6097 && TREE_CODE (exp) != INTEGER_CST
6098 && TREE_CODE (exp) != PARM_DECL
6099 && TREE_CODE (exp) != ARRAY_REF
6100 && TREE_CODE (exp) != ARRAY_RANGE_REF
6101 && TREE_CODE (exp) != COMPONENT_REF
6102 && TREE_CODE (exp) != BIT_FIELD_REF
6103 && TREE_CODE (exp) != INDIRECT_REF
6104 && TREE_CODE (exp) != VAR_DECL
6105 && TREE_CODE (exp) != CALL_EXPR
6106 && TREE_CODE (exp) != RTL_EXPR
6107 && GET_MODE_CLASS (tmode) == MODE_INT
6108 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6109 internal_error ("unsupported wide integer operation");
6111 check_max_integer_computation_mode (exp);
6114 /* If will do cse, generate all results into pseudo registers
6115 since 1) that allows cse to find more things
6116 and 2) otherwise cse could produce an insn the machine
6117 cannot support. And exception is a CONSTRUCTOR into a multi-word
6118 MEM: that's much more likely to be most efficient into the MEM. */
6120 if (! cse_not_expected && mode != BLKmode && target
6121 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6122 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6129 tree function = decl_function_context (exp);
6130 /* Handle using a label in a containing function. */
6131 if (function != current_function_decl
6132 && function != inline_function_decl && function != 0)
6134 struct function *p = find_function_data (function);
6135 p->expr->x_forced_labels
6136 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6137 p->expr->x_forced_labels);
6141 if (modifier == EXPAND_INITIALIZER)
6142 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6147 temp = gen_rtx_MEM (FUNCTION_MODE,
6148 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6149 if (function != current_function_decl
6150 && function != inline_function_decl && function != 0)
6151 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6156 if (DECL_RTL (exp) == 0)
6158 error_with_decl (exp, "prior parameter's size depends on `%s'");
6159 return CONST0_RTX (mode);
6162 /* ... fall through ... */
6165 /* If a static var's type was incomplete when the decl was written,
6166 but the type is complete now, lay out the decl now. */
6167 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6168 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6170 rtx value = DECL_RTL_IF_SET (exp);
6172 layout_decl (exp, 0);
6174 /* If the RTL was already set, update its mode and memory
6178 PUT_MODE (value, DECL_MODE (exp));
6179 SET_DECL_RTL (exp, 0);
6180 set_mem_attributes (value, exp, 1);
6181 SET_DECL_RTL (exp, value);
6185 /* ... fall through ... */
6189 if (DECL_RTL (exp) == 0)
6192 /* Ensure variable marked as used even if it doesn't go through
6193 a parser. If it hasn't be used yet, write out an external
6195 if (! TREE_USED (exp))
6197 assemble_external (exp);
6198 TREE_USED (exp) = 1;
6201 /* Show we haven't gotten RTL for this yet. */
6204 /* Handle variables inherited from containing functions. */
6205 context = decl_function_context (exp);
6207 /* We treat inline_function_decl as an alias for the current function
6208 because that is the inline function whose vars, types, etc.
6209 are being merged into the current function.
6210 See expand_inline_function. */
6212 if (context != 0 && context != current_function_decl
6213 && context != inline_function_decl
6214 /* If var is static, we don't need a static chain to access it. */
6215 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6216 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6220 /* Mark as non-local and addressable. */
6221 DECL_NONLOCAL (exp) = 1;
6222 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6224 mark_addressable (exp);
6225 if (GET_CODE (DECL_RTL (exp)) != MEM)
6227 addr = XEXP (DECL_RTL (exp), 0);
6228 if (GET_CODE (addr) == MEM)
6230 = replace_equiv_address (addr,
6231 fix_lexical_addr (XEXP (addr, 0), exp));
6233 addr = fix_lexical_addr (addr, exp);
6235 temp = replace_equiv_address (DECL_RTL (exp), addr);
6238 /* This is the case of an array whose size is to be determined
6239 from its initializer, while the initializer is still being parsed.
6242 else if (GET_CODE (DECL_RTL (exp)) == MEM
6243 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6244 temp = validize_mem (DECL_RTL (exp));
6246 /* If DECL_RTL is memory, we are in the normal case and either
6247 the address is not valid or it is not a register and -fforce-addr
6248 is specified, get the address into a register. */
6250 else if (GET_CODE (DECL_RTL (exp)) == MEM
6251 && modifier != EXPAND_CONST_ADDRESS
6252 && modifier != EXPAND_SUM
6253 && modifier != EXPAND_INITIALIZER
6254 && (! memory_address_p (DECL_MODE (exp),
6255 XEXP (DECL_RTL (exp), 0))
6257 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6258 temp = replace_equiv_address (DECL_RTL (exp),
6259 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6261 /* If we got something, return it. But first, set the alignment
6262 if the address is a register. */
6265 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6266 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6271 /* If the mode of DECL_RTL does not match that of the decl, it
6272 must be a promoted value. We return a SUBREG of the wanted mode,
6273 but mark it so that we know that it was already extended. */
6275 if (GET_CODE (DECL_RTL (exp)) == REG
6276 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6278 /* Get the signedness used for this variable. Ensure we get the
6279 same mode we got when the variable was declared. */
6280 if (GET_MODE (DECL_RTL (exp))
6281 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6284 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6285 SUBREG_PROMOTED_VAR_P (temp) = 1;
6286 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6290 return DECL_RTL (exp);
6293 return immed_double_const (TREE_INT_CST_LOW (exp),
6294 TREE_INT_CST_HIGH (exp), mode);
6297 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6300 /* If optimized, generate immediate CONST_DOUBLE
6301 which will be turned into memory by reload if necessary.
6303 We used to force a register so that loop.c could see it. But
6304 this does not allow gen_* patterns to perform optimizations with
6305 the constants. It also produces two insns in cases like "x = 1.0;".
6306 On most machines, floating-point constants are not permitted in
6307 many insns, so we'd end up copying it to a register in any case.
6309 Now, we do the copying in expand_binop, if appropriate. */
6310 return immed_real_const (exp);
6314 if (! TREE_CST_RTL (exp))
6315 output_constant_def (exp, 1);
6317 /* TREE_CST_RTL probably contains a constant address.
6318 On RISC machines where a constant address isn't valid,
6319 make some insns to get that address into a register. */
6320 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6321 && modifier != EXPAND_CONST_ADDRESS
6322 && modifier != EXPAND_INITIALIZER
6323 && modifier != EXPAND_SUM
6324 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6326 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6327 return replace_equiv_address (TREE_CST_RTL (exp),
6328 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6329 return TREE_CST_RTL (exp);
6331 case EXPR_WITH_FILE_LOCATION:
6334 const char *saved_input_filename = input_filename;
6335 int saved_lineno = lineno;
6336 input_filename = EXPR_WFL_FILENAME (exp);
6337 lineno = EXPR_WFL_LINENO (exp);
6338 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6339 emit_line_note (input_filename, lineno);
6340 /* Possibly avoid switching back and forth here. */
6341 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6342 input_filename = saved_input_filename;
6343 lineno = saved_lineno;
6348 context = decl_function_context (exp);
6350 /* If this SAVE_EXPR was at global context, assume we are an
6351 initialization function and move it into our context. */
6353 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6355 /* We treat inline_function_decl as an alias for the current function
6356 because that is the inline function whose vars, types, etc.
6357 are being merged into the current function.
6358 See expand_inline_function. */
6359 if (context == current_function_decl || context == inline_function_decl)
6362 /* If this is non-local, handle it. */
6365 /* The following call just exists to abort if the context is
6366 not of a containing function. */
6367 find_function_data (context);
6369 temp = SAVE_EXPR_RTL (exp);
6370 if (temp && GET_CODE (temp) == REG)
6372 put_var_into_stack (exp);
6373 temp = SAVE_EXPR_RTL (exp);
6375 if (temp == 0 || GET_CODE (temp) != MEM)
6378 replace_equiv_address (temp,
6379 fix_lexical_addr (XEXP (temp, 0), exp));
6381 if (SAVE_EXPR_RTL (exp) == 0)
6383 if (mode == VOIDmode)
6386 temp = assign_temp (build_qualified_type (type,
6388 | TYPE_QUAL_CONST)),
6391 SAVE_EXPR_RTL (exp) = temp;
6392 if (!optimize && GET_CODE (temp) == REG)
6393 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6396 /* If the mode of TEMP does not match that of the expression, it
6397 must be a promoted value. We pass store_expr a SUBREG of the
6398 wanted mode but mark it so that we know that it was already
6399 extended. Note that `unsignedp' was modified above in
6402 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6404 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6405 SUBREG_PROMOTED_VAR_P (temp) = 1;
6406 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6409 if (temp == const0_rtx)
6410 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6412 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6414 TREE_USED (exp) = 1;
6417 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6418 must be a promoted value. We return a SUBREG of the wanted mode,
6419 but mark it so that we know that it was already extended. */
6421 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6422 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6424 /* Compute the signedness and make the proper SUBREG. */
6425 promote_mode (type, mode, &unsignedp, 0);
6426 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6427 SUBREG_PROMOTED_VAR_P (temp) = 1;
6428 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6432 return SAVE_EXPR_RTL (exp);
6437 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6438 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6442 case PLACEHOLDER_EXPR:
6444 tree old_list = placeholder_list;
6445 tree placeholder_expr = 0;
6447 exp = find_placeholder (exp, &placeholder_expr);
6451 placeholder_list = TREE_CHAIN (placeholder_expr);
6452 temp = expand_expr (exp, original_target, tmode, modifier);
6453 placeholder_list = old_list;
6457 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6460 case WITH_RECORD_EXPR:
6461 /* Put the object on the placeholder list, expand our first operand,
6462 and pop the list. */
6463 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6465 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6467 placeholder_list = TREE_CHAIN (placeholder_list);
6471 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6472 expand_goto (TREE_OPERAND (exp, 0));
6474 expand_computed_goto (TREE_OPERAND (exp, 0));
6478 expand_exit_loop_if_false (NULL,
6479 invert_truthvalue (TREE_OPERAND (exp, 0)));
6482 case LABELED_BLOCK_EXPR:
6483 if (LABELED_BLOCK_BODY (exp))
6484 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6485 /* Should perhaps use expand_label, but this is simpler and safer. */
6486 do_pending_stack_adjust ();
6487 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6490 case EXIT_BLOCK_EXPR:
6491 if (EXIT_BLOCK_RETURN (exp))
6492 sorry ("returned value in block_exit_expr");
6493 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6498 expand_start_loop (1);
6499 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6507 tree vars = TREE_OPERAND (exp, 0);
6508 int vars_need_expansion = 0;
6510 /* Need to open a binding contour here because
6511 if there are any cleanups they must be contained here. */
6512 expand_start_bindings (2);
6514 /* Mark the corresponding BLOCK for output in its proper place. */
6515 if (TREE_OPERAND (exp, 2) != 0
6516 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6517 insert_block (TREE_OPERAND (exp, 2));
6519 /* If VARS have not yet been expanded, expand them now. */
6522 if (!DECL_RTL_SET_P (vars))
6524 vars_need_expansion = 1;
6527 expand_decl_init (vars);
6528 vars = TREE_CHAIN (vars);
6531 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6533 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6539 if (RTL_EXPR_SEQUENCE (exp))
6541 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6543 emit_insns (RTL_EXPR_SEQUENCE (exp));
6544 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6546 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6547 free_temps_for_rtl_expr (exp);
6548 return RTL_EXPR_RTL (exp);
6551 /* If we don't need the result, just ensure we evaluate any
6557 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6558 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6563 /* All elts simple constants => refer to a constant in memory. But
6564 if this is a non-BLKmode mode, let it store a field at a time
6565 since that should make a CONST_INT or CONST_DOUBLE when we
6566 fold. Likewise, if we have a target we can use, it is best to
6567 store directly into the target unless the type is large enough
6568 that memcpy will be used. If we are making an initializer and
6569 all operands are constant, put it in memory as well. */
6570 else if ((TREE_STATIC (exp)
6571 && ((mode == BLKmode
6572 && ! (target != 0 && safe_from_p (target, exp, 1)))
6573 || TREE_ADDRESSABLE (exp)
6574 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6575 && (! MOVE_BY_PIECES_P
6576 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6578 && ! mostly_zeros_p (exp))))
6579 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6581 rtx constructor = output_constant_def (exp, 1);
6583 if (modifier != EXPAND_CONST_ADDRESS
6584 && modifier != EXPAND_INITIALIZER
6585 && modifier != EXPAND_SUM)
6586 constructor = validize_mem (constructor);
6592 /* Handle calls that pass values in multiple non-contiguous
6593 locations. The Irix 6 ABI has examples of this. */
6594 if (target == 0 || ! safe_from_p (target, exp, 1)
6595 || GET_CODE (target) == PARALLEL)
6597 = assign_temp (build_qualified_type (type,
6599 | (TREE_READONLY (exp)
6600 * TYPE_QUAL_CONST))),
6601 0, TREE_ADDRESSABLE (exp), 1);
6603 store_constructor (exp, target, 0,
6604 int_size_in_bytes (TREE_TYPE (exp)));
6610 tree exp1 = TREE_OPERAND (exp, 0);
6612 tree string = string_constant (exp1, &index);
6614 /* Try to optimize reads from const strings. */
6616 && TREE_CODE (string) == STRING_CST
6617 && TREE_CODE (index) == INTEGER_CST
6618 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6619 && GET_MODE_CLASS (mode) == MODE_INT
6620 && GET_MODE_SIZE (mode) == 1
6621 && modifier != EXPAND_WRITE)
6623 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6625 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6626 op0 = memory_address (mode, op0);
6627 temp = gen_rtx_MEM (mode, op0);
6628 set_mem_attributes (temp, exp, 0);
6630 /* If we are writing to this object and its type is a record with
6631 readonly fields, we must mark it as readonly so it will
6632 conflict with readonly references to those fields. */
6633 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6634 RTX_UNCHANGING_P (temp) = 1;
6640 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6644 tree array = TREE_OPERAND (exp, 0);
6645 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6646 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6647 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6650 /* Optimize the special-case of a zero lower bound.
6652 We convert the low_bound to sizetype to avoid some problems
6653 with constant folding. (E.g. suppose the lower bound is 1,
6654 and its mode is QI. Without the conversion, (ARRAY
6655 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6656 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6658 if (! integer_zerop (low_bound))
6659 index = size_diffop (index, convert (sizetype, low_bound));
6661 /* Fold an expression like: "foo"[2].
6662 This is not done in fold so it won't happen inside &.
6663 Don't fold if this is for wide characters since it's too
6664 difficult to do correctly and this is a very rare case. */
6666 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6667 && TREE_CODE (array) == STRING_CST
6668 && TREE_CODE (index) == INTEGER_CST
6669 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6670 && GET_MODE_CLASS (mode) == MODE_INT
6671 && GET_MODE_SIZE (mode) == 1)
6673 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6675 /* If this is a constant index into a constant array,
6676 just get the value from the array. Handle both the cases when
6677 we have an explicit constructor and when our operand is a variable
6678 that was declared const. */
6680 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6681 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6682 && TREE_CODE (index) == INTEGER_CST
6683 && 0 > compare_tree_int (index,
6684 list_length (CONSTRUCTOR_ELTS
6685 (TREE_OPERAND (exp, 0)))))
6689 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6690 i = TREE_INT_CST_LOW (index);
6691 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6695 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6699 else if (optimize >= 1
6700 && modifier != EXPAND_CONST_ADDRESS
6701 && modifier != EXPAND_INITIALIZER
6702 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6703 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6704 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6706 if (TREE_CODE (index) == INTEGER_CST)
6708 tree init = DECL_INITIAL (array);
6710 if (TREE_CODE (init) == CONSTRUCTOR)
6714 for (elem = CONSTRUCTOR_ELTS (init);
6716 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6717 elem = TREE_CHAIN (elem))
6720 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6721 return expand_expr (fold (TREE_VALUE (elem)), target,
6724 else if (TREE_CODE (init) == STRING_CST
6725 && 0 > compare_tree_int (index,
6726 TREE_STRING_LENGTH (init)))
6728 tree type = TREE_TYPE (TREE_TYPE (init));
6729 enum machine_mode mode = TYPE_MODE (type);
6731 if (GET_MODE_CLASS (mode) == MODE_INT
6732 && GET_MODE_SIZE (mode) == 1)
6734 (TREE_STRING_POINTER
6735 (init)[TREE_INT_CST_LOW (index)]));
6744 case ARRAY_RANGE_REF:
6745 /* If the operand is a CONSTRUCTOR, we can just extract the
6746 appropriate field if it is present. Don't do this if we have
6747 already written the data since we want to refer to that copy
6748 and varasm.c assumes that's what we'll do. */
6749 if (code == COMPONENT_REF
6750 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6751 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6755 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6756 elt = TREE_CHAIN (elt))
6757 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6758 /* We can normally use the value of the field in the
6759 CONSTRUCTOR. However, if this is a bitfield in
6760 an integral mode that we can fit in a HOST_WIDE_INT,
6761 we must mask only the number of bits in the bitfield,
6762 since this is done implicitly by the constructor. If
6763 the bitfield does not meet either of those conditions,
6764 we can't do this optimization. */
6765 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6766 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6768 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6769 <= HOST_BITS_PER_WIDE_INT))))
6771 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6772 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6774 HOST_WIDE_INT bitsize
6775 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6777 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6779 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6780 op0 = expand_and (op0, op1, target);
6784 enum machine_mode imode
6785 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6787 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6790 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6792 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6802 enum machine_mode mode1;
6803 HOST_WIDE_INT bitsize, bitpos;
6806 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6807 &mode1, &unsignedp, &volatilep);
6810 /* If we got back the original object, something is wrong. Perhaps
6811 we are evaluating an expression too early. In any event, don't
6812 infinitely recurse. */
6816 /* If TEM's type is a union of variable size, pass TARGET to the inner
6817 computation, since it will need a temporary and TARGET is known
6818 to have to do. This occurs in unchecked conversion in Ada. */
6822 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6823 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6825 ? target : NULL_RTX),
6827 (modifier == EXPAND_INITIALIZER
6828 || modifier == EXPAND_CONST_ADDRESS)
6829 ? modifier : EXPAND_NORMAL);
6831 /* If this is a constant, put it into a register if it is a
6832 legitimate constant and OFFSET is 0 and memory if it isn't. */
6833 if (CONSTANT_P (op0))
6835 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6836 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6838 op0 = force_reg (mode, op0);
6840 op0 = validize_mem (force_const_mem (mode, op0));
6845 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6847 /* If this object is in a register, put it into memory.
6848 This case can't occur in C, but can in Ada if we have
6849 unchecked conversion of an expression from a scalar type to
6850 an array or record type. */
6851 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6852 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6854 /* If the operand is a SAVE_EXPR, we can deal with this by
6855 forcing the SAVE_EXPR into memory. */
6856 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6858 put_var_into_stack (TREE_OPERAND (exp, 0));
6859 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6864 = build_qualified_type (TREE_TYPE (tem),
6865 (TYPE_QUALS (TREE_TYPE (tem))
6866 | TYPE_QUAL_CONST));
6867 rtx memloc = assign_temp (nt, 1, 1, 1);
6869 emit_move_insn (memloc, op0);
6874 if (GET_CODE (op0) != MEM)
6877 if (GET_MODE (offset_rtx) != ptr_mode)
6878 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6880 #ifdef POINTERS_EXTEND_UNSIGNED
6881 if (GET_MODE (offset_rtx) != Pmode)
6882 offset_rtx = convert_memory_address (Pmode, offset_rtx);
6885 /* A constant address in OP0 can have VOIDmode, we must not try
6886 to call force_reg for that case. Avoid that case. */
6887 if (GET_CODE (op0) == MEM
6888 && GET_MODE (op0) == BLKmode
6889 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6891 && (bitpos % bitsize) == 0
6892 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6893 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6895 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6897 if (GET_CODE (XEXP (temp, 0)) == REG)
6900 op0 = (replace_equiv_address
6902 force_reg (GET_MODE (XEXP (temp, 0)),
6907 op0 = offset_address (op0, offset_rtx,
6908 highest_pow2_factor (offset));
6911 /* Don't forget about volatility even if this is a bitfield. */
6912 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6914 if (op0 == orig_op0)
6915 op0 = copy_rtx (op0);
6917 MEM_VOLATILE_P (op0) = 1;
6920 /* In cases where an aligned union has an unaligned object
6921 as a field, we might be extracting a BLKmode value from
6922 an integer-mode (e.g., SImode) object. Handle this case
6923 by doing the extract into an object as wide as the field
6924 (which we know to be the width of a basic mode), then
6925 storing into memory, and changing the mode to BLKmode. */
6926 if (mode1 == VOIDmode
6927 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6928 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6929 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6930 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6931 && modifier != EXPAND_CONST_ADDRESS
6932 && modifier != EXPAND_INITIALIZER)
6933 /* If the field isn't aligned enough to fetch as a memref,
6934 fetch it as a bit field. */
6935 || (mode1 != BLKmode
6936 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
6937 && ((TYPE_ALIGN (TREE_TYPE (tem))
6938 < GET_MODE_ALIGNMENT (mode))
6939 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6940 /* If the type and the field are a constant size and the
6941 size of the type isn't the same size as the bitfield,
6942 we must use bitfield operations. */
6944 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6946 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6949 enum machine_mode ext_mode = mode;
6951 if (ext_mode == BLKmode
6952 && ! (target != 0 && GET_CODE (op0) == MEM
6953 && GET_CODE (target) == MEM
6954 && bitpos % BITS_PER_UNIT == 0))
6955 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6957 if (ext_mode == BLKmode)
6959 /* In this case, BITPOS must start at a byte boundary and
6960 TARGET, if specified, must be a MEM. */
6961 if (GET_CODE (op0) != MEM
6962 || (target != 0 && GET_CODE (target) != MEM)
6963 || bitpos % BITS_PER_UNIT != 0)
6966 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
6968 target = assign_temp (type, 0, 1, 1);
6970 emit_block_move (target, op0,
6971 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6977 op0 = validize_mem (op0);
6979 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6980 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
6982 op0 = extract_bit_field (op0, bitsize, bitpos,
6983 unsignedp, target, ext_mode, ext_mode,
6984 int_size_in_bytes (TREE_TYPE (tem)));
6986 /* If the result is a record type and BITSIZE is narrower than
6987 the mode of OP0, an integral mode, and this is a big endian
6988 machine, we must put the field into the high-order bits. */
6989 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6990 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6991 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
6992 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6993 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6997 if (mode == BLKmode)
6999 rtx new = assign_temp (build_qualified_type
7000 (type_for_mode (ext_mode, 0),
7001 TYPE_QUAL_CONST), 0, 1, 1);
7003 emit_move_insn (new, op0);
7004 op0 = copy_rtx (new);
7005 PUT_MODE (op0, BLKmode);
7006 set_mem_attributes (op0, exp, 1);
7012 /* If the result is BLKmode, use that to access the object
7014 if (mode == BLKmode)
7017 /* Get a reference to just this component. */
7018 if (modifier == EXPAND_CONST_ADDRESS
7019 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7020 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7022 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7024 if (op0 == orig_op0)
7025 op0 = copy_rtx (op0);
7027 set_mem_attributes (op0, exp, 0);
7028 if (GET_CODE (XEXP (op0, 0)) == REG)
7029 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7031 MEM_VOLATILE_P (op0) |= volatilep;
7032 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7033 || modifier == EXPAND_CONST_ADDRESS
7034 || modifier == EXPAND_INITIALIZER)
7036 else if (target == 0)
7037 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7039 convert_move (target, op0, unsignedp);
7045 rtx insn, before = get_last_insn (), vtbl_ref;
7047 /* Evaluate the interior expression. */
7048 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7051 /* Get or create an instruction off which to hang a note. */
7052 if (REG_P (subtarget))
7055 insn = get_last_insn ();
7058 if (! INSN_P (insn))
7059 insn = prev_nonnote_insn (insn);
7063 target = gen_reg_rtx (GET_MODE (subtarget));
7064 insn = emit_move_insn (target, subtarget);
7067 /* Collect the data for the note. */
7068 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7069 vtbl_ref = plus_constant (vtbl_ref,
7070 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7071 /* Discard the initial CONST that was added. */
7072 vtbl_ref = XEXP (vtbl_ref, 0);
7075 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7080 /* Intended for a reference to a buffer of a file-object in Pascal.
7081 But it's not certain that a special tree code will really be
7082 necessary for these. INDIRECT_REF might work for them. */
7088 /* Pascal set IN expression.
7091 rlo = set_low - (set_low%bits_per_word);
7092 the_word = set [ (index - rlo)/bits_per_word ];
7093 bit_index = index % bits_per_word;
7094 bitmask = 1 << bit_index;
7095 return !!(the_word & bitmask); */
7097 tree set = TREE_OPERAND (exp, 0);
7098 tree index = TREE_OPERAND (exp, 1);
7099 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7100 tree set_type = TREE_TYPE (set);
7101 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7102 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7103 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7104 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7105 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7106 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7107 rtx setaddr = XEXP (setval, 0);
7108 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7110 rtx diff, quo, rem, addr, bit, result;
7112 /* If domain is empty, answer is no. Likewise if index is constant
7113 and out of bounds. */
7114 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7115 && TREE_CODE (set_low_bound) == INTEGER_CST
7116 && tree_int_cst_lt (set_high_bound, set_low_bound))
7117 || (TREE_CODE (index) == INTEGER_CST
7118 && TREE_CODE (set_low_bound) == INTEGER_CST
7119 && tree_int_cst_lt (index, set_low_bound))
7120 || (TREE_CODE (set_high_bound) == INTEGER_CST
7121 && TREE_CODE (index) == INTEGER_CST
7122 && tree_int_cst_lt (set_high_bound, index))))
7126 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7128 /* If we get here, we have to generate the code for both cases
7129 (in range and out of range). */
7131 op0 = gen_label_rtx ();
7132 op1 = gen_label_rtx ();
7134 if (! (GET_CODE (index_val) == CONST_INT
7135 && GET_CODE (lo_r) == CONST_INT))
7136 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7137 GET_MODE (index_val), iunsignedp, op1);
7139 if (! (GET_CODE (index_val) == CONST_INT
7140 && GET_CODE (hi_r) == CONST_INT))
7141 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7142 GET_MODE (index_val), iunsignedp, op1);
7144 /* Calculate the element number of bit zero in the first word
7146 if (GET_CODE (lo_r) == CONST_INT)
7147 rlow = GEN_INT (INTVAL (lo_r)
7148 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7150 rlow = expand_binop (index_mode, and_optab, lo_r,
7151 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7152 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7154 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7155 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7157 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7158 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7159 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7160 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7162 addr = memory_address (byte_mode,
7163 expand_binop (index_mode, add_optab, diff,
7164 setaddr, NULL_RTX, iunsignedp,
7167 /* Extract the bit we want to examine. */
7168 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7169 gen_rtx_MEM (byte_mode, addr),
7170 make_tree (TREE_TYPE (index), rem),
7172 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7173 GET_MODE (target) == byte_mode ? target : 0,
7174 1, OPTAB_LIB_WIDEN);
7176 if (result != target)
7177 convert_move (target, result, 1);
7179 /* Output the code to handle the out-of-range case. */
7182 emit_move_insn (target, const0_rtx);
7187 case WITH_CLEANUP_EXPR:
7188 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7190 WITH_CLEANUP_EXPR_RTL (exp)
7191 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7192 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7194 /* That's it for this cleanup. */
7195 TREE_OPERAND (exp, 1) = 0;
7197 return WITH_CLEANUP_EXPR_RTL (exp);
7199 case CLEANUP_POINT_EXPR:
7201 /* Start a new binding layer that will keep track of all cleanup
7202 actions to be performed. */
7203 expand_start_bindings (2);
7205 target_temp_slot_level = temp_slot_level;
7207 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7208 /* If we're going to use this value, load it up now. */
7210 op0 = force_not_mem (op0);
7211 preserve_temp_slots (op0);
7212 expand_end_bindings (NULL_TREE, 0, 0);
7217 /* Check for a built-in function. */
7218 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7219 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7221 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7223 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7224 == BUILT_IN_FRONTEND)
7225 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7227 return expand_builtin (exp, target, subtarget, tmode, ignore);
7230 return expand_call (exp, target, ignore);
7232 case NON_LVALUE_EXPR:
7235 case REFERENCE_EXPR:
7236 if (TREE_OPERAND (exp, 0) == error_mark_node)
7239 if (TREE_CODE (type) == UNION_TYPE)
7241 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7243 /* If both input and output are BLKmode, this conversion isn't doing
7244 anything except possibly changing memory attribute. */
7245 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7247 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7250 result = copy_rtx (result);
7251 set_mem_attributes (result, exp, 0);
7256 target = assign_temp (type, 0, 1, 1);
7258 if (GET_CODE (target) == MEM)
7259 /* Store data into beginning of memory target. */
7260 store_expr (TREE_OPERAND (exp, 0),
7261 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7263 else if (GET_CODE (target) == REG)
7264 /* Store this field into a union of the proper type. */
7265 store_field (target,
7266 MIN ((int_size_in_bytes (TREE_TYPE
7267 (TREE_OPERAND (exp, 0)))
7269 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7270 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7271 VOIDmode, 0, type, 0);
7275 /* Return the entire union. */
7279 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7281 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7284 /* If the signedness of the conversion differs and OP0 is
7285 a promoted SUBREG, clear that indication since we now
7286 have to do the proper extension. */
7287 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7288 && GET_CODE (op0) == SUBREG)
7289 SUBREG_PROMOTED_VAR_P (op0) = 0;
7294 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7295 if (GET_MODE (op0) == mode)
7298 /* If OP0 is a constant, just convert it into the proper mode. */
7299 if (CONSTANT_P (op0))
7301 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7302 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7304 if (modifier == EXPAND_INITIALIZER)
7305 return simplify_gen_subreg (mode, op0, inner_mode,
7306 subreg_lowpart_offset (mode,
7309 return convert_modes (mode, inner_mode, op0,
7310 TREE_UNSIGNED (inner_type));
7313 if (modifier == EXPAND_INITIALIZER)
7314 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7318 convert_to_mode (mode, op0,
7319 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7321 convert_move (target, op0,
7322 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7325 case VIEW_CONVERT_EXPR:
7326 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7328 /* If the input and output modes are both the same, we are done.
7329 Otherwise, if neither mode is BLKmode and both are within a word, we
7330 can use gen_lowpart. If neither is true, make sure the operand is
7331 in memory and convert the MEM to the new mode. */
7332 if (TYPE_MODE (type) == GET_MODE (op0))
7334 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7335 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7336 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7337 op0 = gen_lowpart (TYPE_MODE (type), op0);
7338 else if (GET_CODE (op0) != MEM)
7340 /* If the operand is not a MEM, force it into memory. Since we
7341 are going to be be changing the mode of the MEM, don't call
7342 force_const_mem for constants because we don't allow pool
7343 constants to change mode. */
7344 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7346 if (TREE_ADDRESSABLE (exp))
7349 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7351 = assign_stack_temp_for_type
7352 (TYPE_MODE (inner_type),
7353 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7355 emit_move_insn (target, op0);
7359 /* At this point, OP0 is in the correct mode. If the output type is such
7360 that the operand is known to be aligned, indicate that it is.
7361 Otherwise, we need only be concerned about alignment for non-BLKmode
7363 if (GET_CODE (op0) == MEM)
7365 op0 = copy_rtx (op0);
7367 if (TYPE_ALIGN_OK (type))
7368 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7369 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7370 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7372 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7373 HOST_WIDE_INT temp_size
7374 = MAX (int_size_in_bytes (inner_type),
7375 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7376 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7377 temp_size, 0, type);
7378 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7380 if (TREE_ADDRESSABLE (exp))
7383 if (GET_MODE (op0) == BLKmode)
7384 emit_block_move (new_with_op0_mode, op0,
7385 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7387 emit_move_insn (new_with_op0_mode, op0);
7392 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7398 /* We come here from MINUS_EXPR when the second operand is a
7401 this_optab = ! unsignedp && flag_trapv
7402 && (GET_MODE_CLASS (mode) == MODE_INT)
7403 ? addv_optab : add_optab;
7405 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7406 something else, make sure we add the register to the constant and
7407 then to the other thing. This case can occur during strength
7408 reduction and doing it this way will produce better code if the
7409 frame pointer or argument pointer is eliminated.
7411 fold-const.c will ensure that the constant is always in the inner
7412 PLUS_EXPR, so the only case we need to do anything about is if
7413 sp, ap, or fp is our second argument, in which case we must swap
7414 the innermost first argument and our second argument. */
7416 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7417 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7418 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7419 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7420 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7421 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7423 tree t = TREE_OPERAND (exp, 1);
7425 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7426 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7429 /* If the result is to be ptr_mode and we are adding an integer to
7430 something, we might be forming a constant. So try to use
7431 plus_constant. If it produces a sum and we can't accept it,
7432 use force_operand. This allows P = &ARR[const] to generate
7433 efficient code on machines where a SYMBOL_REF is not a valid
7436 If this is an EXPAND_SUM call, always return the sum. */
7437 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7438 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7440 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7441 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7442 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7446 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7448 /* Use immed_double_const to ensure that the constant is
7449 truncated according to the mode of OP1, then sign extended
7450 to a HOST_WIDE_INT. Using the constant directly can result
7451 in non-canonical RTL in a 64x32 cross compile. */
7453 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7455 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7456 op1 = plus_constant (op1, INTVAL (constant_part));
7457 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7458 op1 = force_operand (op1, target);
7462 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7463 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7464 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7468 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7470 if (! CONSTANT_P (op0))
7472 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7473 VOIDmode, modifier);
7474 /* Don't go to both_summands if modifier
7475 says it's not right to return a PLUS. */
7476 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7480 /* Use immed_double_const to ensure that the constant is
7481 truncated according to the mode of OP1, then sign extended
7482 to a HOST_WIDE_INT. Using the constant directly can result
7483 in non-canonical RTL in a 64x32 cross compile. */
7485 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7487 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7488 op0 = plus_constant (op0, INTVAL (constant_part));
7489 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7490 op0 = force_operand (op0, target);
7495 /* No sense saving up arithmetic to be done
7496 if it's all in the wrong mode to form part of an address.
7497 And force_operand won't know whether to sign-extend or
7499 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7500 || mode != ptr_mode)
7503 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7506 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7507 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7510 /* Make sure any term that's a sum with a constant comes last. */
7511 if (GET_CODE (op0) == PLUS
7512 && CONSTANT_P (XEXP (op0, 1)))
7518 /* If adding to a sum including a constant,
7519 associate it to put the constant outside. */
7520 if (GET_CODE (op1) == PLUS
7521 && CONSTANT_P (XEXP (op1, 1)))
7523 rtx constant_term = const0_rtx;
7525 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7528 /* Ensure that MULT comes first if there is one. */
7529 else if (GET_CODE (op0) == MULT)
7530 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7532 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7534 /* Let's also eliminate constants from op0 if possible. */
7535 op0 = eliminate_constant_term (op0, &constant_term);
7537 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7538 their sum should be a constant. Form it into OP1, since the
7539 result we want will then be OP0 + OP1. */
7541 temp = simplify_binary_operation (PLUS, mode, constant_term,
7546 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7549 /* Put a constant term last and put a multiplication first. */
7550 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7551 temp = op1, op1 = op0, op0 = temp;
7553 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7554 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7557 /* For initializers, we are allowed to return a MINUS of two
7558 symbolic constants. Here we handle all cases when both operands
7560 /* Handle difference of two symbolic constants,
7561 for the sake of an initializer. */
7562 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7563 && really_constant_p (TREE_OPERAND (exp, 0))
7564 && really_constant_p (TREE_OPERAND (exp, 1)))
7566 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7568 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7571 /* If the last operand is a CONST_INT, use plus_constant of
7572 the negated constant. Else make the MINUS. */
7573 if (GET_CODE (op1) == CONST_INT)
7574 return plus_constant (op0, - INTVAL (op1));
7576 return gen_rtx_MINUS (mode, op0, op1);
7578 /* Convert A - const to A + (-const). */
7579 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7581 tree negated = fold (build1 (NEGATE_EXPR, type,
7582 TREE_OPERAND (exp, 1)));
7584 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7585 /* If we can't negate the constant in TYPE, leave it alone and
7586 expand_binop will negate it for us. We used to try to do it
7587 here in the signed version of TYPE, but that doesn't work
7588 on POINTER_TYPEs. */;
7591 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7595 this_optab = ! unsignedp && flag_trapv
7596 && (GET_MODE_CLASS(mode) == MODE_INT)
7597 ? subv_optab : sub_optab;
7601 /* If first operand is constant, swap them.
7602 Thus the following special case checks need only
7603 check the second operand. */
7604 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7606 tree t1 = TREE_OPERAND (exp, 0);
7607 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7608 TREE_OPERAND (exp, 1) = t1;
7611 /* Attempt to return something suitable for generating an
7612 indexed address, for machines that support that. */
7614 if (modifier == EXPAND_SUM && mode == ptr_mode
7615 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7616 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7618 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7621 /* Apply distributive law if OP0 is x+c. */
7622 if (GET_CODE (op0) == PLUS
7623 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7628 (mode, XEXP (op0, 0),
7629 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7630 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7631 * INTVAL (XEXP (op0, 1))));
7633 if (GET_CODE (op0) != REG)
7634 op0 = force_operand (op0, NULL_RTX);
7635 if (GET_CODE (op0) != REG)
7636 op0 = copy_to_mode_reg (mode, op0);
7639 gen_rtx_MULT (mode, op0,
7640 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7643 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7646 /* Check for multiplying things that have been extended
7647 from a narrower type. If this machine supports multiplying
7648 in that narrower type with a result in the desired type,
7649 do it that way, and avoid the explicit type-conversion. */
7650 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7651 && TREE_CODE (type) == INTEGER_TYPE
7652 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7653 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7654 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7655 && int_fits_type_p (TREE_OPERAND (exp, 1),
7656 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7657 /* Don't use a widening multiply if a shift will do. */
7658 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7659 > HOST_BITS_PER_WIDE_INT)
7660 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7662 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7663 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7665 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7666 /* If both operands are extended, they must either both
7667 be zero-extended or both be sign-extended. */
7668 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7670 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7672 enum machine_mode innermode
7673 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7674 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7675 ? smul_widen_optab : umul_widen_optab);
7676 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7677 ? umul_widen_optab : smul_widen_optab);
7678 if (mode == GET_MODE_WIDER_MODE (innermode))
7680 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7682 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7683 NULL_RTX, VOIDmode, 0);
7684 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7685 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7688 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7689 NULL_RTX, VOIDmode, 0);
7692 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7693 && innermode == word_mode)
7696 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7697 NULL_RTX, VOIDmode, 0);
7698 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7699 op1 = convert_modes (innermode, mode,
7700 expand_expr (TREE_OPERAND (exp, 1),
7701 NULL_RTX, VOIDmode, 0),
7704 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7705 NULL_RTX, VOIDmode, 0);
7706 temp = expand_binop (mode, other_optab, op0, op1, target,
7707 unsignedp, OPTAB_LIB_WIDEN);
7708 htem = expand_mult_highpart_adjust (innermode,
7709 gen_highpart (innermode, temp),
7711 gen_highpart (innermode, temp),
7713 emit_move_insn (gen_highpart (innermode, temp), htem);
7718 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7719 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7720 return expand_mult (mode, op0, op1, target, unsignedp);
7722 case TRUNC_DIV_EXPR:
7723 case FLOOR_DIV_EXPR:
7725 case ROUND_DIV_EXPR:
7726 case EXACT_DIV_EXPR:
7727 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7729 /* Possible optimization: compute the dividend with EXPAND_SUM
7730 then if the divisor is constant can optimize the case
7731 where some terms of the dividend have coeffs divisible by it. */
7732 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7733 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7734 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7737 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7738 expensive divide. If not, combine will rebuild the original
7740 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7741 && !real_onep (TREE_OPERAND (exp, 0)))
7742 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7743 build (RDIV_EXPR, type,
7744 build_real (type, dconst1),
7745 TREE_OPERAND (exp, 1))),
7746 target, tmode, unsignedp);
7747 this_optab = sdiv_optab;
7750 case TRUNC_MOD_EXPR:
7751 case FLOOR_MOD_EXPR:
7753 case ROUND_MOD_EXPR:
7754 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7756 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7757 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7758 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7760 case FIX_ROUND_EXPR:
7761 case FIX_FLOOR_EXPR:
7763 abort (); /* Not used for C. */
7765 case FIX_TRUNC_EXPR:
7766 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7768 target = gen_reg_rtx (mode);
7769 expand_fix (target, op0, unsignedp);
7773 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7775 target = gen_reg_rtx (mode);
7776 /* expand_float can't figure out what to do if FROM has VOIDmode.
7777 So give it the correct mode. With -O, cse will optimize this. */
7778 if (GET_MODE (op0) == VOIDmode)
7779 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7781 expand_float (target, op0,
7782 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7786 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7787 temp = expand_unop (mode,
7788 ! unsignedp && flag_trapv
7789 && (GET_MODE_CLASS(mode) == MODE_INT)
7790 ? negv_optab : neg_optab, op0, target, 0);
7796 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7798 /* Handle complex values specially. */
7799 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7800 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7801 return expand_complex_abs (mode, op0, target, unsignedp);
7803 /* Unsigned abs is simply the operand. Testing here means we don't
7804 risk generating incorrect code below. */
7805 if (TREE_UNSIGNED (type))
7808 return expand_abs (mode, op0, target, unsignedp,
7809 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7813 target = original_target;
7814 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7815 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7816 || GET_MODE (target) != mode
7817 || (GET_CODE (target) == REG
7818 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7819 target = gen_reg_rtx (mode);
7820 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7821 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7823 /* First try to do it with a special MIN or MAX instruction.
7824 If that does not win, use a conditional jump to select the proper
7826 this_optab = (TREE_UNSIGNED (type)
7827 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7828 : (code == MIN_EXPR ? smin_optab : smax_optab));
7830 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7835 /* At this point, a MEM target is no longer useful; we will get better
7838 if (GET_CODE (target) == MEM)
7839 target = gen_reg_rtx (mode);
7842 emit_move_insn (target, op0);
7844 op0 = gen_label_rtx ();
7846 /* If this mode is an integer too wide to compare properly,
7847 compare word by word. Rely on cse to optimize constant cases. */
7848 if (GET_MODE_CLASS (mode) == MODE_INT
7849 && ! can_compare_p (GE, mode, ccp_jump))
7851 if (code == MAX_EXPR)
7852 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7853 target, op1, NULL_RTX, op0);
7855 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7856 op1, target, NULL_RTX, op0);
7860 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7861 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7862 unsignedp, mode, NULL_RTX, NULL_RTX,
7865 emit_move_insn (target, op1);
7870 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7871 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7877 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7878 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7883 /* ??? Can optimize bitwise operations with one arg constant.
7884 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7885 and (a bitwise1 b) bitwise2 b (etc)
7886 but that is probably not worth while. */
7888 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7889 boolean values when we want in all cases to compute both of them. In
7890 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7891 as actual zero-or-1 values and then bitwise anding. In cases where
7892 there cannot be any side effects, better code would be made by
7893 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7894 how to recognize those cases. */
7896 case TRUTH_AND_EXPR:
7898 this_optab = and_optab;
7903 this_optab = ior_optab;
7906 case TRUTH_XOR_EXPR:
7908 this_optab = xor_optab;
7915 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7917 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7918 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7921 /* Could determine the answer when only additive constants differ. Also,
7922 the addition of one can be handled by changing the condition. */
7929 case UNORDERED_EXPR:
7936 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7940 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7941 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7943 && GET_CODE (original_target) == REG
7944 && (GET_MODE (original_target)
7945 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7947 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7950 if (temp != original_target)
7951 temp = copy_to_reg (temp);
7953 op1 = gen_label_rtx ();
7954 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7955 GET_MODE (temp), unsignedp, op1);
7956 emit_move_insn (temp, const1_rtx);
7961 /* If no set-flag instruction, must generate a conditional
7962 store into a temporary variable. Drop through
7963 and handle this like && and ||. */
7965 case TRUTH_ANDIF_EXPR:
7966 case TRUTH_ORIF_EXPR:
7968 && (target == 0 || ! safe_from_p (target, exp, 1)
7969 /* Make sure we don't have a hard reg (such as function's return
7970 value) live across basic blocks, if not optimizing. */
7971 || (!optimize && GET_CODE (target) == REG
7972 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7973 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7976 emit_clr_insn (target);
7978 op1 = gen_label_rtx ();
7979 jumpifnot (exp, op1);
7982 emit_0_to_1_insn (target);
7985 return ignore ? const0_rtx : target;
7987 case TRUTH_NOT_EXPR:
7988 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7989 /* The parser is careful to generate TRUTH_NOT_EXPR
7990 only with operands that are always zero or one. */
7991 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7992 target, 1, OPTAB_LIB_WIDEN);
7998 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8000 return expand_expr (TREE_OPERAND (exp, 1),
8001 (ignore ? const0_rtx : target),
8005 /* If we would have a "singleton" (see below) were it not for a
8006 conversion in each arm, bring that conversion back out. */
8007 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8008 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8009 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8010 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8012 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8013 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8015 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8016 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8017 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8018 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8019 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8020 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8021 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8022 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8023 return expand_expr (build1 (NOP_EXPR, type,
8024 build (COND_EXPR, TREE_TYPE (iftrue),
8025 TREE_OPERAND (exp, 0),
8027 target, tmode, modifier);
8031 /* Note that COND_EXPRs whose type is a structure or union
8032 are required to be constructed to contain assignments of
8033 a temporary variable, so that we can evaluate them here
8034 for side effect only. If type is void, we must do likewise. */
8036 /* If an arm of the branch requires a cleanup,
8037 only that cleanup is performed. */
8040 tree binary_op = 0, unary_op = 0;
8042 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8043 convert it to our mode, if necessary. */
8044 if (integer_onep (TREE_OPERAND (exp, 1))
8045 && integer_zerop (TREE_OPERAND (exp, 2))
8046 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8050 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8055 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8056 if (GET_MODE (op0) == mode)
8060 target = gen_reg_rtx (mode);
8061 convert_move (target, op0, unsignedp);
8065 /* Check for X ? A + B : A. If we have this, we can copy A to the
8066 output and conditionally add B. Similarly for unary operations.
8067 Don't do this if X has side-effects because those side effects
8068 might affect A or B and the "?" operation is a sequence point in
8069 ANSI. (operand_equal_p tests for side effects.) */
8071 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8072 && operand_equal_p (TREE_OPERAND (exp, 2),
8073 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8074 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8075 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8076 && operand_equal_p (TREE_OPERAND (exp, 1),
8077 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8078 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8079 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8080 && operand_equal_p (TREE_OPERAND (exp, 2),
8081 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8082 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8083 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8084 && operand_equal_p (TREE_OPERAND (exp, 1),
8085 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8086 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8088 /* If we are not to produce a result, we have no target. Otherwise,
8089 if a target was specified use it; it will not be used as an
8090 intermediate target unless it is safe. If no target, use a
8095 else if (original_target
8096 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8097 || (singleton && GET_CODE (original_target) == REG
8098 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8099 && original_target == var_rtx (singleton)))
8100 && GET_MODE (original_target) == mode
8101 #ifdef HAVE_conditional_move
8102 && (! can_conditionally_move_p (mode)
8103 || GET_CODE (original_target) == REG
8104 || TREE_ADDRESSABLE (type))
8106 && (GET_CODE (original_target) != MEM
8107 || TREE_ADDRESSABLE (type)))
8108 temp = original_target;
8109 else if (TREE_ADDRESSABLE (type))
8112 temp = assign_temp (type, 0, 0, 1);
8114 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8115 do the test of X as a store-flag operation, do this as
8116 A + ((X != 0) << log C). Similarly for other simple binary
8117 operators. Only do for C == 1 if BRANCH_COST is low. */
8118 if (temp && singleton && binary_op
8119 && (TREE_CODE (binary_op) == PLUS_EXPR
8120 || TREE_CODE (binary_op) == MINUS_EXPR
8121 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8122 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8123 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8124 : integer_onep (TREE_OPERAND (binary_op, 1)))
8125 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8128 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8129 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8130 ? addv_optab : add_optab)
8131 : TREE_CODE (binary_op) == MINUS_EXPR
8132 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8133 ? subv_optab : sub_optab)
8134 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8137 /* If we had X ? A : A + 1, do this as A + (X == 0).
8139 We have to invert the truth value here and then put it
8140 back later if do_store_flag fails. We cannot simply copy
8141 TREE_OPERAND (exp, 0) to another variable and modify that
8142 because invert_truthvalue can modify the tree pointed to
8144 if (singleton == TREE_OPERAND (exp, 1))
8145 TREE_OPERAND (exp, 0)
8146 = invert_truthvalue (TREE_OPERAND (exp, 0));
8148 result = do_store_flag (TREE_OPERAND (exp, 0),
8149 (safe_from_p (temp, singleton, 1)
8151 mode, BRANCH_COST <= 1);
8153 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8154 result = expand_shift (LSHIFT_EXPR, mode, result,
8155 build_int_2 (tree_log2
8159 (safe_from_p (temp, singleton, 1)
8160 ? temp : NULL_RTX), 0);
8164 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8165 return expand_binop (mode, boptab, op1, result, temp,
8166 unsignedp, OPTAB_LIB_WIDEN);
8168 else if (singleton == TREE_OPERAND (exp, 1))
8169 TREE_OPERAND (exp, 0)
8170 = invert_truthvalue (TREE_OPERAND (exp, 0));
8173 do_pending_stack_adjust ();
8175 op0 = gen_label_rtx ();
8177 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8181 /* If the target conflicts with the other operand of the
8182 binary op, we can't use it. Also, we can't use the target
8183 if it is a hard register, because evaluating the condition
8184 might clobber it. */
8186 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8187 || (GET_CODE (temp) == REG
8188 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8189 temp = gen_reg_rtx (mode);
8190 store_expr (singleton, temp, 0);
8193 expand_expr (singleton,
8194 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8195 if (singleton == TREE_OPERAND (exp, 1))
8196 jumpif (TREE_OPERAND (exp, 0), op0);
8198 jumpifnot (TREE_OPERAND (exp, 0), op0);
8200 start_cleanup_deferral ();
8201 if (binary_op && temp == 0)
8202 /* Just touch the other operand. */
8203 expand_expr (TREE_OPERAND (binary_op, 1),
8204 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8206 store_expr (build (TREE_CODE (binary_op), type,
8207 make_tree (type, temp),
8208 TREE_OPERAND (binary_op, 1)),
8211 store_expr (build1 (TREE_CODE (unary_op), type,
8212 make_tree (type, temp)),
8216 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8217 comparison operator. If we have one of these cases, set the
8218 output to A, branch on A (cse will merge these two references),
8219 then set the output to FOO. */
8221 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8222 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8223 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8224 TREE_OPERAND (exp, 1), 0)
8225 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8226 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8227 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8229 if (GET_CODE (temp) == REG
8230 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8231 temp = gen_reg_rtx (mode);
8232 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8233 jumpif (TREE_OPERAND (exp, 0), op0);
8235 start_cleanup_deferral ();
8236 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8240 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8241 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8242 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8243 TREE_OPERAND (exp, 2), 0)
8244 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8245 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8246 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8248 if (GET_CODE (temp) == REG
8249 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8250 temp = gen_reg_rtx (mode);
8251 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8252 jumpifnot (TREE_OPERAND (exp, 0), op0);
8254 start_cleanup_deferral ();
8255 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8260 op1 = gen_label_rtx ();
8261 jumpifnot (TREE_OPERAND (exp, 0), op0);
8263 start_cleanup_deferral ();
8265 /* One branch of the cond can be void, if it never returns. For
8266 example A ? throw : E */
8268 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8269 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8271 expand_expr (TREE_OPERAND (exp, 1),
8272 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8273 end_cleanup_deferral ();
8275 emit_jump_insn (gen_jump (op1));
8278 start_cleanup_deferral ();
8280 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8281 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8283 expand_expr (TREE_OPERAND (exp, 2),
8284 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8287 end_cleanup_deferral ();
8298 /* Something needs to be initialized, but we didn't know
8299 where that thing was when building the tree. For example,
8300 it could be the return value of a function, or a parameter
8301 to a function which lays down in the stack, or a temporary
8302 variable which must be passed by reference.
8304 We guarantee that the expression will either be constructed
8305 or copied into our original target. */
8307 tree slot = TREE_OPERAND (exp, 0);
8308 tree cleanups = NULL_TREE;
8311 if (TREE_CODE (slot) != VAR_DECL)
8315 target = original_target;
8317 /* Set this here so that if we get a target that refers to a
8318 register variable that's already been used, put_reg_into_stack
8319 knows that it should fix up those uses. */
8320 TREE_USED (slot) = 1;
8324 if (DECL_RTL_SET_P (slot))
8326 target = DECL_RTL (slot);
8327 /* If we have already expanded the slot, so don't do
8329 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8334 target = assign_temp (type, 2, 0, 1);
8335 /* All temp slots at this level must not conflict. */
8336 preserve_temp_slots (target);
8337 SET_DECL_RTL (slot, target);
8338 if (TREE_ADDRESSABLE (slot))
8339 put_var_into_stack (slot);
8341 /* Since SLOT is not known to the called function
8342 to belong to its stack frame, we must build an explicit
8343 cleanup. This case occurs when we must build up a reference
8344 to pass the reference as an argument. In this case,
8345 it is very likely that such a reference need not be
8348 if (TREE_OPERAND (exp, 2) == 0)
8349 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8350 cleanups = TREE_OPERAND (exp, 2);
8355 /* This case does occur, when expanding a parameter which
8356 needs to be constructed on the stack. The target
8357 is the actual stack address that we want to initialize.
8358 The function we call will perform the cleanup in this case. */
8360 /* If we have already assigned it space, use that space,
8361 not target that we were passed in, as our target
8362 parameter is only a hint. */
8363 if (DECL_RTL_SET_P (slot))
8365 target = DECL_RTL (slot);
8366 /* If we have already expanded the slot, so don't do
8368 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8373 SET_DECL_RTL (slot, target);
8374 /* If we must have an addressable slot, then make sure that
8375 the RTL that we just stored in slot is OK. */
8376 if (TREE_ADDRESSABLE (slot))
8377 put_var_into_stack (slot);
8381 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8382 /* Mark it as expanded. */
8383 TREE_OPERAND (exp, 1) = NULL_TREE;
8385 store_expr (exp1, target, 0);
8387 expand_decl_cleanup (NULL_TREE, cleanups);
8394 tree lhs = TREE_OPERAND (exp, 0);
8395 tree rhs = TREE_OPERAND (exp, 1);
8397 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8403 /* If lhs is complex, expand calls in rhs before computing it.
8404 That's so we don't compute a pointer and save it over a
8405 call. If lhs is simple, compute it first so we can give it
8406 as a target if the rhs is just a call. This avoids an
8407 extra temp and copy and that prevents a partial-subsumption
8408 which makes bad code. Actually we could treat
8409 component_ref's of vars like vars. */
8411 tree lhs = TREE_OPERAND (exp, 0);
8412 tree rhs = TREE_OPERAND (exp, 1);
8416 /* Check for |= or &= of a bitfield of size one into another bitfield
8417 of size 1. In this case, (unless we need the result of the
8418 assignment) we can do this more efficiently with a
8419 test followed by an assignment, if necessary.
8421 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8422 things change so we do, this code should be enhanced to
8425 && TREE_CODE (lhs) == COMPONENT_REF
8426 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8427 || TREE_CODE (rhs) == BIT_AND_EXPR)
8428 && TREE_OPERAND (rhs, 0) == lhs
8429 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8430 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8431 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8433 rtx label = gen_label_rtx ();
8435 do_jump (TREE_OPERAND (rhs, 1),
8436 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8437 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8438 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8439 (TREE_CODE (rhs) == BIT_IOR_EXPR
8441 : integer_zero_node)),
8443 do_pending_stack_adjust ();
8448 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8454 if (!TREE_OPERAND (exp, 0))
8455 expand_null_return ();
8457 expand_return (TREE_OPERAND (exp, 0));
8460 case PREINCREMENT_EXPR:
8461 case PREDECREMENT_EXPR:
8462 return expand_increment (exp, 0, ignore);
8464 case POSTINCREMENT_EXPR:
8465 case POSTDECREMENT_EXPR:
8466 /* Faster to treat as pre-increment if result is not used. */
8467 return expand_increment (exp, ! ignore, ignore);
8470 /* Are we taking the address of a nested function? */
8471 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8472 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8473 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8474 && ! TREE_STATIC (exp))
8476 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8477 op0 = force_operand (op0, target);
8479 /* If we are taking the address of something erroneous, just
8481 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8483 /* If we are taking the address of a constant and are at the
8484 top level, we have to use output_constant_def since we can't
8485 call force_const_mem at top level. */
8487 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8488 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8490 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8493 /* We make sure to pass const0_rtx down if we came in with
8494 ignore set, to avoid doing the cleanups twice for something. */
8495 op0 = expand_expr (TREE_OPERAND (exp, 0),
8496 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8497 (modifier == EXPAND_INITIALIZER
8498 ? modifier : EXPAND_CONST_ADDRESS));
8500 /* If we are going to ignore the result, OP0 will have been set
8501 to const0_rtx, so just return it. Don't get confused and
8502 think we are taking the address of the constant. */
8506 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8507 clever and returns a REG when given a MEM. */
8508 op0 = protect_from_queue (op0, 1);
8510 /* We would like the object in memory. If it is a constant, we can
8511 have it be statically allocated into memory. For a non-constant,
8512 we need to allocate some memory and store the value into it. */
8514 if (CONSTANT_P (op0))
8515 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8517 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8518 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8519 || GET_CODE (op0) == PARALLEL)
8521 /* If the operand is a SAVE_EXPR, we can deal with this by
8522 forcing the SAVE_EXPR into memory. */
8523 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8525 put_var_into_stack (TREE_OPERAND (exp, 0));
8526 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8530 /* If this object is in a register, it can't be BLKmode. */
8531 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8532 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8534 if (GET_CODE (op0) == PARALLEL)
8535 /* Handle calls that pass values in multiple
8536 non-contiguous locations. The Irix 6 ABI has examples
8538 emit_group_store (memloc, op0,
8539 int_size_in_bytes (inner_type));
8541 emit_move_insn (memloc, op0);
8547 if (GET_CODE (op0) != MEM)
8550 mark_temp_addr_taken (op0);
8551 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8553 op0 = XEXP (op0, 0);
8554 #ifdef POINTERS_EXTEND_UNSIGNED
8555 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8556 && mode == ptr_mode)
8557 op0 = convert_memory_address (ptr_mode, op0);
8562 /* If OP0 is not aligned as least as much as the type requires, we
8563 need to make a temporary, copy OP0 to it, and take the address of
8564 the temporary. We want to use the alignment of the type, not of
8565 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8566 the test for BLKmode means that can't happen. The test for
8567 BLKmode is because we never make mis-aligned MEMs with
8570 We don't need to do this at all if the machine doesn't have
8571 strict alignment. */
8572 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8573 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8575 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8577 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8579 = assign_stack_temp_for_type
8580 (TYPE_MODE (inner_type),
8581 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8582 : int_size_in_bytes (inner_type),
8583 1, build_qualified_type (inner_type,
8584 (TYPE_QUALS (inner_type)
8585 | TYPE_QUAL_CONST)));
8587 if (TYPE_ALIGN_OK (inner_type))
8590 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8594 op0 = force_operand (XEXP (op0, 0), target);
8598 && GET_CODE (op0) != REG
8599 && modifier != EXPAND_CONST_ADDRESS
8600 && modifier != EXPAND_INITIALIZER
8601 && modifier != EXPAND_SUM)
8602 op0 = force_reg (Pmode, op0);
8604 if (GET_CODE (op0) == REG
8605 && ! REG_USERVAR_P (op0))
8606 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8608 #ifdef POINTERS_EXTEND_UNSIGNED
8609 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8610 && mode == ptr_mode)
8611 op0 = convert_memory_address (ptr_mode, op0);
8616 case ENTRY_VALUE_EXPR:
8619 /* COMPLEX type for Extended Pascal & Fortran */
8622 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8625 /* Get the rtx code of the operands. */
8626 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8627 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8630 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8634 /* Move the real (op0) and imaginary (op1) parts to their location. */
8635 emit_move_insn (gen_realpart (mode, target), op0);
8636 emit_move_insn (gen_imagpart (mode, target), op1);
8638 insns = get_insns ();
8641 /* Complex construction should appear as a single unit. */
8642 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8643 each with a separate pseudo as destination.
8644 It's not correct for flow to treat them as a unit. */
8645 if (GET_CODE (target) != CONCAT)
8646 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8654 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8655 return gen_realpart (mode, op0);
8658 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8659 return gen_imagpart (mode, op0);
8663 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8667 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8670 target = gen_reg_rtx (mode);
8674 /* Store the realpart and the negated imagpart to target. */
8675 emit_move_insn (gen_realpart (partmode, target),
8676 gen_realpart (partmode, op0));
8678 imag_t = gen_imagpart (partmode, target);
8679 temp = expand_unop (partmode,
8680 ! unsignedp && flag_trapv
8681 && (GET_MODE_CLASS(partmode) == MODE_INT)
8682 ? negv_optab : neg_optab,
8683 gen_imagpart (partmode, op0), imag_t, 0);
8685 emit_move_insn (imag_t, temp);
8687 insns = get_insns ();
8690 /* Conjugate should appear as a single unit
8691 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8692 each with a separate pseudo as destination.
8693 It's not correct for flow to treat them as a unit. */
8694 if (GET_CODE (target) != CONCAT)
8695 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8702 case TRY_CATCH_EXPR:
8704 tree handler = TREE_OPERAND (exp, 1);
8706 expand_eh_region_start ();
8708 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8710 expand_eh_region_end_cleanup (handler);
8715 case TRY_FINALLY_EXPR:
8717 tree try_block = TREE_OPERAND (exp, 0);
8718 tree finally_block = TREE_OPERAND (exp, 1);
8719 rtx finally_label = gen_label_rtx ();
8720 rtx done_label = gen_label_rtx ();
8721 rtx return_link = gen_reg_rtx (Pmode);
8722 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8723 (tree) finally_label, (tree) return_link);
8724 TREE_SIDE_EFFECTS (cleanup) = 1;
8726 /* Start a new binding layer that will keep track of all cleanup
8727 actions to be performed. */
8728 expand_start_bindings (2);
8730 target_temp_slot_level = temp_slot_level;
8732 expand_decl_cleanup (NULL_TREE, cleanup);
8733 op0 = expand_expr (try_block, target, tmode, modifier);
8735 preserve_temp_slots (op0);
8736 expand_end_bindings (NULL_TREE, 0, 0);
8737 emit_jump (done_label);
8738 emit_label (finally_label);
8739 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8740 emit_indirect_jump (return_link);
8741 emit_label (done_label);
8745 case GOTO_SUBROUTINE_EXPR:
8747 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8748 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8749 rtx return_address = gen_label_rtx ();
8750 emit_move_insn (return_link,
8751 gen_rtx_LABEL_REF (Pmode, return_address));
8753 emit_label (return_address);
8758 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8761 return get_exception_pointer (cfun);
8764 /* Function descriptors are not valid except for as
8765 initialization constants, and should not be expanded. */
8769 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8772 /* Here to do an ordinary binary operator, generating an instruction
8773 from the optab already placed in `this_optab'. */
8775 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8777 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8778 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8780 temp = expand_binop (mode, this_optab, op0, op1, target,
8781 unsignedp, OPTAB_LIB_WIDEN);
8787 /* Return the tree node if a ARG corresponds to a string constant or zero
8788 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8789 in bytes within the string that ARG is accessing. The type of the
8790 offset will be `sizetype'. */
8793 string_constant (arg, ptr_offset)
8799 if (TREE_CODE (arg) == ADDR_EXPR
8800 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8802 *ptr_offset = size_zero_node;
8803 return TREE_OPERAND (arg, 0);
8805 else if (TREE_CODE (arg) == PLUS_EXPR)
8807 tree arg0 = TREE_OPERAND (arg, 0);
8808 tree arg1 = TREE_OPERAND (arg, 1);
8813 if (TREE_CODE (arg0) == ADDR_EXPR
8814 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8816 *ptr_offset = convert (sizetype, arg1);
8817 return TREE_OPERAND (arg0, 0);
8819 else if (TREE_CODE (arg1) == ADDR_EXPR
8820 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8822 *ptr_offset = convert (sizetype, arg0);
8823 return TREE_OPERAND (arg1, 0);
8830 /* Expand code for a post- or pre- increment or decrement
8831 and return the RTX for the result.
8832 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8835 expand_increment (exp, post, ignore)
8841 tree incremented = TREE_OPERAND (exp, 0);
8842 optab this_optab = add_optab;
8844 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8845 int op0_is_copy = 0;
8846 int single_insn = 0;
8847 /* 1 means we can't store into OP0 directly,
8848 because it is a subreg narrower than a word,
8849 and we don't dare clobber the rest of the word. */
8852 /* Stabilize any component ref that might need to be
8853 evaluated more than once below. */
8855 || TREE_CODE (incremented) == BIT_FIELD_REF
8856 || (TREE_CODE (incremented) == COMPONENT_REF
8857 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8858 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8859 incremented = stabilize_reference (incremented);
8860 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8861 ones into save exprs so that they don't accidentally get evaluated
8862 more than once by the code below. */
8863 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8864 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8865 incremented = save_expr (incremented);
8867 /* Compute the operands as RTX.
8868 Note whether OP0 is the actual lvalue or a copy of it:
8869 I believe it is a copy iff it is a register or subreg
8870 and insns were generated in computing it. */
8872 temp = get_last_insn ();
8873 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8875 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8876 in place but instead must do sign- or zero-extension during assignment,
8877 so we copy it into a new register and let the code below use it as
8880 Note that we can safely modify this SUBREG since it is know not to be
8881 shared (it was made by the expand_expr call above). */
8883 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8886 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8890 else if (GET_CODE (op0) == SUBREG
8891 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8893 /* We cannot increment this SUBREG in place. If we are
8894 post-incrementing, get a copy of the old value. Otherwise,
8895 just mark that we cannot increment in place. */
8897 op0 = copy_to_reg (op0);
8902 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8903 && temp != get_last_insn ());
8904 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8906 /* Decide whether incrementing or decrementing. */
8907 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8908 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8909 this_optab = sub_optab;
8911 /* Convert decrement by a constant into a negative increment. */
8912 if (this_optab == sub_optab
8913 && GET_CODE (op1) == CONST_INT)
8915 op1 = GEN_INT (-INTVAL (op1));
8916 this_optab = add_optab;
8919 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
8920 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
8922 /* For a preincrement, see if we can do this with a single instruction. */
8925 icode = (int) this_optab->handlers[(int) mode].insn_code;
8926 if (icode != (int) CODE_FOR_nothing
8927 /* Make sure that OP0 is valid for operands 0 and 1
8928 of the insn we want to queue. */
8929 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8930 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8931 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8935 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8936 then we cannot just increment OP0. We must therefore contrive to
8937 increment the original value. Then, for postincrement, we can return
8938 OP0 since it is a copy of the old value. For preincrement, expand here
8939 unless we can do it with a single insn.
8941 Likewise if storing directly into OP0 would clobber high bits
8942 we need to preserve (bad_subreg). */
8943 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8945 /* This is the easiest way to increment the value wherever it is.
8946 Problems with multiple evaluation of INCREMENTED are prevented
8947 because either (1) it is a component_ref or preincrement,
8948 in which case it was stabilized above, or (2) it is an array_ref
8949 with constant index in an array in a register, which is
8950 safe to reevaluate. */
8951 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8952 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8953 ? MINUS_EXPR : PLUS_EXPR),
8956 TREE_OPERAND (exp, 1));
8958 while (TREE_CODE (incremented) == NOP_EXPR
8959 || TREE_CODE (incremented) == CONVERT_EXPR)
8961 newexp = convert (TREE_TYPE (incremented), newexp);
8962 incremented = TREE_OPERAND (incremented, 0);
8965 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8966 return post ? op0 : temp;
8971 /* We have a true reference to the value in OP0.
8972 If there is an insn to add or subtract in this mode, queue it.
8973 Queueing the increment insn avoids the register shuffling
8974 that often results if we must increment now and first save
8975 the old value for subsequent use. */
8977 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8978 op0 = stabilize (op0);
8981 icode = (int) this_optab->handlers[(int) mode].insn_code;
8982 if (icode != (int) CODE_FOR_nothing
8983 /* Make sure that OP0 is valid for operands 0 and 1
8984 of the insn we want to queue. */
8985 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8986 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8988 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8989 op1 = force_reg (mode, op1);
8991 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8993 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8995 rtx addr = (general_operand (XEXP (op0, 0), mode)
8996 ? force_reg (Pmode, XEXP (op0, 0))
8997 : copy_to_reg (XEXP (op0, 0)));
9000 op0 = replace_equiv_address (op0, addr);
9001 temp = force_reg (GET_MODE (op0), op0);
9002 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9003 op1 = force_reg (mode, op1);
9005 /* The increment queue is LIFO, thus we have to `queue'
9006 the instructions in reverse order. */
9007 enqueue_insn (op0, gen_move_insn (op0, temp));
9008 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9013 /* Preincrement, or we can't increment with one simple insn. */
9015 /* Save a copy of the value before inc or dec, to return it later. */
9016 temp = value = copy_to_reg (op0);
9018 /* Arrange to return the incremented value. */
9019 /* Copy the rtx because expand_binop will protect from the queue,
9020 and the results of that would be invalid for us to return
9021 if our caller does emit_queue before using our result. */
9022 temp = copy_rtx (value = op0);
9024 /* Increment however we can. */
9025 op1 = expand_binop (mode, this_optab, value, op1, op0,
9026 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9028 /* Make sure the value is stored into OP0. */
9030 emit_move_insn (op0, op1);
9035 /* At the start of a function, record that we have no previously-pushed
9036 arguments waiting to be popped. */
9039 init_pending_stack_adjust ()
9041 pending_stack_adjust = 0;
9044 /* When exiting from function, if safe, clear out any pending stack adjust
9045 so the adjustment won't get done.
9047 Note, if the current function calls alloca, then it must have a
9048 frame pointer regardless of the value of flag_omit_frame_pointer. */
9051 clear_pending_stack_adjust ()
9053 #ifdef EXIT_IGNORE_STACK
9055 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9056 && EXIT_IGNORE_STACK
9057 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9058 && ! flag_inline_functions)
9060 stack_pointer_delta -= pending_stack_adjust,
9061 pending_stack_adjust = 0;
9066 /* Pop any previously-pushed arguments that have not been popped yet. */
9069 do_pending_stack_adjust ()
9071 if (inhibit_defer_pop == 0)
9073 if (pending_stack_adjust != 0)
9074 adjust_stack (GEN_INT (pending_stack_adjust));
9075 pending_stack_adjust = 0;
9079 /* Expand conditional expressions. */
9081 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9082 LABEL is an rtx of code CODE_LABEL, in this function and all the
9086 jumpifnot (exp, label)
9090 do_jump (exp, label, NULL_RTX);
9093 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9100 do_jump (exp, NULL_RTX, label);
9103 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9104 the result is zero, or IF_TRUE_LABEL if the result is one.
9105 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9106 meaning fall through in that case.
9108 do_jump always does any pending stack adjust except when it does not
9109 actually perform a jump. An example where there is no jump
9110 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9112 This function is responsible for optimizing cases such as
9113 &&, || and comparison operators in EXP. */
9116 do_jump (exp, if_false_label, if_true_label)
9118 rtx if_false_label, if_true_label;
9120 enum tree_code code = TREE_CODE (exp);
9121 /* Some cases need to create a label to jump to
9122 in order to properly fall through.
9123 These cases set DROP_THROUGH_LABEL nonzero. */
9124 rtx drop_through_label = 0;
9128 enum machine_mode mode;
9130 #ifdef MAX_INTEGER_COMPUTATION_MODE
9131 check_max_integer_computation_mode (exp);
9142 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9148 /* This is not true with #pragma weak */
9150 /* The address of something can never be zero. */
9152 emit_jump (if_true_label);
9157 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9158 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9159 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9160 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9163 /* If we are narrowing the operand, we have to do the compare in the
9165 if ((TYPE_PRECISION (TREE_TYPE (exp))
9166 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9168 case NON_LVALUE_EXPR:
9169 case REFERENCE_EXPR:
9174 /* These cannot change zero->non-zero or vice versa. */
9175 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9178 case WITH_RECORD_EXPR:
9179 /* Put the object on the placeholder list, recurse through our first
9180 operand, and pop the list. */
9181 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9183 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9184 placeholder_list = TREE_CHAIN (placeholder_list);
9188 /* This is never less insns than evaluating the PLUS_EXPR followed by
9189 a test and can be longer if the test is eliminated. */
9191 /* Reduce to minus. */
9192 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9193 TREE_OPERAND (exp, 0),
9194 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9195 TREE_OPERAND (exp, 1))));
9196 /* Process as MINUS. */
9200 /* Non-zero iff operands of minus differ. */
9201 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9202 TREE_OPERAND (exp, 0),
9203 TREE_OPERAND (exp, 1)),
9204 NE, NE, if_false_label, if_true_label);
9208 /* If we are AND'ing with a small constant, do this comparison in the
9209 smallest type that fits. If the machine doesn't have comparisons
9210 that small, it will be converted back to the wider comparison.
9211 This helps if we are testing the sign bit of a narrower object.
9212 combine can't do this for us because it can't know whether a
9213 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9215 if (! SLOW_BYTE_ACCESS
9216 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9217 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9218 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9219 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9220 && (type = type_for_mode (mode, 1)) != 0
9221 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9222 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9223 != CODE_FOR_nothing))
9225 do_jump (convert (type, exp), if_false_label, if_true_label);
9230 case TRUTH_NOT_EXPR:
9231 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9234 case TRUTH_ANDIF_EXPR:
9235 if (if_false_label == 0)
9236 if_false_label = drop_through_label = gen_label_rtx ();
9237 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9238 start_cleanup_deferral ();
9239 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9240 end_cleanup_deferral ();
9243 case TRUTH_ORIF_EXPR:
9244 if (if_true_label == 0)
9245 if_true_label = drop_through_label = gen_label_rtx ();
9246 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9247 start_cleanup_deferral ();
9248 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9249 end_cleanup_deferral ();
9254 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9255 preserve_temp_slots (NULL_RTX);
9259 do_pending_stack_adjust ();
9260 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9266 case ARRAY_RANGE_REF:
9268 HOST_WIDE_INT bitsize, bitpos;
9270 enum machine_mode mode;
9275 /* Get description of this reference. We don't actually care
9276 about the underlying object here. */
9277 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9278 &unsignedp, &volatilep);
9280 type = type_for_size (bitsize, unsignedp);
9281 if (! SLOW_BYTE_ACCESS
9282 && type != 0 && bitsize >= 0
9283 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9284 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9285 != CODE_FOR_nothing))
9287 do_jump (convert (type, exp), if_false_label, if_true_label);
9294 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9295 if (integer_onep (TREE_OPERAND (exp, 1))
9296 && integer_zerop (TREE_OPERAND (exp, 2)))
9297 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9299 else if (integer_zerop (TREE_OPERAND (exp, 1))
9300 && integer_onep (TREE_OPERAND (exp, 2)))
9301 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9305 rtx label1 = gen_label_rtx ();
9306 drop_through_label = gen_label_rtx ();
9308 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9310 start_cleanup_deferral ();
9311 /* Now the THEN-expression. */
9312 do_jump (TREE_OPERAND (exp, 1),
9313 if_false_label ? if_false_label : drop_through_label,
9314 if_true_label ? if_true_label : drop_through_label);
9315 /* In case the do_jump just above never jumps. */
9316 do_pending_stack_adjust ();
9317 emit_label (label1);
9319 /* Now the ELSE-expression. */
9320 do_jump (TREE_OPERAND (exp, 2),
9321 if_false_label ? if_false_label : drop_through_label,
9322 if_true_label ? if_true_label : drop_through_label);
9323 end_cleanup_deferral ();
9329 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9331 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9332 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9334 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9335 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9338 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9339 fold (build (EQ_EXPR, TREE_TYPE (exp),
9340 fold (build1 (REALPART_EXPR,
9341 TREE_TYPE (inner_type),
9343 fold (build1 (REALPART_EXPR,
9344 TREE_TYPE (inner_type),
9346 fold (build (EQ_EXPR, TREE_TYPE (exp),
9347 fold (build1 (IMAGPART_EXPR,
9348 TREE_TYPE (inner_type),
9350 fold (build1 (IMAGPART_EXPR,
9351 TREE_TYPE (inner_type),
9353 if_false_label, if_true_label);
9356 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9357 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9359 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9360 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9361 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9363 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9369 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9371 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9372 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9374 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9375 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9378 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9379 fold (build (NE_EXPR, TREE_TYPE (exp),
9380 fold (build1 (REALPART_EXPR,
9381 TREE_TYPE (inner_type),
9383 fold (build1 (REALPART_EXPR,
9384 TREE_TYPE (inner_type),
9386 fold (build (NE_EXPR, TREE_TYPE (exp),
9387 fold (build1 (IMAGPART_EXPR,
9388 TREE_TYPE (inner_type),
9390 fold (build1 (IMAGPART_EXPR,
9391 TREE_TYPE (inner_type),
9393 if_false_label, if_true_label);
9396 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9397 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9399 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9400 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9401 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9403 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9408 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9409 if (GET_MODE_CLASS (mode) == MODE_INT
9410 && ! can_compare_p (LT, mode, ccp_jump))
9411 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9413 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9417 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9418 if (GET_MODE_CLASS (mode) == MODE_INT
9419 && ! can_compare_p (LE, mode, ccp_jump))
9420 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9422 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9426 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9427 if (GET_MODE_CLASS (mode) == MODE_INT
9428 && ! can_compare_p (GT, mode, ccp_jump))
9429 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9431 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9435 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9436 if (GET_MODE_CLASS (mode) == MODE_INT
9437 && ! can_compare_p (GE, mode, ccp_jump))
9438 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9440 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9443 case UNORDERED_EXPR:
9446 enum rtx_code cmp, rcmp;
9449 if (code == UNORDERED_EXPR)
9450 cmp = UNORDERED, rcmp = ORDERED;
9452 cmp = ORDERED, rcmp = UNORDERED;
9453 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9456 if (! can_compare_p (cmp, mode, ccp_jump)
9457 && (can_compare_p (rcmp, mode, ccp_jump)
9458 /* If the target doesn't provide either UNORDERED or ORDERED
9459 comparisons, canonicalize on UNORDERED for the library. */
9460 || rcmp == UNORDERED))
9464 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9466 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9471 enum rtx_code rcode1;
9472 enum tree_code tcode2;
9496 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9497 if (can_compare_p (rcode1, mode, ccp_jump))
9498 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9502 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9503 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9506 /* If the target doesn't support combined unordered
9507 compares, decompose into UNORDERED + comparison. */
9508 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9509 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9510 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9511 do_jump (exp, if_false_label, if_true_label);
9517 __builtin_expect (<test>, 0) and
9518 __builtin_expect (<test>, 1)
9520 We need to do this here, so that <test> is not converted to a SCC
9521 operation on machines that use condition code registers and COMPARE
9522 like the PowerPC, and then the jump is done based on whether the SCC
9523 operation produced a 1 or 0. */
9525 /* Check for a built-in function. */
9526 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9528 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9529 tree arglist = TREE_OPERAND (exp, 1);
9531 if (TREE_CODE (fndecl) == FUNCTION_DECL
9532 && DECL_BUILT_IN (fndecl)
9533 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9534 && arglist != NULL_TREE
9535 && TREE_CHAIN (arglist) != NULL_TREE)
9537 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9540 if (seq != NULL_RTX)
9547 /* fall through and generate the normal code. */
9551 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9553 /* This is not needed any more and causes poor code since it causes
9554 comparisons and tests from non-SI objects to have different code
9556 /* Copy to register to avoid generating bad insns by cse
9557 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9558 if (!cse_not_expected && GET_CODE (temp) == MEM)
9559 temp = copy_to_reg (temp);
9561 do_pending_stack_adjust ();
9562 /* Do any postincrements in the expression that was tested. */
9565 if (GET_CODE (temp) == CONST_INT
9566 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9567 || GET_CODE (temp) == LABEL_REF)
9569 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9573 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9574 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9575 /* Note swapping the labels gives us not-equal. */
9576 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9577 else if (GET_MODE (temp) != VOIDmode)
9578 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9579 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9580 GET_MODE (temp), NULL_RTX,
9581 if_false_label, if_true_label);
9586 if (drop_through_label)
9588 /* If do_jump produces code that might be jumped around,
9589 do any stack adjusts from that code, before the place
9590 where control merges in. */
9591 do_pending_stack_adjust ();
9592 emit_label (drop_through_label);
9596 /* Given a comparison expression EXP for values too wide to be compared
9597 with one insn, test the comparison and jump to the appropriate label.
9598 The code of EXP is ignored; we always test GT if SWAP is 0,
9599 and LT if SWAP is 1. */
9602 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9605 rtx if_false_label, if_true_label;
9607 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9608 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9609 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9610 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9612 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9615 /* Compare OP0 with OP1, word at a time, in mode MODE.
9616 UNSIGNEDP says to do unsigned comparison.
9617 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9620 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9621 enum machine_mode mode;
9624 rtx if_false_label, if_true_label;
9626 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9627 rtx drop_through_label = 0;
9630 if (! if_true_label || ! if_false_label)
9631 drop_through_label = gen_label_rtx ();
9632 if (! if_true_label)
9633 if_true_label = drop_through_label;
9634 if (! if_false_label)
9635 if_false_label = drop_through_label;
9637 /* Compare a word at a time, high order first. */
9638 for (i = 0; i < nwords; i++)
9640 rtx op0_word, op1_word;
9642 if (WORDS_BIG_ENDIAN)
9644 op0_word = operand_subword_force (op0, i, mode);
9645 op1_word = operand_subword_force (op1, i, mode);
9649 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9650 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9653 /* All but high-order word must be compared as unsigned. */
9654 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9655 (unsignedp || i > 0), word_mode, NULL_RTX,
9656 NULL_RTX, if_true_label);
9658 /* Consider lower words only if these are equal. */
9659 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9660 NULL_RTX, NULL_RTX, if_false_label);
9664 emit_jump (if_false_label);
9665 if (drop_through_label)
9666 emit_label (drop_through_label);
9669 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9670 with one insn, test the comparison and jump to the appropriate label. */
9673 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9675 rtx if_false_label, if_true_label;
9677 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9678 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9679 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9680 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9682 rtx drop_through_label = 0;
9684 if (! if_false_label)
9685 drop_through_label = if_false_label = gen_label_rtx ();
9687 for (i = 0; i < nwords; i++)
9688 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9689 operand_subword_force (op1, i, mode),
9690 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9691 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9694 emit_jump (if_true_label);
9695 if (drop_through_label)
9696 emit_label (drop_through_label);
9699 /* Jump according to whether OP0 is 0.
9700 We assume that OP0 has an integer mode that is too wide
9701 for the available compare insns. */
9704 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9706 rtx if_false_label, if_true_label;
9708 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9711 rtx drop_through_label = 0;
9713 /* The fastest way of doing this comparison on almost any machine is to
9714 "or" all the words and compare the result. If all have to be loaded
9715 from memory and this is a very wide item, it's possible this may
9716 be slower, but that's highly unlikely. */
9718 part = gen_reg_rtx (word_mode);
9719 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9720 for (i = 1; i < nwords && part != 0; i++)
9721 part = expand_binop (word_mode, ior_optab, part,
9722 operand_subword_force (op0, i, GET_MODE (op0)),
9723 part, 1, OPTAB_WIDEN);
9727 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9728 NULL_RTX, if_false_label, if_true_label);
9733 /* If we couldn't do the "or" simply, do this with a series of compares. */
9734 if (! if_false_label)
9735 drop_through_label = if_false_label = gen_label_rtx ();
9737 for (i = 0; i < nwords; i++)
9738 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9739 const0_rtx, EQ, 1, word_mode, NULL_RTX,
9740 if_false_label, NULL_RTX);
9743 emit_jump (if_true_label);
9745 if (drop_through_label)
9746 emit_label (drop_through_label);
9749 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9750 (including code to compute the values to be compared)
9751 and set (CC0) according to the result.
9752 The decision as to signed or unsigned comparison must be made by the caller.
9754 We force a stack adjustment unless there are currently
9755 things pushed on the stack that aren't yet used.
9757 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9761 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9765 enum machine_mode mode;
9770 /* If one operand is constant, make it the second one. Only do this
9771 if the other operand is not constant as well. */
9773 if (swap_commutative_operands_p (op0, op1))
9778 code = swap_condition (code);
9783 op0 = force_not_mem (op0);
9784 op1 = force_not_mem (op1);
9787 do_pending_stack_adjust ();
9789 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9790 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9794 /* There's no need to do this now that combine.c can eliminate lots of
9795 sign extensions. This can be less efficient in certain cases on other
9798 /* If this is a signed equality comparison, we can do it as an
9799 unsigned comparison since zero-extension is cheaper than sign
9800 extension and comparisons with zero are done as unsigned. This is
9801 the case even on machines that can do fast sign extension, since
9802 zero-extension is easier to combine with other operations than
9803 sign-extension is. If we are comparing against a constant, we must
9804 convert it to what it would look like unsigned. */
9805 if ((code == EQ || code == NE) && ! unsignedp
9806 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9808 if (GET_CODE (op1) == CONST_INT
9809 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9810 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9815 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
9817 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9820 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9821 The decision as to signed or unsigned comparison must be made by the caller.
9823 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9827 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
9828 if_false_label, if_true_label)
9832 enum machine_mode mode;
9834 rtx if_false_label, if_true_label;
9837 int dummy_true_label = 0;
9839 /* Reverse the comparison if that is safe and we want to jump if it is
9841 if (! if_true_label && ! FLOAT_MODE_P (mode))
9843 if_true_label = if_false_label;
9845 code = reverse_condition (code);
9848 /* If one operand is constant, make it the second one. Only do this
9849 if the other operand is not constant as well. */
9851 if (swap_commutative_operands_p (op0, op1))
9856 code = swap_condition (code);
9861 op0 = force_not_mem (op0);
9862 op1 = force_not_mem (op1);
9865 do_pending_stack_adjust ();
9867 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9868 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9870 if (tem == const_true_rtx)
9873 emit_jump (if_true_label);
9878 emit_jump (if_false_label);
9884 /* There's no need to do this now that combine.c can eliminate lots of
9885 sign extensions. This can be less efficient in certain cases on other
9888 /* If this is a signed equality comparison, we can do it as an
9889 unsigned comparison since zero-extension is cheaper than sign
9890 extension and comparisons with zero are done as unsigned. This is
9891 the case even on machines that can do fast sign extension, since
9892 zero-extension is easier to combine with other operations than
9893 sign-extension is. If we are comparing against a constant, we must
9894 convert it to what it would look like unsigned. */
9895 if ((code == EQ || code == NE) && ! unsignedp
9896 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9898 if (GET_CODE (op1) == CONST_INT
9899 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9900 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9905 if (! if_true_label)
9907 dummy_true_label = 1;
9908 if_true_label = gen_label_rtx ();
9911 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
9915 emit_jump (if_false_label);
9916 if (dummy_true_label)
9917 emit_label (if_true_label);
9920 /* Generate code for a comparison expression EXP (including code to compute
9921 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9922 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9923 generated code will drop through.
9924 SIGNED_CODE should be the rtx operation for this comparison for
9925 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9927 We force a stack adjustment unless there are currently
9928 things pushed on the stack that aren't yet used. */
9931 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9934 enum rtx_code signed_code, unsigned_code;
9935 rtx if_false_label, if_true_label;
9939 enum machine_mode mode;
9943 /* Don't crash if the comparison was erroneous. */
9944 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9945 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9948 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9949 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
9952 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9953 mode = TYPE_MODE (type);
9954 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
9955 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
9956 || (GET_MODE_BITSIZE (mode)
9957 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
9960 /* op0 might have been replaced by promoted constant, in which
9961 case the type of second argument should be used. */
9962 type = TREE_TYPE (TREE_OPERAND (exp, 1));
9963 mode = TYPE_MODE (type);
9965 unsignedp = TREE_UNSIGNED (type);
9966 code = unsignedp ? unsigned_code : signed_code;
9968 #ifdef HAVE_canonicalize_funcptr_for_compare
9969 /* If function pointers need to be "canonicalized" before they can
9970 be reliably compared, then canonicalize them. */
9971 if (HAVE_canonicalize_funcptr_for_compare
9972 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9973 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9976 rtx new_op0 = gen_reg_rtx (mode);
9978 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9982 if (HAVE_canonicalize_funcptr_for_compare
9983 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9984 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9987 rtx new_op1 = gen_reg_rtx (mode);
9989 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9994 /* Do any postincrements in the expression that was tested. */
9997 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9999 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10000 if_false_label, if_true_label);
10003 /* Generate code to calculate EXP using a store-flag instruction
10004 and return an rtx for the result. EXP is either a comparison
10005 or a TRUTH_NOT_EXPR whose operand is a comparison.
10007 If TARGET is nonzero, store the result there if convenient.
10009 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10012 Return zero if there is no suitable set-flag instruction
10013 available on this machine.
10015 Once expand_expr has been called on the arguments of the comparison,
10016 we are committed to doing the store flag, since it is not safe to
10017 re-evaluate the expression. We emit the store-flag insn by calling
10018 emit_store_flag, but only expand the arguments if we have a reason
10019 to believe that emit_store_flag will be successful. If we think that
10020 it will, but it isn't, we have to simulate the store-flag with a
10021 set/jump/set sequence. */
10024 do_store_flag (exp, target, mode, only_cheap)
10027 enum machine_mode mode;
10030 enum rtx_code code;
10031 tree arg0, arg1, type;
10033 enum machine_mode operand_mode;
10037 enum insn_code icode;
10038 rtx subtarget = target;
10041 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10042 result at the end. We can't simply invert the test since it would
10043 have already been inverted if it were valid. This case occurs for
10044 some floating-point comparisons. */
10046 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10047 invert = 1, exp = TREE_OPERAND (exp, 0);
10049 arg0 = TREE_OPERAND (exp, 0);
10050 arg1 = TREE_OPERAND (exp, 1);
10052 /* Don't crash if the comparison was erroneous. */
10053 if (arg0 == error_mark_node || arg1 == error_mark_node)
10056 type = TREE_TYPE (arg0);
10057 operand_mode = TYPE_MODE (type);
10058 unsignedp = TREE_UNSIGNED (type);
10060 /* We won't bother with BLKmode store-flag operations because it would mean
10061 passing a lot of information to emit_store_flag. */
10062 if (operand_mode == BLKmode)
10065 /* We won't bother with store-flag operations involving function pointers
10066 when function pointers must be canonicalized before comparisons. */
10067 #ifdef HAVE_canonicalize_funcptr_for_compare
10068 if (HAVE_canonicalize_funcptr_for_compare
10069 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10070 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10072 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10073 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10074 == FUNCTION_TYPE))))
10081 /* Get the rtx comparison code to use. We know that EXP is a comparison
10082 operation of some type. Some comparisons against 1 and -1 can be
10083 converted to comparisons with zero. Do so here so that the tests
10084 below will be aware that we have a comparison with zero. These
10085 tests will not catch constants in the first operand, but constants
10086 are rarely passed as the first operand. */
10088 switch (TREE_CODE (exp))
10097 if (integer_onep (arg1))
10098 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10100 code = unsignedp ? LTU : LT;
10103 if (! unsignedp && integer_all_onesp (arg1))
10104 arg1 = integer_zero_node, code = LT;
10106 code = unsignedp ? LEU : LE;
10109 if (! unsignedp && integer_all_onesp (arg1))
10110 arg1 = integer_zero_node, code = GE;
10112 code = unsignedp ? GTU : GT;
10115 if (integer_onep (arg1))
10116 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10118 code = unsignedp ? GEU : GE;
10121 case UNORDERED_EXPR:
10147 /* Put a constant second. */
10148 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10150 tem = arg0; arg0 = arg1; arg1 = tem;
10151 code = swap_condition (code);
10154 /* If this is an equality or inequality test of a single bit, we can
10155 do this by shifting the bit being tested to the low-order bit and
10156 masking the result with the constant 1. If the condition was EQ,
10157 we xor it with 1. This does not require an scc insn and is faster
10158 than an scc insn even if we have it. */
10160 if ((code == NE || code == EQ)
10161 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10162 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10164 tree inner = TREE_OPERAND (arg0, 0);
10165 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10168 /* If INNER is a right shift of a constant and it plus BITNUM does
10169 not overflow, adjust BITNUM and INNER. */
10171 if (TREE_CODE (inner) == RSHIFT_EXPR
10172 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10173 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10174 && bitnum < TYPE_PRECISION (type)
10175 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10176 bitnum - TYPE_PRECISION (type)))
10178 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10179 inner = TREE_OPERAND (inner, 0);
10182 /* If we are going to be able to omit the AND below, we must do our
10183 operations as unsigned. If we must use the AND, we have a choice.
10184 Normally unsigned is faster, but for some machines signed is. */
10185 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10186 #ifdef LOAD_EXTEND_OP
10187 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10193 if (! get_subtarget (subtarget)
10194 || GET_MODE (subtarget) != operand_mode
10195 || ! safe_from_p (subtarget, inner, 1))
10198 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10201 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10202 size_int (bitnum), subtarget, ops_unsignedp);
10204 if (GET_MODE (op0) != mode)
10205 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10207 if ((code == EQ && ! invert) || (code == NE && invert))
10208 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10209 ops_unsignedp, OPTAB_LIB_WIDEN);
10211 /* Put the AND last so it can combine with more things. */
10212 if (bitnum != TYPE_PRECISION (type) - 1)
10213 op0 = expand_and (op0, const1_rtx, subtarget);
10218 /* Now see if we are likely to be able to do this. Return if not. */
10219 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10222 icode = setcc_gen_code[(int) code];
10223 if (icode == CODE_FOR_nothing
10224 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10226 /* We can only do this if it is one of the special cases that
10227 can be handled without an scc insn. */
10228 if ((code == LT && integer_zerop (arg1))
10229 || (! only_cheap && code == GE && integer_zerop (arg1)))
10231 else if (BRANCH_COST >= 0
10232 && ! only_cheap && (code == NE || code == EQ)
10233 && TREE_CODE (type) != REAL_TYPE
10234 && ((abs_optab->handlers[(int) operand_mode].insn_code
10235 != CODE_FOR_nothing)
10236 || (ffs_optab->handlers[(int) operand_mode].insn_code
10237 != CODE_FOR_nothing)))
10243 if (! get_subtarget (target)
10244 || GET_MODE (subtarget) != operand_mode
10245 || ! safe_from_p (subtarget, arg1, 1))
10248 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10249 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10252 target = gen_reg_rtx (mode);
10254 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10255 because, if the emit_store_flag does anything it will succeed and
10256 OP0 and OP1 will not be used subsequently. */
10258 result = emit_store_flag (target, code,
10259 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10260 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10261 operand_mode, unsignedp, 1);
10266 result = expand_binop (mode, xor_optab, result, const1_rtx,
10267 result, 0, OPTAB_LIB_WIDEN);
10271 /* If this failed, we have to do this with set/compare/jump/set code. */
10272 if (GET_CODE (target) != REG
10273 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10274 target = gen_reg_rtx (GET_MODE (target));
10276 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10277 result = compare_from_rtx (op0, op1, code, unsignedp,
10278 operand_mode, NULL_RTX);
10279 if (GET_CODE (result) == CONST_INT)
10280 return (((result == const0_rtx && ! invert)
10281 || (result != const0_rtx && invert))
10282 ? const0_rtx : const1_rtx);
10284 /* The code of RESULT may not match CODE if compare_from_rtx
10285 decided to swap its operands and reverse the original code.
10287 We know that compare_from_rtx returns either a CONST_INT or
10288 a new comparison code, so it is safe to just extract the
10289 code from RESULT. */
10290 code = GET_CODE (result);
10292 label = gen_label_rtx ();
10293 if (bcc_gen_fctn[(int) code] == 0)
10296 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10297 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10298 emit_label (label);
10304 /* Stubs in case we haven't got a casesi insn. */
10305 #ifndef HAVE_casesi
10306 # define HAVE_casesi 0
10307 # define gen_casesi(a, b, c, d, e) (0)
10308 # define CODE_FOR_casesi CODE_FOR_nothing
10311 /* If the machine does not have a case insn that compares the bounds,
10312 this means extra overhead for dispatch tables, which raises the
10313 threshold for using them. */
10314 #ifndef CASE_VALUES_THRESHOLD
10315 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10316 #endif /* CASE_VALUES_THRESHOLD */
10319 case_values_threshold ()
10321 return CASE_VALUES_THRESHOLD;
10324 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10325 0 otherwise (i.e. if there is no casesi instruction). */
10327 try_casesi (index_type, index_expr, minval, range,
10328 table_label, default_label)
10329 tree index_type, index_expr, minval, range;
10330 rtx table_label ATTRIBUTE_UNUSED;
10333 enum machine_mode index_mode = SImode;
10334 int index_bits = GET_MODE_BITSIZE (index_mode);
10335 rtx op1, op2, index;
10336 enum machine_mode op_mode;
10341 /* Convert the index to SImode. */
10342 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10344 enum machine_mode omode = TYPE_MODE (index_type);
10345 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10347 /* We must handle the endpoints in the original mode. */
10348 index_expr = build (MINUS_EXPR, index_type,
10349 index_expr, minval);
10350 minval = integer_zero_node;
10351 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10352 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10353 omode, 1, default_label);
10354 /* Now we can safely truncate. */
10355 index = convert_to_mode (index_mode, index, 0);
10359 if (TYPE_MODE (index_type) != index_mode)
10361 index_expr = convert (type_for_size (index_bits, 0),
10363 index_type = TREE_TYPE (index_expr);
10366 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10369 index = protect_from_queue (index, 0);
10370 do_pending_stack_adjust ();
10372 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10373 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10375 index = copy_to_mode_reg (op_mode, index);
10377 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10379 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10380 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10381 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10382 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10384 op1 = copy_to_mode_reg (op_mode, op1);
10386 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10388 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10389 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10390 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10391 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10393 op2 = copy_to_mode_reg (op_mode, op2);
10395 emit_jump_insn (gen_casesi (index, op1, op2,
10396 table_label, default_label));
10400 /* Attempt to generate a tablejump instruction; same concept. */
10401 #ifndef HAVE_tablejump
10402 #define HAVE_tablejump 0
10403 #define gen_tablejump(x, y) (0)
10406 /* Subroutine of the next function.
10408 INDEX is the value being switched on, with the lowest value
10409 in the table already subtracted.
10410 MODE is its expected mode (needed if INDEX is constant).
10411 RANGE is the length of the jump table.
10412 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10414 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10415 index value is out of range. */
10418 do_tablejump (index, mode, range, table_label, default_label)
10419 rtx index, range, table_label, default_label;
10420 enum machine_mode mode;
10424 /* Do an unsigned comparison (in the proper mode) between the index
10425 expression and the value which represents the length of the range.
10426 Since we just finished subtracting the lower bound of the range
10427 from the index expression, this comparison allows us to simultaneously
10428 check that the original index expression value is both greater than
10429 or equal to the minimum value of the range and less than or equal to
10430 the maximum value of the range. */
10432 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10435 /* If index is in range, it must fit in Pmode.
10436 Convert to Pmode so we can index with it. */
10438 index = convert_to_mode (Pmode, index, 1);
10440 /* Don't let a MEM slip thru, because then INDEX that comes
10441 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10442 and break_out_memory_refs will go to work on it and mess it up. */
10443 #ifdef PIC_CASE_VECTOR_ADDRESS
10444 if (flag_pic && GET_CODE (index) != REG)
10445 index = copy_to_mode_reg (Pmode, index);
10448 /* If flag_force_addr were to affect this address
10449 it could interfere with the tricky assumptions made
10450 about addresses that contain label-refs,
10451 which may be valid only very near the tablejump itself. */
10452 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10453 GET_MODE_SIZE, because this indicates how large insns are. The other
10454 uses should all be Pmode, because they are addresses. This code
10455 could fail if addresses and insns are not the same size. */
10456 index = gen_rtx_PLUS (Pmode,
10457 gen_rtx_MULT (Pmode, index,
10458 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10459 gen_rtx_LABEL_REF (Pmode, table_label));
10460 #ifdef PIC_CASE_VECTOR_ADDRESS
10462 index = PIC_CASE_VECTOR_ADDRESS (index);
10465 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10466 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10467 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10468 RTX_UNCHANGING_P (vector) = 1;
10469 convert_move (temp, vector, 0);
10471 emit_jump_insn (gen_tablejump (temp, table_label));
10473 /* If we are generating PIC code or if the table is PC-relative, the
10474 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10475 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10480 try_tablejump (index_type, index_expr, minval, range,
10481 table_label, default_label)
10482 tree index_type, index_expr, minval, range;
10483 rtx table_label, default_label;
10487 if (! HAVE_tablejump)
10490 index_expr = fold (build (MINUS_EXPR, index_type,
10491 convert (index_type, index_expr),
10492 convert (index_type, minval)));
10493 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10495 index = protect_from_queue (index, 0);
10496 do_pending_stack_adjust ();
10498 do_tablejump (index, TYPE_MODE (index_type),
10499 convert_modes (TYPE_MODE (index_type),
10500 TYPE_MODE (TREE_TYPE (range)),
10501 expand_expr (range, NULL_RTX,
10503 TREE_UNSIGNED (TREE_TYPE (range))),
10504 table_label, default_label);