1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
31 #include "hard-reg-set.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
41 #include "typeclass.h"
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first */
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
65 #define STACK_PUSH_CODE PRE_INC
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
87 /* Don't check memory usage, since code is being emitted to check a memory
88 usage. Used when current_function_check_memory_usage is true, to avoid
89 infinite recursion. */
90 static int in_check_memory_usage;
92 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
93 static tree placeholder_list = 0;
95 /* This structure is used by move_by_pieces to describe the move to
108 int explicit_inc_from;
116 /* This structure is used by clear_by_pieces to describe the clear to
119 struct clear_by_pieces
131 extern struct obstack permanent_obstack;
133 static rtx get_push_address PARAMS ((int));
135 static rtx enqueue_insn PARAMS ((rtx, rtx));
136 static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
140 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
142 struct clear_by_pieces *));
143 static int is_zeros_p PARAMS ((tree));
144 static int mostly_zeros_p PARAMS ((tree));
145 static void store_constructor_field PARAMS ((rtx, int, int, enum machine_mode,
146 tree, tree, unsigned int, int));
147 static void store_constructor PARAMS ((tree, rtx, unsigned int, int, int));
148 static rtx store_field PARAMS ((rtx, int, int, enum machine_mode,
149 tree, enum machine_mode, int,
150 unsigned int, int, int));
151 static enum memory_use_mode
152 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
153 static tree save_noncopied_parts PARAMS ((tree, tree));
154 static tree init_noncopied_parts PARAMS ((tree, tree));
155 static int safe_from_p PARAMS ((rtx, tree, int));
156 static int fixed_type_p PARAMS ((tree));
157 static rtx var_rtx PARAMS ((tree));
158 static int readonly_fields_p PARAMS ((tree));
159 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
160 static rtx expand_increment PARAMS ((tree, int, int));
161 static void preexpand_calls PARAMS ((tree));
162 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
163 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
164 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code, rtx, rtx));
165 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
167 /* Record for each mode whether we can move a register directly to or
168 from an object of that mode in memory. If we can't, we won't try
169 to use that mode directly when accessing a field of that mode. */
171 static char direct_load[NUM_MACHINE_MODES];
172 static char direct_store[NUM_MACHINE_MODES];
174 /* If a memory-to-memory move would take MOVE_RATIO or more simple
175 move-instruction sequences, we will do a movstr or libcall instead. */
178 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
181 /* If we are optimizing for space (-Os), cut down the default move ratio */
182 #define MOVE_RATIO (optimize_size ? 3 : 15)
186 /* This macro is used to determine whether move_by_pieces should be called
187 to perform a structure copy. */
188 #ifndef MOVE_BY_PIECES_P
189 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
190 (SIZE, ALIGN) < MOVE_RATIO)
193 /* This array records the insn_code of insns to perform block moves. */
194 enum insn_code movstr_optab[NUM_MACHINE_MODES];
196 /* This array records the insn_code of insns to perform block clears. */
197 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
199 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
201 #ifndef SLOW_UNALIGNED_ACCESS
202 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
205 /* This is run once per compilation to set up which modes can be used
206 directly in memory and to initialize the block move optab. */
212 enum machine_mode mode;
219 /* Since we are on the permanent obstack, we must be sure we save this
220 spot AFTER we call start_sequence, since it will reuse the rtl it
222 free_point = (char *) oballoc (0);
224 /* Try indexing by frame ptr and try by stack ptr.
225 It is known that on the Convex the stack ptr isn't a valid index.
226 With luck, one or the other is valid on any machine. */
227 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
228 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
230 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
231 pat = PATTERN (insn);
233 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
234 mode = (enum machine_mode) ((int) mode + 1))
239 direct_load[(int) mode] = direct_store[(int) mode] = 0;
240 PUT_MODE (mem, mode);
241 PUT_MODE (mem1, mode);
243 /* See if there is some register that can be used in this mode and
244 directly loaded or stored from memory. */
246 if (mode != VOIDmode && mode != BLKmode)
247 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
248 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
251 if (! HARD_REGNO_MODE_OK (regno, mode))
254 reg = gen_rtx_REG (mode, regno);
257 SET_DEST (pat) = reg;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_load[(int) mode] = 1;
261 SET_SRC (pat) = mem1;
262 SET_DEST (pat) = reg;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_load[(int) mode] = 1;
267 SET_DEST (pat) = mem;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_store[(int) mode] = 1;
272 SET_DEST (pat) = mem1;
273 if (recog (pat, insn, &num_clobbers) >= 0)
274 direct_store[(int) mode] = 1;
282 /* This is run at the start of compiling a function. */
287 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
290 pending_stack_adjust = 0;
291 arg_space_so_far = 0;
292 inhibit_defer_pop = 0;
294 apply_args_value = 0;
300 struct expr_status *p;
305 ggc_mark_rtx (p->x_saveregs_value);
306 ggc_mark_rtx (p->x_apply_args_value);
307 ggc_mark_rtx (p->x_forced_labels);
318 /* Small sanity check that the queue is empty at the end of a function. */
320 finish_expr_for_function ()
326 /* Manage the queue of increment instructions to be output
327 for POSTINCREMENT_EXPR expressions, etc. */
329 /* Queue up to increment (or change) VAR later. BODY says how:
330 BODY should be the same thing you would pass to emit_insn
331 to increment right away. It will go to emit_insn later on.
333 The value is a QUEUED expression to be used in place of VAR
334 where you want to guarantee the pre-incrementation value of VAR. */
337 enqueue_insn (var, body)
340 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
341 body, pending_chain);
342 return pending_chain;
345 /* Use protect_from_queue to convert a QUEUED expression
346 into something that you can put immediately into an instruction.
347 If the queued incrementation has not happened yet,
348 protect_from_queue returns the variable itself.
349 If the incrementation has happened, protect_from_queue returns a temp
350 that contains a copy of the old value of the variable.
352 Any time an rtx which might possibly be a QUEUED is to be put
353 into an instruction, it must be passed through protect_from_queue first.
354 QUEUED expressions are not meaningful in instructions.
356 Do not pass a value through protect_from_queue and then hold
357 on to it for a while before putting it in an instruction!
358 If the queue is flushed in between, incorrect code will result. */
361 protect_from_queue (x, modify)
365 register RTX_CODE code = GET_CODE (x);
367 #if 0 /* A QUEUED can hang around after the queue is forced out. */
368 /* Shortcut for most common case. */
369 if (pending_chain == 0)
375 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
376 use of autoincrement. Make a copy of the contents of the memory
377 location rather than a copy of the address, but not if the value is
378 of mode BLKmode. Don't modify X in place since it might be
380 if (code == MEM && GET_MODE (x) != BLKmode
381 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
383 register rtx y = XEXP (x, 0);
384 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
386 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
387 MEM_COPY_ATTRIBUTES (new, x);
388 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
392 register rtx temp = gen_reg_rtx (GET_MODE (new));
393 emit_insn_before (gen_move_insn (temp, new),
399 /* Otherwise, recursively protect the subexpressions of all
400 the kinds of rtx's that can contain a QUEUED. */
403 rtx tem = protect_from_queue (XEXP (x, 0), 0);
404 if (tem != XEXP (x, 0))
410 else if (code == PLUS || code == MULT)
412 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
413 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
414 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
423 /* If the increment has not happened, use the variable itself. */
424 if (QUEUED_INSN (x) == 0)
425 return QUEUED_VAR (x);
426 /* If the increment has happened and a pre-increment copy exists,
428 if (QUEUED_COPY (x) != 0)
429 return QUEUED_COPY (x);
430 /* The increment has happened but we haven't set up a pre-increment copy.
431 Set one up now, and use it. */
432 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
433 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
435 return QUEUED_COPY (x);
438 /* Return nonzero if X contains a QUEUED expression:
439 if it contains anything that will be altered by a queued increment.
440 We handle only combinations of MEM, PLUS, MINUS and MULT operators
441 since memory addresses generally contain only those. */
447 register enum rtx_code code = GET_CODE (x);
453 return queued_subexp_p (XEXP (x, 0));
457 return (queued_subexp_p (XEXP (x, 0))
458 || queued_subexp_p (XEXP (x, 1)));
464 /* Perform all the pending incrementations. */
470 while ((p = pending_chain))
472 rtx body = QUEUED_BODY (p);
474 if (GET_CODE (body) == SEQUENCE)
476 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
477 emit_insn (QUEUED_BODY (p));
480 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
481 pending_chain = QUEUED_NEXT (p);
485 /* Copy data from FROM to TO, where the machine modes are not the same.
486 Both modes may be integer, or both may be floating.
487 UNSIGNEDP should be nonzero if FROM is an unsigned type.
488 This causes zero-extension instead of sign-extension. */
491 convert_move (to, from, unsignedp)
492 register rtx to, from;
495 enum machine_mode to_mode = GET_MODE (to);
496 enum machine_mode from_mode = GET_MODE (from);
497 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
498 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
502 /* rtx code for making an equivalent value. */
503 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
505 to = protect_from_queue (to, 1);
506 from = protect_from_queue (from, 0);
508 if (to_real != from_real)
511 /* If FROM is a SUBREG that indicates that we have already done at least
512 the required extension, strip it. We don't handle such SUBREGs as
515 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
516 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
517 >= GET_MODE_SIZE (to_mode))
518 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
519 from = gen_lowpart (to_mode, from), from_mode = to_mode;
521 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
524 if (to_mode == from_mode
525 || (from_mode == VOIDmode && CONSTANT_P (from)))
527 emit_move_insn (to, from);
535 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
537 /* Try converting directly if the insn is supported. */
538 if ((code = can_extend_p (to_mode, from_mode, 0))
541 emit_unop_insn (code, to, from, UNKNOWN);
546 #ifdef HAVE_trunchfqf2
547 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
549 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
553 #ifdef HAVE_trunctqfqf2
554 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
556 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
560 #ifdef HAVE_truncsfqf2
561 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
563 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
567 #ifdef HAVE_truncdfqf2
568 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
570 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
574 #ifdef HAVE_truncxfqf2
575 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
577 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
581 #ifdef HAVE_trunctfqf2
582 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
584 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
589 #ifdef HAVE_trunctqfhf2
590 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
592 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
596 #ifdef HAVE_truncsfhf2
597 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
599 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
603 #ifdef HAVE_truncdfhf2
604 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
606 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
610 #ifdef HAVE_truncxfhf2
611 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
613 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
617 #ifdef HAVE_trunctfhf2
618 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
620 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
625 #ifdef HAVE_truncsftqf2
626 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
628 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
632 #ifdef HAVE_truncdftqf2
633 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
635 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
639 #ifdef HAVE_truncxftqf2
640 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
642 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
646 #ifdef HAVE_trunctftqf2
647 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
649 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
654 #ifdef HAVE_truncdfsf2
655 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
657 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
661 #ifdef HAVE_truncxfsf2
662 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
664 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
668 #ifdef HAVE_trunctfsf2
669 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
671 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
675 #ifdef HAVE_truncxfdf2
676 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
678 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
682 #ifdef HAVE_trunctfdf2
683 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
685 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
697 libcall = extendsfdf2_libfunc;
701 libcall = extendsfxf2_libfunc;
705 libcall = extendsftf2_libfunc;
717 libcall = truncdfsf2_libfunc;
721 libcall = extenddfxf2_libfunc;
725 libcall = extenddftf2_libfunc;
737 libcall = truncxfsf2_libfunc;
741 libcall = truncxfdf2_libfunc;
753 libcall = trunctfsf2_libfunc;
757 libcall = trunctfdf2_libfunc;
769 if (libcall == (rtx) 0)
770 /* This conversion is not implemented yet. */
773 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
775 emit_move_insn (to, value);
779 /* Now both modes are integers. */
781 /* Handle expanding beyond a word. */
782 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
783 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
790 enum machine_mode lowpart_mode;
791 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
793 /* Try converting directly if the insn is supported. */
794 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
797 /* If FROM is a SUBREG, put it into a register. Do this
798 so that we always generate the same set of insns for
799 better cse'ing; if an intermediate assignment occurred,
800 we won't be doing the operation directly on the SUBREG. */
801 if (optimize > 0 && GET_CODE (from) == SUBREG)
802 from = force_reg (from_mode, from);
803 emit_unop_insn (code, to, from, equiv_code);
806 /* Next, try converting via full word. */
807 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
808 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
809 != CODE_FOR_nothing))
811 if (GET_CODE (to) == REG)
812 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
813 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
814 emit_unop_insn (code, to,
815 gen_lowpart (word_mode, to), equiv_code);
819 /* No special multiword conversion insn; do it by hand. */
822 /* Since we will turn this into a no conflict block, we must ensure
823 that the source does not overlap the target. */
825 if (reg_overlap_mentioned_p (to, from))
826 from = force_reg (from_mode, from);
828 /* Get a copy of FROM widened to a word, if necessary. */
829 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
830 lowpart_mode = word_mode;
832 lowpart_mode = from_mode;
834 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
836 lowpart = gen_lowpart (lowpart_mode, to);
837 emit_move_insn (lowpart, lowfrom);
839 /* Compute the value to put in each remaining word. */
841 fill_value = const0_rtx;
846 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
847 && STORE_FLAG_VALUE == -1)
849 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
851 fill_value = gen_reg_rtx (word_mode);
852 emit_insn (gen_slt (fill_value));
858 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
859 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
861 fill_value = convert_to_mode (word_mode, fill_value, 1);
865 /* Fill the remaining words. */
866 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
868 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
869 rtx subword = operand_subword (to, index, 1, to_mode);
874 if (fill_value != subword)
875 emit_move_insn (subword, fill_value);
878 insns = get_insns ();
881 emit_no_conflict_block (insns, to, from, NULL_RTX,
882 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
886 /* Truncating multi-word to a word or less. */
887 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
888 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
890 if (!((GET_CODE (from) == MEM
891 && ! MEM_VOLATILE_P (from)
892 && direct_load[(int) to_mode]
893 && ! mode_dependent_address_p (XEXP (from, 0)))
894 || GET_CODE (from) == REG
895 || GET_CODE (from) == SUBREG))
896 from = force_reg (from_mode, from);
897 convert_move (to, gen_lowpart (word_mode, from), 0);
901 /* Handle pointer conversion */ /* SPEE 900220 */
902 if (to_mode == PQImode)
904 if (from_mode != QImode)
905 from = convert_to_mode (QImode, from, unsignedp);
907 #ifdef HAVE_truncqipqi2
908 if (HAVE_truncqipqi2)
910 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
913 #endif /* HAVE_truncqipqi2 */
917 if (from_mode == PQImode)
919 if (to_mode != QImode)
921 from = convert_to_mode (QImode, from, unsignedp);
926 #ifdef HAVE_extendpqiqi2
927 if (HAVE_extendpqiqi2)
929 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
932 #endif /* HAVE_extendpqiqi2 */
937 if (to_mode == PSImode)
939 if (from_mode != SImode)
940 from = convert_to_mode (SImode, from, unsignedp);
942 #ifdef HAVE_truncsipsi2
943 if (HAVE_truncsipsi2)
945 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
948 #endif /* HAVE_truncsipsi2 */
952 if (from_mode == PSImode)
954 if (to_mode != SImode)
956 from = convert_to_mode (SImode, from, unsignedp);
961 #ifdef HAVE_extendpsisi2
962 if (HAVE_extendpsisi2)
964 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
967 #endif /* HAVE_extendpsisi2 */
972 if (to_mode == PDImode)
974 if (from_mode != DImode)
975 from = convert_to_mode (DImode, from, unsignedp);
977 #ifdef HAVE_truncdipdi2
978 if (HAVE_truncdipdi2)
980 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
983 #endif /* HAVE_truncdipdi2 */
987 if (from_mode == PDImode)
989 if (to_mode != DImode)
991 from = convert_to_mode (DImode, from, unsignedp);
996 #ifdef HAVE_extendpdidi2
997 if (HAVE_extendpdidi2)
999 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1002 #endif /* HAVE_extendpdidi2 */
1007 /* Now follow all the conversions between integers
1008 no more than a word long. */
1010 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1011 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1012 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1013 GET_MODE_BITSIZE (from_mode)))
1015 if (!((GET_CODE (from) == MEM
1016 && ! MEM_VOLATILE_P (from)
1017 && direct_load[(int) to_mode]
1018 && ! mode_dependent_address_p (XEXP (from, 0)))
1019 || GET_CODE (from) == REG
1020 || GET_CODE (from) == SUBREG))
1021 from = force_reg (from_mode, from);
1022 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1023 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1024 from = copy_to_reg (from);
1025 emit_move_insn (to, gen_lowpart (to_mode, from));
1029 /* Handle extension. */
1030 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1032 /* Convert directly if that works. */
1033 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1034 != CODE_FOR_nothing)
1036 emit_unop_insn (code, to, from, equiv_code);
1041 enum machine_mode intermediate;
1045 /* Search for a mode to convert via. */
1046 for (intermediate = from_mode; intermediate != VOIDmode;
1047 intermediate = GET_MODE_WIDER_MODE (intermediate))
1048 if (((can_extend_p (to_mode, intermediate, unsignedp)
1049 != CODE_FOR_nothing)
1050 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1051 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1052 GET_MODE_BITSIZE (intermediate))))
1053 && (can_extend_p (intermediate, from_mode, unsignedp)
1054 != CODE_FOR_nothing))
1056 convert_move (to, convert_to_mode (intermediate, from,
1057 unsignedp), unsignedp);
1061 /* No suitable intermediate mode.
1062 Generate what we need with shifts. */
1063 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1064 - GET_MODE_BITSIZE (from_mode), 0);
1065 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1066 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1068 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1071 emit_move_insn (to, tmp);
1076 /* Support special truncate insns for certain modes. */
1078 if (from_mode == DImode && to_mode == SImode)
1080 #ifdef HAVE_truncdisi2
1081 if (HAVE_truncdisi2)
1083 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1087 convert_move (to, force_reg (from_mode, from), unsignedp);
1091 if (from_mode == DImode && to_mode == HImode)
1093 #ifdef HAVE_truncdihi2
1094 if (HAVE_truncdihi2)
1096 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1100 convert_move (to, force_reg (from_mode, from), unsignedp);
1104 if (from_mode == DImode && to_mode == QImode)
1106 #ifdef HAVE_truncdiqi2
1107 if (HAVE_truncdiqi2)
1109 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1113 convert_move (to, force_reg (from_mode, from), unsignedp);
1117 if (from_mode == SImode && to_mode == HImode)
1119 #ifdef HAVE_truncsihi2
1120 if (HAVE_truncsihi2)
1122 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1126 convert_move (to, force_reg (from_mode, from), unsignedp);
1130 if (from_mode == SImode && to_mode == QImode)
1132 #ifdef HAVE_truncsiqi2
1133 if (HAVE_truncsiqi2)
1135 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1139 convert_move (to, force_reg (from_mode, from), unsignedp);
1143 if (from_mode == HImode && to_mode == QImode)
1145 #ifdef HAVE_trunchiqi2
1146 if (HAVE_trunchiqi2)
1148 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1152 convert_move (to, force_reg (from_mode, from), unsignedp);
1156 if (from_mode == TImode && to_mode == DImode)
1158 #ifdef HAVE_trunctidi2
1159 if (HAVE_trunctidi2)
1161 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1165 convert_move (to, force_reg (from_mode, from), unsignedp);
1169 if (from_mode == TImode && to_mode == SImode)
1171 #ifdef HAVE_trunctisi2
1172 if (HAVE_trunctisi2)
1174 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1178 convert_move (to, force_reg (from_mode, from), unsignedp);
1182 if (from_mode == TImode && to_mode == HImode)
1184 #ifdef HAVE_trunctihi2
1185 if (HAVE_trunctihi2)
1187 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1191 convert_move (to, force_reg (from_mode, from), unsignedp);
1195 if (from_mode == TImode && to_mode == QImode)
1197 #ifdef HAVE_trunctiqi2
1198 if (HAVE_trunctiqi2)
1200 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1204 convert_move (to, force_reg (from_mode, from), unsignedp);
1208 /* Handle truncation of volatile memrefs, and so on;
1209 the things that couldn't be truncated directly,
1210 and for which there was no special instruction. */
1211 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1213 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1214 emit_move_insn (to, temp);
1218 /* Mode combination is not recognized. */
1222 /* Return an rtx for a value that would result
1223 from converting X to mode MODE.
1224 Both X and MODE may be floating, or both integer.
1225 UNSIGNEDP is nonzero if X is an unsigned value.
1226 This can be done by referring to a part of X in place
1227 or by copying to a new temporary with conversion.
1229 This function *must not* call protect_from_queue
1230 except when putting X into an insn (in which case convert_move does it). */
1233 convert_to_mode (mode, x, unsignedp)
1234 enum machine_mode mode;
1238 return convert_modes (mode, VOIDmode, x, unsignedp);
1241 /* Return an rtx for a value that would result
1242 from converting X from mode OLDMODE to mode MODE.
1243 Both modes may be floating, or both integer.
1244 UNSIGNEDP is nonzero if X is an unsigned value.
1246 This can be done by referring to a part of X in place
1247 or by copying to a new temporary with conversion.
1249 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1251 This function *must not* call protect_from_queue
1252 except when putting X into an insn (in which case convert_move does it). */
1255 convert_modes (mode, oldmode, x, unsignedp)
1256 enum machine_mode mode, oldmode;
1262 /* If FROM is a SUBREG that indicates that we have already done at least
1263 the required extension, strip it. */
1265 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1266 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1267 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1268 x = gen_lowpart (mode, x);
1270 if (GET_MODE (x) != VOIDmode)
1271 oldmode = GET_MODE (x);
1273 if (mode == oldmode)
1276 /* There is one case that we must handle specially: If we are converting
1277 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1278 we are to interpret the constant as unsigned, gen_lowpart will do
1279 the wrong if the constant appears negative. What we want to do is
1280 make the high-order word of the constant zero, not all ones. */
1282 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1283 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1284 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1286 HOST_WIDE_INT val = INTVAL (x);
1288 if (oldmode != VOIDmode
1289 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1291 int width = GET_MODE_BITSIZE (oldmode);
1293 /* We need to zero extend VAL. */
1294 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1297 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1300 /* We can do this with a gen_lowpart if both desired and current modes
1301 are integer, and this is either a constant integer, a register, or a
1302 non-volatile MEM. Except for the constant case where MODE is no
1303 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1305 if ((GET_CODE (x) == CONST_INT
1306 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1307 || (GET_MODE_CLASS (mode) == MODE_INT
1308 && GET_MODE_CLASS (oldmode) == MODE_INT
1309 && (GET_CODE (x) == CONST_DOUBLE
1310 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1311 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1312 && direct_load[(int) mode])
1313 || (GET_CODE (x) == REG
1314 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1315 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1317 /* ?? If we don't know OLDMODE, we have to assume here that
1318 X does not need sign- or zero-extension. This may not be
1319 the case, but it's the best we can do. */
1320 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1321 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1323 HOST_WIDE_INT val = INTVAL (x);
1324 int width = GET_MODE_BITSIZE (oldmode);
1326 /* We must sign or zero-extend in this case. Start by
1327 zero-extending, then sign extend if we need to. */
1328 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1330 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1331 val |= (HOST_WIDE_INT) (-1) << width;
1333 return GEN_INT (val);
1336 return gen_lowpart (mode, x);
1339 temp = gen_reg_rtx (mode);
1340 convert_move (temp, x, unsignedp);
1345 /* This macro is used to determine what the largest unit size that
1346 move_by_pieces can use is. */
1348 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1349 move efficiently, as opposed to MOVE_MAX which is the maximum
1350 number of bhytes we can move with a single instruction. */
1352 #ifndef MOVE_MAX_PIECES
1353 #define MOVE_MAX_PIECES MOVE_MAX
1356 /* Generate several move instructions to copy LEN bytes
1357 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1358 The caller must pass FROM and TO
1359 through protect_from_queue before calling.
1360 ALIGN (in bytes) is maximum alignment we can assume. */
1363 move_by_pieces (to, from, len, align)
1368 struct move_by_pieces data;
1369 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1370 int max_size = MOVE_MAX_PIECES + 1;
1371 enum machine_mode mode = VOIDmode, tmode;
1372 enum insn_code icode;
1375 data.to_addr = to_addr;
1376 data.from_addr = from_addr;
1380 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1381 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1383 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1384 || GET_CODE (from_addr) == POST_INC
1385 || GET_CODE (from_addr) == POST_DEC);
1387 data.explicit_inc_from = 0;
1388 data.explicit_inc_to = 0;
1390 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1391 if (data.reverse) data.offset = len;
1394 data.to_struct = MEM_IN_STRUCT_P (to);
1395 data.from_struct = MEM_IN_STRUCT_P (from);
1396 data.to_readonly = RTX_UNCHANGING_P (to);
1397 data.from_readonly = RTX_UNCHANGING_P (from);
1399 /* If copying requires more than two move insns,
1400 copy addresses to registers (to make displacements shorter)
1401 and use post-increment if available. */
1402 if (!(data.autinc_from && data.autinc_to)
1403 && move_by_pieces_ninsns (len, align) > 2)
1405 /* Find the mode of the largest move... */
1406 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1407 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1408 if (GET_MODE_SIZE (tmode) < max_size)
1411 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1413 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1414 data.autinc_from = 1;
1415 data.explicit_inc_from = -1;
1417 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1419 data.from_addr = copy_addr_to_reg (from_addr);
1420 data.autinc_from = 1;
1421 data.explicit_inc_from = 1;
1423 if (!data.autinc_from && CONSTANT_P (from_addr))
1424 data.from_addr = copy_addr_to_reg (from_addr);
1425 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1427 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1429 data.explicit_inc_to = -1;
1431 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1433 data.to_addr = copy_addr_to_reg (to_addr);
1435 data.explicit_inc_to = 1;
1437 if (!data.autinc_to && CONSTANT_P (to_addr))
1438 data.to_addr = copy_addr_to_reg (to_addr);
1441 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1442 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1445 /* First move what we can in the largest integer mode, then go to
1446 successively smaller modes. */
1448 while (max_size > 1)
1450 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1451 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1452 if (GET_MODE_SIZE (tmode) < max_size)
1455 if (mode == VOIDmode)
1458 icode = mov_optab->handlers[(int) mode].insn_code;
1459 if (icode != CODE_FOR_nothing
1460 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1461 (unsigned int) GET_MODE_SIZE (mode)))
1462 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1464 max_size = GET_MODE_SIZE (mode);
1467 /* The code above should have handled everything. */
1472 /* Return number of insns required to move L bytes by pieces.
1473 ALIGN (in bytes) is maximum alignment we can assume. */
1476 move_by_pieces_ninsns (l, align)
1480 register int n_insns = 0;
1481 int max_size = MOVE_MAX + 1;
1483 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1484 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1487 while (max_size > 1)
1489 enum machine_mode mode = VOIDmode, tmode;
1490 enum insn_code icode;
1492 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1493 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1494 if (GET_MODE_SIZE (tmode) < max_size)
1497 if (mode == VOIDmode)
1500 icode = mov_optab->handlers[(int) mode].insn_code;
1501 if (icode != CODE_FOR_nothing
1502 && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1503 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1505 max_size = GET_MODE_SIZE (mode);
1511 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1512 with move instructions for mode MODE. GENFUN is the gen_... function
1513 to make a move insn for that mode. DATA has all the other info. */
1516 move_by_pieces_1 (genfun, mode, data)
1517 rtx (*genfun) PARAMS ((rtx, ...));
1518 enum machine_mode mode;
1519 struct move_by_pieces *data;
1521 register int size = GET_MODE_SIZE (mode);
1522 register rtx to1, from1;
1524 while (data->len >= size)
1526 if (data->reverse) data->offset -= size;
1528 to1 = (data->autinc_to
1529 ? gen_rtx_MEM (mode, data->to_addr)
1530 : copy_rtx (change_address (data->to, mode,
1531 plus_constant (data->to_addr,
1533 MEM_IN_STRUCT_P (to1) = data->to_struct;
1534 RTX_UNCHANGING_P (to1) = data->to_readonly;
1537 = (data->autinc_from
1538 ? gen_rtx_MEM (mode, data->from_addr)
1539 : copy_rtx (change_address (data->from, mode,
1540 plus_constant (data->from_addr,
1542 MEM_IN_STRUCT_P (from1) = data->from_struct;
1543 RTX_UNCHANGING_P (from1) = data->from_readonly;
1545 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1546 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1547 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1548 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1550 emit_insn ((*genfun) (to1, from1));
1551 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1552 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1553 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1554 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1556 if (! data->reverse) data->offset += size;
1562 /* Emit code to move a block Y to a block X.
1563 This may be done with string-move instructions,
1564 with multiple scalar move instructions, or with a library call.
1566 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1568 SIZE is an rtx that says how long they are.
1569 ALIGN is the maximum alignment we can assume they have,
1572 Return the address of the new block, if memcpy is called and returns it,
1576 emit_block_move (x, y, size, align)
1582 #ifdef TARGET_MEM_FUNCTIONS
1584 tree call_expr, arg_list;
1587 if (GET_MODE (x) != BLKmode)
1590 if (GET_MODE (y) != BLKmode)
1593 x = protect_from_queue (x, 1);
1594 y = protect_from_queue (y, 0);
1595 size = protect_from_queue (size, 0);
1597 if (GET_CODE (x) != MEM)
1599 if (GET_CODE (y) != MEM)
1604 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1605 move_by_pieces (x, y, INTVAL (size), align);
1608 /* Try the most limited insn first, because there's no point
1609 including more than one in the machine description unless
1610 the more limited one has some advantage. */
1612 rtx opalign = GEN_INT (align);
1613 enum machine_mode mode;
1615 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1616 mode = GET_MODE_WIDER_MODE (mode))
1618 enum insn_code code = movstr_optab[(int) mode];
1619 insn_operand_predicate_fn pred;
1621 if (code != CODE_FOR_nothing
1622 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1623 here because if SIZE is less than the mode mask, as it is
1624 returned by the macro, it will definitely be less than the
1625 actual mode mask. */
1626 && ((GET_CODE (size) == CONST_INT
1627 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1628 <= (GET_MODE_MASK (mode) >> 1)))
1629 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1630 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1631 || (*pred) (x, BLKmode))
1632 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1633 || (*pred) (y, BLKmode))
1634 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1635 || (*pred) (opalign, VOIDmode)))
1638 rtx last = get_last_insn ();
1641 op2 = convert_to_mode (mode, size, 1);
1642 pred = insn_data[(int) code].operand[2].predicate;
1643 if (pred != 0 && ! (*pred) (op2, mode))
1644 op2 = copy_to_mode_reg (mode, op2);
1646 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1653 delete_insns_since (last);
1657 /* X, Y, or SIZE may have been passed through protect_from_queue.
1659 It is unsafe to save the value generated by protect_from_queue
1660 and reuse it later. Consider what happens if emit_queue is
1661 called before the return value from protect_from_queue is used.
1663 Expansion of the CALL_EXPR below will call emit_queue before
1664 we are finished emitting RTL for argument setup. So if we are
1665 not careful we could get the wrong value for an argument.
1667 To avoid this problem we go ahead and emit code to copy X, Y &
1668 SIZE into new pseudos. We can then place those new pseudos
1669 into an RTL_EXPR and use them later, even after a call to
1672 Note this is not strictly needed for library calls since they
1673 do not call emit_queue before loading their arguments. However,
1674 we may need to have library calls call emit_queue in the future
1675 since failing to do so could cause problems for targets which
1676 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1677 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1678 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1680 #ifdef TARGET_MEM_FUNCTIONS
1681 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1683 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1684 TREE_UNSIGNED (integer_type_node));
1685 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1688 #ifdef TARGET_MEM_FUNCTIONS
1689 /* It is incorrect to use the libcall calling conventions to call
1690 memcpy in this context.
1692 This could be a user call to memcpy and the user may wish to
1693 examine the return value from memcpy.
1695 For targets where libcalls and normal calls have different conventions
1696 for returning pointers, we could end up generating incorrect code.
1698 So instead of using a libcall sequence we build up a suitable
1699 CALL_EXPR and expand the call in the normal fashion. */
1700 if (fn == NULL_TREE)
1704 /* This was copied from except.c, I don't know if all this is
1705 necessary in this context or not. */
1706 fn = get_identifier ("memcpy");
1707 push_obstacks_nochange ();
1708 end_temporary_allocation ();
1709 fntype = build_pointer_type (void_type_node);
1710 fntype = build_function_type (fntype, NULL_TREE);
1711 fn = build_decl (FUNCTION_DECL, fn, fntype);
1712 ggc_add_tree_root (&fn, 1);
1713 DECL_EXTERNAL (fn) = 1;
1714 TREE_PUBLIC (fn) = 1;
1715 DECL_ARTIFICIAL (fn) = 1;
1716 make_decl_rtl (fn, NULL_PTR, 1);
1717 assemble_external (fn);
1721 /* We need to make an argument list for the function call.
1723 memcpy has three arguments, the first two are void * addresses and
1724 the last is a size_t byte count for the copy. */
1726 = build_tree_list (NULL_TREE,
1727 make_tree (build_pointer_type (void_type_node), x));
1728 TREE_CHAIN (arg_list)
1729 = build_tree_list (NULL_TREE,
1730 make_tree (build_pointer_type (void_type_node), y));
1731 TREE_CHAIN (TREE_CHAIN (arg_list))
1732 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1733 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1735 /* Now we have to build up the CALL_EXPR itself. */
1736 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1737 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1738 call_expr, arg_list, NULL_TREE);
1739 TREE_SIDE_EFFECTS (call_expr) = 1;
1741 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1743 emit_library_call (bcopy_libfunc, 0,
1744 VOIDmode, 3, y, Pmode, x, Pmode,
1745 convert_to_mode (TYPE_MODE (integer_type_node), size,
1746 TREE_UNSIGNED (integer_type_node)),
1747 TYPE_MODE (integer_type_node));
1754 /* Copy all or part of a value X into registers starting at REGNO.
1755 The number of registers to be filled is NREGS. */
1758 move_block_to_reg (regno, x, nregs, mode)
1762 enum machine_mode mode;
1765 #ifdef HAVE_load_multiple
1773 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1774 x = validize_mem (force_const_mem (mode, x));
1776 /* See if the machine can do this with a load multiple insn. */
1777 #ifdef HAVE_load_multiple
1778 if (HAVE_load_multiple)
1780 last = get_last_insn ();
1781 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1789 delete_insns_since (last);
1793 for (i = 0; i < nregs; i++)
1794 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1795 operand_subword_force (x, i, mode));
1798 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1799 The number of registers to be filled is NREGS. SIZE indicates the number
1800 of bytes in the object X. */
1804 move_block_from_reg (regno, x, nregs, size)
1811 #ifdef HAVE_store_multiple
1815 enum machine_mode mode;
1817 /* If SIZE is that of a mode no bigger than a word, just use that
1818 mode's store operation. */
1819 if (size <= UNITS_PER_WORD
1820 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1822 emit_move_insn (change_address (x, mode, NULL),
1823 gen_rtx_REG (mode, regno));
1827 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1828 to the left before storing to memory. Note that the previous test
1829 doesn't handle all cases (e.g. SIZE == 3). */
1830 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1832 rtx tem = operand_subword (x, 0, 1, BLKmode);
1838 shift = expand_shift (LSHIFT_EXPR, word_mode,
1839 gen_rtx_REG (word_mode, regno),
1840 build_int_2 ((UNITS_PER_WORD - size)
1841 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1842 emit_move_insn (tem, shift);
1846 /* See if the machine can do this with a store multiple insn. */
1847 #ifdef HAVE_store_multiple
1848 if (HAVE_store_multiple)
1850 last = get_last_insn ();
1851 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1859 delete_insns_since (last);
1863 for (i = 0; i < nregs; i++)
1865 rtx tem = operand_subword (x, i, 1, BLKmode);
1870 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1874 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1875 registers represented by a PARALLEL. SSIZE represents the total size of
1876 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1878 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1879 the balance will be in what would be the low-order memory addresses, i.e.
1880 left justified for big endian, right justified for little endian. This
1881 happens to be true for the targets currently using this support. If this
1882 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1886 emit_group_load (dst, orig_src, ssize, align)
1894 if (GET_CODE (dst) != PARALLEL)
1897 /* Check for a NULL entry, used to indicate that the parameter goes
1898 both on the stack and in registers. */
1899 if (XEXP (XVECEXP (dst, 0, 0), 0))
1904 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1906 /* If we won't be loading directly from memory, protect the real source
1907 from strange tricks we might play. */
1909 if (GET_CODE (src) != MEM)
1911 if (GET_CODE (src) == VOIDmode)
1912 src = gen_reg_rtx (GET_MODE (dst));
1914 src = gen_reg_rtx (GET_MODE (orig_src));
1915 emit_move_insn (src, orig_src);
1918 /* Process the pieces. */
1919 for (i = start; i < XVECLEN (dst, 0); i++)
1921 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1922 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1923 int bytelen = GET_MODE_SIZE (mode);
1926 /* Handle trailing fragments that run over the size of the struct. */
1927 if (ssize >= 0 && bytepos + bytelen > ssize)
1929 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1930 bytelen = ssize - bytepos;
1935 /* Optimize the access just a bit. */
1936 if (GET_CODE (src) == MEM
1937 && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1938 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1939 && bytelen == GET_MODE_SIZE (mode))
1941 tmps[i] = gen_reg_rtx (mode);
1942 emit_move_insn (tmps[i],
1943 change_address (src, mode,
1944 plus_constant (XEXP (src, 0),
1947 else if (GET_CODE (src) == CONCAT)
1950 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1951 tmps[i] = XEXP (src, 0);
1952 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1953 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1954 tmps[i] = XEXP (src, 1);
1960 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1961 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1962 mode, mode, align, ssize);
1965 if (BYTES_BIG_ENDIAN && shift)
1967 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1968 tmps[i], 0, OPTAB_WIDEN);
1973 /* Copy the extracted pieces into the proper (probable) hard regs. */
1974 for (i = start; i < XVECLEN (dst, 0); i++)
1975 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1978 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1979 registers represented by a PARALLEL. SSIZE represents the total size of
1980 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1983 emit_group_store (orig_dst, src, ssize, align)
1991 if (GET_CODE (src) != PARALLEL)
1994 /* Check for a NULL entry, used to indicate that the parameter goes
1995 both on the stack and in registers. */
1996 if (XEXP (XVECEXP (src, 0, 0), 0))
2001 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2003 /* Copy the (probable) hard regs into pseudos. */
2004 for (i = start; i < XVECLEN (src, 0); i++)
2006 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2007 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2008 emit_move_insn (tmps[i], reg);
2012 /* If we won't be storing directly into memory, protect the real destination
2013 from strange tricks we might play. */
2015 if (GET_CODE (dst) == PARALLEL)
2019 /* We can get a PARALLEL dst if there is a conditional expression in
2020 a return statement. In that case, the dst and src are the same,
2021 so no action is necessary. */
2022 if (rtx_equal_p (dst, src))
2025 /* It is unclear if we can ever reach here, but we may as well handle
2026 it. Allocate a temporary, and split this into a store/load to/from
2029 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2030 emit_group_store (temp, src, ssize, align);
2031 emit_group_load (dst, temp, ssize, align);
2034 else if (GET_CODE (dst) != MEM)
2036 dst = gen_reg_rtx (GET_MODE (orig_dst));
2037 /* Make life a bit easier for combine. */
2038 emit_move_insn (dst, const0_rtx);
2040 else if (! MEM_IN_STRUCT_P (dst))
2042 /* store_bit_field requires that memory operations have
2043 mem_in_struct_p set; we might not. */
2045 dst = copy_rtx (orig_dst);
2046 MEM_SET_IN_STRUCT_P (dst, 1);
2049 /* Process the pieces. */
2050 for (i = start; i < XVECLEN (src, 0); i++)
2052 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2053 enum machine_mode mode = GET_MODE (tmps[i]);
2054 int bytelen = GET_MODE_SIZE (mode);
2056 /* Handle trailing fragments that run over the size of the struct. */
2057 if (ssize >= 0 && bytepos + bytelen > ssize)
2059 if (BYTES_BIG_ENDIAN)
2061 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2062 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2063 tmps[i], 0, OPTAB_WIDEN);
2065 bytelen = ssize - bytepos;
2068 /* Optimize the access just a bit. */
2069 if (GET_CODE (dst) == MEM
2070 && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2071 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2072 && bytelen == GET_MODE_SIZE (mode))
2073 emit_move_insn (change_address (dst, mode,
2074 plus_constant (XEXP (dst, 0),
2078 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2079 mode, tmps[i], align, ssize);
2084 /* Copy from the pseudo into the (probable) hard reg. */
2085 if (GET_CODE (dst) == REG)
2086 emit_move_insn (orig_dst, dst);
2089 /* Generate code to copy a BLKmode object of TYPE out of a
2090 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2091 is null, a stack temporary is created. TGTBLK is returned.
2093 The primary purpose of this routine is to handle functions
2094 that return BLKmode structures in registers. Some machines
2095 (the PA for example) want to return all small structures
2096 in registers regardless of the structure's alignment. */
2099 copy_blkmode_from_reg (tgtblk,srcreg,type)
2104 int bytes = int_size_in_bytes (type);
2105 rtx src = NULL, dst = NULL;
2106 int bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2107 int bitpos, xbitpos, big_endian_correction = 0;
2111 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2112 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2113 preserve_temp_slots (tgtblk);
2116 /* This code assumes srcreg is at least a full word. If it isn't,
2117 copy it into a new pseudo which is a full word. */
2118 if (GET_MODE (srcreg) != BLKmode
2119 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2120 srcreg = convert_to_mode (word_mode, srcreg,
2121 TREE_UNSIGNED (type));
2123 /* Structures whose size is not a multiple of a word are aligned
2124 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2125 machine, this means we must skip the empty high order bytes when
2126 calculating the bit offset. */
2127 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2128 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2131 /* Copy the structure BITSIZE bites at a time.
2133 We could probably emit more efficient code for machines
2134 which do not use strict alignment, but it doesn't seem
2135 worth the effort at the current time. */
2136 for (bitpos = 0, xbitpos = big_endian_correction;
2137 bitpos < bytes * BITS_PER_UNIT;
2138 bitpos += bitsize, xbitpos += bitsize)
2141 /* We need a new source operand each time xbitpos is on a
2142 word boundary and when xbitpos == big_endian_correction
2143 (the first time through). */
2144 if (xbitpos % BITS_PER_WORD == 0
2145 || xbitpos == big_endian_correction)
2146 src = operand_subword_force (srcreg,
2147 xbitpos / BITS_PER_WORD,
2150 /* We need a new destination operand each time bitpos is on
2152 if (bitpos % BITS_PER_WORD == 0)
2153 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2155 /* Use xbitpos for the source extraction (right justified) and
2156 xbitpos for the destination store (left justified). */
2157 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2158 extract_bit_field (src, bitsize,
2159 xbitpos % BITS_PER_WORD, 1,
2160 NULL_RTX, word_mode,
2162 bitsize / BITS_PER_UNIT,
2164 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2170 /* Add a USE expression for REG to the (possibly empty) list pointed
2171 to by CALL_FUSAGE. REG must denote a hard register. */
2174 use_reg (call_fusage, reg)
2175 rtx *call_fusage, reg;
2177 if (GET_CODE (reg) != REG
2178 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2182 = gen_rtx_EXPR_LIST (VOIDmode,
2183 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2186 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2187 starting at REGNO. All of these registers must be hard registers. */
2190 use_regs (call_fusage, regno, nregs)
2197 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2200 for (i = 0; i < nregs; i++)
2201 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2204 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2205 PARALLEL REGS. This is for calls that pass values in multiple
2206 non-contiguous locations. The Irix 6 ABI has examples of this. */
2209 use_group_regs (call_fusage, regs)
2215 for (i = 0; i < XVECLEN (regs, 0); i++)
2217 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2219 /* A NULL entry means the parameter goes both on the stack and in
2220 registers. This can also be a MEM for targets that pass values
2221 partially on the stack and partially in registers. */
2222 if (reg != 0 && GET_CODE (reg) == REG)
2223 use_reg (call_fusage, reg);
2227 /* Generate several move instructions to clear LEN bytes of block TO.
2228 (A MEM rtx with BLKmode). The caller must pass TO through
2229 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2233 clear_by_pieces (to, len, align)
2238 struct clear_by_pieces data;
2239 rtx to_addr = XEXP (to, 0);
2240 int max_size = MOVE_MAX_PIECES + 1;
2241 enum machine_mode mode = VOIDmode, tmode;
2242 enum insn_code icode;
2245 data.to_addr = to_addr;
2248 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2249 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2251 data.explicit_inc_to = 0;
2253 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2254 if (data.reverse) data.offset = len;
2257 data.to_struct = MEM_IN_STRUCT_P (to);
2259 /* If copying requires more than two move insns,
2260 copy addresses to registers (to make displacements shorter)
2261 and use post-increment if available. */
2263 && move_by_pieces_ninsns (len, align) > 2)
2265 /* Determine the main mode we'll be using */
2266 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2267 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2268 if (GET_MODE_SIZE (tmode) < max_size)
2271 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2273 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2275 data.explicit_inc_to = -1;
2277 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2279 data.to_addr = copy_addr_to_reg (to_addr);
2281 data.explicit_inc_to = 1;
2283 if (!data.autinc_to && CONSTANT_P (to_addr))
2284 data.to_addr = copy_addr_to_reg (to_addr);
2287 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2288 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2291 /* First move what we can in the largest integer mode, then go to
2292 successively smaller modes. */
2294 while (max_size > 1)
2296 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2297 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2298 if (GET_MODE_SIZE (tmode) < max_size)
2301 if (mode == VOIDmode)
2304 icode = mov_optab->handlers[(int) mode].insn_code;
2305 if (icode != CODE_FOR_nothing
2306 && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2307 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2309 max_size = GET_MODE_SIZE (mode);
2312 /* The code above should have handled everything. */
2317 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2318 with move instructions for mode MODE. GENFUN is the gen_... function
2319 to make a move insn for that mode. DATA has all the other info. */
2322 clear_by_pieces_1 (genfun, mode, data)
2323 rtx (*genfun) PARAMS ((rtx, ...));
2324 enum machine_mode mode;
2325 struct clear_by_pieces *data;
2327 register int size = GET_MODE_SIZE (mode);
2330 while (data->len >= size)
2332 if (data->reverse) data->offset -= size;
2334 to1 = (data->autinc_to
2335 ? gen_rtx_MEM (mode, data->to_addr)
2336 : copy_rtx (change_address (data->to, mode,
2337 plus_constant (data->to_addr,
2339 MEM_IN_STRUCT_P (to1) = data->to_struct;
2341 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2342 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2344 emit_insn ((*genfun) (to1, const0_rtx));
2345 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2346 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2348 if (! data->reverse) data->offset += size;
2354 /* Write zeros through the storage of OBJECT.
2355 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2356 the maximum alignment we can is has, measured in bytes.
2358 If we call a function that returns the length of the block, return it. */
2361 clear_storage (object, size, align)
2366 #ifdef TARGET_MEM_FUNCTIONS
2368 tree call_expr, arg_list;
2372 if (GET_MODE (object) == BLKmode)
2374 object = protect_from_queue (object, 1);
2375 size = protect_from_queue (size, 0);
2377 if (GET_CODE (size) == CONST_INT
2378 && MOVE_BY_PIECES_P (INTVAL (size), align))
2379 clear_by_pieces (object, INTVAL (size), align);
2383 /* Try the most limited insn first, because there's no point
2384 including more than one in the machine description unless
2385 the more limited one has some advantage. */
2387 rtx opalign = GEN_INT (align);
2388 enum machine_mode mode;
2390 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2391 mode = GET_MODE_WIDER_MODE (mode))
2393 enum insn_code code = clrstr_optab[(int) mode];
2394 insn_operand_predicate_fn pred;
2396 if (code != CODE_FOR_nothing
2397 /* We don't need MODE to be narrower than
2398 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2399 the mode mask, as it is returned by the macro, it will
2400 definitely be less than the actual mode mask. */
2401 && ((GET_CODE (size) == CONST_INT
2402 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2403 <= (GET_MODE_MASK (mode) >> 1)))
2404 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2405 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2406 || (*pred) (object, BLKmode))
2407 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2408 || (*pred) (opalign, VOIDmode)))
2411 rtx last = get_last_insn ();
2414 op1 = convert_to_mode (mode, size, 1);
2415 pred = insn_data[(int) code].operand[1].predicate;
2416 if (pred != 0 && ! (*pred) (op1, mode))
2417 op1 = copy_to_mode_reg (mode, op1);
2419 pat = GEN_FCN ((int) code) (object, op1, opalign);
2426 delete_insns_since (last);
2430 /* OBJECT or SIZE may have been passed through protect_from_queue.
2432 It is unsafe to save the value generated by protect_from_queue
2433 and reuse it later. Consider what happens if emit_queue is
2434 called before the return value from protect_from_queue is used.
2436 Expansion of the CALL_EXPR below will call emit_queue before
2437 we are finished emitting RTL for argument setup. So if we are
2438 not careful we could get the wrong value for an argument.
2440 To avoid this problem we go ahead and emit code to copy OBJECT
2441 and SIZE into new pseudos. We can then place those new pseudos
2442 into an RTL_EXPR and use them later, even after a call to
2445 Note this is not strictly needed for library calls since they
2446 do not call emit_queue before loading their arguments. However,
2447 we may need to have library calls call emit_queue in the future
2448 since failing to do so could cause problems for targets which
2449 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2450 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2452 #ifdef TARGET_MEM_FUNCTIONS
2453 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2455 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2456 TREE_UNSIGNED (integer_type_node));
2457 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2461 #ifdef TARGET_MEM_FUNCTIONS
2462 /* It is incorrect to use the libcall calling conventions to call
2463 memset in this context.
2465 This could be a user call to memset and the user may wish to
2466 examine the return value from memset.
2468 For targets where libcalls and normal calls have different
2469 conventions for returning pointers, we could end up generating
2472 So instead of using a libcall sequence we build up a suitable
2473 CALL_EXPR and expand the call in the normal fashion. */
2474 if (fn == NULL_TREE)
2478 /* This was copied from except.c, I don't know if all this is
2479 necessary in this context or not. */
2480 fn = get_identifier ("memset");
2481 push_obstacks_nochange ();
2482 end_temporary_allocation ();
2483 fntype = build_pointer_type (void_type_node);
2484 fntype = build_function_type (fntype, NULL_TREE);
2485 fn = build_decl (FUNCTION_DECL, fn, fntype);
2486 ggc_add_tree_root (&fn, 1);
2487 DECL_EXTERNAL (fn) = 1;
2488 TREE_PUBLIC (fn) = 1;
2489 DECL_ARTIFICIAL (fn) = 1;
2490 make_decl_rtl (fn, NULL_PTR, 1);
2491 assemble_external (fn);
2495 /* We need to make an argument list for the function call.
2497 memset has three arguments, the first is a void * addresses, the
2498 second a integer with the initialization value, the last is a
2499 size_t byte count for the copy. */
2501 = build_tree_list (NULL_TREE,
2502 make_tree (build_pointer_type (void_type_node),
2504 TREE_CHAIN (arg_list)
2505 = build_tree_list (NULL_TREE,
2506 make_tree (integer_type_node, const0_rtx));
2507 TREE_CHAIN (TREE_CHAIN (arg_list))
2508 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2509 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2511 /* Now we have to build up the CALL_EXPR itself. */
2512 call_expr = build1 (ADDR_EXPR,
2513 build_pointer_type (TREE_TYPE (fn)), fn);
2514 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2515 call_expr, arg_list, NULL_TREE);
2516 TREE_SIDE_EFFECTS (call_expr) = 1;
2518 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2520 emit_library_call (bzero_libfunc, 0,
2521 VOIDmode, 2, object, Pmode, size,
2522 TYPE_MODE (integer_type_node));
2527 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2532 /* Generate code to copy Y into X.
2533 Both Y and X must have the same mode, except that
2534 Y can be a constant with VOIDmode.
2535 This mode cannot be BLKmode; use emit_block_move for that.
2537 Return the last instruction emitted. */
2540 emit_move_insn (x, y)
2543 enum machine_mode mode = GET_MODE (x);
2545 x = protect_from_queue (x, 1);
2546 y = protect_from_queue (y, 0);
2548 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2551 /* Never force constant_p_rtx to memory. */
2552 if (GET_CODE (y) == CONSTANT_P_RTX)
2554 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2555 y = force_const_mem (mode, y);
2557 /* If X or Y are memory references, verify that their addresses are valid
2559 if (GET_CODE (x) == MEM
2560 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2561 && ! push_operand (x, GET_MODE (x)))
2563 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2564 x = change_address (x, VOIDmode, XEXP (x, 0));
2566 if (GET_CODE (y) == MEM
2567 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2569 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2570 y = change_address (y, VOIDmode, XEXP (y, 0));
2572 if (mode == BLKmode)
2575 return emit_move_insn_1 (x, y);
2578 /* Low level part of emit_move_insn.
2579 Called just like emit_move_insn, but assumes X and Y
2580 are basically valid. */
2583 emit_move_insn_1 (x, y)
2586 enum machine_mode mode = GET_MODE (x);
2587 enum machine_mode submode;
2588 enum mode_class class = GET_MODE_CLASS (mode);
2591 if (mode >= MAX_MACHINE_MODE)
2594 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2596 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2598 /* Expand complex moves by moving real part and imag part, if possible. */
2599 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2600 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2602 (class == MODE_COMPLEX_INT
2603 ? MODE_INT : MODE_FLOAT),
2605 && (mov_optab->handlers[(int) submode].insn_code
2606 != CODE_FOR_nothing))
2608 /* Don't split destination if it is a stack push. */
2609 int stack = push_operand (x, GET_MODE (x));
2611 /* If this is a stack, push the highpart first, so it
2612 will be in the argument order.
2614 In that case, change_address is used only to convert
2615 the mode, not to change the address. */
2618 /* Note that the real part always precedes the imag part in memory
2619 regardless of machine's endianness. */
2620 #ifdef STACK_GROWS_DOWNWARD
2621 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2622 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2623 gen_imagpart (submode, y)));
2624 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2625 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2626 gen_realpart (submode, y)));
2628 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2629 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2630 gen_realpart (submode, y)));
2631 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2632 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2633 gen_imagpart (submode, y)));
2638 rtx realpart_x, realpart_y;
2639 rtx imagpart_x, imagpart_y;
2641 /* If this is a complex value with each part being smaller than a
2642 word, the usual calling sequence will likely pack the pieces into
2643 a single register. Unfortunately, SUBREG of hard registers only
2644 deals in terms of words, so we have a problem converting input
2645 arguments to the CONCAT of two registers that is used elsewhere
2646 for complex values. If this is before reload, we can copy it into
2647 memory and reload. FIXME, we should see about using extract and
2648 insert on integer registers, but complex short and complex char
2649 variables should be rarely used. */
2650 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2651 && (reload_in_progress | reload_completed) == 0)
2653 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2654 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2656 if (packed_dest_p || packed_src_p)
2658 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2659 ? MODE_FLOAT : MODE_INT);
2661 enum machine_mode reg_mode =
2662 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2664 if (reg_mode != BLKmode)
2666 rtx mem = assign_stack_temp (reg_mode,
2667 GET_MODE_SIZE (mode), 0);
2669 rtx cmem = change_address (mem, mode, NULL_RTX);
2671 cfun->cannot_inline = "function uses short complex types";
2675 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2676 emit_move_insn_1 (cmem, y);
2677 return emit_move_insn_1 (sreg, mem);
2681 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2682 emit_move_insn_1 (mem, sreg);
2683 return emit_move_insn_1 (x, cmem);
2689 realpart_x = gen_realpart (submode, x);
2690 realpart_y = gen_realpart (submode, y);
2691 imagpart_x = gen_imagpart (submode, x);
2692 imagpart_y = gen_imagpart (submode, y);
2694 /* Show the output dies here. This is necessary for SUBREGs
2695 of pseudos since we cannot track their lifetimes correctly;
2696 hard regs shouldn't appear here except as return values.
2697 We never want to emit such a clobber after reload. */
2699 && ! (reload_in_progress || reload_completed)
2700 && (GET_CODE (realpart_x) == SUBREG
2701 || GET_CODE (imagpart_x) == SUBREG))
2703 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2706 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2707 (realpart_x, realpart_y));
2708 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2709 (imagpart_x, imagpart_y));
2712 return get_last_insn ();
2715 /* This will handle any multi-word mode that lacks a move_insn pattern.
2716 However, you will get better code if you define such patterns,
2717 even if they must turn into multiple assembler instructions. */
2718 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2724 #ifdef PUSH_ROUNDING
2726 /* If X is a push on the stack, do the push now and replace
2727 X with a reference to the stack pointer. */
2728 if (push_operand (x, GET_MODE (x)))
2730 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2731 x = change_address (x, VOIDmode, stack_pointer_rtx);
2739 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2742 rtx xpart = operand_subword (x, i, 1, mode);
2743 rtx ypart = operand_subword (y, i, 1, mode);
2745 /* If we can't get a part of Y, put Y into memory if it is a
2746 constant. Otherwise, force it into a register. If we still
2747 can't get a part of Y, abort. */
2748 if (ypart == 0 && CONSTANT_P (y))
2750 y = force_const_mem (mode, y);
2751 ypart = operand_subword (y, i, 1, mode);
2753 else if (ypart == 0)
2754 ypart = operand_subword_force (y, i, mode);
2756 if (xpart == 0 || ypart == 0)
2759 need_clobber |= (GET_CODE (xpart) == SUBREG);
2761 last_insn = emit_move_insn (xpart, ypart);
2764 seq = gen_sequence ();
2767 /* Show the output dies here. This is necessary for SUBREGs
2768 of pseudos since we cannot track their lifetimes correctly;
2769 hard regs shouldn't appear here except as return values.
2770 We never want to emit such a clobber after reload. */
2772 && ! (reload_in_progress || reload_completed)
2773 && need_clobber != 0)
2775 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2786 /* Pushing data onto the stack. */
2788 /* Push a block of length SIZE (perhaps variable)
2789 and return an rtx to address the beginning of the block.
2790 Note that it is not possible for the value returned to be a QUEUED.
2791 The value may be virtual_outgoing_args_rtx.
2793 EXTRA is the number of bytes of padding to push in addition to SIZE.
2794 BELOW nonzero means this padding comes at low addresses;
2795 otherwise, the padding comes at high addresses. */
2798 push_block (size, extra, below)
2804 size = convert_modes (Pmode, ptr_mode, size, 1);
2805 if (CONSTANT_P (size))
2806 anti_adjust_stack (plus_constant (size, extra));
2807 else if (GET_CODE (size) == REG && extra == 0)
2808 anti_adjust_stack (size);
2811 rtx temp = copy_to_mode_reg (Pmode, size);
2813 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2814 temp, 0, OPTAB_LIB_WIDEN);
2815 anti_adjust_stack (temp);
2818 #if defined (STACK_GROWS_DOWNWARD) \
2819 || (defined (ARGS_GROW_DOWNWARD) \
2820 && !defined (ACCUMULATE_OUTGOING_ARGS))
2822 /* Return the lowest stack address when STACK or ARGS grow downward and
2823 we are not aaccumulating outgoing arguments (the c4x port uses such
2825 temp = virtual_outgoing_args_rtx;
2826 if (extra != 0 && below)
2827 temp = plus_constant (temp, extra);
2829 if (GET_CODE (size) == CONST_INT)
2830 temp = plus_constant (virtual_outgoing_args_rtx,
2831 - INTVAL (size) - (below ? 0 : extra));
2832 else if (extra != 0 && !below)
2833 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2834 negate_rtx (Pmode, plus_constant (size, extra)));
2836 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2837 negate_rtx (Pmode, size));
2840 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2846 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2849 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2850 block of SIZE bytes. */
2853 get_push_address (size)
2858 if (STACK_PUSH_CODE == POST_DEC)
2859 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2860 else if (STACK_PUSH_CODE == POST_INC)
2861 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2863 temp = stack_pointer_rtx;
2865 return copy_to_reg (temp);
2868 /* Generate code to push X onto the stack, assuming it has mode MODE and
2870 MODE is redundant except when X is a CONST_INT (since they don't
2872 SIZE is an rtx for the size of data to be copied (in bytes),
2873 needed only if X is BLKmode.
2875 ALIGN (in bytes) is maximum alignment we can assume.
2877 If PARTIAL and REG are both nonzero, then copy that many of the first
2878 words of X into registers starting with REG, and push the rest of X.
2879 The amount of space pushed is decreased by PARTIAL words,
2880 rounded *down* to a multiple of PARM_BOUNDARY.
2881 REG must be a hard register in this case.
2882 If REG is zero but PARTIAL is not, take any all others actions for an
2883 argument partially in registers, but do not actually load any
2886 EXTRA is the amount in bytes of extra space to leave next to this arg.
2887 This is ignored if an argument block has already been allocated.
2889 On a machine that lacks real push insns, ARGS_ADDR is the address of
2890 the bottom of the argument block for this call. We use indexing off there
2891 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2892 argument block has not been preallocated.
2894 ARGS_SO_FAR is the size of args previously pushed for this call.
2896 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2897 for arguments passed in registers. If nonzero, it will be the number
2898 of bytes required. */
2901 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2902 args_addr, args_so_far, reg_parm_stack_space,
2905 enum machine_mode mode;
2914 int reg_parm_stack_space;
2918 enum direction stack_direction
2919 #ifdef STACK_GROWS_DOWNWARD
2925 /* Decide where to pad the argument: `downward' for below,
2926 `upward' for above, or `none' for don't pad it.
2927 Default is below for small data on big-endian machines; else above. */
2928 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2930 /* Invert direction if stack is post-update. */
2931 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2932 if (where_pad != none)
2933 where_pad = (where_pad == downward ? upward : downward);
2935 xinner = x = protect_from_queue (x, 0);
2937 if (mode == BLKmode)
2939 /* Copy a block into the stack, entirely or partially. */
2942 int used = partial * UNITS_PER_WORD;
2943 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2951 /* USED is now the # of bytes we need not copy to the stack
2952 because registers will take care of them. */
2955 xinner = change_address (xinner, BLKmode,
2956 plus_constant (XEXP (xinner, 0), used));
2958 /* If the partial register-part of the arg counts in its stack size,
2959 skip the part of stack space corresponding to the registers.
2960 Otherwise, start copying to the beginning of the stack space,
2961 by setting SKIP to 0. */
2962 skip = (reg_parm_stack_space == 0) ? 0 : used;
2964 #ifdef PUSH_ROUNDING
2965 /* Do it with several push insns if that doesn't take lots of insns
2966 and if there is no difficulty with push insns that skip bytes
2967 on the stack for alignment purposes. */
2969 && GET_CODE (size) == CONST_INT
2971 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2972 /* Here we avoid the case of a structure whose weak alignment
2973 forces many pushes of a small amount of data,
2974 and such small pushes do rounding that causes trouble. */
2975 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
2976 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2977 || PUSH_ROUNDING (align) == align)
2978 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2980 /* Push padding now if padding above and stack grows down,
2981 or if padding below and stack grows up.
2982 But if space already allocated, this has already been done. */
2983 if (extra && args_addr == 0
2984 && where_pad != none && where_pad != stack_direction)
2985 anti_adjust_stack (GEN_INT (extra));
2987 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2988 INTVAL (size) - used, align);
2990 if (current_function_check_memory_usage && ! in_check_memory_usage)
2994 in_check_memory_usage = 1;
2995 temp = get_push_address (INTVAL(size) - used);
2996 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2997 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2999 XEXP (xinner, 0), Pmode,
3000 GEN_INT (INTVAL(size) - used),
3001 TYPE_MODE (sizetype));
3003 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3005 GEN_INT (INTVAL(size) - used),
3006 TYPE_MODE (sizetype),
3007 GEN_INT (MEMORY_USE_RW),
3008 TYPE_MODE (integer_type_node));
3009 in_check_memory_usage = 0;
3013 #endif /* PUSH_ROUNDING */
3015 /* Otherwise make space on the stack and copy the data
3016 to the address of that space. */
3018 /* Deduct words put into registers from the size we must copy. */
3021 if (GET_CODE (size) == CONST_INT)
3022 size = GEN_INT (INTVAL (size) - used);
3024 size = expand_binop (GET_MODE (size), sub_optab, size,
3025 GEN_INT (used), NULL_RTX, 0,
3029 /* Get the address of the stack space.
3030 In this case, we do not deal with EXTRA separately.
3031 A single stack adjust will do. */
3034 temp = push_block (size, extra, where_pad == downward);
3037 else if (GET_CODE (args_so_far) == CONST_INT)
3038 temp = memory_address (BLKmode,
3039 plus_constant (args_addr,
3040 skip + INTVAL (args_so_far)));
3042 temp = memory_address (BLKmode,
3043 plus_constant (gen_rtx_PLUS (Pmode,
3047 if (current_function_check_memory_usage && ! in_check_memory_usage)
3051 in_check_memory_usage = 1;
3052 target = copy_to_reg (temp);
3053 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3054 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3056 XEXP (xinner, 0), Pmode,
3057 size, TYPE_MODE (sizetype));
3059 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3061 size, TYPE_MODE (sizetype),
3062 GEN_INT (MEMORY_USE_RW),
3063 TYPE_MODE (integer_type_node));
3064 in_check_memory_usage = 0;
3067 /* TEMP is the address of the block. Copy the data there. */
3068 if (GET_CODE (size) == CONST_INT
3069 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3071 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3072 INTVAL (size), align);
3077 rtx opalign = GEN_INT (align);
3078 enum machine_mode mode;
3079 rtx target = gen_rtx_MEM (BLKmode, temp);
3081 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3083 mode = GET_MODE_WIDER_MODE (mode))
3085 enum insn_code code = movstr_optab[(int) mode];
3086 insn_operand_predicate_fn pred;
3088 if (code != CODE_FOR_nothing
3089 && ((GET_CODE (size) == CONST_INT
3090 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3091 <= (GET_MODE_MASK (mode) >> 1)))
3092 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3093 && (!(pred = insn_data[(int) code].operand[0].predicate)
3094 || ((*pred) (target, BLKmode)))
3095 && (!(pred = insn_data[(int) code].operand[1].predicate)
3096 || ((*pred) (xinner, BLKmode)))
3097 && (!(pred = insn_data[(int) code].operand[3].predicate)
3098 || ((*pred) (opalign, VOIDmode))))
3100 rtx op2 = convert_to_mode (mode, size, 1);
3101 rtx last = get_last_insn ();
3104 pred = insn_data[(int) code].operand[2].predicate;
3105 if (pred != 0 && ! (*pred) (op2, mode))
3106 op2 = copy_to_mode_reg (mode, op2);
3108 pat = GEN_FCN ((int) code) (target, xinner,
3116 delete_insns_since (last);
3121 #ifndef ACCUMULATE_OUTGOING_ARGS
3122 /* If the source is referenced relative to the stack pointer,
3123 copy it to another register to stabilize it. We do not need
3124 to do this if we know that we won't be changing sp. */
3126 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3127 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3128 temp = copy_to_reg (temp);
3131 /* Make inhibit_defer_pop nonzero around the library call
3132 to force it to pop the bcopy-arguments right away. */
3134 #ifdef TARGET_MEM_FUNCTIONS
3135 emit_library_call (memcpy_libfunc, 0,
3136 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3137 convert_to_mode (TYPE_MODE (sizetype),
3138 size, TREE_UNSIGNED (sizetype)),
3139 TYPE_MODE (sizetype));
3141 emit_library_call (bcopy_libfunc, 0,
3142 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3143 convert_to_mode (TYPE_MODE (integer_type_node),
3145 TREE_UNSIGNED (integer_type_node)),
3146 TYPE_MODE (integer_type_node));
3151 else if (partial > 0)
3153 /* Scalar partly in registers. */
3155 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3158 /* # words of start of argument
3159 that we must make space for but need not store. */
3160 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3161 int args_offset = INTVAL (args_so_far);
3164 /* Push padding now if padding above and stack grows down,
3165 or if padding below and stack grows up.
3166 But if space already allocated, this has already been done. */
3167 if (extra && args_addr == 0
3168 && where_pad != none && where_pad != stack_direction)
3169 anti_adjust_stack (GEN_INT (extra));
3171 /* If we make space by pushing it, we might as well push
3172 the real data. Otherwise, we can leave OFFSET nonzero
3173 and leave the space uninitialized. */
3177 /* Now NOT_STACK gets the number of words that we don't need to
3178 allocate on the stack. */
3179 not_stack = partial - offset;
3181 /* If the partial register-part of the arg counts in its stack size,
3182 skip the part of stack space corresponding to the registers.
3183 Otherwise, start copying to the beginning of the stack space,
3184 by setting SKIP to 0. */
3185 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3187 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3188 x = validize_mem (force_const_mem (mode, x));
3190 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3191 SUBREGs of such registers are not allowed. */
3192 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3193 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3194 x = copy_to_reg (x);
3196 /* Loop over all the words allocated on the stack for this arg. */
3197 /* We can do it by words, because any scalar bigger than a word
3198 has a size a multiple of a word. */
3199 #ifndef PUSH_ARGS_REVERSED
3200 for (i = not_stack; i < size; i++)
3202 for (i = size - 1; i >= not_stack; i--)
3204 if (i >= not_stack + offset)
3205 emit_push_insn (operand_subword_force (x, i, mode),
3206 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3208 GEN_INT (args_offset + ((i - not_stack + skip)
3210 reg_parm_stack_space, alignment_pad);
3215 rtx target = NULL_RTX;
3217 /* Push padding now if padding above and stack grows down,
3218 or if padding below and stack grows up.
3219 But if space already allocated, this has already been done. */
3220 if (extra && args_addr == 0
3221 && where_pad != none && where_pad != stack_direction)
3222 anti_adjust_stack (GEN_INT (extra));
3224 #ifdef PUSH_ROUNDING
3226 addr = gen_push_operand ();
3230 if (GET_CODE (args_so_far) == CONST_INT)
3232 = memory_address (mode,
3233 plus_constant (args_addr,
3234 INTVAL (args_so_far)));
3236 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3241 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3243 if (current_function_check_memory_usage && ! in_check_memory_usage)
3245 in_check_memory_usage = 1;
3247 target = get_push_address (GET_MODE_SIZE (mode));
3249 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3250 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3253 GEN_INT (GET_MODE_SIZE (mode)),
3254 TYPE_MODE (sizetype));
3256 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3258 GEN_INT (GET_MODE_SIZE (mode)),
3259 TYPE_MODE (sizetype),
3260 GEN_INT (MEMORY_USE_RW),
3261 TYPE_MODE (integer_type_node));
3262 in_check_memory_usage = 0;
3267 /* If part should go in registers, copy that part
3268 into the appropriate registers. Do this now, at the end,
3269 since mem-to-mem copies above may do function calls. */
3270 if (partial > 0 && reg != 0)
3272 /* Handle calls that pass values in multiple non-contiguous locations.
3273 The Irix 6 ABI has examples of this. */
3274 if (GET_CODE (reg) == PARALLEL)
3275 emit_group_load (reg, x, -1, align); /* ??? size? */
3277 move_block_to_reg (REGNO (reg), x, partial, mode);
3280 if (extra && args_addr == 0 && where_pad == stack_direction)
3281 anti_adjust_stack (GEN_INT (extra));
3284 anti_adjust_stack (alignment_pad);
3287 /* Expand an assignment that stores the value of FROM into TO.
3288 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3289 (This may contain a QUEUED rtx;
3290 if the value is constant, this rtx is a constant.)
3291 Otherwise, the returned value is NULL_RTX.
3293 SUGGEST_REG is no longer actually used.
3294 It used to mean, copy the value through a register
3295 and return that register, if that is possible.
3296 We now use WANT_VALUE to decide whether to do this. */
3299 expand_assignment (to, from, want_value, suggest_reg)
3302 int suggest_reg ATTRIBUTE_UNUSED;
3304 register rtx to_rtx = 0;
3307 /* Don't crash if the lhs of the assignment was erroneous. */
3309 if (TREE_CODE (to) == ERROR_MARK)
3311 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3312 return want_value ? result : NULL_RTX;
3315 /* Assignment of a structure component needs special treatment
3316 if the structure component's rtx is not simply a MEM.
3317 Assignment of an array element at a constant index, and assignment of
3318 an array element in an unaligned packed structure field, has the same
3321 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3322 || TREE_CODE (to) == ARRAY_REF)
3324 enum machine_mode mode1;
3331 unsigned int alignment;
3334 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3335 &unsignedp, &volatilep, &alignment);
3337 /* If we are going to use store_bit_field and extract_bit_field,
3338 make sure to_rtx will be safe for multiple use. */
3340 if (mode1 == VOIDmode && want_value)
3341 tem = stabilize_reference (tem);
3343 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3346 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3348 if (GET_CODE (to_rtx) != MEM)
3351 if (GET_MODE (offset_rtx) != ptr_mode)
3353 #ifdef POINTERS_EXTEND_UNSIGNED
3354 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3356 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3360 /* A constant address in TO_RTX can have VOIDmode, we must not try
3361 to call force_reg for that case. Avoid that case. */
3362 if (GET_CODE (to_rtx) == MEM
3363 && GET_MODE (to_rtx) == BLKmode
3364 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3366 && (bitpos % bitsize) == 0
3367 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3368 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3370 rtx temp = change_address (to_rtx, mode1,
3371 plus_constant (XEXP (to_rtx, 0),
3374 if (GET_CODE (XEXP (temp, 0)) == REG)
3377 to_rtx = change_address (to_rtx, mode1,
3378 force_reg (GET_MODE (XEXP (temp, 0)),
3383 to_rtx = change_address (to_rtx, VOIDmode,
3384 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3385 force_reg (ptr_mode,
3391 if (GET_CODE (to_rtx) == MEM)
3393 /* When the offset is zero, to_rtx is the address of the
3394 structure we are storing into, and hence may be shared.
3395 We must make a new MEM before setting the volatile bit. */
3397 to_rtx = copy_rtx (to_rtx);
3399 MEM_VOLATILE_P (to_rtx) = 1;
3401 #if 0 /* This was turned off because, when a field is volatile
3402 in an object which is not volatile, the object may be in a register,
3403 and then we would abort over here. */
3409 if (TREE_CODE (to) == COMPONENT_REF
3410 && TREE_READONLY (TREE_OPERAND (to, 1)))
3413 to_rtx = copy_rtx (to_rtx);
3415 RTX_UNCHANGING_P (to_rtx) = 1;
3418 /* Check the access. */
3419 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3424 enum machine_mode best_mode;
3426 best_mode = get_best_mode (bitsize, bitpos,
3427 TYPE_ALIGN (TREE_TYPE (tem)),
3429 if (best_mode == VOIDmode)
3432 best_mode_size = GET_MODE_BITSIZE (best_mode);
3433 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3434 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3435 size *= GET_MODE_SIZE (best_mode);
3437 /* Check the access right of the pointer. */
3439 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3441 GEN_INT (size), TYPE_MODE (sizetype),
3442 GEN_INT (MEMORY_USE_WO),
3443 TYPE_MODE (integer_type_node));
3446 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3448 /* Spurious cast makes HPUX compiler happy. */
3449 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3452 /* Required alignment of containing datum. */
3454 int_size_in_bytes (TREE_TYPE (tem)),
3455 get_alias_set (to));
3456 preserve_temp_slots (result);
3460 /* If the value is meaningful, convert RESULT to the proper mode.
3461 Otherwise, return nothing. */
3462 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3463 TYPE_MODE (TREE_TYPE (from)),
3465 TREE_UNSIGNED (TREE_TYPE (to)))
3469 /* If the rhs is a function call and its value is not an aggregate,
3470 call the function before we start to compute the lhs.
3471 This is needed for correct code for cases such as
3472 val = setjmp (buf) on machines where reference to val
3473 requires loading up part of an address in a separate insn.
3475 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3476 since it might be a promoted variable where the zero- or sign- extension
3477 needs to be done. Handling this in the normal way is safe because no
3478 computation is done before the call. */
3479 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3480 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3481 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3482 && GET_CODE (DECL_RTL (to)) == REG))
3487 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3489 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3491 /* Handle calls that return values in multiple non-contiguous locations.
3492 The Irix 6 ABI has examples of this. */
3493 if (GET_CODE (to_rtx) == PARALLEL)
3494 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3495 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3496 else if (GET_MODE (to_rtx) == BLKmode)
3497 emit_block_move (to_rtx, value, expr_size (from),
3498 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3501 #ifdef POINTERS_EXTEND_UNSIGNED
3502 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3503 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3504 value = convert_memory_address (GET_MODE (to_rtx), value);
3506 emit_move_insn (to_rtx, value);
3508 preserve_temp_slots (to_rtx);
3511 return want_value ? to_rtx : NULL_RTX;
3514 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3515 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3519 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3520 if (GET_CODE (to_rtx) == MEM)
3521 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3524 /* Don't move directly into a return register. */
3525 if (TREE_CODE (to) == RESULT_DECL
3526 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3531 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3533 if (GET_CODE (to_rtx) == PARALLEL)
3534 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3535 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3537 emit_move_insn (to_rtx, temp);
3539 preserve_temp_slots (to_rtx);
3542 return want_value ? to_rtx : NULL_RTX;
3545 /* In case we are returning the contents of an object which overlaps
3546 the place the value is being stored, use a safe function when copying
3547 a value through a pointer into a structure value return block. */
3548 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3549 && current_function_returns_struct
3550 && !current_function_returns_pcc_struct)
3555 size = expr_size (from);
3556 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3557 EXPAND_MEMORY_USE_DONT);
3559 /* Copy the rights of the bitmap. */
3560 if (current_function_check_memory_usage)
3561 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3562 XEXP (to_rtx, 0), Pmode,
3563 XEXP (from_rtx, 0), Pmode,
3564 convert_to_mode (TYPE_MODE (sizetype),
3565 size, TREE_UNSIGNED (sizetype)),
3566 TYPE_MODE (sizetype));
3568 #ifdef TARGET_MEM_FUNCTIONS
3569 emit_library_call (memcpy_libfunc, 0,
3570 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3571 XEXP (from_rtx, 0), Pmode,
3572 convert_to_mode (TYPE_MODE (sizetype),
3573 size, TREE_UNSIGNED (sizetype)),
3574 TYPE_MODE (sizetype));
3576 emit_library_call (bcopy_libfunc, 0,
3577 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3578 XEXP (to_rtx, 0), Pmode,
3579 convert_to_mode (TYPE_MODE (integer_type_node),
3580 size, TREE_UNSIGNED (integer_type_node)),
3581 TYPE_MODE (integer_type_node));
3584 preserve_temp_slots (to_rtx);
3587 return want_value ? to_rtx : NULL_RTX;
3590 /* Compute FROM and store the value in the rtx we got. */
3593 result = store_expr (from, to_rtx, want_value);
3594 preserve_temp_slots (result);
3597 return want_value ? result : NULL_RTX;
3600 /* Generate code for computing expression EXP,
3601 and storing the value into TARGET.
3602 TARGET may contain a QUEUED rtx.
3604 If WANT_VALUE is nonzero, return a copy of the value
3605 not in TARGET, so that we can be sure to use the proper
3606 value in a containing expression even if TARGET has something
3607 else stored in it. If possible, we copy the value through a pseudo
3608 and return that pseudo. Or, if the value is constant, we try to
3609 return the constant. In some cases, we return a pseudo
3610 copied *from* TARGET.
3612 If the mode is BLKmode then we may return TARGET itself.
3613 It turns out that in BLKmode it doesn't cause a problem.
3614 because C has no operators that could combine two different
3615 assignments into the same BLKmode object with different values
3616 with no sequence point. Will other languages need this to
3619 If WANT_VALUE is 0, we return NULL, to make sure
3620 to catch quickly any cases where the caller uses the value
3621 and fails to set WANT_VALUE. */
3624 store_expr (exp, target, want_value)
3626 register rtx target;
3630 int dont_return_target = 0;
3632 if (TREE_CODE (exp) == COMPOUND_EXPR)
3634 /* Perform first part of compound expression, then assign from second
3636 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3638 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3640 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3642 /* For conditional expression, get safe form of the target. Then
3643 test the condition, doing the appropriate assignment on either
3644 side. This avoids the creation of unnecessary temporaries.
3645 For non-BLKmode, it is more efficient not to do this. */
3647 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3650 target = protect_from_queue (target, 1);
3652 do_pending_stack_adjust ();
3654 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3655 start_cleanup_deferral ();
3656 store_expr (TREE_OPERAND (exp, 1), target, 0);
3657 end_cleanup_deferral ();
3659 emit_jump_insn (gen_jump (lab2));
3662 start_cleanup_deferral ();
3663 store_expr (TREE_OPERAND (exp, 2), target, 0);
3664 end_cleanup_deferral ();
3669 return want_value ? target : NULL_RTX;
3671 else if (queued_subexp_p (target))
3672 /* If target contains a postincrement, let's not risk
3673 using it as the place to generate the rhs. */
3675 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3677 /* Expand EXP into a new pseudo. */
3678 temp = gen_reg_rtx (GET_MODE (target));
3679 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3682 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3684 /* If target is volatile, ANSI requires accessing the value
3685 *from* the target, if it is accessed. So make that happen.
3686 In no case return the target itself. */
3687 if (! MEM_VOLATILE_P (target) && want_value)
3688 dont_return_target = 1;
3690 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3691 && GET_MODE (target) != BLKmode)
3692 /* If target is in memory and caller wants value in a register instead,
3693 arrange that. Pass TARGET as target for expand_expr so that,
3694 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3695 We know expand_expr will not use the target in that case.
3696 Don't do this if TARGET is volatile because we are supposed
3697 to write it and then read it. */
3699 temp = expand_expr (exp, target, GET_MODE (target), 0);
3700 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3701 temp = copy_to_reg (temp);
3702 dont_return_target = 1;
3704 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3705 /* If this is an scalar in a register that is stored in a wider mode
3706 than the declared mode, compute the result into its declared mode
3707 and then convert to the wider mode. Our value is the computed
3710 /* If we don't want a value, we can do the conversion inside EXP,
3711 which will often result in some optimizations. Do the conversion
3712 in two steps: first change the signedness, if needed, then
3713 the extend. But don't do this if the type of EXP is a subtype
3714 of something else since then the conversion might involve
3715 more than just converting modes. */
3716 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3717 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3719 if (TREE_UNSIGNED (TREE_TYPE (exp))
3720 != SUBREG_PROMOTED_UNSIGNED_P (target))
3723 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3727 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3728 SUBREG_PROMOTED_UNSIGNED_P (target)),
3732 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3734 /* If TEMP is a volatile MEM and we want a result value, make
3735 the access now so it gets done only once. Likewise if
3736 it contains TARGET. */
3737 if (GET_CODE (temp) == MEM && want_value
3738 && (MEM_VOLATILE_P (temp)
3739 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3740 temp = copy_to_reg (temp);
3742 /* If TEMP is a VOIDmode constant, use convert_modes to make
3743 sure that we properly convert it. */
3744 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3745 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3746 TYPE_MODE (TREE_TYPE (exp)), temp,
3747 SUBREG_PROMOTED_UNSIGNED_P (target));
3749 convert_move (SUBREG_REG (target), temp,
3750 SUBREG_PROMOTED_UNSIGNED_P (target));
3752 /* If we promoted a constant, change the mode back down to match
3753 target. Otherwise, the caller might get confused by a result whose
3754 mode is larger than expected. */
3756 if (want_value && GET_MODE (temp) != GET_MODE (target)
3757 && GET_MODE (temp) != VOIDmode)
3759 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3760 SUBREG_PROMOTED_VAR_P (temp) = 1;
3761 SUBREG_PROMOTED_UNSIGNED_P (temp)
3762 = SUBREG_PROMOTED_UNSIGNED_P (target);
3765 return want_value ? temp : NULL_RTX;
3769 temp = expand_expr (exp, target, GET_MODE (target), 0);
3770 /* Return TARGET if it's a specified hardware register.
3771 If TARGET is a volatile mem ref, either return TARGET
3772 or return a reg copied *from* TARGET; ANSI requires this.
3774 Otherwise, if TEMP is not TARGET, return TEMP
3775 if it is constant (for efficiency),
3776 or if we really want the correct value. */
3777 if (!(target && GET_CODE (target) == REG
3778 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3779 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3780 && ! rtx_equal_p (temp, target)
3781 && (CONSTANT_P (temp) || want_value))
3782 dont_return_target = 1;
3785 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3786 the same as that of TARGET, adjust the constant. This is needed, for
3787 example, in case it is a CONST_DOUBLE and we want only a word-sized
3789 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3790 && TREE_CODE (exp) != ERROR_MARK
3791 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3792 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3793 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3795 if (current_function_check_memory_usage
3796 && GET_CODE (target) == MEM
3797 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3799 if (GET_CODE (temp) == MEM)
3800 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3801 XEXP (target, 0), Pmode,
3802 XEXP (temp, 0), Pmode,
3803 expr_size (exp), TYPE_MODE (sizetype));
3805 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3806 XEXP (target, 0), Pmode,
3807 expr_size (exp), TYPE_MODE (sizetype),
3808 GEN_INT (MEMORY_USE_WO),
3809 TYPE_MODE (integer_type_node));
3812 /* If value was not generated in the target, store it there.
3813 Convert the value to TARGET's type first if nec. */
3814 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3815 one or both of them are volatile memory refs, we have to distinguish
3817 - expand_expr has used TARGET. In this case, we must not generate
3818 another copy. This can be detected by TARGET being equal according
3820 - expand_expr has not used TARGET - that means that the source just
3821 happens to have the same RTX form. Since temp will have been created
3822 by expand_expr, it will compare unequal according to == .
3823 We must generate a copy in this case, to reach the correct number
3824 of volatile memory references. */
3826 if ((! rtx_equal_p (temp, target)
3827 || (temp != target && (side_effects_p (temp)
3828 || side_effects_p (target))))
3829 && TREE_CODE (exp) != ERROR_MARK)
3831 target = protect_from_queue (target, 1);
3832 if (GET_MODE (temp) != GET_MODE (target)
3833 && GET_MODE (temp) != VOIDmode)
3835 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3836 if (dont_return_target)
3838 /* In this case, we will return TEMP,
3839 so make sure it has the proper mode.
3840 But don't forget to store the value into TARGET. */
3841 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3842 emit_move_insn (target, temp);
3845 convert_move (target, temp, unsignedp);
3848 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3850 /* Handle copying a string constant into an array.
3851 The string constant may be shorter than the array.
3852 So copy just the string's actual length, and clear the rest. */
3856 /* Get the size of the data type of the string,
3857 which is actually the size of the target. */
3858 size = expr_size (exp);
3859 if (GET_CODE (size) == CONST_INT
3860 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3861 emit_block_move (target, temp, size,
3862 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3865 /* Compute the size of the data to copy from the string. */
3867 = size_binop (MIN_EXPR,
3868 make_tree (sizetype, size),
3869 size_int (TREE_STRING_LENGTH (exp)));
3870 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3874 /* Copy that much. */
3875 emit_block_move (target, temp, copy_size_rtx,
3876 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3878 /* Figure out how much is left in TARGET that we have to clear.
3879 Do all calculations in ptr_mode. */
3881 addr = XEXP (target, 0);
3882 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3884 if (GET_CODE (copy_size_rtx) == CONST_INT)
3886 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3887 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3891 addr = force_reg (ptr_mode, addr);
3892 addr = expand_binop (ptr_mode, add_optab, addr,
3893 copy_size_rtx, NULL_RTX, 0,
3896 size = expand_binop (ptr_mode, sub_optab, size,
3897 copy_size_rtx, NULL_RTX, 0,
3900 label = gen_label_rtx ();
3901 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3902 GET_MODE (size), 0, 0, label);
3905 if (size != const0_rtx)
3907 /* Be sure we can write on ADDR. */
3908 if (current_function_check_memory_usage)
3909 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3911 size, TYPE_MODE (sizetype),
3912 GEN_INT (MEMORY_USE_WO),
3913 TYPE_MODE (integer_type_node));
3914 #ifdef TARGET_MEM_FUNCTIONS
3915 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3917 const0_rtx, TYPE_MODE (integer_type_node),
3918 convert_to_mode (TYPE_MODE (sizetype),
3920 TREE_UNSIGNED (sizetype)),
3921 TYPE_MODE (sizetype));
3923 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3925 convert_to_mode (TYPE_MODE (integer_type_node),
3927 TREE_UNSIGNED (integer_type_node)),
3928 TYPE_MODE (integer_type_node));
3936 /* Handle calls that return values in multiple non-contiguous locations.
3937 The Irix 6 ABI has examples of this. */
3938 else if (GET_CODE (target) == PARALLEL)
3939 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3940 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3941 else if (GET_MODE (temp) == BLKmode)
3942 emit_block_move (target, temp, expr_size (exp),
3943 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3945 emit_move_insn (target, temp);
3948 /* If we don't want a value, return NULL_RTX. */
3952 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3953 ??? The latter test doesn't seem to make sense. */
3954 else if (dont_return_target && GET_CODE (temp) != MEM)
3957 /* Return TARGET itself if it is a hard register. */
3958 else if (want_value && GET_MODE (target) != BLKmode
3959 && ! (GET_CODE (target) == REG
3960 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3961 return copy_to_reg (target);
3967 /* Return 1 if EXP just contains zeros. */
3975 switch (TREE_CODE (exp))
3979 case NON_LVALUE_EXPR:
3980 return is_zeros_p (TREE_OPERAND (exp, 0));
3983 return integer_zerop (exp);
3987 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3990 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3993 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3994 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3995 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3996 if (! is_zeros_p (TREE_VALUE (elt)))
4006 /* Return 1 if EXP contains mostly (3/4) zeros. */
4009 mostly_zeros_p (exp)
4012 if (TREE_CODE (exp) == CONSTRUCTOR)
4014 int elts = 0, zeros = 0;
4015 tree elt = CONSTRUCTOR_ELTS (exp);
4016 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4018 /* If there are no ranges of true bits, it is all zero. */
4019 return elt == NULL_TREE;
4021 for (; elt; elt = TREE_CHAIN (elt))
4023 /* We do not handle the case where the index is a RANGE_EXPR,
4024 so the statistic will be somewhat inaccurate.
4025 We do make a more accurate count in store_constructor itself,
4026 so since this function is only used for nested array elements,
4027 this should be close enough. */
4028 if (mostly_zeros_p (TREE_VALUE (elt)))
4033 return 4 * zeros >= 3 * elts;
4036 return is_zeros_p (exp);
4039 /* Helper function for store_constructor.
4040 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4041 TYPE is the type of the CONSTRUCTOR, not the element type.
4042 ALIGN and CLEARED are as for store_constructor.
4044 This provides a recursive shortcut back to store_constructor when it isn't
4045 necessary to go through store_field. This is so that we can pass through
4046 the cleared field to let store_constructor know that we may not have to
4047 clear a substructure if the outer structure has already been cleared. */
4050 store_constructor_field (target, bitsize, bitpos,
4051 mode, exp, type, align, cleared)
4053 int bitsize, bitpos;
4054 enum machine_mode mode;
4059 if (TREE_CODE (exp) == CONSTRUCTOR
4060 && bitpos % BITS_PER_UNIT == 0
4061 /* If we have a non-zero bitpos for a register target, then we just
4062 let store_field do the bitfield handling. This is unlikely to
4063 generate unnecessary clear instructions anyways. */
4064 && (bitpos == 0 || GET_CODE (target) == MEM))
4068 = change_address (target,
4069 GET_MODE (target) == BLKmode
4071 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4072 ? BLKmode : VOIDmode,
4073 plus_constant (XEXP (target, 0),
4074 bitpos / BITS_PER_UNIT));
4075 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4078 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
4079 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
4080 int_size_in_bytes (type), 0);
4083 /* Store the value of constructor EXP into the rtx TARGET.
4084 TARGET is either a REG or a MEM.
4085 ALIGN is the maximum known alignment for TARGET, in bits.
4086 CLEARED is true if TARGET is known to have been zero'd.
4087 SIZE is the number of bytes of TARGET we are allowed to modify: this
4088 may not be the same as the size of EXP if we are assigning to a field
4089 which has been packed to exclude padding bits. */
4092 store_constructor (exp, target, align, cleared, size)
4099 tree type = TREE_TYPE (exp);
4100 #ifdef WORD_REGISTER_OPERATIONS
4101 rtx exp_size = expr_size (exp);
4104 /* We know our target cannot conflict, since safe_from_p has been called. */
4106 /* Don't try copying piece by piece into a hard register
4107 since that is vulnerable to being clobbered by EXP.
4108 Instead, construct in a pseudo register and then copy it all. */
4109 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4111 rtx temp = gen_reg_rtx (GET_MODE (target));
4112 store_constructor (exp, temp, align, cleared, size);
4113 emit_move_insn (target, temp);
4118 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4119 || TREE_CODE (type) == QUAL_UNION_TYPE)
4123 /* Inform later passes that the whole union value is dead. */
4124 if ((TREE_CODE (type) == UNION_TYPE
4125 || TREE_CODE (type) == QUAL_UNION_TYPE)
4128 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4130 /* If the constructor is empty, clear the union. */
4131 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4132 clear_storage (target, expr_size (exp),
4133 TYPE_ALIGN (type) / BITS_PER_UNIT);
4136 /* If we are building a static constructor into a register,
4137 set the initial value as zero so we can fold the value into
4138 a constant. But if more than one register is involved,
4139 this probably loses. */
4140 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4141 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4144 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4149 /* If the constructor has fewer fields than the structure
4150 or if we are initializing the structure to mostly zeros,
4151 clear the whole structure first. */
4153 && ((list_length (CONSTRUCTOR_ELTS (exp))
4154 != list_length (TYPE_FIELDS (type)))
4155 || mostly_zeros_p (exp)))
4158 clear_storage (target, GEN_INT (size),
4159 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4164 /* Inform later passes that the old value is dead. */
4165 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4167 /* Store each element of the constructor into
4168 the corresponding field of TARGET. */
4170 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4172 register tree field = TREE_PURPOSE (elt);
4173 #ifdef WORD_REGISTER_OPERATIONS
4174 tree value = TREE_VALUE (elt);
4176 register enum machine_mode mode;
4180 tree pos, constant = 0, offset = 0;
4181 rtx to_rtx = target;
4183 /* Just ignore missing fields.
4184 We cleared the whole structure, above,
4185 if any fields are missing. */
4189 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4192 if (TREE_CODE (DECL_SIZE (field)) == INTEGER_CST)
4193 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4197 unsignedp = TREE_UNSIGNED (field);
4198 mode = DECL_MODE (field);
4199 if (DECL_BIT_FIELD (field))
4202 pos = DECL_FIELD_BITPOS (field);
4203 if (TREE_CODE (pos) == INTEGER_CST)
4205 else if (TREE_CODE (pos) == PLUS_EXPR
4206 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4207 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4212 bitpos = TREE_INT_CST_LOW (constant);
4218 if (contains_placeholder_p (offset))
4219 offset = build (WITH_RECORD_EXPR, bitsizetype,
4220 offset, make_tree (TREE_TYPE (exp), target));
4222 offset = size_binop (EXACT_DIV_EXPR, offset,
4223 bitsize_int (BITS_PER_UNIT));
4224 offset = convert (sizetype, offset);
4226 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4227 if (GET_CODE (to_rtx) != MEM)
4230 if (GET_MODE (offset_rtx) != ptr_mode)
4232 #ifdef POINTERS_EXTEND_UNSIGNED
4233 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4235 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4240 = change_address (to_rtx, VOIDmode,
4241 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4242 force_reg (ptr_mode,
4246 if (TREE_READONLY (field))
4248 if (GET_CODE (to_rtx) == MEM)
4249 to_rtx = copy_rtx (to_rtx);
4251 RTX_UNCHANGING_P (to_rtx) = 1;
4254 #ifdef WORD_REGISTER_OPERATIONS
4255 /* If this initializes a field that is smaller than a word, at the
4256 start of a word, try to widen it to a full word.
4257 This special case allows us to output C++ member function
4258 initializations in a form that the optimizers can understand. */
4260 && GET_CODE (target) == REG
4261 && bitsize < BITS_PER_WORD
4262 && bitpos % BITS_PER_WORD == 0
4263 && GET_MODE_CLASS (mode) == MODE_INT
4264 && TREE_CODE (value) == INTEGER_CST
4265 && GET_CODE (exp_size) == CONST_INT
4266 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4268 tree type = TREE_TYPE (value);
4269 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4271 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4272 value = convert (type, value);
4274 if (BYTES_BIG_ENDIAN)
4276 = fold (build (LSHIFT_EXPR, type, value,
4277 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4278 bitsize = BITS_PER_WORD;
4282 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4283 TREE_VALUE (elt), type,
4285 DECL_ALIGN (TREE_PURPOSE (elt))),
4289 else if (TREE_CODE (type) == ARRAY_TYPE)
4294 tree domain = TYPE_DOMAIN (type);
4295 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4296 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4297 tree elttype = TREE_TYPE (type);
4299 /* If the constructor has fewer elements than the array,
4300 clear the whole array first. Similarly if this is
4301 static constructor of a non-BLKmode object. */
4302 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4306 HOST_WIDE_INT count = 0, zero_count = 0;
4308 /* This loop is a more accurate version of the loop in
4309 mostly_zeros_p (it handles RANGE_EXPR in an index).
4310 It is also needed to check for missing elements. */
4311 for (elt = CONSTRUCTOR_ELTS (exp);
4313 elt = TREE_CHAIN (elt))
4315 tree index = TREE_PURPOSE (elt);
4316 HOST_WIDE_INT this_node_count;
4317 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4319 tree lo_index = TREE_OPERAND (index, 0);
4320 tree hi_index = TREE_OPERAND (index, 1);
4322 if (TREE_CODE (lo_index) != INTEGER_CST
4323 || TREE_CODE (hi_index) != INTEGER_CST)
4328 this_node_count = (TREE_INT_CST_LOW (hi_index)
4329 - TREE_INT_CST_LOW (lo_index) + 1);
4332 this_node_count = 1;
4333 count += this_node_count;
4334 if (mostly_zeros_p (TREE_VALUE (elt)))
4335 zero_count += this_node_count;
4337 /* Clear the entire array first if there are any missing elements,
4338 or if the incidence of zero elements is >= 75%. */
4339 if (count < maxelt - minelt + 1
4340 || 4 * zero_count >= 3 * count)
4343 if (need_to_clear && size > 0)
4346 clear_storage (target, GEN_INT (size),
4347 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4351 /* Inform later passes that the old value is dead. */
4352 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4354 /* Store each element of the constructor into
4355 the corresponding element of TARGET, determined
4356 by counting the elements. */
4357 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4359 elt = TREE_CHAIN (elt), i++)
4361 register enum machine_mode mode;
4365 tree value = TREE_VALUE (elt);
4366 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4367 tree index = TREE_PURPOSE (elt);
4368 rtx xtarget = target;
4370 if (cleared && is_zeros_p (value))
4373 unsignedp = TREE_UNSIGNED (elttype);
4374 mode = TYPE_MODE (elttype);
4375 if (mode == BLKmode)
4377 if (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4378 && TREE_INT_CST_HIGH (TYPE_SIZE (elttype)) == 0)
4379 bitsize = TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4384 bitsize = GET_MODE_BITSIZE (mode);
4386 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4388 tree lo_index = TREE_OPERAND (index, 0);
4389 tree hi_index = TREE_OPERAND (index, 1);
4390 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4391 struct nesting *loop;
4392 HOST_WIDE_INT lo, hi, count;
4395 /* If the range is constant and "small", unroll the loop. */
4396 if (TREE_CODE (lo_index) == INTEGER_CST
4397 && TREE_CODE (hi_index) == INTEGER_CST
4398 && (lo = TREE_INT_CST_LOW (lo_index),
4399 hi = TREE_INT_CST_LOW (hi_index),
4400 count = hi - lo + 1,
4401 (GET_CODE (target) != MEM
4403 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4404 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4407 lo -= minelt; hi -= minelt;
4408 for (; lo <= hi; lo++)
4410 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4411 store_constructor_field (target, bitsize, bitpos, mode,
4412 value, type, align, cleared);
4417 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4418 loop_top = gen_label_rtx ();
4419 loop_end = gen_label_rtx ();
4421 unsignedp = TREE_UNSIGNED (domain);
4423 index = build_decl (VAR_DECL, NULL_TREE, domain);
4425 DECL_RTL (index) = index_r
4426 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4429 if (TREE_CODE (value) == SAVE_EXPR
4430 && SAVE_EXPR_RTL (value) == 0)
4432 /* Make sure value gets expanded once before the
4434 expand_expr (value, const0_rtx, VOIDmode, 0);
4437 store_expr (lo_index, index_r, 0);
4438 loop = expand_start_loop (0);
4440 /* Assign value to element index. */
4442 = convert (ssizetype,
4443 fold (build (MINUS_EXPR, TREE_TYPE (index),
4444 index, TYPE_MIN_VALUE (domain))));
4445 position = size_binop (MULT_EXPR, position,
4447 TYPE_SIZE_UNIT (elttype)));
4449 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4450 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4451 xtarget = change_address (target, mode, addr);
4452 if (TREE_CODE (value) == CONSTRUCTOR)
4453 store_constructor (value, xtarget, align, cleared,
4454 bitsize / BITS_PER_UNIT);
4456 store_expr (value, xtarget, 0);
4458 expand_exit_loop_if_false (loop,
4459 build (LT_EXPR, integer_type_node,
4462 expand_increment (build (PREINCREMENT_EXPR,
4464 index, integer_one_node), 0, 0);
4466 emit_label (loop_end);
4469 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4470 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4476 index = ssize_int (1);
4479 index = convert (ssizetype,
4480 fold (build (MINUS_EXPR, index,
4481 TYPE_MIN_VALUE (domain))));
4482 position = size_binop (MULT_EXPR, index,
4484 TYPE_SIZE_UNIT (elttype)));
4485 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4486 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4487 xtarget = change_address (target, mode, addr);
4488 store_expr (value, xtarget, 0);
4493 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4494 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4496 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4497 store_constructor_field (target, bitsize, bitpos, mode, value,
4498 type, align, cleared);
4502 /* set constructor assignments */
4503 else if (TREE_CODE (type) == SET_TYPE)
4505 tree elt = CONSTRUCTOR_ELTS (exp);
4506 int nbytes = int_size_in_bytes (type), nbits;
4507 tree domain = TYPE_DOMAIN (type);
4508 tree domain_min, domain_max, bitlength;
4510 /* The default implementation strategy is to extract the constant
4511 parts of the constructor, use that to initialize the target,
4512 and then "or" in whatever non-constant ranges we need in addition.
4514 If a large set is all zero or all ones, it is
4515 probably better to set it using memset (if available) or bzero.
4516 Also, if a large set has just a single range, it may also be
4517 better to first clear all the first clear the set (using
4518 bzero/memset), and set the bits we want. */
4520 /* Check for all zeros. */
4521 if (elt == NULL_TREE && size > 0)
4524 clear_storage (target, GEN_INT (size),
4525 TYPE_ALIGN (type) / BITS_PER_UNIT);
4529 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4530 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4531 bitlength = size_binop (PLUS_EXPR,
4532 size_diffop (domain_max, domain_min),
4535 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4537 nbits = TREE_INT_CST_LOW (bitlength);
4539 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4540 are "complicated" (more than one range), initialize (the
4541 constant parts) by copying from a constant. */
4542 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4543 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4545 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4546 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4547 char *bit_buffer = (char *) alloca (nbits);
4548 HOST_WIDE_INT word = 0;
4551 int offset = 0; /* In bytes from beginning of set. */
4552 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4555 if (bit_buffer[ibit])
4557 if (BYTES_BIG_ENDIAN)
4558 word |= (1 << (set_word_size - 1 - bit_pos));
4560 word |= 1 << bit_pos;
4563 if (bit_pos >= set_word_size || ibit == nbits)
4565 if (word != 0 || ! cleared)
4567 rtx datum = GEN_INT (word);
4569 /* The assumption here is that it is safe to use
4570 XEXP if the set is multi-word, but not if
4571 it's single-word. */
4572 if (GET_CODE (target) == MEM)
4574 to_rtx = plus_constant (XEXP (target, 0), offset);
4575 to_rtx = change_address (target, mode, to_rtx);
4577 else if (offset == 0)
4581 emit_move_insn (to_rtx, datum);
4587 offset += set_word_size / BITS_PER_UNIT;
4593 /* Don't bother clearing storage if the set is all ones. */
4594 if (TREE_CHAIN (elt) != NULL_TREE
4595 || (TREE_PURPOSE (elt) == NULL_TREE
4597 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4598 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4599 || ((HOST_WIDE_INT) TREE_INT_CST_LOW (TREE_VALUE (elt))
4600 - (HOST_WIDE_INT) TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4602 clear_storage (target, expr_size (exp),
4603 TYPE_ALIGN (type) / BITS_PER_UNIT);
4606 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4608 /* start of range of element or NULL */
4609 tree startbit = TREE_PURPOSE (elt);
4610 /* end of range of element, or element value */
4611 tree endbit = TREE_VALUE (elt);
4612 #ifdef TARGET_MEM_FUNCTIONS
4613 HOST_WIDE_INT startb, endb;
4615 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4617 bitlength_rtx = expand_expr (bitlength,
4618 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4620 /* handle non-range tuple element like [ expr ] */
4621 if (startbit == NULL_TREE)
4623 startbit = save_expr (endbit);
4626 startbit = convert (sizetype, startbit);
4627 endbit = convert (sizetype, endbit);
4628 if (! integer_zerop (domain_min))
4630 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4631 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4633 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4634 EXPAND_CONST_ADDRESS);
4635 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4636 EXPAND_CONST_ADDRESS);
4640 targetx = assign_stack_temp (GET_MODE (target),
4641 GET_MODE_SIZE (GET_MODE (target)),
4643 emit_move_insn (targetx, target);
4645 else if (GET_CODE (target) == MEM)
4650 #ifdef TARGET_MEM_FUNCTIONS
4651 /* Optimization: If startbit and endbit are
4652 constants divisible by BITS_PER_UNIT,
4653 call memset instead. */
4654 if (TREE_CODE (startbit) == INTEGER_CST
4655 && TREE_CODE (endbit) == INTEGER_CST
4656 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4657 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4659 emit_library_call (memset_libfunc, 0,
4661 plus_constant (XEXP (targetx, 0),
4662 startb / BITS_PER_UNIT),
4664 constm1_rtx, TYPE_MODE (integer_type_node),
4665 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4666 TYPE_MODE (sizetype));
4671 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4672 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4673 bitlength_rtx, TYPE_MODE (sizetype),
4674 startbit_rtx, TYPE_MODE (sizetype),
4675 endbit_rtx, TYPE_MODE (sizetype));
4678 emit_move_insn (target, targetx);
4686 /* Store the value of EXP (an expression tree)
4687 into a subfield of TARGET which has mode MODE and occupies
4688 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4689 If MODE is VOIDmode, it means that we are storing into a bit-field.
4691 If VALUE_MODE is VOIDmode, return nothing in particular.
4692 UNSIGNEDP is not used in this case.
4694 Otherwise, return an rtx for the value stored. This rtx
4695 has mode VALUE_MODE if that is convenient to do.
4696 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4698 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4699 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4701 ALIAS_SET is the alias set for the destination. This value will
4702 (in general) be different from that for TARGET, since TARGET is a
4703 reference to the containing structure. */
4706 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4707 unsignedp, align, total_size, alias_set)
4709 int bitsize, bitpos;
4710 enum machine_mode mode;
4712 enum machine_mode value_mode;
4718 HOST_WIDE_INT width_mask = 0;
4720 if (TREE_CODE (exp) == ERROR_MARK)
4723 if (bitsize < HOST_BITS_PER_WIDE_INT)
4724 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4726 /* If we are storing into an unaligned field of an aligned union that is
4727 in a register, we may have the mode of TARGET being an integer mode but
4728 MODE == BLKmode. In that case, get an aligned object whose size and
4729 alignment are the same as TARGET and store TARGET into it (we can avoid
4730 the store if the field being stored is the entire width of TARGET). Then
4731 call ourselves recursively to store the field into a BLKmode version of
4732 that object. Finally, load from the object into TARGET. This is not
4733 very efficient in general, but should only be slightly more expensive
4734 than the otherwise-required unaligned accesses. Perhaps this can be
4735 cleaned up later. */
4738 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4740 rtx object = assign_stack_temp (GET_MODE (target),
4741 GET_MODE_SIZE (GET_MODE (target)), 0);
4742 rtx blk_object = copy_rtx (object);
4744 MEM_SET_IN_STRUCT_P (object, 1);
4745 MEM_SET_IN_STRUCT_P (blk_object, 1);
4746 PUT_MODE (blk_object, BLKmode);
4748 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4749 emit_move_insn (object, target);
4751 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4752 align, total_size, alias_set);
4754 /* Even though we aren't returning target, we need to
4755 give it the updated value. */
4756 emit_move_insn (target, object);
4761 /* If the structure is in a register or if the component
4762 is a bit field, we cannot use addressing to access it.
4763 Use bit-field techniques or SUBREG to store in it. */
4765 if (mode == VOIDmode
4766 || (mode != BLKmode && ! direct_store[(int) mode]
4767 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4768 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4769 || GET_CODE (target) == REG
4770 || GET_CODE (target) == SUBREG
4771 /* If the field isn't aligned enough to store as an ordinary memref,
4772 store it as a bit field. */
4773 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4774 && (align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
4775 || bitpos % GET_MODE_ALIGNMENT (mode)))
4776 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4777 && (TYPE_ALIGN (TREE_TYPE (exp)) > align * BITS_PER_UNIT
4778 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4779 /* If the RHS and field are a constant size and the size of the
4780 RHS isn't the same size as the bitfield, we must use bitfield
4783 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4784 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4786 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4788 /* If BITSIZE is narrower than the size of the type of EXP
4789 we will be narrowing TEMP. Normally, what's wanted are the
4790 low-order bits. However, if EXP's type is a record and this is
4791 big-endian machine, we want the upper BITSIZE bits. */
4792 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4793 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4794 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4795 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4796 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4800 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4802 if (mode != VOIDmode && mode != BLKmode
4803 && mode != TYPE_MODE (TREE_TYPE (exp)))
4804 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4806 /* If the modes of TARGET and TEMP are both BLKmode, both
4807 must be in memory and BITPOS must be aligned on a byte
4808 boundary. If so, we simply do a block copy. */
4809 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4811 unsigned int exp_align = expr_align (exp) / BITS_PER_UNIT;
4813 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4814 || bitpos % BITS_PER_UNIT != 0)
4817 target = change_address (target, VOIDmode,
4818 plus_constant (XEXP (target, 0),
4819 bitpos / BITS_PER_UNIT));
4821 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4822 align = MIN (exp_align, align);
4824 /* Find an alignment that is consistent with the bit position. */
4825 while ((bitpos % (align * BITS_PER_UNIT)) != 0)
4828 emit_block_move (target, temp,
4829 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4833 return value_mode == VOIDmode ? const0_rtx : target;
4836 /* Store the value in the bitfield. */
4837 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4838 if (value_mode != VOIDmode)
4840 /* The caller wants an rtx for the value. */
4841 /* If possible, avoid refetching from the bitfield itself. */
4843 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4846 enum machine_mode tmode;
4849 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4850 tmode = GET_MODE (temp);
4851 if (tmode == VOIDmode)
4853 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4854 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4855 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4857 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4858 NULL_RTX, value_mode, 0, align,
4865 rtx addr = XEXP (target, 0);
4868 /* If a value is wanted, it must be the lhs;
4869 so make the address stable for multiple use. */
4871 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4872 && ! CONSTANT_ADDRESS_P (addr)
4873 /* A frame-pointer reference is already stable. */
4874 && ! (GET_CODE (addr) == PLUS
4875 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4876 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4877 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4878 addr = copy_to_reg (addr);
4880 /* Now build a reference to just the desired component. */
4882 to_rtx = copy_rtx (change_address (target, mode,
4883 plus_constant (addr,
4885 / BITS_PER_UNIT))));
4886 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4887 MEM_ALIAS_SET (to_rtx) = alias_set;
4889 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4893 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4894 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4895 ARRAY_REFs and find the ultimate containing object, which we return.
4897 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4898 bit position, and *PUNSIGNEDP to the signedness of the field.
4899 If the position of the field is variable, we store a tree
4900 giving the variable offset (in units) in *POFFSET.
4901 This offset is in addition to the bit position.
4902 If the position is not variable, we store 0 in *POFFSET.
4903 We set *PALIGNMENT to the alignment in bytes of the address that will be
4904 computed. This is the alignment of the thing we return if *POFFSET
4905 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4907 If any of the extraction expressions is volatile,
4908 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4910 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4911 is a mode that can be used to access the field. In that case, *PBITSIZE
4914 If the field describes a variable-sized object, *PMODE is set to
4915 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4916 this case, but the address of the object can be found. */
4919 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4920 punsignedp, pvolatilep, palignment)
4925 enum machine_mode *pmode;
4928 unsigned int *palignment;
4930 tree orig_exp = exp;
4932 enum machine_mode mode = VOIDmode;
4933 tree offset = size_zero_node;
4934 unsigned int alignment = BIGGEST_ALIGNMENT;
4936 if (TREE_CODE (exp) == COMPONENT_REF)
4938 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4939 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4940 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4941 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4943 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4945 size_tree = TREE_OPERAND (exp, 1);
4946 *punsignedp = TREE_UNSIGNED (exp);
4950 mode = TYPE_MODE (TREE_TYPE (exp));
4951 if (mode == BLKmode)
4952 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4954 *pbitsize = GET_MODE_BITSIZE (mode);
4955 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4960 if (TREE_CODE (size_tree) != INTEGER_CST)
4961 mode = BLKmode, *pbitsize = -1;
4963 *pbitsize = TREE_INT_CST_LOW (size_tree);
4966 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4967 and find the ultimate containing object. */
4973 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4975 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4976 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4977 : TREE_OPERAND (exp, 2));
4978 tree constant = bitsize_int (0), var = pos;
4980 /* If this field hasn't been filled in yet, don't go
4981 past it. This should only happen when folding expressions
4982 made during type construction. */
4986 /* Assume here that the offset is a multiple of a unit.
4987 If not, there should be an explicitly added constant. */
4988 if (TREE_CODE (pos) == PLUS_EXPR
4989 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4990 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4991 else if (TREE_CODE (pos) == INTEGER_CST)
4992 constant = pos, var = bitsize_int (0);
4994 *pbitpos += TREE_INT_CST_LOW (constant);
4996 = size_binop (PLUS_EXPR, offset,
4998 size_binop (EXACT_DIV_EXPR, var,
4999 bitsize_int (BITS_PER_UNIT))));
5002 else if (TREE_CODE (exp) == ARRAY_REF)
5004 /* This code is based on the code in case ARRAY_REF in expand_expr
5005 below. We assume here that the size of an array element is
5006 always an integral multiple of BITS_PER_UNIT. */
5008 tree index = TREE_OPERAND (exp, 1);
5009 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5011 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5012 tree index_type = TREE_TYPE (index);
5015 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5017 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5019 index_type = TREE_TYPE (index);
5022 /* Optimize the special-case of a zero lower bound.
5024 We convert the low_bound to sizetype to avoid some problems
5025 with constant folding. (E.g. suppose the lower bound is 1,
5026 and its mode is QI. Without the conversion, (ARRAY
5027 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5028 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5030 But sizetype isn't quite right either (especially if
5031 the lowbound is negative). FIXME */
5033 if (! integer_zerop (low_bound))
5034 index = fold (build (MINUS_EXPR, index_type, index,
5035 convert (sizetype, low_bound)));
5037 if (TREE_CODE (index) == INTEGER_CST)
5039 index = convert (sbitsizetype, index);
5040 index_type = TREE_TYPE (index);
5043 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
5044 convert (sbitsizetype,
5045 TYPE_SIZE (TREE_TYPE (exp)))));
5047 if (TREE_CODE (xindex) == INTEGER_CST
5048 && TREE_INT_CST_HIGH (xindex) == 0)
5049 *pbitpos += TREE_INT_CST_LOW (xindex);
5052 /* Either the bit offset calculated above is not constant, or
5053 it overflowed. In either case, redo the multiplication
5054 against the size in units. This is especially important
5055 in the non-constant case to avoid a division at runtime. */
5057 = fold (build (MULT_EXPR, ssizetype, index,
5059 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
5061 if (contains_placeholder_p (xindex))
5062 xindex = build (WITH_RECORD_EXPR, ssizetype, xindex, exp);
5065 = size_binop (PLUS_EXPR, offset, convert (sizetype, xindex));
5068 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5069 && ! ((TREE_CODE (exp) == NOP_EXPR
5070 || TREE_CODE (exp) == CONVERT_EXPR)
5071 && (TYPE_MODE (TREE_TYPE (exp))
5072 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5075 /* If any reference in the chain is volatile, the effect is volatile. */
5076 if (TREE_THIS_VOLATILE (exp))
5079 /* If the offset is non-constant already, then we can't assume any
5080 alignment more than the alignment here. */
5081 if (! integer_zerop (offset))
5082 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5084 exp = TREE_OPERAND (exp, 0);
5087 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5088 alignment = MIN (alignment, DECL_ALIGN (exp));
5089 else if (TREE_TYPE (exp) != 0)
5090 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5092 if (integer_zerop (offset))
5095 if (offset != 0 && contains_placeholder_p (offset))
5096 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5100 *palignment = alignment / BITS_PER_UNIT;
5104 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5105 static enum memory_use_mode
5106 get_memory_usage_from_modifier (modifier)
5107 enum expand_modifier modifier;
5113 return MEMORY_USE_RO;
5115 case EXPAND_MEMORY_USE_WO:
5116 return MEMORY_USE_WO;
5118 case EXPAND_MEMORY_USE_RW:
5119 return MEMORY_USE_RW;
5121 case EXPAND_MEMORY_USE_DONT:
5122 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5123 MEMORY_USE_DONT, because they are modifiers to a call of
5124 expand_expr in the ADDR_EXPR case of expand_expr. */
5125 case EXPAND_CONST_ADDRESS:
5126 case EXPAND_INITIALIZER:
5127 return MEMORY_USE_DONT;
5128 case EXPAND_MEMORY_USE_BAD:
5134 /* Given an rtx VALUE that may contain additions and multiplications,
5135 return an equivalent value that just refers to a register or memory.
5136 This is done by generating instructions to perform the arithmetic
5137 and returning a pseudo-register containing the value.
5139 The returned value may be a REG, SUBREG, MEM or constant. */
5142 force_operand (value, target)
5145 register optab binoptab = 0;
5146 /* Use a temporary to force order of execution of calls to
5150 /* Use subtarget as the target for operand 0 of a binary operation. */
5151 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5153 /* Check for a PIC address load. */
5155 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5156 && XEXP (value, 0) == pic_offset_table_rtx
5157 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5158 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5159 || GET_CODE (XEXP (value, 1)) == CONST))
5162 subtarget = gen_reg_rtx (GET_MODE (value));
5163 emit_move_insn (subtarget, value);
5167 if (GET_CODE (value) == PLUS)
5168 binoptab = add_optab;
5169 else if (GET_CODE (value) == MINUS)
5170 binoptab = sub_optab;
5171 else if (GET_CODE (value) == MULT)
5173 op2 = XEXP (value, 1);
5174 if (!CONSTANT_P (op2)
5175 && !(GET_CODE (op2) == REG && op2 != subtarget))
5177 tmp = force_operand (XEXP (value, 0), subtarget);
5178 return expand_mult (GET_MODE (value), tmp,
5179 force_operand (op2, NULL_RTX),
5185 op2 = XEXP (value, 1);
5186 if (!CONSTANT_P (op2)
5187 && !(GET_CODE (op2) == REG && op2 != subtarget))
5189 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5191 binoptab = add_optab;
5192 op2 = negate_rtx (GET_MODE (value), op2);
5195 /* Check for an addition with OP2 a constant integer and our first
5196 operand a PLUS of a virtual register and something else. In that
5197 case, we want to emit the sum of the virtual register and the
5198 constant first and then add the other value. This allows virtual
5199 register instantiation to simply modify the constant rather than
5200 creating another one around this addition. */
5201 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5202 && GET_CODE (XEXP (value, 0)) == PLUS
5203 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5204 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5205 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5207 rtx temp = expand_binop (GET_MODE (value), binoptab,
5208 XEXP (XEXP (value, 0), 0), op2,
5209 subtarget, 0, OPTAB_LIB_WIDEN);
5210 return expand_binop (GET_MODE (value), binoptab, temp,
5211 force_operand (XEXP (XEXP (value, 0), 1), 0),
5212 target, 0, OPTAB_LIB_WIDEN);
5215 tmp = force_operand (XEXP (value, 0), subtarget);
5216 return expand_binop (GET_MODE (value), binoptab, tmp,
5217 force_operand (op2, NULL_RTX),
5218 target, 0, OPTAB_LIB_WIDEN);
5219 /* We give UNSIGNEDP = 0 to expand_binop
5220 because the only operations we are expanding here are signed ones. */
5225 /* Subroutine of expand_expr:
5226 save the non-copied parts (LIST) of an expr (LHS), and return a list
5227 which can restore these values to their previous values,
5228 should something modify their storage. */
5231 save_noncopied_parts (lhs, list)
5238 for (tail = list; tail; tail = TREE_CHAIN (tail))
5239 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5240 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5243 tree part = TREE_VALUE (tail);
5244 tree part_type = TREE_TYPE (part);
5245 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5246 rtx target = assign_temp (part_type, 0, 1, 1);
5247 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5248 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5249 parts = tree_cons (to_be_saved,
5250 build (RTL_EXPR, part_type, NULL_TREE,
5253 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5258 /* Subroutine of expand_expr:
5259 record the non-copied parts (LIST) of an expr (LHS), and return a list
5260 which specifies the initial values of these parts. */
5263 init_noncopied_parts (lhs, list)
5270 for (tail = list; tail; tail = TREE_CHAIN (tail))
5271 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5272 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5273 else if (TREE_PURPOSE (tail))
5275 tree part = TREE_VALUE (tail);
5276 tree part_type = TREE_TYPE (part);
5277 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5278 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5283 /* Subroutine of expand_expr: return nonzero iff there is no way that
5284 EXP can reference X, which is being modified. TOP_P is nonzero if this
5285 call is going to be used to determine whether we need a temporary
5286 for EXP, as opposed to a recursive call to this function.
5288 It is always safe for this routine to return zero since it merely
5289 searches for optimization opportunities. */
5292 safe_from_p (x, exp, top_p)
5299 static int save_expr_count;
5300 static int save_expr_size = 0;
5301 static tree *save_expr_rewritten;
5302 static tree save_expr_trees[256];
5305 /* If EXP has varying size, we MUST use a target since we currently
5306 have no way of allocating temporaries of variable size
5307 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5308 So we assume here that something at a higher level has prevented a
5309 clash. This is somewhat bogus, but the best we can do. Only
5310 do this when X is BLKmode and when we are at the top level. */
5311 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5312 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5313 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5314 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5315 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5317 && GET_MODE (x) == BLKmode))
5320 if (top_p && save_expr_size == 0)
5324 save_expr_count = 0;
5325 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5326 save_expr_rewritten = &save_expr_trees[0];
5328 rtn = safe_from_p (x, exp, 1);
5330 for (i = 0; i < save_expr_count; ++i)
5332 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5334 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5342 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5343 find the underlying pseudo. */
5344 if (GET_CODE (x) == SUBREG)
5347 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5351 /* If X is a location in the outgoing argument area, it is always safe. */
5352 if (GET_CODE (x) == MEM
5353 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5354 || (GET_CODE (XEXP (x, 0)) == PLUS
5355 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5358 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5361 exp_rtl = DECL_RTL (exp);
5368 if (TREE_CODE (exp) == TREE_LIST)
5369 return ((TREE_VALUE (exp) == 0
5370 || safe_from_p (x, TREE_VALUE (exp), 0))
5371 && (TREE_CHAIN (exp) == 0
5372 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5373 else if (TREE_CODE (exp) == ERROR_MARK)
5374 return 1; /* An already-visited SAVE_EXPR? */
5379 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5383 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5384 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5388 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5389 the expression. If it is set, we conflict iff we are that rtx or
5390 both are in memory. Otherwise, we check all operands of the
5391 expression recursively. */
5393 switch (TREE_CODE (exp))
5396 return (staticp (TREE_OPERAND (exp, 0))
5397 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5398 || TREE_STATIC (exp));
5401 if (GET_CODE (x) == MEM)
5406 exp_rtl = CALL_EXPR_RTL (exp);
5409 /* Assume that the call will clobber all hard registers and
5411 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5412 || GET_CODE (x) == MEM)
5419 /* If a sequence exists, we would have to scan every instruction
5420 in the sequence to see if it was safe. This is probably not
5422 if (RTL_EXPR_SEQUENCE (exp))
5425 exp_rtl = RTL_EXPR_RTL (exp);
5428 case WITH_CLEANUP_EXPR:
5429 exp_rtl = RTL_EXPR_RTL (exp);
5432 case CLEANUP_POINT_EXPR:
5433 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5436 exp_rtl = SAVE_EXPR_RTL (exp);
5440 /* This SAVE_EXPR might appear many times in the top-level
5441 safe_from_p() expression, and if it has a complex
5442 subexpression, examining it multiple times could result
5443 in a combinatorial explosion. E.g. on an Alpha
5444 running at least 200MHz, a Fortran test case compiled with
5445 optimization took about 28 minutes to compile -- even though
5446 it was only a few lines long, and the complicated line causing
5447 so much time to be spent in the earlier version of safe_from_p()
5448 had only 293 or so unique nodes.
5450 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5451 where it is so we can turn it back in the top-level safe_from_p()
5454 /* For now, don't bother re-sizing the array. */
5455 if (save_expr_count >= save_expr_size)
5457 save_expr_rewritten[save_expr_count++] = exp;
5459 nops = tree_code_length[(int) SAVE_EXPR];
5460 for (i = 0; i < nops; i++)
5462 tree operand = TREE_OPERAND (exp, i);
5463 if (operand == NULL_TREE)
5465 TREE_SET_CODE (exp, ERROR_MARK);
5466 if (!safe_from_p (x, operand, 0))
5468 TREE_SET_CODE (exp, SAVE_EXPR);
5470 TREE_SET_CODE (exp, ERROR_MARK);
5474 /* The only operand we look at is operand 1. The rest aren't
5475 part of the expression. */
5476 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5478 case METHOD_CALL_EXPR:
5479 /* This takes a rtx argument, but shouldn't appear here. */
5486 /* If we have an rtx, we do not need to scan our operands. */
5490 nops = tree_code_length[(int) TREE_CODE (exp)];
5491 for (i = 0; i < nops; i++)
5492 if (TREE_OPERAND (exp, i) != 0
5493 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5497 /* If we have an rtl, find any enclosed object. Then see if we conflict
5501 if (GET_CODE (exp_rtl) == SUBREG)
5503 exp_rtl = SUBREG_REG (exp_rtl);
5504 if (GET_CODE (exp_rtl) == REG
5505 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5509 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5510 are memory and EXP is not readonly. */
5511 return ! (rtx_equal_p (x, exp_rtl)
5512 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5513 && ! TREE_READONLY (exp)));
5516 /* If we reach here, it is safe. */
5520 /* Subroutine of expand_expr: return nonzero iff EXP is an
5521 expression whose type is statically determinable. */
5527 if (TREE_CODE (exp) == PARM_DECL
5528 || TREE_CODE (exp) == VAR_DECL
5529 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5530 || TREE_CODE (exp) == COMPONENT_REF
5531 || TREE_CODE (exp) == ARRAY_REF)
5536 /* Subroutine of expand_expr: return rtx if EXP is a
5537 variable or parameter; else return 0. */
5544 switch (TREE_CODE (exp))
5548 return DECL_RTL (exp);
5554 #ifdef MAX_INTEGER_COMPUTATION_MODE
5556 check_max_integer_computation_mode (exp)
5559 enum tree_code code;
5560 enum machine_mode mode;
5562 /* Strip any NOPs that don't change the mode. */
5564 code = TREE_CODE (exp);
5566 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5567 if (code == NOP_EXPR
5568 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5571 /* First check the type of the overall operation. We need only look at
5572 unary, binary and relational operations. */
5573 if (TREE_CODE_CLASS (code) == '1'
5574 || TREE_CODE_CLASS (code) == '2'
5575 || TREE_CODE_CLASS (code) == '<')
5577 mode = TYPE_MODE (TREE_TYPE (exp));
5578 if (GET_MODE_CLASS (mode) == MODE_INT
5579 && mode > MAX_INTEGER_COMPUTATION_MODE)
5580 fatal ("unsupported wide integer operation");
5583 /* Check operand of a unary op. */
5584 if (TREE_CODE_CLASS (code) == '1')
5586 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5587 if (GET_MODE_CLASS (mode) == MODE_INT
5588 && mode > MAX_INTEGER_COMPUTATION_MODE)
5589 fatal ("unsupported wide integer operation");
5592 /* Check operands of a binary/comparison op. */
5593 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5595 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5596 if (GET_MODE_CLASS (mode) == MODE_INT
5597 && mode > MAX_INTEGER_COMPUTATION_MODE)
5598 fatal ("unsupported wide integer operation");
5600 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5601 if (GET_MODE_CLASS (mode) == MODE_INT
5602 && mode > MAX_INTEGER_COMPUTATION_MODE)
5603 fatal ("unsupported wide integer operation");
5609 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5610 has any readonly fields. If any of the fields have types that
5611 contain readonly fields, return true as well. */
5614 readonly_fields_p (type)
5619 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5620 if (TREE_CODE (field) == FIELD_DECL
5621 && (TREE_READONLY (field)
5622 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5623 && readonly_fields_p (TREE_TYPE (field)))))
5629 /* expand_expr: generate code for computing expression EXP.
5630 An rtx for the computed value is returned. The value is never null.
5631 In the case of a void EXP, const0_rtx is returned.
5633 The value may be stored in TARGET if TARGET is nonzero.
5634 TARGET is just a suggestion; callers must assume that
5635 the rtx returned may not be the same as TARGET.
5637 If TARGET is CONST0_RTX, it means that the value will be ignored.
5639 If TMODE is not VOIDmode, it suggests generating the
5640 result in mode TMODE. But this is done only when convenient.
5641 Otherwise, TMODE is ignored and the value generated in its natural mode.
5642 TMODE is just a suggestion; callers must assume that
5643 the rtx returned may not have mode TMODE.
5645 Note that TARGET may have neither TMODE nor MODE. In that case, it
5646 probably will not be used.
5648 If MODIFIER is EXPAND_SUM then when EXP is an addition
5649 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5650 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5651 products as above, or REG or MEM, or constant.
5652 Ordinarily in such cases we would output mul or add instructions
5653 and then return a pseudo reg containing the sum.
5655 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5656 it also marks a label as absolutely required (it can't be dead).
5657 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5658 This is used for outputting expressions used in initializers.
5660 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5661 with a constant address even if that address is not normally legitimate.
5662 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5665 expand_expr (exp, target, tmode, modifier)
5668 enum machine_mode tmode;
5669 enum expand_modifier modifier;
5671 register rtx op0, op1, temp;
5672 tree type = TREE_TYPE (exp);
5673 int unsignedp = TREE_UNSIGNED (type);
5674 register enum machine_mode mode;
5675 register enum tree_code code = TREE_CODE (exp);
5677 rtx subtarget, original_target;
5680 /* Used by check-memory-usage to make modifier read only. */
5681 enum expand_modifier ro_modifier;
5683 /* Handle ERROR_MARK before anybody tries to access its type. */
5684 if (TREE_CODE (exp) == ERROR_MARK)
5686 op0 = CONST0_RTX (tmode);
5692 mode = TYPE_MODE (type);
5693 /* Use subtarget as the target for operand 0 of a binary operation. */
5694 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5695 original_target = target;
5696 ignore = (target == const0_rtx
5697 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5698 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5699 || code == COND_EXPR)
5700 && TREE_CODE (type) == VOID_TYPE));
5702 /* Make a read-only version of the modifier. */
5703 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5704 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5705 ro_modifier = modifier;
5707 ro_modifier = EXPAND_NORMAL;
5709 /* Don't use hard regs as subtargets, because the combiner
5710 can only handle pseudo regs. */
5711 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5713 /* Avoid subtargets inside loops,
5714 since they hide some invariant expressions. */
5715 if (preserve_subexpressions_p ())
5718 /* If we are going to ignore this result, we need only do something
5719 if there is a side-effect somewhere in the expression. If there
5720 is, short-circuit the most common cases here. Note that we must
5721 not call expand_expr with anything but const0_rtx in case this
5722 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5726 if (! TREE_SIDE_EFFECTS (exp))
5729 /* Ensure we reference a volatile object even if value is ignored, but
5730 don't do this if all we are doing is taking its address. */
5731 if (TREE_THIS_VOLATILE (exp)
5732 && TREE_CODE (exp) != FUNCTION_DECL
5733 && mode != VOIDmode && mode != BLKmode
5734 && modifier != EXPAND_CONST_ADDRESS)
5736 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5737 if (GET_CODE (temp) == MEM)
5738 temp = copy_to_reg (temp);
5742 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5743 || code == INDIRECT_REF || code == BUFFER_REF)
5744 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5745 VOIDmode, ro_modifier);
5746 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5747 || code == ARRAY_REF)
5749 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5750 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5753 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5754 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5755 /* If the second operand has no side effects, just evaluate
5757 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5758 VOIDmode, ro_modifier);
5759 else if (code == BIT_FIELD_REF)
5761 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5762 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5763 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5770 #ifdef MAX_INTEGER_COMPUTATION_MODE
5771 /* Only check stuff here if the mode we want is different from the mode
5772 of the expression; if it's the same, check_max_integer_computiation_mode
5773 will handle it. Do we really need to check this stuff at all? */
5776 && GET_MODE (target) != mode
5777 && TREE_CODE (exp) != INTEGER_CST
5778 && TREE_CODE (exp) != PARM_DECL
5779 && TREE_CODE (exp) != ARRAY_REF
5780 && TREE_CODE (exp) != COMPONENT_REF
5781 && TREE_CODE (exp) != BIT_FIELD_REF
5782 && TREE_CODE (exp) != INDIRECT_REF
5783 && TREE_CODE (exp) != CALL_EXPR
5784 && TREE_CODE (exp) != VAR_DECL
5785 && TREE_CODE (exp) != RTL_EXPR)
5787 enum machine_mode mode = GET_MODE (target);
5789 if (GET_MODE_CLASS (mode) == MODE_INT
5790 && mode > MAX_INTEGER_COMPUTATION_MODE)
5791 fatal ("unsupported wide integer operation");
5795 && TREE_CODE (exp) != INTEGER_CST
5796 && TREE_CODE (exp) != PARM_DECL
5797 && TREE_CODE (exp) != ARRAY_REF
5798 && TREE_CODE (exp) != COMPONENT_REF
5799 && TREE_CODE (exp) != BIT_FIELD_REF
5800 && TREE_CODE (exp) != INDIRECT_REF
5801 && TREE_CODE (exp) != VAR_DECL
5802 && TREE_CODE (exp) != CALL_EXPR
5803 && TREE_CODE (exp) != RTL_EXPR
5804 && GET_MODE_CLASS (tmode) == MODE_INT
5805 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5806 fatal ("unsupported wide integer operation");
5808 check_max_integer_computation_mode (exp);
5811 /* If will do cse, generate all results into pseudo registers
5812 since 1) that allows cse to find more things
5813 and 2) otherwise cse could produce an insn the machine
5816 if (! cse_not_expected && mode != BLKmode && target
5817 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5824 tree function = decl_function_context (exp);
5825 /* Handle using a label in a containing function. */
5826 if (function != current_function_decl
5827 && function != inline_function_decl && function != 0)
5829 struct function *p = find_function_data (function);
5830 /* Allocate in the memory associated with the function
5831 that the label is in. */
5832 push_obstacks (p->function_obstack,
5833 p->function_maybepermanent_obstack);
5835 p->expr->x_forced_labels
5836 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5837 p->expr->x_forced_labels);
5842 if (modifier == EXPAND_INITIALIZER)
5843 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5848 temp = gen_rtx_MEM (FUNCTION_MODE,
5849 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5850 if (function != current_function_decl
5851 && function != inline_function_decl && function != 0)
5852 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5857 if (DECL_RTL (exp) == 0)
5859 error_with_decl (exp, "prior parameter's size depends on `%s'");
5860 return CONST0_RTX (mode);
5863 /* ... fall through ... */
5866 /* If a static var's type was incomplete when the decl was written,
5867 but the type is complete now, lay out the decl now. */
5868 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5869 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5871 push_obstacks_nochange ();
5872 end_temporary_allocation ();
5873 layout_decl (exp, 0);
5874 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5878 /* Although static-storage variables start off initialized, according to
5879 ANSI C, a memcpy could overwrite them with uninitialized values. So
5880 we check them too. This also lets us check for read-only variables
5881 accessed via a non-const declaration, in case it won't be detected
5882 any other way (e.g., in an embedded system or OS kernel without
5885 Aggregates are not checked here; they're handled elsewhere. */
5886 if (cfun && current_function_check_memory_usage
5888 && GET_CODE (DECL_RTL (exp)) == MEM
5889 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5891 enum memory_use_mode memory_usage;
5892 memory_usage = get_memory_usage_from_modifier (modifier);
5894 if (memory_usage != MEMORY_USE_DONT)
5895 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5896 XEXP (DECL_RTL (exp), 0), Pmode,
5897 GEN_INT (int_size_in_bytes (type)),
5898 TYPE_MODE (sizetype),
5899 GEN_INT (memory_usage),
5900 TYPE_MODE (integer_type_node));
5903 /* ... fall through ... */
5907 if (DECL_RTL (exp) == 0)
5910 /* Ensure variable marked as used even if it doesn't go through
5911 a parser. If it hasn't be used yet, write out an external
5913 if (! TREE_USED (exp))
5915 assemble_external (exp);
5916 TREE_USED (exp) = 1;
5919 /* Show we haven't gotten RTL for this yet. */
5922 /* Handle variables inherited from containing functions. */
5923 context = decl_function_context (exp);
5925 /* We treat inline_function_decl as an alias for the current function
5926 because that is the inline function whose vars, types, etc.
5927 are being merged into the current function.
5928 See expand_inline_function. */
5930 if (context != 0 && context != current_function_decl
5931 && context != inline_function_decl
5932 /* If var is static, we don't need a static chain to access it. */
5933 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5934 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5938 /* Mark as non-local and addressable. */
5939 DECL_NONLOCAL (exp) = 1;
5940 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5942 mark_addressable (exp);
5943 if (GET_CODE (DECL_RTL (exp)) != MEM)
5945 addr = XEXP (DECL_RTL (exp), 0);
5946 if (GET_CODE (addr) == MEM)
5947 addr = gen_rtx_MEM (Pmode,
5948 fix_lexical_addr (XEXP (addr, 0), exp));
5950 addr = fix_lexical_addr (addr, exp);
5951 temp = change_address (DECL_RTL (exp), mode, addr);
5954 /* This is the case of an array whose size is to be determined
5955 from its initializer, while the initializer is still being parsed.
5958 else if (GET_CODE (DECL_RTL (exp)) == MEM
5959 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5960 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5961 XEXP (DECL_RTL (exp), 0));
5963 /* If DECL_RTL is memory, we are in the normal case and either
5964 the address is not valid or it is not a register and -fforce-addr
5965 is specified, get the address into a register. */
5967 else if (GET_CODE (DECL_RTL (exp)) == MEM
5968 && modifier != EXPAND_CONST_ADDRESS
5969 && modifier != EXPAND_SUM
5970 && modifier != EXPAND_INITIALIZER
5971 && (! memory_address_p (DECL_MODE (exp),
5972 XEXP (DECL_RTL (exp), 0))
5974 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5975 temp = change_address (DECL_RTL (exp), VOIDmode,
5976 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5978 /* If we got something, return it. But first, set the alignment
5979 the address is a register. */
5982 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5983 mark_reg_pointer (XEXP (temp, 0),
5984 DECL_ALIGN (exp) / BITS_PER_UNIT);
5989 /* If the mode of DECL_RTL does not match that of the decl, it
5990 must be a promoted value. We return a SUBREG of the wanted mode,
5991 but mark it so that we know that it was already extended. */
5993 if (GET_CODE (DECL_RTL (exp)) == REG
5994 && GET_MODE (DECL_RTL (exp)) != mode)
5996 /* Get the signedness used for this variable. Ensure we get the
5997 same mode we got when the variable was declared. */
5998 if (GET_MODE (DECL_RTL (exp))
5999 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6002 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6003 SUBREG_PROMOTED_VAR_P (temp) = 1;
6004 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6008 return DECL_RTL (exp);
6011 return immed_double_const (TREE_INT_CST_LOW (exp),
6012 TREE_INT_CST_HIGH (exp), mode);
6015 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6016 EXPAND_MEMORY_USE_BAD);
6019 /* If optimized, generate immediate CONST_DOUBLE
6020 which will be turned into memory by reload if necessary.
6022 We used to force a register so that loop.c could see it. But
6023 this does not allow gen_* patterns to perform optimizations with
6024 the constants. It also produces two insns in cases like "x = 1.0;".
6025 On most machines, floating-point constants are not permitted in
6026 many insns, so we'd end up copying it to a register in any case.
6028 Now, we do the copying in expand_binop, if appropriate. */
6029 return immed_real_const (exp);
6033 if (! TREE_CST_RTL (exp))
6034 output_constant_def (exp);
6036 /* TREE_CST_RTL probably contains a constant address.
6037 On RISC machines where a constant address isn't valid,
6038 make some insns to get that address into a register. */
6039 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6040 && modifier != EXPAND_CONST_ADDRESS
6041 && modifier != EXPAND_INITIALIZER
6042 && modifier != EXPAND_SUM
6043 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6045 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6046 return change_address (TREE_CST_RTL (exp), VOIDmode,
6047 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6048 return TREE_CST_RTL (exp);
6050 case EXPR_WITH_FILE_LOCATION:
6053 char *saved_input_filename = input_filename;
6054 int saved_lineno = lineno;
6055 input_filename = EXPR_WFL_FILENAME (exp);
6056 lineno = EXPR_WFL_LINENO (exp);
6057 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6058 emit_line_note (input_filename, lineno);
6059 /* Possibly avoid switching back and force here */
6060 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6061 input_filename = saved_input_filename;
6062 lineno = saved_lineno;
6067 context = decl_function_context (exp);
6069 /* If this SAVE_EXPR was at global context, assume we are an
6070 initialization function and move it into our context. */
6072 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6074 /* We treat inline_function_decl as an alias for the current function
6075 because that is the inline function whose vars, types, etc.
6076 are being merged into the current function.
6077 See expand_inline_function. */
6078 if (context == current_function_decl || context == inline_function_decl)
6081 /* If this is non-local, handle it. */
6084 /* The following call just exists to abort if the context is
6085 not of a containing function. */
6086 find_function_data (context);
6088 temp = SAVE_EXPR_RTL (exp);
6089 if (temp && GET_CODE (temp) == REG)
6091 put_var_into_stack (exp);
6092 temp = SAVE_EXPR_RTL (exp);
6094 if (temp == 0 || GET_CODE (temp) != MEM)
6096 return change_address (temp, mode,
6097 fix_lexical_addr (XEXP (temp, 0), exp));
6099 if (SAVE_EXPR_RTL (exp) == 0)
6101 if (mode == VOIDmode)
6104 temp = assign_temp (type, 3, 0, 0);
6106 SAVE_EXPR_RTL (exp) = temp;
6107 if (!optimize && GET_CODE (temp) == REG)
6108 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6111 /* If the mode of TEMP does not match that of the expression, it
6112 must be a promoted value. We pass store_expr a SUBREG of the
6113 wanted mode but mark it so that we know that it was already
6114 extended. Note that `unsignedp' was modified above in
6117 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6119 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6120 SUBREG_PROMOTED_VAR_P (temp) = 1;
6121 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6124 if (temp == const0_rtx)
6125 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6126 EXPAND_MEMORY_USE_BAD);
6128 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6130 TREE_USED (exp) = 1;
6133 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6134 must be a promoted value. We return a SUBREG of the wanted mode,
6135 but mark it so that we know that it was already extended. */
6137 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6138 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6140 /* Compute the signedness and make the proper SUBREG. */
6141 promote_mode (type, mode, &unsignedp, 0);
6142 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6143 SUBREG_PROMOTED_VAR_P (temp) = 1;
6144 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6148 return SAVE_EXPR_RTL (exp);
6153 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6154 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6158 case PLACEHOLDER_EXPR:
6160 tree placeholder_expr;
6162 /* If there is an object on the head of the placeholder list,
6163 see if some object in it of type TYPE or a pointer to it. For
6164 further information, see tree.def. */
6165 for (placeholder_expr = placeholder_list;
6166 placeholder_expr != 0;
6167 placeholder_expr = TREE_CHAIN (placeholder_expr))
6169 tree need_type = TYPE_MAIN_VARIANT (type);
6171 tree old_list = placeholder_list;
6174 /* Find the outermost reference that is of the type we want.
6175 If none, see if any object has a type that is a pointer to
6176 the type we want. */
6177 for (elt = TREE_PURPOSE (placeholder_expr);
6178 elt != 0 && object == 0;
6180 = ((TREE_CODE (elt) == COMPOUND_EXPR
6181 || TREE_CODE (elt) == COND_EXPR)
6182 ? TREE_OPERAND (elt, 1)
6183 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6184 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6185 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6186 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6187 ? TREE_OPERAND (elt, 0) : 0))
6188 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6191 for (elt = TREE_PURPOSE (placeholder_expr);
6192 elt != 0 && object == 0;
6194 = ((TREE_CODE (elt) == COMPOUND_EXPR
6195 || TREE_CODE (elt) == COND_EXPR)
6196 ? TREE_OPERAND (elt, 1)
6197 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6198 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6199 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6200 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6201 ? TREE_OPERAND (elt, 0) : 0))
6202 if (POINTER_TYPE_P (TREE_TYPE (elt))
6203 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6205 object = build1 (INDIRECT_REF, need_type, elt);
6209 /* Expand this object skipping the list entries before
6210 it was found in case it is also a PLACEHOLDER_EXPR.
6211 In that case, we want to translate it using subsequent
6213 placeholder_list = TREE_CHAIN (placeholder_expr);
6214 temp = expand_expr (object, original_target, tmode,
6216 placeholder_list = old_list;
6222 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6225 case WITH_RECORD_EXPR:
6226 /* Put the object on the placeholder list, expand our first operand,
6227 and pop the list. */
6228 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6230 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6231 tmode, ro_modifier);
6232 placeholder_list = TREE_CHAIN (placeholder_list);
6236 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6237 expand_goto (TREE_OPERAND (exp, 0));
6239 expand_computed_goto (TREE_OPERAND (exp, 0));
6243 expand_exit_loop_if_false (NULL_PTR,
6244 invert_truthvalue (TREE_OPERAND (exp, 0)));
6247 case LABELED_BLOCK_EXPR:
6248 if (LABELED_BLOCK_BODY (exp))
6249 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6250 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6253 case EXIT_BLOCK_EXPR:
6254 if (EXIT_BLOCK_RETURN (exp))
6255 sorry ("returned value in block_exit_expr");
6256 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6261 expand_start_loop (1);
6262 expand_expr_stmt (TREE_OPERAND (exp, 0));
6270 tree vars = TREE_OPERAND (exp, 0);
6271 int vars_need_expansion = 0;
6273 /* Need to open a binding contour here because
6274 if there are any cleanups they must be contained here. */
6275 expand_start_bindings (2);
6277 /* Mark the corresponding BLOCK for output in its proper place. */
6278 if (TREE_OPERAND (exp, 2) != 0
6279 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6280 insert_block (TREE_OPERAND (exp, 2));
6282 /* If VARS have not yet been expanded, expand them now. */
6285 if (DECL_RTL (vars) == 0)
6287 vars_need_expansion = 1;
6290 expand_decl_init (vars);
6291 vars = TREE_CHAIN (vars);
6294 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6296 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6302 if (RTL_EXPR_SEQUENCE (exp))
6304 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6306 emit_insns (RTL_EXPR_SEQUENCE (exp));
6307 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6309 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6310 free_temps_for_rtl_expr (exp);
6311 return RTL_EXPR_RTL (exp);
6314 /* If we don't need the result, just ensure we evaluate any
6319 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6320 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6321 EXPAND_MEMORY_USE_BAD);
6325 /* All elts simple constants => refer to a constant in memory. But
6326 if this is a non-BLKmode mode, let it store a field at a time
6327 since that should make a CONST_INT or CONST_DOUBLE when we
6328 fold. Likewise, if we have a target we can use, it is best to
6329 store directly into the target unless the type is large enough
6330 that memcpy will be used. If we are making an initializer and
6331 all operands are constant, put it in memory as well. */
6332 else if ((TREE_STATIC (exp)
6333 && ((mode == BLKmode
6334 && ! (target != 0 && safe_from_p (target, exp, 1)))
6335 || TREE_ADDRESSABLE (exp)
6336 || (TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST
6337 && TREE_INT_CST_HIGH (TYPE_SIZE_UNIT (type)) == 0
6338 && (! MOVE_BY_PIECES_P
6339 (TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type)),
6340 TYPE_ALIGN (type) / BITS_PER_UNIT))
6341 && ! mostly_zeros_p (exp))))
6342 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6344 rtx constructor = output_constant_def (exp);
6345 if (modifier != EXPAND_CONST_ADDRESS
6346 && modifier != EXPAND_INITIALIZER
6347 && modifier != EXPAND_SUM
6348 && (! memory_address_p (GET_MODE (constructor),
6349 XEXP (constructor, 0))
6351 && GET_CODE (XEXP (constructor, 0)) != REG)))
6352 constructor = change_address (constructor, VOIDmode,
6353 XEXP (constructor, 0));
6359 /* Handle calls that pass values in multiple non-contiguous
6360 locations. The Irix 6 ABI has examples of this. */
6361 if (target == 0 || ! safe_from_p (target, exp, 1)
6362 || GET_CODE (target) == PARALLEL)
6364 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6365 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6367 target = assign_temp (type, 0, 1, 1);
6370 if (TREE_READONLY (exp))
6372 if (GET_CODE (target) == MEM)
6373 target = copy_rtx (target);
6375 RTX_UNCHANGING_P (target) = 1;
6378 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6379 int_size_in_bytes (TREE_TYPE (exp)));
6385 tree exp1 = TREE_OPERAND (exp, 0);
6388 tree string = string_constant (exp1, &index);
6390 /* Try to optimize reads from const strings. */
6392 && TREE_CODE (string) == STRING_CST
6393 && TREE_CODE (index) == INTEGER_CST
6394 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6395 && GET_MODE_CLASS (mode) == MODE_INT
6396 && GET_MODE_SIZE (mode) == 1
6397 && modifier != EXPAND_MEMORY_USE_WO)
6399 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6401 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6402 op0 = memory_address (mode, op0);
6404 if (cfun && current_function_check_memory_usage
6405 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6407 enum memory_use_mode memory_usage;
6408 memory_usage = get_memory_usage_from_modifier (modifier);
6410 if (memory_usage != MEMORY_USE_DONT)
6412 in_check_memory_usage = 1;
6413 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6415 GEN_INT (int_size_in_bytes (type)),
6416 TYPE_MODE (sizetype),
6417 GEN_INT (memory_usage),
6418 TYPE_MODE (integer_type_node));
6419 in_check_memory_usage = 0;
6423 temp = gen_rtx_MEM (mode, op0);
6424 /* If address was computed by addition,
6425 mark this as an element of an aggregate. */
6426 if (TREE_CODE (exp1) == PLUS_EXPR
6427 || (TREE_CODE (exp1) == SAVE_EXPR
6428 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6429 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6430 || (TREE_CODE (exp1) == ADDR_EXPR
6431 && (exp2 = TREE_OPERAND (exp1, 0))
6432 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6433 MEM_SET_IN_STRUCT_P (temp, 1);
6435 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6436 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6438 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6439 here, because, in C and C++, the fact that a location is accessed
6440 through a pointer to const does not mean that the value there can
6441 never change. Languages where it can never change should
6442 also set TREE_STATIC. */
6443 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6445 /* If we are writing to this object and its type is a record with
6446 readonly fields, we must mark it as readonly so it will
6447 conflict with readonly references to those fields. */
6448 if (modifier == EXPAND_MEMORY_USE_WO
6449 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6450 RTX_UNCHANGING_P (temp) = 1;
6456 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6460 tree array = TREE_OPERAND (exp, 0);
6461 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6462 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6463 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6466 /* Optimize the special-case of a zero lower bound.
6468 We convert the low_bound to sizetype to avoid some problems
6469 with constant folding. (E.g. suppose the lower bound is 1,
6470 and its mode is QI. Without the conversion, (ARRAY
6471 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6472 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6474 if (! integer_zerop (low_bound))
6475 index = size_diffop (index, convert (sizetype, low_bound));
6477 /* Fold an expression like: "foo"[2].
6478 This is not done in fold so it won't happen inside &.
6479 Don't fold if this is for wide characters since it's too
6480 difficult to do correctly and this is a very rare case. */
6482 if (TREE_CODE (array) == STRING_CST
6483 && TREE_CODE (index) == INTEGER_CST
6484 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6485 && GET_MODE_CLASS (mode) == MODE_INT
6486 && GET_MODE_SIZE (mode) == 1)
6488 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6490 /* If this is a constant index into a constant array,
6491 just get the value from the array. Handle both the cases when
6492 we have an explicit constructor and when our operand is a variable
6493 that was declared const. */
6495 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6496 && TREE_CODE (index) == INTEGER_CST
6497 && 0 > compare_tree_int (index,
6498 list_length (CONSTRUCTOR_ELTS
6499 (TREE_OPERAND (exp, 0)))))
6503 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6504 i = TREE_INT_CST_LOW (index);
6505 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6509 return expand_expr (fold (TREE_VALUE (elem)), target,
6510 tmode, ro_modifier);
6513 else if (optimize >= 1
6514 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6515 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6516 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6518 if (TREE_CODE (index) == INTEGER_CST)
6520 tree init = DECL_INITIAL (array);
6522 if (TREE_CODE (init) == CONSTRUCTOR)
6524 tree elem = CONSTRUCTOR_ELTS (init);
6526 for (elem = CONSTRUCTOR_ELTS (init);
6528 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6529 elem = TREE_CHAIN (elem))
6533 return expand_expr (fold (TREE_VALUE (elem)), target,
6534 tmode, ro_modifier);
6536 else if (TREE_CODE (init) == STRING_CST
6537 && 0 > compare_tree_int (index,
6538 TREE_STRING_LENGTH (init)))
6540 (TREE_STRING_POINTER
6541 (init)[TREE_INT_CST_LOW (index)]));
6546 /* ... fall through ... */
6550 /* If the operand is a CONSTRUCTOR, we can just extract the
6551 appropriate field if it is present. Don't do this if we have
6552 already written the data since we want to refer to that copy
6553 and varasm.c assumes that's what we'll do. */
6554 if (code != ARRAY_REF
6555 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6556 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6560 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6561 elt = TREE_CHAIN (elt))
6562 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6563 /* We can normally use the value of the field in the
6564 CONSTRUCTOR. However, if this is a bitfield in
6565 an integral mode that we can fit in a HOST_WIDE_INT,
6566 we must mask only the number of bits in the bitfield,
6567 since this is done implicitly by the constructor. If
6568 the bitfield does not meet either of those conditions,
6569 we can't do this optimization. */
6570 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6571 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6573 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6574 <= HOST_BITS_PER_WIDE_INT))))
6576 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6577 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6579 HOST_WIDE_INT bitsize
6580 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6582 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6584 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6585 op0 = expand_and (op0, op1, target);
6589 enum machine_mode imode
6590 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6592 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6595 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6597 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6607 enum machine_mode mode1;
6612 unsigned int alignment;
6613 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6614 &mode1, &unsignedp, &volatilep,
6617 /* If we got back the original object, something is wrong. Perhaps
6618 we are evaluating an expression too early. In any event, don't
6619 infinitely recurse. */
6623 /* If TEM's type is a union of variable size, pass TARGET to the inner
6624 computation, since it will need a temporary and TARGET is known
6625 to have to do. This occurs in unchecked conversion in Ada. */
6627 op0 = expand_expr (tem,
6628 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6629 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6631 ? target : NULL_RTX),
6633 (modifier == EXPAND_INITIALIZER
6634 || modifier == EXPAND_CONST_ADDRESS)
6635 ? modifier : EXPAND_NORMAL);
6637 /* If this is a constant, put it into a register if it is a
6638 legitimate constant and OFFSET is 0 and memory if it isn't. */
6639 if (CONSTANT_P (op0))
6641 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6642 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6644 op0 = force_reg (mode, op0);
6646 op0 = validize_mem (force_const_mem (mode, op0));
6651 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6653 /* If this object is in memory, put it into a register.
6654 This case can't occur in C, but can in Ada if we have
6655 unchecked conversion of an expression from a scalar type to
6656 an array or record type. */
6657 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6658 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6660 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6662 mark_temp_addr_taken (memloc);
6663 emit_move_insn (memloc, op0);
6667 if (GET_CODE (op0) != MEM)
6670 if (GET_MODE (offset_rtx) != ptr_mode)
6672 #ifdef POINTERS_EXTEND_UNSIGNED
6673 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6675 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6679 /* A constant address in OP0 can have VOIDmode, we must not try
6680 to call force_reg for that case. Avoid that case. */
6681 if (GET_CODE (op0) == MEM
6682 && GET_MODE (op0) == BLKmode
6683 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6685 && (bitpos % bitsize) == 0
6686 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6687 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6689 rtx temp = change_address (op0, mode1,
6690 plus_constant (XEXP (op0, 0),
6693 if (GET_CODE (XEXP (temp, 0)) == REG)
6696 op0 = change_address (op0, mode1,
6697 force_reg (GET_MODE (XEXP (temp, 0)),
6703 op0 = change_address (op0, VOIDmode,
6704 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6705 force_reg (ptr_mode,
6709 /* Don't forget about volatility even if this is a bitfield. */
6710 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6712 op0 = copy_rtx (op0);
6713 MEM_VOLATILE_P (op0) = 1;
6716 /* Check the access. */
6717 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6719 enum memory_use_mode memory_usage;
6720 memory_usage = get_memory_usage_from_modifier (modifier);
6722 if (memory_usage != MEMORY_USE_DONT)
6727 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6728 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6730 /* Check the access right of the pointer. */
6731 if (size > BITS_PER_UNIT)
6732 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6734 GEN_INT (size / BITS_PER_UNIT),
6735 TYPE_MODE (sizetype),
6736 GEN_INT (memory_usage),
6737 TYPE_MODE (integer_type_node));
6741 /* In cases where an aligned union has an unaligned object
6742 as a field, we might be extracting a BLKmode value from
6743 an integer-mode (e.g., SImode) object. Handle this case
6744 by doing the extract into an object as wide as the field
6745 (which we know to be the width of a basic mode), then
6746 storing into memory, and changing the mode to BLKmode.
6747 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6748 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6749 if (mode1 == VOIDmode
6750 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6751 || (modifier != EXPAND_CONST_ADDRESS
6752 && modifier != EXPAND_INITIALIZER
6753 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6754 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6755 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6756 /* If the field isn't aligned enough to fetch as a memref,
6757 fetch it as a bit field. */
6758 || (mode1 != BLKmode
6759 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6760 && ((TYPE_ALIGN (TREE_TYPE (tem))
6761 < (unsigned int) GET_MODE_ALIGNMENT (mode))
6762 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6763 /* If the type and the field are a constant size and the
6764 size of the type isn't the same size as the bitfield,
6765 we must use bitfield operations. */
6767 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6769 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6771 || (modifier != EXPAND_CONST_ADDRESS
6772 && modifier != EXPAND_INITIALIZER
6774 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6775 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
6776 || bitpos % TYPE_ALIGN (type) != 0)))
6778 enum machine_mode ext_mode = mode;
6780 if (ext_mode == BLKmode
6781 && ! (target != 0 && GET_CODE (op0) == MEM
6782 && GET_CODE (target) == MEM
6783 && bitpos % BITS_PER_UNIT == 0))
6784 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6786 if (ext_mode == BLKmode)
6788 /* In this case, BITPOS must start at a byte boundary and
6789 TARGET, if specified, must be a MEM. */
6790 if (GET_CODE (op0) != MEM
6791 || (target != 0 && GET_CODE (target) != MEM)
6792 || bitpos % BITS_PER_UNIT != 0)
6795 op0 = change_address (op0, VOIDmode,
6796 plus_constant (XEXP (op0, 0),
6797 bitpos / BITS_PER_UNIT));
6799 target = assign_temp (type, 0, 1, 1);
6801 emit_block_move (target, op0,
6802 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6809 op0 = validize_mem (op0);
6811 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6812 mark_reg_pointer (XEXP (op0, 0), alignment);
6814 op0 = extract_bit_field (op0, bitsize, bitpos,
6815 unsignedp, target, ext_mode, ext_mode,
6817 int_size_in_bytes (TREE_TYPE (tem)));
6819 /* If the result is a record type and BITSIZE is narrower than
6820 the mode of OP0, an integral mode, and this is a big endian
6821 machine, we must put the field into the high-order bits. */
6822 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6823 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6824 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6825 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6826 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6830 if (mode == BLKmode)
6832 rtx new = assign_stack_temp (ext_mode,
6833 bitsize / BITS_PER_UNIT, 0);
6835 emit_move_insn (new, op0);
6836 op0 = copy_rtx (new);
6837 PUT_MODE (op0, BLKmode);
6838 MEM_SET_IN_STRUCT_P (op0, 1);
6844 /* If the result is BLKmode, use that to access the object
6846 if (mode == BLKmode)
6849 /* Get a reference to just this component. */
6850 if (modifier == EXPAND_CONST_ADDRESS
6851 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6852 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6853 (bitpos / BITS_PER_UNIT)));
6855 op0 = change_address (op0, mode1,
6856 plus_constant (XEXP (op0, 0),
6857 (bitpos / BITS_PER_UNIT)));
6859 if (GET_CODE (op0) == MEM)
6860 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6862 if (GET_CODE (XEXP (op0, 0)) == REG)
6863 mark_reg_pointer (XEXP (op0, 0), alignment);
6865 MEM_SET_IN_STRUCT_P (op0, 1);
6866 MEM_VOLATILE_P (op0) |= volatilep;
6867 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6868 || modifier == EXPAND_CONST_ADDRESS
6869 || modifier == EXPAND_INITIALIZER)
6871 else if (target == 0)
6872 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6874 convert_move (target, op0, unsignedp);
6878 /* Intended for a reference to a buffer of a file-object in Pascal.
6879 But it's not certain that a special tree code will really be
6880 necessary for these. INDIRECT_REF might work for them. */
6886 /* Pascal set IN expression.
6889 rlo = set_low - (set_low%bits_per_word);
6890 the_word = set [ (index - rlo)/bits_per_word ];
6891 bit_index = index % bits_per_word;
6892 bitmask = 1 << bit_index;
6893 return !!(the_word & bitmask); */
6895 tree set = TREE_OPERAND (exp, 0);
6896 tree index = TREE_OPERAND (exp, 1);
6897 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6898 tree set_type = TREE_TYPE (set);
6899 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6900 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6901 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6902 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6903 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6904 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6905 rtx setaddr = XEXP (setval, 0);
6906 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6908 rtx diff, quo, rem, addr, bit, result;
6910 preexpand_calls (exp);
6912 /* If domain is empty, answer is no. Likewise if index is constant
6913 and out of bounds. */
6914 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6915 && TREE_CODE (set_low_bound) == INTEGER_CST
6916 && tree_int_cst_lt (set_high_bound, set_low_bound))
6917 || (TREE_CODE (index) == INTEGER_CST
6918 && TREE_CODE (set_low_bound) == INTEGER_CST
6919 && tree_int_cst_lt (index, set_low_bound))
6920 || (TREE_CODE (set_high_bound) == INTEGER_CST
6921 && TREE_CODE (index) == INTEGER_CST
6922 && tree_int_cst_lt (set_high_bound, index))))
6926 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6928 /* If we get here, we have to generate the code for both cases
6929 (in range and out of range). */
6931 op0 = gen_label_rtx ();
6932 op1 = gen_label_rtx ();
6934 if (! (GET_CODE (index_val) == CONST_INT
6935 && GET_CODE (lo_r) == CONST_INT))
6937 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6938 GET_MODE (index_val), iunsignedp, 0, op1);
6941 if (! (GET_CODE (index_val) == CONST_INT
6942 && GET_CODE (hi_r) == CONST_INT))
6944 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6945 GET_MODE (index_val), iunsignedp, 0, op1);
6948 /* Calculate the element number of bit zero in the first word
6950 if (GET_CODE (lo_r) == CONST_INT)
6951 rlow = GEN_INT (INTVAL (lo_r)
6952 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6954 rlow = expand_binop (index_mode, and_optab, lo_r,
6955 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6956 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6958 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6959 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6961 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6962 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6963 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6964 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6966 addr = memory_address (byte_mode,
6967 expand_binop (index_mode, add_optab, diff,
6968 setaddr, NULL_RTX, iunsignedp,
6971 /* Extract the bit we want to examine */
6972 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6973 gen_rtx_MEM (byte_mode, addr),
6974 make_tree (TREE_TYPE (index), rem),
6976 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6977 GET_MODE (target) == byte_mode ? target : 0,
6978 1, OPTAB_LIB_WIDEN);
6980 if (result != target)
6981 convert_move (target, result, 1);
6983 /* Output the code to handle the out-of-range case. */
6986 emit_move_insn (target, const0_rtx);
6991 case WITH_CLEANUP_EXPR:
6992 if (RTL_EXPR_RTL (exp) == 0)
6995 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6996 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6998 /* That's it for this cleanup. */
6999 TREE_OPERAND (exp, 2) = 0;
7001 return RTL_EXPR_RTL (exp);
7003 case CLEANUP_POINT_EXPR:
7005 /* Start a new binding layer that will keep track of all cleanup
7006 actions to be performed. */
7007 expand_start_bindings (2);
7009 target_temp_slot_level = temp_slot_level;
7011 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7012 /* If we're going to use this value, load it up now. */
7014 op0 = force_not_mem (op0);
7015 preserve_temp_slots (op0);
7016 expand_end_bindings (NULL_TREE, 0, 0);
7021 /* Check for a built-in function. */
7022 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7023 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7025 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7026 return expand_builtin (exp, target, subtarget, tmode, ignore);
7028 /* If this call was expanded already by preexpand_calls,
7029 just return the result we got. */
7030 if (CALL_EXPR_RTL (exp) != 0)
7031 return CALL_EXPR_RTL (exp);
7033 return expand_call (exp, target, ignore);
7035 case NON_LVALUE_EXPR:
7038 case REFERENCE_EXPR:
7039 if (TREE_CODE (type) == UNION_TYPE)
7041 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7043 /* If both input and output are BLKmode, this conversion
7044 isn't actually doing anything unless we need to make the
7045 alignment stricter. */
7046 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7047 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7048 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7049 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7054 if (mode != BLKmode)
7055 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7057 target = assign_temp (type, 0, 1, 1);
7060 if (GET_CODE (target) == MEM)
7061 /* Store data into beginning of memory target. */
7062 store_expr (TREE_OPERAND (exp, 0),
7063 change_address (target, TYPE_MODE (valtype), 0), 0);
7065 else if (GET_CODE (target) == REG)
7066 /* Store this field into a union of the proper type. */
7067 store_field (target,
7068 MIN ((int_size_in_bytes (TREE_TYPE
7069 (TREE_OPERAND (exp, 0)))
7071 GET_MODE_BITSIZE (mode)),
7072 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7073 VOIDmode, 0, 1, int_size_in_bytes (type), 0);
7077 /* Return the entire union. */
7081 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7083 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7086 /* If the signedness of the conversion differs and OP0 is
7087 a promoted SUBREG, clear that indication since we now
7088 have to do the proper extension. */
7089 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7090 && GET_CODE (op0) == SUBREG)
7091 SUBREG_PROMOTED_VAR_P (op0) = 0;
7096 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7097 if (GET_MODE (op0) == mode)
7100 /* If OP0 is a constant, just convert it into the proper mode. */
7101 if (CONSTANT_P (op0))
7103 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7104 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7106 if (modifier == EXPAND_INITIALIZER)
7107 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7111 convert_to_mode (mode, op0,
7112 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7114 convert_move (target, op0,
7115 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7119 /* We come here from MINUS_EXPR when the second operand is a
7122 this_optab = add_optab;
7124 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7125 something else, make sure we add the register to the constant and
7126 then to the other thing. This case can occur during strength
7127 reduction and doing it this way will produce better code if the
7128 frame pointer or argument pointer is eliminated.
7130 fold-const.c will ensure that the constant is always in the inner
7131 PLUS_EXPR, so the only case we need to do anything about is if
7132 sp, ap, or fp is our second argument, in which case we must swap
7133 the innermost first argument and our second argument. */
7135 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7136 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7137 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7138 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7139 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7140 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7142 tree t = TREE_OPERAND (exp, 1);
7144 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7145 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7148 /* If the result is to be ptr_mode and we are adding an integer to
7149 something, we might be forming a constant. So try to use
7150 plus_constant. If it produces a sum and we can't accept it,
7151 use force_operand. This allows P = &ARR[const] to generate
7152 efficient code on machines where a SYMBOL_REF is not a valid
7155 If this is an EXPAND_SUM call, always return the sum. */
7156 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7157 || mode == ptr_mode)
7159 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7160 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7161 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7165 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7167 /* Use immed_double_const to ensure that the constant is
7168 truncated according to the mode of OP1, then sign extended
7169 to a HOST_WIDE_INT. Using the constant directly can result
7170 in non-canonical RTL in a 64x32 cross compile. */
7172 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7174 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7175 op1 = plus_constant (op1, INTVAL (constant_part));
7176 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7177 op1 = force_operand (op1, target);
7181 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7182 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7183 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7187 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7189 if (! CONSTANT_P (op0))
7191 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7192 VOIDmode, modifier);
7193 /* Don't go to both_summands if modifier
7194 says it's not right to return a PLUS. */
7195 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7199 /* Use immed_double_const to ensure that the constant is
7200 truncated according to the mode of OP1, then sign extended
7201 to a HOST_WIDE_INT. Using the constant directly can result
7202 in non-canonical RTL in a 64x32 cross compile. */
7204 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7206 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7207 op0 = plus_constant (op0, INTVAL (constant_part));
7208 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7209 op0 = force_operand (op0, target);
7214 /* No sense saving up arithmetic to be done
7215 if it's all in the wrong mode to form part of an address.
7216 And force_operand won't know whether to sign-extend or
7218 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7219 || mode != ptr_mode)
7222 preexpand_calls (exp);
7223 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7226 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7227 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7230 /* Make sure any term that's a sum with a constant comes last. */
7231 if (GET_CODE (op0) == PLUS
7232 && CONSTANT_P (XEXP (op0, 1)))
7238 /* If adding to a sum including a constant,
7239 associate it to put the constant outside. */
7240 if (GET_CODE (op1) == PLUS
7241 && CONSTANT_P (XEXP (op1, 1)))
7243 rtx constant_term = const0_rtx;
7245 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7248 /* Ensure that MULT comes first if there is one. */
7249 else if (GET_CODE (op0) == MULT)
7250 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7252 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7254 /* Let's also eliminate constants from op0 if possible. */
7255 op0 = eliminate_constant_term (op0, &constant_term);
7257 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7258 their sum should be a constant. Form it into OP1, since the
7259 result we want will then be OP0 + OP1. */
7261 temp = simplify_binary_operation (PLUS, mode, constant_term,
7266 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7269 /* Put a constant term last and put a multiplication first. */
7270 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7271 temp = op1, op1 = op0, op0 = temp;
7273 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7274 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7277 /* For initializers, we are allowed to return a MINUS of two
7278 symbolic constants. Here we handle all cases when both operands
7280 /* Handle difference of two symbolic constants,
7281 for the sake of an initializer. */
7282 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7283 && really_constant_p (TREE_OPERAND (exp, 0))
7284 && really_constant_p (TREE_OPERAND (exp, 1)))
7286 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7287 VOIDmode, ro_modifier);
7288 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7289 VOIDmode, ro_modifier);
7291 /* If the last operand is a CONST_INT, use plus_constant of
7292 the negated constant. Else make the MINUS. */
7293 if (GET_CODE (op1) == CONST_INT)
7294 return plus_constant (op0, - INTVAL (op1));
7296 return gen_rtx_MINUS (mode, op0, op1);
7298 /* Convert A - const to A + (-const). */
7299 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7301 tree negated = fold (build1 (NEGATE_EXPR, type,
7302 TREE_OPERAND (exp, 1)));
7304 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7305 /* If we can't negate the constant in TYPE, leave it alone and
7306 expand_binop will negate it for us. We used to try to do it
7307 here in the signed version of TYPE, but that doesn't work
7308 on POINTER_TYPEs. */;
7311 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7315 this_optab = sub_optab;
7319 preexpand_calls (exp);
7320 /* If first operand is constant, swap them.
7321 Thus the following special case checks need only
7322 check the second operand. */
7323 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7325 register tree t1 = TREE_OPERAND (exp, 0);
7326 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7327 TREE_OPERAND (exp, 1) = t1;
7330 /* Attempt to return something suitable for generating an
7331 indexed address, for machines that support that. */
7333 if (modifier == EXPAND_SUM && mode == ptr_mode
7334 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7335 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7337 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7340 /* Apply distributive law if OP0 is x+c. */
7341 if (GET_CODE (op0) == PLUS
7342 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7347 (mode, XEXP (op0, 0),
7348 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7349 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7350 * INTVAL (XEXP (op0, 1))));
7352 if (GET_CODE (op0) != REG)
7353 op0 = force_operand (op0, NULL_RTX);
7354 if (GET_CODE (op0) != REG)
7355 op0 = copy_to_mode_reg (mode, op0);
7358 gen_rtx_MULT (mode, op0,
7359 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7362 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7365 /* Check for multiplying things that have been extended
7366 from a narrower type. If this machine supports multiplying
7367 in that narrower type with a result in the desired type,
7368 do it that way, and avoid the explicit type-conversion. */
7369 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7370 && TREE_CODE (type) == INTEGER_TYPE
7371 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7372 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7373 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7374 && int_fits_type_p (TREE_OPERAND (exp, 1),
7375 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7376 /* Don't use a widening multiply if a shift will do. */
7377 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7378 > HOST_BITS_PER_WIDE_INT)
7379 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7381 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7382 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7384 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7385 /* If both operands are extended, they must either both
7386 be zero-extended or both be sign-extended. */
7387 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7389 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7391 enum machine_mode innermode
7392 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7393 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7394 ? smul_widen_optab : umul_widen_optab);
7395 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7396 ? umul_widen_optab : smul_widen_optab);
7397 if (mode == GET_MODE_WIDER_MODE (innermode))
7399 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7401 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7402 NULL_RTX, VOIDmode, 0);
7403 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7404 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7407 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7408 NULL_RTX, VOIDmode, 0);
7411 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7412 && innermode == word_mode)
7415 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7416 NULL_RTX, VOIDmode, 0);
7417 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7418 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7421 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7422 NULL_RTX, VOIDmode, 0);
7423 temp = expand_binop (mode, other_optab, op0, op1, target,
7424 unsignedp, OPTAB_LIB_WIDEN);
7425 htem = expand_mult_highpart_adjust (innermode,
7426 gen_highpart (innermode, temp),
7428 gen_highpart (innermode, temp),
7430 emit_move_insn (gen_highpart (innermode, temp), htem);
7435 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7436 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7437 return expand_mult (mode, op0, op1, target, unsignedp);
7439 case TRUNC_DIV_EXPR:
7440 case FLOOR_DIV_EXPR:
7442 case ROUND_DIV_EXPR:
7443 case EXACT_DIV_EXPR:
7444 preexpand_calls (exp);
7445 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7447 /* Possible optimization: compute the dividend with EXPAND_SUM
7448 then if the divisor is constant can optimize the case
7449 where some terms of the dividend have coeffs divisible by it. */
7450 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7451 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7452 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7455 this_optab = flodiv_optab;
7458 case TRUNC_MOD_EXPR:
7459 case FLOOR_MOD_EXPR:
7461 case ROUND_MOD_EXPR:
7462 preexpand_calls (exp);
7463 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7465 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7466 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7467 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7469 case FIX_ROUND_EXPR:
7470 case FIX_FLOOR_EXPR:
7472 abort (); /* Not used for C. */
7474 case FIX_TRUNC_EXPR:
7475 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7477 target = gen_reg_rtx (mode);
7478 expand_fix (target, op0, unsignedp);
7482 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7484 target = gen_reg_rtx (mode);
7485 /* expand_float can't figure out what to do if FROM has VOIDmode.
7486 So give it the correct mode. With -O, cse will optimize this. */
7487 if (GET_MODE (op0) == VOIDmode)
7488 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7490 expand_float (target, op0,
7491 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7495 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7496 temp = expand_unop (mode, neg_optab, op0, target, 0);
7502 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7504 /* Handle complex values specially. */
7505 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7506 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7507 return expand_complex_abs (mode, op0, target, unsignedp);
7509 /* Unsigned abs is simply the operand. Testing here means we don't
7510 risk generating incorrect code below. */
7511 if (TREE_UNSIGNED (type))
7514 return expand_abs (mode, op0, target,
7515 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7519 target = original_target;
7520 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7521 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7522 || GET_MODE (target) != mode
7523 || (GET_CODE (target) == REG
7524 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7525 target = gen_reg_rtx (mode);
7526 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7527 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7529 /* First try to do it with a special MIN or MAX instruction.
7530 If that does not win, use a conditional jump to select the proper
7532 this_optab = (TREE_UNSIGNED (type)
7533 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7534 : (code == MIN_EXPR ? smin_optab : smax_optab));
7536 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7541 /* At this point, a MEM target is no longer useful; we will get better
7544 if (GET_CODE (target) == MEM)
7545 target = gen_reg_rtx (mode);
7548 emit_move_insn (target, op0);
7550 op0 = gen_label_rtx ();
7552 /* If this mode is an integer too wide to compare properly,
7553 compare word by word. Rely on cse to optimize constant cases. */
7554 if (GET_MODE_CLASS (mode) == MODE_INT
7555 && ! can_compare_p (GE, mode, ccp_jump))
7557 if (code == MAX_EXPR)
7558 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7559 target, op1, NULL_RTX, op0);
7561 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7562 op1, target, NULL_RTX, op0);
7566 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7567 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7568 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7571 emit_move_insn (target, op1);
7576 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7577 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7583 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7584 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7589 /* ??? Can optimize bitwise operations with one arg constant.
7590 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7591 and (a bitwise1 b) bitwise2 b (etc)
7592 but that is probably not worth while. */
7594 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7595 boolean values when we want in all cases to compute both of them. In
7596 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7597 as actual zero-or-1 values and then bitwise anding. In cases where
7598 there cannot be any side effects, better code would be made by
7599 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7600 how to recognize those cases. */
7602 case TRUTH_AND_EXPR:
7604 this_optab = and_optab;
7609 this_optab = ior_optab;
7612 case TRUTH_XOR_EXPR:
7614 this_optab = xor_optab;
7621 preexpand_calls (exp);
7622 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7624 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7625 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7628 /* Could determine the answer when only additive constants differ. Also,
7629 the addition of one can be handled by changing the condition. */
7636 case UNORDERED_EXPR:
7643 preexpand_calls (exp);
7644 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7648 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7649 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7651 && GET_CODE (original_target) == REG
7652 && (GET_MODE (original_target)
7653 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7655 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7658 if (temp != original_target)
7659 temp = copy_to_reg (temp);
7661 op1 = gen_label_rtx ();
7662 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7663 GET_MODE (temp), unsignedp, 0, op1);
7664 emit_move_insn (temp, const1_rtx);
7669 /* If no set-flag instruction, must generate a conditional
7670 store into a temporary variable. Drop through
7671 and handle this like && and ||. */
7673 case TRUTH_ANDIF_EXPR:
7674 case TRUTH_ORIF_EXPR:
7676 && (target == 0 || ! safe_from_p (target, exp, 1)
7677 /* Make sure we don't have a hard reg (such as function's return
7678 value) live across basic blocks, if not optimizing. */
7679 || (!optimize && GET_CODE (target) == REG
7680 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7681 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7684 emit_clr_insn (target);
7686 op1 = gen_label_rtx ();
7687 jumpifnot (exp, op1);
7690 emit_0_to_1_insn (target);
7693 return ignore ? const0_rtx : target;
7695 case TRUTH_NOT_EXPR:
7696 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7697 /* The parser is careful to generate TRUTH_NOT_EXPR
7698 only with operands that are always zero or one. */
7699 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7700 target, 1, OPTAB_LIB_WIDEN);
7706 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7708 return expand_expr (TREE_OPERAND (exp, 1),
7709 (ignore ? const0_rtx : target),
7713 /* If we would have a "singleton" (see below) were it not for a
7714 conversion in each arm, bring that conversion back out. */
7715 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7716 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7717 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7718 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7720 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7721 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7723 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7724 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7725 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7726 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7727 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7728 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7729 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7730 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7731 return expand_expr (build1 (NOP_EXPR, type,
7732 build (COND_EXPR, TREE_TYPE (true),
7733 TREE_OPERAND (exp, 0),
7735 target, tmode, modifier);
7739 /* Note that COND_EXPRs whose type is a structure or union
7740 are required to be constructed to contain assignments of
7741 a temporary variable, so that we can evaluate them here
7742 for side effect only. If type is void, we must do likewise. */
7744 /* If an arm of the branch requires a cleanup,
7745 only that cleanup is performed. */
7748 tree binary_op = 0, unary_op = 0;
7750 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7751 convert it to our mode, if necessary. */
7752 if (integer_onep (TREE_OPERAND (exp, 1))
7753 && integer_zerop (TREE_OPERAND (exp, 2))
7754 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7758 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7763 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7764 if (GET_MODE (op0) == mode)
7768 target = gen_reg_rtx (mode);
7769 convert_move (target, op0, unsignedp);
7773 /* Check for X ? A + B : A. If we have this, we can copy A to the
7774 output and conditionally add B. Similarly for unary operations.
7775 Don't do this if X has side-effects because those side effects
7776 might affect A or B and the "?" operation is a sequence point in
7777 ANSI. (operand_equal_p tests for side effects.) */
7779 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7780 && operand_equal_p (TREE_OPERAND (exp, 2),
7781 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7782 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7783 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7784 && operand_equal_p (TREE_OPERAND (exp, 1),
7785 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7786 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7787 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7788 && operand_equal_p (TREE_OPERAND (exp, 2),
7789 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7790 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7791 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7792 && operand_equal_p (TREE_OPERAND (exp, 1),
7793 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7794 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7796 /* If we are not to produce a result, we have no target. Otherwise,
7797 if a target was specified use it; it will not be used as an
7798 intermediate target unless it is safe. If no target, use a
7803 else if (original_target
7804 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7805 || (singleton && GET_CODE (original_target) == REG
7806 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7807 && original_target == var_rtx (singleton)))
7808 && GET_MODE (original_target) == mode
7809 #ifdef HAVE_conditional_move
7810 && (! can_conditionally_move_p (mode)
7811 || GET_CODE (original_target) == REG
7812 || TREE_ADDRESSABLE (type))
7814 && ! (GET_CODE (original_target) == MEM
7815 && MEM_VOLATILE_P (original_target)))
7816 temp = original_target;
7817 else if (TREE_ADDRESSABLE (type))
7820 temp = assign_temp (type, 0, 0, 1);
7822 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7823 do the test of X as a store-flag operation, do this as
7824 A + ((X != 0) << log C). Similarly for other simple binary
7825 operators. Only do for C == 1 if BRANCH_COST is low. */
7826 if (temp && singleton && binary_op
7827 && (TREE_CODE (binary_op) == PLUS_EXPR
7828 || TREE_CODE (binary_op) == MINUS_EXPR
7829 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7830 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7831 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7832 : integer_onep (TREE_OPERAND (binary_op, 1)))
7833 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7836 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7837 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7838 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7841 /* If we had X ? A : A + 1, do this as A + (X == 0).
7843 We have to invert the truth value here and then put it
7844 back later if do_store_flag fails. We cannot simply copy
7845 TREE_OPERAND (exp, 0) to another variable and modify that
7846 because invert_truthvalue can modify the tree pointed to
7848 if (singleton == TREE_OPERAND (exp, 1))
7849 TREE_OPERAND (exp, 0)
7850 = invert_truthvalue (TREE_OPERAND (exp, 0));
7852 result = do_store_flag (TREE_OPERAND (exp, 0),
7853 (safe_from_p (temp, singleton, 1)
7855 mode, BRANCH_COST <= 1);
7857 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7858 result = expand_shift (LSHIFT_EXPR, mode, result,
7859 build_int_2 (tree_log2
7863 (safe_from_p (temp, singleton, 1)
7864 ? temp : NULL_RTX), 0);
7868 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7869 return expand_binop (mode, boptab, op1, result, temp,
7870 unsignedp, OPTAB_LIB_WIDEN);
7872 else if (singleton == TREE_OPERAND (exp, 1))
7873 TREE_OPERAND (exp, 0)
7874 = invert_truthvalue (TREE_OPERAND (exp, 0));
7877 do_pending_stack_adjust ();
7879 op0 = gen_label_rtx ();
7881 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7885 /* If the target conflicts with the other operand of the
7886 binary op, we can't use it. Also, we can't use the target
7887 if it is a hard register, because evaluating the condition
7888 might clobber it. */
7890 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7891 || (GET_CODE (temp) == REG
7892 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7893 temp = gen_reg_rtx (mode);
7894 store_expr (singleton, temp, 0);
7897 expand_expr (singleton,
7898 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7899 if (singleton == TREE_OPERAND (exp, 1))
7900 jumpif (TREE_OPERAND (exp, 0), op0);
7902 jumpifnot (TREE_OPERAND (exp, 0), op0);
7904 start_cleanup_deferral ();
7905 if (binary_op && temp == 0)
7906 /* Just touch the other operand. */
7907 expand_expr (TREE_OPERAND (binary_op, 1),
7908 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7910 store_expr (build (TREE_CODE (binary_op), type,
7911 make_tree (type, temp),
7912 TREE_OPERAND (binary_op, 1)),
7915 store_expr (build1 (TREE_CODE (unary_op), type,
7916 make_tree (type, temp)),
7920 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7921 comparison operator. If we have one of these cases, set the
7922 output to A, branch on A (cse will merge these two references),
7923 then set the output to FOO. */
7925 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7926 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7927 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7928 TREE_OPERAND (exp, 1), 0)
7929 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7930 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7931 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7933 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7934 temp = gen_reg_rtx (mode);
7935 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7936 jumpif (TREE_OPERAND (exp, 0), op0);
7938 start_cleanup_deferral ();
7939 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7943 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7944 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7945 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7946 TREE_OPERAND (exp, 2), 0)
7947 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7948 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7949 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7951 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7952 temp = gen_reg_rtx (mode);
7953 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7954 jumpifnot (TREE_OPERAND (exp, 0), op0);
7956 start_cleanup_deferral ();
7957 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7962 op1 = gen_label_rtx ();
7963 jumpifnot (TREE_OPERAND (exp, 0), op0);
7965 start_cleanup_deferral ();
7967 /* One branch of the cond can be void, if it never returns. For
7968 example A ? throw : E */
7970 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7971 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7973 expand_expr (TREE_OPERAND (exp, 1),
7974 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7975 end_cleanup_deferral ();
7977 emit_jump_insn (gen_jump (op1));
7980 start_cleanup_deferral ();
7982 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
7983 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7985 expand_expr (TREE_OPERAND (exp, 2),
7986 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7989 end_cleanup_deferral ();
8000 /* Something needs to be initialized, but we didn't know
8001 where that thing was when building the tree. For example,
8002 it could be the return value of a function, or a parameter
8003 to a function which lays down in the stack, or a temporary
8004 variable which must be passed by reference.
8006 We guarantee that the expression will either be constructed
8007 or copied into our original target. */
8009 tree slot = TREE_OPERAND (exp, 0);
8010 tree cleanups = NULL_TREE;
8013 if (TREE_CODE (slot) != VAR_DECL)
8017 target = original_target;
8019 /* Set this here so that if we get a target that refers to a
8020 register variable that's already been used, put_reg_into_stack
8021 knows that it should fix up those uses. */
8022 TREE_USED (slot) = 1;
8026 if (DECL_RTL (slot) != 0)
8028 target = DECL_RTL (slot);
8029 /* If we have already expanded the slot, so don't do
8031 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8036 target = assign_temp (type, 2, 0, 1);
8037 /* All temp slots at this level must not conflict. */
8038 preserve_temp_slots (target);
8039 DECL_RTL (slot) = target;
8040 if (TREE_ADDRESSABLE (slot))
8042 TREE_ADDRESSABLE (slot) = 0;
8043 mark_addressable (slot);
8046 /* Since SLOT is not known to the called function
8047 to belong to its stack frame, we must build an explicit
8048 cleanup. This case occurs when we must build up a reference
8049 to pass the reference as an argument. In this case,
8050 it is very likely that such a reference need not be
8053 if (TREE_OPERAND (exp, 2) == 0)
8054 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8055 cleanups = TREE_OPERAND (exp, 2);
8060 /* This case does occur, when expanding a parameter which
8061 needs to be constructed on the stack. The target
8062 is the actual stack address that we want to initialize.
8063 The function we call will perform the cleanup in this case. */
8065 /* If we have already assigned it space, use that space,
8066 not target that we were passed in, as our target
8067 parameter is only a hint. */
8068 if (DECL_RTL (slot) != 0)
8070 target = DECL_RTL (slot);
8071 /* If we have already expanded the slot, so don't do
8073 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8078 DECL_RTL (slot) = target;
8079 /* If we must have an addressable slot, then make sure that
8080 the RTL that we just stored in slot is OK. */
8081 if (TREE_ADDRESSABLE (slot))
8083 TREE_ADDRESSABLE (slot) = 0;
8084 mark_addressable (slot);
8089 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8090 /* Mark it as expanded. */
8091 TREE_OPERAND (exp, 1) = NULL_TREE;
8093 store_expr (exp1, target, 0);
8095 expand_decl_cleanup (NULL_TREE, cleanups);
8102 tree lhs = TREE_OPERAND (exp, 0);
8103 tree rhs = TREE_OPERAND (exp, 1);
8104 tree noncopied_parts = 0;
8105 tree lhs_type = TREE_TYPE (lhs);
8107 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8108 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8109 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8110 TYPE_NONCOPIED_PARTS (lhs_type));
8111 while (noncopied_parts != 0)
8113 expand_assignment (TREE_VALUE (noncopied_parts),
8114 TREE_PURPOSE (noncopied_parts), 0, 0);
8115 noncopied_parts = TREE_CHAIN (noncopied_parts);
8122 /* If lhs is complex, expand calls in rhs before computing it.
8123 That's so we don't compute a pointer and save it over a call.
8124 If lhs is simple, compute it first so we can give it as a
8125 target if the rhs is just a call. This avoids an extra temp and copy
8126 and that prevents a partial-subsumption which makes bad code.
8127 Actually we could treat component_ref's of vars like vars. */
8129 tree lhs = TREE_OPERAND (exp, 0);
8130 tree rhs = TREE_OPERAND (exp, 1);
8131 tree noncopied_parts = 0;
8132 tree lhs_type = TREE_TYPE (lhs);
8136 if (TREE_CODE (lhs) != VAR_DECL
8137 && TREE_CODE (lhs) != RESULT_DECL
8138 && TREE_CODE (lhs) != PARM_DECL
8139 && ! (TREE_CODE (lhs) == INDIRECT_REF
8140 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8141 preexpand_calls (exp);
8143 /* Check for |= or &= of a bitfield of size one into another bitfield
8144 of size 1. In this case, (unless we need the result of the
8145 assignment) we can do this more efficiently with a
8146 test followed by an assignment, if necessary.
8148 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8149 things change so we do, this code should be enhanced to
8152 && TREE_CODE (lhs) == COMPONENT_REF
8153 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8154 || TREE_CODE (rhs) == BIT_AND_EXPR)
8155 && TREE_OPERAND (rhs, 0) == lhs
8156 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8157 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8158 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8160 rtx label = gen_label_rtx ();
8162 do_jump (TREE_OPERAND (rhs, 1),
8163 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8164 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8165 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8166 (TREE_CODE (rhs) == BIT_IOR_EXPR
8168 : integer_zero_node)),
8170 do_pending_stack_adjust ();
8175 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8176 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8177 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8178 TYPE_NONCOPIED_PARTS (lhs_type));
8180 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8181 while (noncopied_parts != 0)
8183 expand_assignment (TREE_PURPOSE (noncopied_parts),
8184 TREE_VALUE (noncopied_parts), 0, 0);
8185 noncopied_parts = TREE_CHAIN (noncopied_parts);
8191 if (!TREE_OPERAND (exp, 0))
8192 expand_null_return ();
8194 expand_return (TREE_OPERAND (exp, 0));
8197 case PREINCREMENT_EXPR:
8198 case PREDECREMENT_EXPR:
8199 return expand_increment (exp, 0, ignore);
8201 case POSTINCREMENT_EXPR:
8202 case POSTDECREMENT_EXPR:
8203 /* Faster to treat as pre-increment if result is not used. */
8204 return expand_increment (exp, ! ignore, ignore);
8207 /* If nonzero, TEMP will be set to the address of something that might
8208 be a MEM corresponding to a stack slot. */
8211 /* Are we taking the address of a nested function? */
8212 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8213 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8214 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8215 && ! TREE_STATIC (exp))
8217 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8218 op0 = force_operand (op0, target);
8220 /* If we are taking the address of something erroneous, just
8222 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8226 /* We make sure to pass const0_rtx down if we came in with
8227 ignore set, to avoid doing the cleanups twice for something. */
8228 op0 = expand_expr (TREE_OPERAND (exp, 0),
8229 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8230 (modifier == EXPAND_INITIALIZER
8231 ? modifier : EXPAND_CONST_ADDRESS));
8233 /* If we are going to ignore the result, OP0 will have been set
8234 to const0_rtx, so just return it. Don't get confused and
8235 think we are taking the address of the constant. */
8239 op0 = protect_from_queue (op0, 0);
8241 /* We would like the object in memory. If it is a constant, we can
8242 have it be statically allocated into memory. For a non-constant,
8243 we need to allocate some memory and store the value into it. */
8245 if (CONSTANT_P (op0))
8246 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8248 else if (GET_CODE (op0) == MEM)
8250 mark_temp_addr_taken (op0);
8251 temp = XEXP (op0, 0);
8254 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8255 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8257 /* If this object is in a register, it must be not
8259 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8260 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8262 mark_temp_addr_taken (memloc);
8263 emit_move_insn (memloc, op0);
8267 if (GET_CODE (op0) != MEM)
8270 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8272 temp = XEXP (op0, 0);
8273 #ifdef POINTERS_EXTEND_UNSIGNED
8274 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8275 && mode == ptr_mode)
8276 temp = convert_memory_address (ptr_mode, temp);
8281 op0 = force_operand (XEXP (op0, 0), target);
8284 if (flag_force_addr && GET_CODE (op0) != REG)
8285 op0 = force_reg (Pmode, op0);
8287 if (GET_CODE (op0) == REG
8288 && ! REG_USERVAR_P (op0))
8289 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8291 /* If we might have had a temp slot, add an equivalent address
8294 update_temp_slot_address (temp, op0);
8296 #ifdef POINTERS_EXTEND_UNSIGNED
8297 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8298 && mode == ptr_mode)
8299 op0 = convert_memory_address (ptr_mode, op0);
8304 case ENTRY_VALUE_EXPR:
8307 /* COMPLEX type for Extended Pascal & Fortran */
8310 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8313 /* Get the rtx code of the operands. */
8314 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8315 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8318 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8322 /* Move the real (op0) and imaginary (op1) parts to their location. */
8323 emit_move_insn (gen_realpart (mode, target), op0);
8324 emit_move_insn (gen_imagpart (mode, target), op1);
8326 insns = get_insns ();
8329 /* Complex construction should appear as a single unit. */
8330 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8331 each with a separate pseudo as destination.
8332 It's not correct for flow to treat them as a unit. */
8333 if (GET_CODE (target) != CONCAT)
8334 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8342 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8343 return gen_realpart (mode, op0);
8346 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8347 return gen_imagpart (mode, op0);
8351 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8355 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8358 target = gen_reg_rtx (mode);
8362 /* Store the realpart and the negated imagpart to target. */
8363 emit_move_insn (gen_realpart (partmode, target),
8364 gen_realpart (partmode, op0));
8366 imag_t = gen_imagpart (partmode, target);
8367 temp = expand_unop (partmode, neg_optab,
8368 gen_imagpart (partmode, op0), imag_t, 0);
8370 emit_move_insn (imag_t, temp);
8372 insns = get_insns ();
8375 /* Conjugate should appear as a single unit
8376 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8377 each with a separate pseudo as destination.
8378 It's not correct for flow to treat them as a unit. */
8379 if (GET_CODE (target) != CONCAT)
8380 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8387 case TRY_CATCH_EXPR:
8389 tree handler = TREE_OPERAND (exp, 1);
8391 expand_eh_region_start ();
8393 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8395 expand_eh_region_end (handler);
8400 case TRY_FINALLY_EXPR:
8402 tree try_block = TREE_OPERAND (exp, 0);
8403 tree finally_block = TREE_OPERAND (exp, 1);
8404 rtx finally_label = gen_label_rtx ();
8405 rtx done_label = gen_label_rtx ();
8406 rtx return_link = gen_reg_rtx (Pmode);
8407 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8408 (tree) finally_label, (tree) return_link);
8409 TREE_SIDE_EFFECTS (cleanup) = 1;
8411 /* Start a new binding layer that will keep track of all cleanup
8412 actions to be performed. */
8413 expand_start_bindings (2);
8415 target_temp_slot_level = temp_slot_level;
8417 expand_decl_cleanup (NULL_TREE, cleanup);
8418 op0 = expand_expr (try_block, target, tmode, modifier);
8420 preserve_temp_slots (op0);
8421 expand_end_bindings (NULL_TREE, 0, 0);
8422 emit_jump (done_label);
8423 emit_label (finally_label);
8424 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8425 emit_indirect_jump (return_link);
8426 emit_label (done_label);
8430 case GOTO_SUBROUTINE_EXPR:
8432 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8433 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8434 rtx return_address = gen_label_rtx ();
8435 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8437 emit_label (return_address);
8443 rtx dcc = get_dynamic_cleanup_chain ();
8444 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8450 rtx dhc = get_dynamic_handler_chain ();
8451 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8456 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8459 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8462 /* Here to do an ordinary binary operator, generating an instruction
8463 from the optab already placed in `this_optab'. */
8465 preexpand_calls (exp);
8466 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8468 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8469 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8471 temp = expand_binop (mode, this_optab, op0, op1, target,
8472 unsignedp, OPTAB_LIB_WIDEN);
8478 /* Similar to expand_expr, except that we don't specify a target, target
8479 mode, or modifier and we return the alignment of the inner type. This is
8480 used in cases where it is not necessary to align the result to the
8481 alignment of its type as long as we know the alignment of the result, for
8482 example for comparisons of BLKmode values. */
8485 expand_expr_unaligned (exp, palign)
8487 unsigned int *palign;
8490 tree type = TREE_TYPE (exp);
8491 register enum machine_mode mode = TYPE_MODE (type);
8493 /* Default the alignment we return to that of the type. */
8494 *palign = TYPE_ALIGN (type);
8496 /* The only cases in which we do anything special is if the resulting mode
8498 if (mode != BLKmode)
8499 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8501 switch (TREE_CODE (exp))
8505 case NON_LVALUE_EXPR:
8506 /* Conversions between BLKmode values don't change the underlying
8507 alignment or value. */
8508 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8509 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8513 /* Much of the code for this case is copied directly from expand_expr.
8514 We need to duplicate it here because we will do something different
8515 in the fall-through case, so we need to handle the same exceptions
8518 tree array = TREE_OPERAND (exp, 0);
8519 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8520 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8521 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8524 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8527 /* Optimize the special-case of a zero lower bound.
8529 We convert the low_bound to sizetype to avoid some problems
8530 with constant folding. (E.g. suppose the lower bound is 1,
8531 and its mode is QI. Without the conversion, (ARRAY
8532 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8533 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8535 if (! integer_zerop (low_bound))
8536 index = size_diffop (index, convert (sizetype, low_bound));
8538 /* If this is a constant index into a constant array,
8539 just get the value from the array. Handle both the cases when
8540 we have an explicit constructor and when our operand is a variable
8541 that was declared const. */
8543 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8544 && 0 > compare_tree_int (index,
8545 list_length (CONSTRUCTOR_ELTS
8546 (TREE_OPERAND (exp, 0)))))
8550 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8551 i = TREE_INT_CST_LOW (index);
8552 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8556 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8559 else if (optimize >= 1
8560 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8561 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8562 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8564 if (TREE_CODE (index) == INTEGER_CST)
8566 tree init = DECL_INITIAL (array);
8568 if (TREE_CODE (init) == CONSTRUCTOR)
8572 for (elem = CONSTRUCTOR_ELTS (init);
8573 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8574 elem = TREE_CHAIN (elem))
8578 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8585 /* ... fall through ... */
8589 /* If the operand is a CONSTRUCTOR, we can just extract the
8590 appropriate field if it is present. Don't do this if we have
8591 already written the data since we want to refer to that copy
8592 and varasm.c assumes that's what we'll do. */
8593 if (TREE_CODE (exp) != ARRAY_REF
8594 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8595 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8599 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8600 elt = TREE_CHAIN (elt))
8601 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8602 /* Note that unlike the case in expand_expr, we know this is
8603 BLKmode and hence not an integer. */
8604 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8608 enum machine_mode mode1;
8613 unsigned int alignment;
8615 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8616 &mode1, &unsignedp, &volatilep,
8619 /* If we got back the original object, something is wrong. Perhaps
8620 we are evaluating an expression too early. In any event, don't
8621 infinitely recurse. */
8625 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8627 /* If this is a constant, put it into a register if it is a
8628 legitimate constant and OFFSET is 0 and memory if it isn't. */
8629 if (CONSTANT_P (op0))
8631 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8633 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8635 op0 = force_reg (inner_mode, op0);
8637 op0 = validize_mem (force_const_mem (inner_mode, op0));
8642 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8644 /* If this object is in a register, put it into memory.
8645 This case can't occur in C, but can in Ada if we have
8646 unchecked conversion of an expression from a scalar type to
8647 an array or record type. */
8648 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8649 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8651 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8653 mark_temp_addr_taken (memloc);
8654 emit_move_insn (memloc, op0);
8658 if (GET_CODE (op0) != MEM)
8661 if (GET_MODE (offset_rtx) != ptr_mode)
8663 #ifdef POINTERS_EXTEND_UNSIGNED
8664 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8666 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8670 op0 = change_address (op0, VOIDmode,
8671 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8672 force_reg (ptr_mode,
8676 /* Don't forget about volatility even if this is a bitfield. */
8677 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8679 op0 = copy_rtx (op0);
8680 MEM_VOLATILE_P (op0) = 1;
8683 /* Check the access. */
8684 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8689 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8690 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8692 /* Check the access right of the pointer. */
8693 if (size > BITS_PER_UNIT)
8694 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8695 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8696 TYPE_MODE (sizetype),
8697 GEN_INT (MEMORY_USE_RO),
8698 TYPE_MODE (integer_type_node));
8701 /* In cases where an aligned union has an unaligned object
8702 as a field, we might be extracting a BLKmode value from
8703 an integer-mode (e.g., SImode) object. Handle this case
8704 by doing the extract into an object as wide as the field
8705 (which we know to be the width of a basic mode), then
8706 storing into memory, and changing the mode to BLKmode.
8707 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8708 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8709 if (mode1 == VOIDmode
8710 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8711 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8712 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
8713 || bitpos % TYPE_ALIGN (type) != 0)))
8715 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8717 if (ext_mode == BLKmode)
8719 /* In this case, BITPOS must start at a byte boundary. */
8720 if (GET_CODE (op0) != MEM
8721 || bitpos % BITS_PER_UNIT != 0)
8724 op0 = change_address (op0, VOIDmode,
8725 plus_constant (XEXP (op0, 0),
8726 bitpos / BITS_PER_UNIT));
8730 rtx new = assign_stack_temp (ext_mode,
8731 bitsize / BITS_PER_UNIT, 0);
8733 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8734 unsignedp, NULL_RTX, ext_mode,
8735 ext_mode, alignment,
8736 int_size_in_bytes (TREE_TYPE (tem)));
8738 /* If the result is a record type and BITSIZE is narrower than
8739 the mode of OP0, an integral mode, and this is a big endian
8740 machine, we must put the field into the high-order bits. */
8741 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8742 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8743 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8744 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8745 size_int (GET_MODE_BITSIZE
8751 emit_move_insn (new, op0);
8752 op0 = copy_rtx (new);
8753 PUT_MODE (op0, BLKmode);
8757 /* Get a reference to just this component. */
8758 op0 = change_address (op0, mode1,
8759 plus_constant (XEXP (op0, 0),
8760 (bitpos / BITS_PER_UNIT)));
8762 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8764 /* Adjust the alignment in case the bit position is not
8765 a multiple of the alignment of the inner object. */
8766 while (bitpos % alignment != 0)
8769 if (GET_CODE (XEXP (op0, 0)) == REG)
8770 mark_reg_pointer (XEXP (op0, 0), alignment);
8772 MEM_IN_STRUCT_P (op0) = 1;
8773 MEM_VOLATILE_P (op0) |= volatilep;
8775 *palign = alignment;
8784 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8787 /* Return the tree node if a ARG corresponds to a string constant or zero
8788 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8789 in bytes within the string that ARG is accessing. The type of the
8790 offset will be `sizetype'. */
8793 string_constant (arg, ptr_offset)
8799 if (TREE_CODE (arg) == ADDR_EXPR
8800 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8802 *ptr_offset = size_zero_node;
8803 return TREE_OPERAND (arg, 0);
8805 else if (TREE_CODE (arg) == PLUS_EXPR)
8807 tree arg0 = TREE_OPERAND (arg, 0);
8808 tree arg1 = TREE_OPERAND (arg, 1);
8813 if (TREE_CODE (arg0) == ADDR_EXPR
8814 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8816 *ptr_offset = convert (sizetype, arg1);
8817 return TREE_OPERAND (arg0, 0);
8819 else if (TREE_CODE (arg1) == ADDR_EXPR
8820 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8822 *ptr_offset = convert (sizetype, arg0);
8823 return TREE_OPERAND (arg1, 0);
8830 /* Expand code for a post- or pre- increment or decrement
8831 and return the RTX for the result.
8832 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8835 expand_increment (exp, post, ignore)
8839 register rtx op0, op1;
8840 register rtx temp, value;
8841 register tree incremented = TREE_OPERAND (exp, 0);
8842 optab this_optab = add_optab;
8844 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8845 int op0_is_copy = 0;
8846 int single_insn = 0;
8847 /* 1 means we can't store into OP0 directly,
8848 because it is a subreg narrower than a word,
8849 and we don't dare clobber the rest of the word. */
8852 /* Stabilize any component ref that might need to be
8853 evaluated more than once below. */
8855 || TREE_CODE (incremented) == BIT_FIELD_REF
8856 || (TREE_CODE (incremented) == COMPONENT_REF
8857 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8858 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8859 incremented = stabilize_reference (incremented);
8860 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8861 ones into save exprs so that they don't accidentally get evaluated
8862 more than once by the code below. */
8863 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8864 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8865 incremented = save_expr (incremented);
8867 /* Compute the operands as RTX.
8868 Note whether OP0 is the actual lvalue or a copy of it:
8869 I believe it is a copy iff it is a register or subreg
8870 and insns were generated in computing it. */
8872 temp = get_last_insn ();
8873 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8875 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8876 in place but instead must do sign- or zero-extension during assignment,
8877 so we copy it into a new register and let the code below use it as
8880 Note that we can safely modify this SUBREG since it is know not to be
8881 shared (it was made by the expand_expr call above). */
8883 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8886 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8890 else if (GET_CODE (op0) == SUBREG
8891 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8893 /* We cannot increment this SUBREG in place. If we are
8894 post-incrementing, get a copy of the old value. Otherwise,
8895 just mark that we cannot increment in place. */
8897 op0 = copy_to_reg (op0);
8902 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8903 && temp != get_last_insn ());
8904 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8905 EXPAND_MEMORY_USE_BAD);
8907 /* Decide whether incrementing or decrementing. */
8908 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8909 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8910 this_optab = sub_optab;
8912 /* Convert decrement by a constant into a negative increment. */
8913 if (this_optab == sub_optab
8914 && GET_CODE (op1) == CONST_INT)
8916 op1 = GEN_INT (- INTVAL (op1));
8917 this_optab = add_optab;
8920 /* For a preincrement, see if we can do this with a single instruction. */
8923 icode = (int) this_optab->handlers[(int) mode].insn_code;
8924 if (icode != (int) CODE_FOR_nothing
8925 /* Make sure that OP0 is valid for operands 0 and 1
8926 of the insn we want to queue. */
8927 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8928 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8929 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8933 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8934 then we cannot just increment OP0. We must therefore contrive to
8935 increment the original value. Then, for postincrement, we can return
8936 OP0 since it is a copy of the old value. For preincrement, expand here
8937 unless we can do it with a single insn.
8939 Likewise if storing directly into OP0 would clobber high bits
8940 we need to preserve (bad_subreg). */
8941 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8943 /* This is the easiest way to increment the value wherever it is.
8944 Problems with multiple evaluation of INCREMENTED are prevented
8945 because either (1) it is a component_ref or preincrement,
8946 in which case it was stabilized above, or (2) it is an array_ref
8947 with constant index in an array in a register, which is
8948 safe to reevaluate. */
8949 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8950 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8951 ? MINUS_EXPR : PLUS_EXPR),
8954 TREE_OPERAND (exp, 1));
8956 while (TREE_CODE (incremented) == NOP_EXPR
8957 || TREE_CODE (incremented) == CONVERT_EXPR)
8959 newexp = convert (TREE_TYPE (incremented), newexp);
8960 incremented = TREE_OPERAND (incremented, 0);
8963 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8964 return post ? op0 : temp;
8969 /* We have a true reference to the value in OP0.
8970 If there is an insn to add or subtract in this mode, queue it.
8971 Queueing the increment insn avoids the register shuffling
8972 that often results if we must increment now and first save
8973 the old value for subsequent use. */
8975 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8976 op0 = stabilize (op0);
8979 icode = (int) this_optab->handlers[(int) mode].insn_code;
8980 if (icode != (int) CODE_FOR_nothing
8981 /* Make sure that OP0 is valid for operands 0 and 1
8982 of the insn we want to queue. */
8983 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8984 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8986 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8987 op1 = force_reg (mode, op1);
8989 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8991 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8993 rtx addr = (general_operand (XEXP (op0, 0), mode)
8994 ? force_reg (Pmode, XEXP (op0, 0))
8995 : copy_to_reg (XEXP (op0, 0)));
8998 op0 = change_address (op0, VOIDmode, addr);
8999 temp = force_reg (GET_MODE (op0), op0);
9000 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9001 op1 = force_reg (mode, op1);
9003 /* The increment queue is LIFO, thus we have to `queue'
9004 the instructions in reverse order. */
9005 enqueue_insn (op0, gen_move_insn (op0, temp));
9006 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9011 /* Preincrement, or we can't increment with one simple insn. */
9013 /* Save a copy of the value before inc or dec, to return it later. */
9014 temp = value = copy_to_reg (op0);
9016 /* Arrange to return the incremented value. */
9017 /* Copy the rtx because expand_binop will protect from the queue,
9018 and the results of that would be invalid for us to return
9019 if our caller does emit_queue before using our result. */
9020 temp = copy_rtx (value = op0);
9022 /* Increment however we can. */
9023 op1 = expand_binop (mode, this_optab, value, op1,
9024 current_function_check_memory_usage ? NULL_RTX : op0,
9025 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9026 /* Make sure the value is stored into OP0. */
9028 emit_move_insn (op0, op1);
9033 /* Expand all function calls contained within EXP, innermost ones first.
9034 But don't look within expressions that have sequence points.
9035 For each CALL_EXPR, record the rtx for its value
9036 in the CALL_EXPR_RTL field. */
9039 preexpand_calls (exp)
9042 register int nops, i;
9043 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9045 if (! do_preexpand_calls)
9048 /* Only expressions and references can contain calls. */
9050 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9053 switch (TREE_CODE (exp))
9056 /* Do nothing if already expanded. */
9057 if (CALL_EXPR_RTL (exp) != 0
9058 /* Do nothing if the call returns a variable-sized object. */
9059 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9060 /* Do nothing to built-in functions. */
9061 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9062 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9064 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9067 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9072 case TRUTH_ANDIF_EXPR:
9073 case TRUTH_ORIF_EXPR:
9074 /* If we find one of these, then we can be sure
9075 the adjust will be done for it (since it makes jumps).
9076 Do it now, so that if this is inside an argument
9077 of a function, we don't get the stack adjustment
9078 after some other args have already been pushed. */
9079 do_pending_stack_adjust ();
9084 case WITH_CLEANUP_EXPR:
9085 case CLEANUP_POINT_EXPR:
9086 case TRY_CATCH_EXPR:
9090 if (SAVE_EXPR_RTL (exp) != 0)
9097 nops = tree_code_length[(int) TREE_CODE (exp)];
9098 for (i = 0; i < nops; i++)
9099 if (TREE_OPERAND (exp, i) != 0)
9101 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9102 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9103 It doesn't happen before the call is made. */
9107 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9108 if (type == 'e' || type == '<' || type == '1' || type == '2'
9110 preexpand_calls (TREE_OPERAND (exp, i));
9115 /* At the start of a function, record that we have no previously-pushed
9116 arguments waiting to be popped. */
9119 init_pending_stack_adjust ()
9121 pending_stack_adjust = 0;
9124 /* When exiting from function, if safe, clear out any pending stack adjust
9125 so the adjustment won't get done.
9127 Note, if the current function calls alloca, then it must have a
9128 frame pointer regardless of the value of flag_omit_frame_pointer. */
9131 clear_pending_stack_adjust ()
9133 #ifdef EXIT_IGNORE_STACK
9135 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9136 && EXIT_IGNORE_STACK
9137 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9138 && ! flag_inline_functions)
9139 pending_stack_adjust = 0;
9143 /* Pop any previously-pushed arguments that have not been popped yet. */
9146 do_pending_stack_adjust ()
9148 if (inhibit_defer_pop == 0)
9150 if (pending_stack_adjust != 0)
9151 adjust_stack (GEN_INT (pending_stack_adjust));
9152 pending_stack_adjust = 0;
9156 /* Expand conditional expressions. */
9158 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9159 LABEL is an rtx of code CODE_LABEL, in this function and all the
9163 jumpifnot (exp, label)
9167 do_jump (exp, label, NULL_RTX);
9170 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9177 do_jump (exp, NULL_RTX, label);
9180 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9181 the result is zero, or IF_TRUE_LABEL if the result is one.
9182 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9183 meaning fall through in that case.
9185 do_jump always does any pending stack adjust except when it does not
9186 actually perform a jump. An example where there is no jump
9187 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9189 This function is responsible for optimizing cases such as
9190 &&, || and comparison operators in EXP. */
9193 do_jump (exp, if_false_label, if_true_label)
9195 rtx if_false_label, if_true_label;
9197 register enum tree_code code = TREE_CODE (exp);
9198 /* Some cases need to create a label to jump to
9199 in order to properly fall through.
9200 These cases set DROP_THROUGH_LABEL nonzero. */
9201 rtx drop_through_label = 0;
9205 enum machine_mode mode;
9207 #ifdef MAX_INTEGER_COMPUTATION_MODE
9208 check_max_integer_computation_mode (exp);
9219 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9225 /* This is not true with #pragma weak */
9227 /* The address of something can never be zero. */
9229 emit_jump (if_true_label);
9234 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9235 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9236 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9239 /* If we are narrowing the operand, we have to do the compare in the
9241 if ((TYPE_PRECISION (TREE_TYPE (exp))
9242 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9244 case NON_LVALUE_EXPR:
9245 case REFERENCE_EXPR:
9250 /* These cannot change zero->non-zero or vice versa. */
9251 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9254 case WITH_RECORD_EXPR:
9255 /* Put the object on the placeholder list, recurse through our first
9256 operand, and pop the list. */
9257 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9259 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9260 placeholder_list = TREE_CHAIN (placeholder_list);
9264 /* This is never less insns than evaluating the PLUS_EXPR followed by
9265 a test and can be longer if the test is eliminated. */
9267 /* Reduce to minus. */
9268 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9269 TREE_OPERAND (exp, 0),
9270 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9271 TREE_OPERAND (exp, 1))));
9272 /* Process as MINUS. */
9276 /* Non-zero iff operands of minus differ. */
9277 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9278 TREE_OPERAND (exp, 0),
9279 TREE_OPERAND (exp, 1)),
9280 NE, NE, if_false_label, if_true_label);
9284 /* If we are AND'ing with a small constant, do this comparison in the
9285 smallest type that fits. If the machine doesn't have comparisons
9286 that small, it will be converted back to the wider comparison.
9287 This helps if we are testing the sign bit of a narrower object.
9288 combine can't do this for us because it can't know whether a
9289 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9291 if (! SLOW_BYTE_ACCESS
9292 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9293 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9294 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9295 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9296 && (type = type_for_mode (mode, 1)) != 0
9297 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9298 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9299 != CODE_FOR_nothing))
9301 do_jump (convert (type, exp), if_false_label, if_true_label);
9306 case TRUTH_NOT_EXPR:
9307 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9310 case TRUTH_ANDIF_EXPR:
9311 if (if_false_label == 0)
9312 if_false_label = drop_through_label = gen_label_rtx ();
9313 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9314 start_cleanup_deferral ();
9315 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9316 end_cleanup_deferral ();
9319 case TRUTH_ORIF_EXPR:
9320 if (if_true_label == 0)
9321 if_true_label = drop_through_label = gen_label_rtx ();
9322 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9323 start_cleanup_deferral ();
9324 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9325 end_cleanup_deferral ();
9330 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9331 preserve_temp_slots (NULL_RTX);
9335 do_pending_stack_adjust ();
9336 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9343 int bitsize, bitpos, unsignedp;
9344 enum machine_mode mode;
9348 unsigned int alignment;
9350 /* Get description of this reference. We don't actually care
9351 about the underlying object here. */
9352 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9353 &mode, &unsignedp, &volatilep,
9356 type = type_for_size (bitsize, unsignedp);
9357 if (! SLOW_BYTE_ACCESS
9358 && type != 0 && bitsize >= 0
9359 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9360 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9361 != CODE_FOR_nothing))
9363 do_jump (convert (type, exp), if_false_label, if_true_label);
9370 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9371 if (integer_onep (TREE_OPERAND (exp, 1))
9372 && integer_zerop (TREE_OPERAND (exp, 2)))
9373 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9375 else if (integer_zerop (TREE_OPERAND (exp, 1))
9376 && integer_onep (TREE_OPERAND (exp, 2)))
9377 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9381 register rtx label1 = gen_label_rtx ();
9382 drop_through_label = gen_label_rtx ();
9384 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9386 start_cleanup_deferral ();
9387 /* Now the THEN-expression. */
9388 do_jump (TREE_OPERAND (exp, 1),
9389 if_false_label ? if_false_label : drop_through_label,
9390 if_true_label ? if_true_label : drop_through_label);
9391 /* In case the do_jump just above never jumps. */
9392 do_pending_stack_adjust ();
9393 emit_label (label1);
9395 /* Now the ELSE-expression. */
9396 do_jump (TREE_OPERAND (exp, 2),
9397 if_false_label ? if_false_label : drop_through_label,
9398 if_true_label ? if_true_label : drop_through_label);
9399 end_cleanup_deferral ();
9405 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9407 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9408 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9410 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9411 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9414 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9415 fold (build (EQ_EXPR, TREE_TYPE (exp),
9416 fold (build1 (REALPART_EXPR,
9417 TREE_TYPE (inner_type),
9419 fold (build1 (REALPART_EXPR,
9420 TREE_TYPE (inner_type),
9422 fold (build (EQ_EXPR, TREE_TYPE (exp),
9423 fold (build1 (IMAGPART_EXPR,
9424 TREE_TYPE (inner_type),
9426 fold (build1 (IMAGPART_EXPR,
9427 TREE_TYPE (inner_type),
9429 if_false_label, if_true_label);
9432 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9433 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9435 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9436 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9437 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9439 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9445 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9447 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9448 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9450 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9451 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9454 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9455 fold (build (NE_EXPR, TREE_TYPE (exp),
9456 fold (build1 (REALPART_EXPR,
9457 TREE_TYPE (inner_type),
9459 fold (build1 (REALPART_EXPR,
9460 TREE_TYPE (inner_type),
9462 fold (build (NE_EXPR, TREE_TYPE (exp),
9463 fold (build1 (IMAGPART_EXPR,
9464 TREE_TYPE (inner_type),
9466 fold (build1 (IMAGPART_EXPR,
9467 TREE_TYPE (inner_type),
9469 if_false_label, if_true_label);
9472 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9473 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9475 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9476 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9477 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9479 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9484 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9485 if (GET_MODE_CLASS (mode) == MODE_INT
9486 && ! can_compare_p (LT, mode, ccp_jump))
9487 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9489 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9493 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9494 if (GET_MODE_CLASS (mode) == MODE_INT
9495 && ! can_compare_p (LE, mode, ccp_jump))
9496 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9498 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9502 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9503 if (GET_MODE_CLASS (mode) == MODE_INT
9504 && ! can_compare_p (GT, mode, ccp_jump))
9505 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9507 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9511 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9512 if (GET_MODE_CLASS (mode) == MODE_INT
9513 && ! can_compare_p (GE, mode, ccp_jump))
9514 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9516 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9519 case UNORDERED_EXPR:
9522 enum rtx_code cmp, rcmp;
9525 if (code == UNORDERED_EXPR)
9526 cmp = UNORDERED, rcmp = ORDERED;
9528 cmp = ORDERED, rcmp = UNORDERED;
9529 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9532 if (! can_compare_p (cmp, mode, ccp_jump)
9533 && (can_compare_p (rcmp, mode, ccp_jump)
9534 /* If the target doesn't provide either UNORDERED or ORDERED
9535 comparisons, canonicalize on UNORDERED for the library. */
9536 || rcmp == UNORDERED))
9540 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9542 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9547 enum rtx_code rcode1;
9548 enum tree_code tcode2;
9572 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9573 if (can_compare_p (rcode1, mode, ccp_jump))
9574 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9578 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9579 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9582 /* If the target doesn't support combined unordered
9583 compares, decompose into UNORDERED + comparison. */
9584 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9585 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9586 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9587 do_jump (exp, if_false_label, if_true_label);
9594 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9596 /* This is not needed any more and causes poor code since it causes
9597 comparisons and tests from non-SI objects to have different code
9599 /* Copy to register to avoid generating bad insns by cse
9600 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9601 if (!cse_not_expected && GET_CODE (temp) == MEM)
9602 temp = copy_to_reg (temp);
9604 do_pending_stack_adjust ();
9605 /* Do any postincrements in the expression that was tested. */
9608 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9610 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9614 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9615 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9616 /* Note swapping the labels gives us not-equal. */
9617 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9618 else if (GET_MODE (temp) != VOIDmode)
9619 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9620 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9621 GET_MODE (temp), NULL_RTX, 0,
9622 if_false_label, if_true_label);
9627 if (drop_through_label)
9629 /* If do_jump produces code that might be jumped around,
9630 do any stack adjusts from that code, before the place
9631 where control merges in. */
9632 do_pending_stack_adjust ();
9633 emit_label (drop_through_label);
9637 /* Given a comparison expression EXP for values too wide to be compared
9638 with one insn, test the comparison and jump to the appropriate label.
9639 The code of EXP is ignored; we always test GT if SWAP is 0,
9640 and LT if SWAP is 1. */
9643 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9646 rtx if_false_label, if_true_label;
9648 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9649 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9650 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9651 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9653 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9656 /* Compare OP0 with OP1, word at a time, in mode MODE.
9657 UNSIGNEDP says to do unsigned comparison.
9658 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9661 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9662 enum machine_mode mode;
9665 rtx if_false_label, if_true_label;
9667 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9668 rtx drop_through_label = 0;
9671 if (! if_true_label || ! if_false_label)
9672 drop_through_label = gen_label_rtx ();
9673 if (! if_true_label)
9674 if_true_label = drop_through_label;
9675 if (! if_false_label)
9676 if_false_label = drop_through_label;
9678 /* Compare a word at a time, high order first. */
9679 for (i = 0; i < nwords; i++)
9681 rtx op0_word, op1_word;
9683 if (WORDS_BIG_ENDIAN)
9685 op0_word = operand_subword_force (op0, i, mode);
9686 op1_word = operand_subword_force (op1, i, mode);
9690 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9691 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9694 /* All but high-order word must be compared as unsigned. */
9695 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9696 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9697 NULL_RTX, if_true_label);
9699 /* Consider lower words only if these are equal. */
9700 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9701 NULL_RTX, 0, NULL_RTX, if_false_label);
9705 emit_jump (if_false_label);
9706 if (drop_through_label)
9707 emit_label (drop_through_label);
9710 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9711 with one insn, test the comparison and jump to the appropriate label. */
9714 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9716 rtx if_false_label, if_true_label;
9718 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9719 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9720 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9721 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9723 rtx drop_through_label = 0;
9725 if (! if_false_label)
9726 drop_through_label = if_false_label = gen_label_rtx ();
9728 for (i = 0; i < nwords; i++)
9729 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9730 operand_subword_force (op1, i, mode),
9731 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9732 word_mode, NULL_RTX, 0, if_false_label,
9736 emit_jump (if_true_label);
9737 if (drop_through_label)
9738 emit_label (drop_through_label);
9741 /* Jump according to whether OP0 is 0.
9742 We assume that OP0 has an integer mode that is too wide
9743 for the available compare insns. */
9746 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9748 rtx if_false_label, if_true_label;
9750 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9753 rtx drop_through_label = 0;
9755 /* The fastest way of doing this comparison on almost any machine is to
9756 "or" all the words and compare the result. If all have to be loaded
9757 from memory and this is a very wide item, it's possible this may
9758 be slower, but that's highly unlikely. */
9760 part = gen_reg_rtx (word_mode);
9761 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9762 for (i = 1; i < nwords && part != 0; i++)
9763 part = expand_binop (word_mode, ior_optab, part,
9764 operand_subword_force (op0, i, GET_MODE (op0)),
9765 part, 1, OPTAB_WIDEN);
9769 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9770 NULL_RTX, 0, if_false_label, if_true_label);
9775 /* If we couldn't do the "or" simply, do this with a series of compares. */
9776 if (! if_false_label)
9777 drop_through_label = if_false_label = gen_label_rtx ();
9779 for (i = 0; i < nwords; i++)
9780 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9781 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9782 if_false_label, NULL_RTX);
9785 emit_jump (if_true_label);
9787 if (drop_through_label)
9788 emit_label (drop_through_label);
9791 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9792 (including code to compute the values to be compared)
9793 and set (CC0) according to the result.
9794 The decision as to signed or unsigned comparison must be made by the caller.
9796 We force a stack adjustment unless there are currently
9797 things pushed on the stack that aren't yet used.
9799 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9802 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9803 size of MODE should be used. */
9806 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9807 register rtx op0, op1;
9810 enum machine_mode mode;
9816 /* If one operand is constant, make it the second one. Only do this
9817 if the other operand is not constant as well. */
9819 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9820 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9825 code = swap_condition (code);
9830 op0 = force_not_mem (op0);
9831 op1 = force_not_mem (op1);
9834 do_pending_stack_adjust ();
9836 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9837 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9841 /* There's no need to do this now that combine.c can eliminate lots of
9842 sign extensions. This can be less efficient in certain cases on other
9845 /* If this is a signed equality comparison, we can do it as an
9846 unsigned comparison since zero-extension is cheaper than sign
9847 extension and comparisons with zero are done as unsigned. This is
9848 the case even on machines that can do fast sign extension, since
9849 zero-extension is easier to combine with other operations than
9850 sign-extension is. If we are comparing against a constant, we must
9851 convert it to what it would look like unsigned. */
9852 if ((code == EQ || code == NE) && ! unsignedp
9853 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9855 if (GET_CODE (op1) == CONST_INT
9856 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9857 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9862 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9864 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9867 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9868 The decision as to signed or unsigned comparison must be made by the caller.
9870 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9873 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9874 size of MODE should be used. */
9877 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9878 if_false_label, if_true_label)
9879 register rtx op0, op1;
9882 enum machine_mode mode;
9885 rtx if_false_label, if_true_label;
9888 int dummy_true_label = 0;
9890 /* Reverse the comparison if that is safe and we want to jump if it is
9892 if (! if_true_label && ! FLOAT_MODE_P (mode))
9894 if_true_label = if_false_label;
9896 code = reverse_condition (code);
9899 /* If one operand is constant, make it the second one. Only do this
9900 if the other operand is not constant as well. */
9902 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9903 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9908 code = swap_condition (code);
9913 op0 = force_not_mem (op0);
9914 op1 = force_not_mem (op1);
9917 do_pending_stack_adjust ();
9919 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9920 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9922 if (tem == const_true_rtx)
9925 emit_jump (if_true_label);
9930 emit_jump (if_false_label);
9936 /* There's no need to do this now that combine.c can eliminate lots of
9937 sign extensions. This can be less efficient in certain cases on other
9940 /* If this is a signed equality comparison, we can do it as an
9941 unsigned comparison since zero-extension is cheaper than sign
9942 extension and comparisons with zero are done as unsigned. This is
9943 the case even on machines that can do fast sign extension, since
9944 zero-extension is easier to combine with other operations than
9945 sign-extension is. If we are comparing against a constant, we must
9946 convert it to what it would look like unsigned. */
9947 if ((code == EQ || code == NE) && ! unsignedp
9948 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9950 if (GET_CODE (op1) == CONST_INT
9951 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9952 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9957 if (! if_true_label)
9959 dummy_true_label = 1;
9960 if_true_label = gen_label_rtx ();
9963 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9967 emit_jump (if_false_label);
9968 if (dummy_true_label)
9969 emit_label (if_true_label);
9972 /* Generate code for a comparison expression EXP (including code to compute
9973 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9974 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9975 generated code will drop through.
9976 SIGNED_CODE should be the rtx operation for this comparison for
9977 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9979 We force a stack adjustment unless there are currently
9980 things pushed on the stack that aren't yet used. */
9983 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9986 enum rtx_code signed_code, unsigned_code;
9987 rtx if_false_label, if_true_label;
9989 unsigned int align0, align1;
9990 register rtx op0, op1;
9992 register enum machine_mode mode;
9996 /* Don't crash if the comparison was erroneous. */
9997 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
9998 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10001 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10002 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10003 mode = TYPE_MODE (type);
10004 unsignedp = TREE_UNSIGNED (type);
10005 code = unsignedp ? unsigned_code : signed_code;
10007 #ifdef HAVE_canonicalize_funcptr_for_compare
10008 /* If function pointers need to be "canonicalized" before they can
10009 be reliably compared, then canonicalize them. */
10010 if (HAVE_canonicalize_funcptr_for_compare
10011 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10012 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10015 rtx new_op0 = gen_reg_rtx (mode);
10017 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10021 if (HAVE_canonicalize_funcptr_for_compare
10022 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10023 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10026 rtx new_op1 = gen_reg_rtx (mode);
10028 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10033 /* Do any postincrements in the expression that was tested. */
10036 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10038 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10039 MIN (align0, align1) / BITS_PER_UNIT,
10040 if_false_label, if_true_label);
10043 /* Generate code to calculate EXP using a store-flag instruction
10044 and return an rtx for the result. EXP is either a comparison
10045 or a TRUTH_NOT_EXPR whose operand is a comparison.
10047 If TARGET is nonzero, store the result there if convenient.
10049 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10052 Return zero if there is no suitable set-flag instruction
10053 available on this machine.
10055 Once expand_expr has been called on the arguments of the comparison,
10056 we are committed to doing the store flag, since it is not safe to
10057 re-evaluate the expression. We emit the store-flag insn by calling
10058 emit_store_flag, but only expand the arguments if we have a reason
10059 to believe that emit_store_flag will be successful. If we think that
10060 it will, but it isn't, we have to simulate the store-flag with a
10061 set/jump/set sequence. */
10064 do_store_flag (exp, target, mode, only_cheap)
10067 enum machine_mode mode;
10070 enum rtx_code code;
10071 tree arg0, arg1, type;
10073 enum machine_mode operand_mode;
10077 enum insn_code icode;
10078 rtx subtarget = target;
10081 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10082 result at the end. We can't simply invert the test since it would
10083 have already been inverted if it were valid. This case occurs for
10084 some floating-point comparisons. */
10086 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10087 invert = 1, exp = TREE_OPERAND (exp, 0);
10089 arg0 = TREE_OPERAND (exp, 0);
10090 arg1 = TREE_OPERAND (exp, 1);
10091 type = TREE_TYPE (arg0);
10092 operand_mode = TYPE_MODE (type);
10093 unsignedp = TREE_UNSIGNED (type);
10095 /* We won't bother with BLKmode store-flag operations because it would mean
10096 passing a lot of information to emit_store_flag. */
10097 if (operand_mode == BLKmode)
10100 /* We won't bother with store-flag operations involving function pointers
10101 when function pointers must be canonicalized before comparisons. */
10102 #ifdef HAVE_canonicalize_funcptr_for_compare
10103 if (HAVE_canonicalize_funcptr_for_compare
10104 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10105 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10107 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10108 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10109 == FUNCTION_TYPE))))
10116 /* Get the rtx comparison code to use. We know that EXP is a comparison
10117 operation of some type. Some comparisons against 1 and -1 can be
10118 converted to comparisons with zero. Do so here so that the tests
10119 below will be aware that we have a comparison with zero. These
10120 tests will not catch constants in the first operand, but constants
10121 are rarely passed as the first operand. */
10123 switch (TREE_CODE (exp))
10132 if (integer_onep (arg1))
10133 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10135 code = unsignedp ? LTU : LT;
10138 if (! unsignedp && integer_all_onesp (arg1))
10139 arg1 = integer_zero_node, code = LT;
10141 code = unsignedp ? LEU : LE;
10144 if (! unsignedp && integer_all_onesp (arg1))
10145 arg1 = integer_zero_node, code = GE;
10147 code = unsignedp ? GTU : GT;
10150 if (integer_onep (arg1))
10151 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10153 code = unsignedp ? GEU : GE;
10156 case UNORDERED_EXPR:
10182 /* Put a constant second. */
10183 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10185 tem = arg0; arg0 = arg1; arg1 = tem;
10186 code = swap_condition (code);
10189 /* If this is an equality or inequality test of a single bit, we can
10190 do this by shifting the bit being tested to the low-order bit and
10191 masking the result with the constant 1. If the condition was EQ,
10192 we xor it with 1. This does not require an scc insn and is faster
10193 than an scc insn even if we have it. */
10195 if ((code == NE || code == EQ)
10196 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10197 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10199 tree inner = TREE_OPERAND (arg0, 0);
10200 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10203 /* If INNER is a right shift of a constant and it plus BITNUM does
10204 not overflow, adjust BITNUM and INNER. */
10206 if (TREE_CODE (inner) == RSHIFT_EXPR
10207 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10208 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10209 && bitnum < TYPE_PRECISION (type)
10210 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10211 bitnum - TYPE_PRECISION (type)))
10213 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10214 inner = TREE_OPERAND (inner, 0);
10217 /* If we are going to be able to omit the AND below, we must do our
10218 operations as unsigned. If we must use the AND, we have a choice.
10219 Normally unsigned is faster, but for some machines signed is. */
10220 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10221 #ifdef LOAD_EXTEND_OP
10222 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10228 if (subtarget == 0 || GET_CODE (subtarget) != REG
10229 || GET_MODE (subtarget) != operand_mode
10230 || ! safe_from_p (subtarget, inner, 1))
10233 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10236 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10237 size_int (bitnum), subtarget, ops_unsignedp);
10239 if (GET_MODE (op0) != mode)
10240 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10242 if ((code == EQ && ! invert) || (code == NE && invert))
10243 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10244 ops_unsignedp, OPTAB_LIB_WIDEN);
10246 /* Put the AND last so it can combine with more things. */
10247 if (bitnum != TYPE_PRECISION (type) - 1)
10248 op0 = expand_and (op0, const1_rtx, subtarget);
10253 /* Now see if we are likely to be able to do this. Return if not. */
10254 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10257 icode = setcc_gen_code[(int) code];
10258 if (icode == CODE_FOR_nothing
10259 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10261 /* We can only do this if it is one of the special cases that
10262 can be handled without an scc insn. */
10263 if ((code == LT && integer_zerop (arg1))
10264 || (! only_cheap && code == GE && integer_zerop (arg1)))
10266 else if (BRANCH_COST >= 0
10267 && ! only_cheap && (code == NE || code == EQ)
10268 && TREE_CODE (type) != REAL_TYPE
10269 && ((abs_optab->handlers[(int) operand_mode].insn_code
10270 != CODE_FOR_nothing)
10271 || (ffs_optab->handlers[(int) operand_mode].insn_code
10272 != CODE_FOR_nothing)))
10278 preexpand_calls (exp);
10279 if (subtarget == 0 || GET_CODE (subtarget) != REG
10280 || GET_MODE (subtarget) != operand_mode
10281 || ! safe_from_p (subtarget, arg1, 1))
10284 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10285 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10288 target = gen_reg_rtx (mode);
10290 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10291 because, if the emit_store_flag does anything it will succeed and
10292 OP0 and OP1 will not be used subsequently. */
10294 result = emit_store_flag (target, code,
10295 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10296 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10297 operand_mode, unsignedp, 1);
10302 result = expand_binop (mode, xor_optab, result, const1_rtx,
10303 result, 0, OPTAB_LIB_WIDEN);
10307 /* If this failed, we have to do this with set/compare/jump/set code. */
10308 if (GET_CODE (target) != REG
10309 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10310 target = gen_reg_rtx (GET_MODE (target));
10312 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10313 result = compare_from_rtx (op0, op1, code, unsignedp,
10314 operand_mode, NULL_RTX, 0);
10315 if (GET_CODE (result) == CONST_INT)
10316 return (((result == const0_rtx && ! invert)
10317 || (result != const0_rtx && invert))
10318 ? const0_rtx : const1_rtx);
10320 label = gen_label_rtx ();
10321 if (bcc_gen_fctn[(int) code] == 0)
10324 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10325 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10326 emit_label (label);
10331 /* Generate a tablejump instruction (used for switch statements). */
10333 #ifdef HAVE_tablejump
10335 /* INDEX is the value being switched on, with the lowest value
10336 in the table already subtracted.
10337 MODE is its expected mode (needed if INDEX is constant).
10338 RANGE is the length of the jump table.
10339 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10341 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10342 index value is out of range. */
10345 do_tablejump (index, mode, range, table_label, default_label)
10346 rtx index, range, table_label, default_label;
10347 enum machine_mode mode;
10349 register rtx temp, vector;
10351 /* Do an unsigned comparison (in the proper mode) between the index
10352 expression and the value which represents the length of the range.
10353 Since we just finished subtracting the lower bound of the range
10354 from the index expression, this comparison allows us to simultaneously
10355 check that the original index expression value is both greater than
10356 or equal to the minimum value of the range and less than or equal to
10357 the maximum value of the range. */
10359 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10362 /* If index is in range, it must fit in Pmode.
10363 Convert to Pmode so we can index with it. */
10365 index = convert_to_mode (Pmode, index, 1);
10367 /* Don't let a MEM slip thru, because then INDEX that comes
10368 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10369 and break_out_memory_refs will go to work on it and mess it up. */
10370 #ifdef PIC_CASE_VECTOR_ADDRESS
10371 if (flag_pic && GET_CODE (index) != REG)
10372 index = copy_to_mode_reg (Pmode, index);
10375 /* If flag_force_addr were to affect this address
10376 it could interfere with the tricky assumptions made
10377 about addresses that contain label-refs,
10378 which may be valid only very near the tablejump itself. */
10379 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10380 GET_MODE_SIZE, because this indicates how large insns are. The other
10381 uses should all be Pmode, because they are addresses. This code
10382 could fail if addresses and insns are not the same size. */
10383 index = gen_rtx_PLUS (Pmode,
10384 gen_rtx_MULT (Pmode, index,
10385 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10386 gen_rtx_LABEL_REF (Pmode, table_label));
10387 #ifdef PIC_CASE_VECTOR_ADDRESS
10389 index = PIC_CASE_VECTOR_ADDRESS (index);
10392 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10393 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10394 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10395 RTX_UNCHANGING_P (vector) = 1;
10396 convert_move (temp, vector, 0);
10398 emit_jump_insn (gen_tablejump (temp, table_label));
10400 /* If we are generating PIC code or if the table is PC-relative, the
10401 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10402 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10406 #endif /* HAVE_tablejump */