1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
41 #include "typeclass.h"
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first. */
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
65 #define STACK_PUSH_CODE PRE_INC
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
74 /* Hook called by safe_from_p for language-specific tree codes. It is
75 up to the language front-end to install a hook if it has any such
76 codes that safe_from_p needs to know about. Since same_from_p will
77 recursively explore the TREE_OPERANDs of an expression, this hook
78 should not reexamine those pieces. This routine may recursively
79 call safe_from_p; it should always pass `0' as the TOP_P
81 int (*lang_safe_from_p) PARAMS ((rtx, tree));
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* Don't check memory usage, since code is being emitted to check a memory
92 usage. Used when current_function_check_memory_usage is true, to avoid
93 infinite recursion. */
94 static int in_check_memory_usage;
96 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
97 static tree placeholder_list = 0;
99 /* This structure is used by move_by_pieces to describe the move to
101 struct move_by_pieces
110 int explicit_inc_from;
111 unsigned HOST_WIDE_INT len;
112 HOST_WIDE_INT offset;
116 /* This structure is used by store_by_pieces to describe the clear to
119 struct store_by_pieces
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
132 extern struct obstack permanent_obstack;
134 static rtx get_push_address PARAMS ((int));
136 static rtx enqueue_insn PARAMS ((rtx, rtx));
137 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
138 PARAMS ((unsigned HOST_WIDE_INT,
140 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
141 struct move_by_pieces *));
142 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
144 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
146 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
148 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
150 struct store_by_pieces *));
151 static rtx get_subtarget PARAMS ((rtx));
152 static int is_zeros_p PARAMS ((tree));
153 static int mostly_zeros_p PARAMS ((tree));
154 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, unsigned int, int,
158 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
160 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, enum machine_mode, int,
163 unsigned int, HOST_WIDE_INT, int));
164 static enum memory_use_mode
165 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
166 static rtx var_rtx PARAMS ((tree));
167 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
176 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
178 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
180 /* Record for each mode whether we can move a register directly to or
181 from an object of that mode in memory. If we can't, we won't try
182 to use that mode directly when accessing a field of that mode. */
184 static char direct_load[NUM_MACHINE_MODES];
185 static char direct_store[NUM_MACHINE_MODES];
187 /* If a memory-to-memory move would take MOVE_RATIO or more simple
188 move-instruction sequences, we will do a movstr or libcall instead. */
191 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
194 /* If we are optimizing for space (-Os), cut down the default move ratio. */
195 #define MOVE_RATIO (optimize_size ? 3 : 15)
199 /* This macro is used to determine whether move_by_pieces should be called
200 to perform a structure copy. */
201 #ifndef MOVE_BY_PIECES_P
202 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
203 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
206 /* This array records the insn_code of insns to perform block moves. */
207 enum insn_code movstr_optab[NUM_MACHINE_MODES];
209 /* This array records the insn_code of insns to perform block clears. */
210 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
212 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
214 #ifndef SLOW_UNALIGNED_ACCESS
215 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
218 /* This is run once per compilation to set up which modes can be used
219 directly in memory and to initialize the block move optab. */
225 enum machine_mode mode;
231 /* Try indexing by frame ptr and try by stack ptr.
232 It is known that on the Convex the stack ptr isn't a valid index.
233 With luck, one or the other is valid on any machine. */
234 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
235 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
237 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
238 pat = PATTERN (insn);
240 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
241 mode = (enum machine_mode) ((int) mode + 1))
246 direct_load[(int) mode] = direct_store[(int) mode] = 0;
247 PUT_MODE (mem, mode);
248 PUT_MODE (mem1, mode);
250 /* See if there is some register that can be used in this mode and
251 directly loaded or stored from memory. */
253 if (mode != VOIDmode && mode != BLKmode)
254 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
255 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
258 if (! HARD_REGNO_MODE_OK (regno, mode))
261 reg = gen_rtx_REG (mode, regno);
264 SET_DEST (pat) = reg;
265 if (recog (pat, insn, &num_clobbers) >= 0)
266 direct_load[(int) mode] = 1;
268 SET_SRC (pat) = mem1;
269 SET_DEST (pat) = reg;
270 if (recog (pat, insn, &num_clobbers) >= 0)
271 direct_load[(int) mode] = 1;
274 SET_DEST (pat) = mem;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_store[(int) mode] = 1;
279 SET_DEST (pat) = mem1;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_store[(int) mode] = 1;
288 /* This is run at the start of compiling a function. */
293 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
296 pending_stack_adjust = 0;
297 stack_pointer_delta = 0;
298 inhibit_defer_pop = 0;
300 apply_args_value = 0;
306 struct expr_status *p;
311 ggc_mark_rtx (p->x_saveregs_value);
312 ggc_mark_rtx (p->x_apply_args_value);
313 ggc_mark_rtx (p->x_forced_labels);
324 /* Small sanity check that the queue is empty at the end of a function. */
327 finish_expr_for_function ()
333 /* Manage the queue of increment instructions to be output
334 for POSTINCREMENT_EXPR expressions, etc. */
336 /* Queue up to increment (or change) VAR later. BODY says how:
337 BODY should be the same thing you would pass to emit_insn
338 to increment right away. It will go to emit_insn later on.
340 The value is a QUEUED expression to be used in place of VAR
341 where you want to guarantee the pre-incrementation value of VAR. */
344 enqueue_insn (var, body)
347 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
348 body, pending_chain);
349 return pending_chain;
352 /* Use protect_from_queue to convert a QUEUED expression
353 into something that you can put immediately into an instruction.
354 If the queued incrementation has not happened yet,
355 protect_from_queue returns the variable itself.
356 If the incrementation has happened, protect_from_queue returns a temp
357 that contains a copy of the old value of the variable.
359 Any time an rtx which might possibly be a QUEUED is to be put
360 into an instruction, it must be passed through protect_from_queue first.
361 QUEUED expressions are not meaningful in instructions.
363 Do not pass a value through protect_from_queue and then hold
364 on to it for a while before putting it in an instruction!
365 If the queue is flushed in between, incorrect code will result. */
368 protect_from_queue (x, modify)
372 RTX_CODE code = GET_CODE (x);
374 #if 0 /* A QUEUED can hang around after the queue is forced out. */
375 /* Shortcut for most common case. */
376 if (pending_chain == 0)
382 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
383 use of autoincrement. Make a copy of the contents of the memory
384 location rather than a copy of the address, but not if the value is
385 of mode BLKmode. Don't modify X in place since it might be
387 if (code == MEM && GET_MODE (x) != BLKmode
388 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
391 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
395 rtx temp = gen_reg_rtx (GET_MODE (x));
397 emit_insn_before (gen_move_insn (temp, new),
402 /* Copy the address into a pseudo, so that the returned value
403 remains correct across calls to emit_queue. */
404 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
407 /* Otherwise, recursively protect the subexpressions of all
408 the kinds of rtx's that can contain a QUEUED. */
411 rtx tem = protect_from_queue (XEXP (x, 0), 0);
412 if (tem != XEXP (x, 0))
418 else if (code == PLUS || code == MULT)
420 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
421 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
422 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
431 /* If the increment has not happened, use the variable itself. Copy it
432 into a new pseudo so that the value remains correct across calls to
434 if (QUEUED_INSN (x) == 0)
435 return copy_to_reg (QUEUED_VAR (x));
436 /* If the increment has happened and a pre-increment copy exists,
438 if (QUEUED_COPY (x) != 0)
439 return QUEUED_COPY (x);
440 /* The increment has happened but we haven't set up a pre-increment copy.
441 Set one up now, and use it. */
442 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
443 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
445 return QUEUED_COPY (x);
448 /* Return nonzero if X contains a QUEUED expression:
449 if it contains anything that will be altered by a queued increment.
450 We handle only combinations of MEM, PLUS, MINUS and MULT operators
451 since memory addresses generally contain only those. */
457 enum rtx_code code = GET_CODE (x);
463 return queued_subexp_p (XEXP (x, 0));
467 return (queued_subexp_p (XEXP (x, 0))
468 || queued_subexp_p (XEXP (x, 1)));
474 /* Perform all the pending incrementations. */
480 while ((p = pending_chain))
482 rtx body = QUEUED_BODY (p);
484 if (GET_CODE (body) == SEQUENCE)
486 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
487 emit_insn (QUEUED_BODY (p));
490 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
491 pending_chain = QUEUED_NEXT (p);
495 /* Copy data from FROM to TO, where the machine modes are not the same.
496 Both modes may be integer, or both may be floating.
497 UNSIGNEDP should be nonzero if FROM is an unsigned type.
498 This causes zero-extension instead of sign-extension. */
501 convert_move (to, from, unsignedp)
505 enum machine_mode to_mode = GET_MODE (to);
506 enum machine_mode from_mode = GET_MODE (from);
507 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
508 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
512 /* rtx code for making an equivalent value. */
513 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
515 to = protect_from_queue (to, 1);
516 from = protect_from_queue (from, 0);
518 if (to_real != from_real)
521 /* If FROM is a SUBREG that indicates that we have already done at least
522 the required extension, strip it. We don't handle such SUBREGs as
525 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
526 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
527 >= GET_MODE_SIZE (to_mode))
528 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
529 from = gen_lowpart (to_mode, from), from_mode = to_mode;
531 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
534 if (to_mode == from_mode
535 || (from_mode == VOIDmode && CONSTANT_P (from)))
537 emit_move_insn (to, from);
541 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
543 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
546 if (VECTOR_MODE_P (to_mode))
547 from = gen_rtx_SUBREG (to_mode, from, 0);
549 to = gen_rtx_SUBREG (from_mode, to, 0);
551 emit_move_insn (to, from);
555 if (to_real != from_real)
562 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
564 /* Try converting directly if the insn is supported. */
565 if ((code = can_extend_p (to_mode, from_mode, 0))
568 emit_unop_insn (code, to, from, UNKNOWN);
573 #ifdef HAVE_trunchfqf2
574 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
576 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
580 #ifdef HAVE_trunctqfqf2
581 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
583 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
587 #ifdef HAVE_truncsfqf2
588 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
590 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
594 #ifdef HAVE_truncdfqf2
595 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
597 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
601 #ifdef HAVE_truncxfqf2
602 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
604 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
608 #ifdef HAVE_trunctfqf2
609 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
611 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
616 #ifdef HAVE_trunctqfhf2
617 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
619 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
623 #ifdef HAVE_truncsfhf2
624 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
626 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
630 #ifdef HAVE_truncdfhf2
631 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
633 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
637 #ifdef HAVE_truncxfhf2
638 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
640 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
644 #ifdef HAVE_trunctfhf2
645 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
647 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
652 #ifdef HAVE_truncsftqf2
653 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
655 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
659 #ifdef HAVE_truncdftqf2
660 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
662 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
666 #ifdef HAVE_truncxftqf2
667 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
669 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
673 #ifdef HAVE_trunctftqf2
674 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
676 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
681 #ifdef HAVE_truncdfsf2
682 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
684 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
688 #ifdef HAVE_truncxfsf2
689 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
691 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
695 #ifdef HAVE_trunctfsf2
696 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
698 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
702 #ifdef HAVE_truncxfdf2
703 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
705 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
709 #ifdef HAVE_trunctfdf2
710 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
712 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
724 libcall = extendsfdf2_libfunc;
728 libcall = extendsfxf2_libfunc;
732 libcall = extendsftf2_libfunc;
744 libcall = truncdfsf2_libfunc;
748 libcall = extenddfxf2_libfunc;
752 libcall = extenddftf2_libfunc;
764 libcall = truncxfsf2_libfunc;
768 libcall = truncxfdf2_libfunc;
780 libcall = trunctfsf2_libfunc;
784 libcall = trunctfdf2_libfunc;
796 if (libcall == (rtx) 0)
797 /* This conversion is not implemented yet. */
801 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
803 insns = get_insns ();
805 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
810 /* Now both modes are integers. */
812 /* Handle expanding beyond a word. */
813 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
814 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
821 enum machine_mode lowpart_mode;
822 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
824 /* Try converting directly if the insn is supported. */
825 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
828 /* If FROM is a SUBREG, put it into a register. Do this
829 so that we always generate the same set of insns for
830 better cse'ing; if an intermediate assignment occurred,
831 we won't be doing the operation directly on the SUBREG. */
832 if (optimize > 0 && GET_CODE (from) == SUBREG)
833 from = force_reg (from_mode, from);
834 emit_unop_insn (code, to, from, equiv_code);
837 /* Next, try converting via full word. */
838 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
839 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
840 != CODE_FOR_nothing))
842 if (GET_CODE (to) == REG)
843 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
844 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
845 emit_unop_insn (code, to,
846 gen_lowpart (word_mode, to), equiv_code);
850 /* No special multiword conversion insn; do it by hand. */
853 /* Since we will turn this into a no conflict block, we must ensure
854 that the source does not overlap the target. */
856 if (reg_overlap_mentioned_p (to, from))
857 from = force_reg (from_mode, from);
859 /* Get a copy of FROM widened to a word, if necessary. */
860 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
861 lowpart_mode = word_mode;
863 lowpart_mode = from_mode;
865 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
867 lowpart = gen_lowpart (lowpart_mode, to);
868 emit_move_insn (lowpart, lowfrom);
870 /* Compute the value to put in each remaining word. */
872 fill_value = const0_rtx;
877 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
878 && STORE_FLAG_VALUE == -1)
880 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
882 fill_value = gen_reg_rtx (word_mode);
883 emit_insn (gen_slt (fill_value));
889 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
890 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
892 fill_value = convert_to_mode (word_mode, fill_value, 1);
896 /* Fill the remaining words. */
897 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
899 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
900 rtx subword = operand_subword (to, index, 1, to_mode);
905 if (fill_value != subword)
906 emit_move_insn (subword, fill_value);
909 insns = get_insns ();
912 emit_no_conflict_block (insns, to, from, NULL_RTX,
913 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
917 /* Truncating multi-word to a word or less. */
918 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
919 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
928 convert_move (to, gen_lowpart (word_mode, from), 0);
932 /* Handle pointer conversion. */ /* SPEE 900220. */
933 if (to_mode == PQImode)
935 if (from_mode != QImode)
936 from = convert_to_mode (QImode, from, unsignedp);
938 #ifdef HAVE_truncqipqi2
939 if (HAVE_truncqipqi2)
941 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
944 #endif /* HAVE_truncqipqi2 */
948 if (from_mode == PQImode)
950 if (to_mode != QImode)
952 from = convert_to_mode (QImode, from, unsignedp);
957 #ifdef HAVE_extendpqiqi2
958 if (HAVE_extendpqiqi2)
960 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
963 #endif /* HAVE_extendpqiqi2 */
968 if (to_mode == PSImode)
970 if (from_mode != SImode)
971 from = convert_to_mode (SImode, from, unsignedp);
973 #ifdef HAVE_truncsipsi2
974 if (HAVE_truncsipsi2)
976 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
979 #endif /* HAVE_truncsipsi2 */
983 if (from_mode == PSImode)
985 if (to_mode != SImode)
987 from = convert_to_mode (SImode, from, unsignedp);
992 #ifdef HAVE_extendpsisi2
993 if (! unsignedp && HAVE_extendpsisi2)
995 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
998 #endif /* HAVE_extendpsisi2 */
999 #ifdef HAVE_zero_extendpsisi2
1000 if (unsignedp && HAVE_zero_extendpsisi2)
1002 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1005 #endif /* HAVE_zero_extendpsisi2 */
1010 if (to_mode == PDImode)
1012 if (from_mode != DImode)
1013 from = convert_to_mode (DImode, from, unsignedp);
1015 #ifdef HAVE_truncdipdi2
1016 if (HAVE_truncdipdi2)
1018 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1021 #endif /* HAVE_truncdipdi2 */
1025 if (from_mode == PDImode)
1027 if (to_mode != DImode)
1029 from = convert_to_mode (DImode, from, unsignedp);
1034 #ifdef HAVE_extendpdidi2
1035 if (HAVE_extendpdidi2)
1037 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1040 #endif /* HAVE_extendpdidi2 */
1045 /* Now follow all the conversions between integers
1046 no more than a word long. */
1048 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1049 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1050 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1051 GET_MODE_BITSIZE (from_mode)))
1053 if (!((GET_CODE (from) == MEM
1054 && ! MEM_VOLATILE_P (from)
1055 && direct_load[(int) to_mode]
1056 && ! mode_dependent_address_p (XEXP (from, 0)))
1057 || GET_CODE (from) == REG
1058 || GET_CODE (from) == SUBREG))
1059 from = force_reg (from_mode, from);
1060 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1061 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1062 from = copy_to_reg (from);
1063 emit_move_insn (to, gen_lowpart (to_mode, from));
1067 /* Handle extension. */
1068 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1070 /* Convert directly if that works. */
1071 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1072 != CODE_FOR_nothing)
1074 emit_unop_insn (code, to, from, equiv_code);
1079 enum machine_mode intermediate;
1083 /* Search for a mode to convert via. */
1084 for (intermediate = from_mode; intermediate != VOIDmode;
1085 intermediate = GET_MODE_WIDER_MODE (intermediate))
1086 if (((can_extend_p (to_mode, intermediate, unsignedp)
1087 != CODE_FOR_nothing)
1088 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1089 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1090 GET_MODE_BITSIZE (intermediate))))
1091 && (can_extend_p (intermediate, from_mode, unsignedp)
1092 != CODE_FOR_nothing))
1094 convert_move (to, convert_to_mode (intermediate, from,
1095 unsignedp), unsignedp);
1099 /* No suitable intermediate mode.
1100 Generate what we need with shifts. */
1101 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1102 - GET_MODE_BITSIZE (from_mode), 0);
1103 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1104 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1106 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1109 emit_move_insn (to, tmp);
1114 /* Support special truncate insns for certain modes. */
1116 if (from_mode == DImode && to_mode == SImode)
1118 #ifdef HAVE_truncdisi2
1119 if (HAVE_truncdisi2)
1121 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1125 convert_move (to, force_reg (from_mode, from), unsignedp);
1129 if (from_mode == DImode && to_mode == HImode)
1131 #ifdef HAVE_truncdihi2
1132 if (HAVE_truncdihi2)
1134 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1138 convert_move (to, force_reg (from_mode, from), unsignedp);
1142 if (from_mode == DImode && to_mode == QImode)
1144 #ifdef HAVE_truncdiqi2
1145 if (HAVE_truncdiqi2)
1147 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1151 convert_move (to, force_reg (from_mode, from), unsignedp);
1155 if (from_mode == SImode && to_mode == HImode)
1157 #ifdef HAVE_truncsihi2
1158 if (HAVE_truncsihi2)
1160 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1164 convert_move (to, force_reg (from_mode, from), unsignedp);
1168 if (from_mode == SImode && to_mode == QImode)
1170 #ifdef HAVE_truncsiqi2
1171 if (HAVE_truncsiqi2)
1173 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1177 convert_move (to, force_reg (from_mode, from), unsignedp);
1181 if (from_mode == HImode && to_mode == QImode)
1183 #ifdef HAVE_trunchiqi2
1184 if (HAVE_trunchiqi2)
1186 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1190 convert_move (to, force_reg (from_mode, from), unsignedp);
1194 if (from_mode == TImode && to_mode == DImode)
1196 #ifdef HAVE_trunctidi2
1197 if (HAVE_trunctidi2)
1199 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1203 convert_move (to, force_reg (from_mode, from), unsignedp);
1207 if (from_mode == TImode && to_mode == SImode)
1209 #ifdef HAVE_trunctisi2
1210 if (HAVE_trunctisi2)
1212 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1216 convert_move (to, force_reg (from_mode, from), unsignedp);
1220 if (from_mode == TImode && to_mode == HImode)
1222 #ifdef HAVE_trunctihi2
1223 if (HAVE_trunctihi2)
1225 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1229 convert_move (to, force_reg (from_mode, from), unsignedp);
1233 if (from_mode == TImode && to_mode == QImode)
1235 #ifdef HAVE_trunctiqi2
1236 if (HAVE_trunctiqi2)
1238 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1242 convert_move (to, force_reg (from_mode, from), unsignedp);
1246 /* Handle truncation of volatile memrefs, and so on;
1247 the things that couldn't be truncated directly,
1248 and for which there was no special instruction. */
1249 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1251 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1252 emit_move_insn (to, temp);
1256 /* Mode combination is not recognized. */
1260 /* Return an rtx for a value that would result
1261 from converting X to mode MODE.
1262 Both X and MODE may be floating, or both integer.
1263 UNSIGNEDP is nonzero if X is an unsigned value.
1264 This can be done by referring to a part of X in place
1265 or by copying to a new temporary with conversion.
1267 This function *must not* call protect_from_queue
1268 except when putting X into an insn (in which case convert_move does it). */
1271 convert_to_mode (mode, x, unsignedp)
1272 enum machine_mode mode;
1276 return convert_modes (mode, VOIDmode, x, unsignedp);
1279 /* Return an rtx for a value that would result
1280 from converting X from mode OLDMODE to mode MODE.
1281 Both modes may be floating, or both integer.
1282 UNSIGNEDP is nonzero if X is an unsigned value.
1284 This can be done by referring to a part of X in place
1285 or by copying to a new temporary with conversion.
1287 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1289 This function *must not* call protect_from_queue
1290 except when putting X into an insn (in which case convert_move does it). */
1293 convert_modes (mode, oldmode, x, unsignedp)
1294 enum machine_mode mode, oldmode;
1300 /* If FROM is a SUBREG that indicates that we have already done at least
1301 the required extension, strip it. */
1303 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1304 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1305 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1306 x = gen_lowpart (mode, x);
1308 if (GET_MODE (x) != VOIDmode)
1309 oldmode = GET_MODE (x);
1311 if (mode == oldmode)
1314 /* There is one case that we must handle specially: If we are converting
1315 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1316 we are to interpret the constant as unsigned, gen_lowpart will do
1317 the wrong if the constant appears negative. What we want to do is
1318 make the high-order word of the constant zero, not all ones. */
1320 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1321 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1322 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1324 HOST_WIDE_INT val = INTVAL (x);
1326 if (oldmode != VOIDmode
1327 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1329 int width = GET_MODE_BITSIZE (oldmode);
1331 /* We need to zero extend VAL. */
1332 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1335 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1338 /* We can do this with a gen_lowpart if both desired and current modes
1339 are integer, and this is either a constant integer, a register, or a
1340 non-volatile MEM. Except for the constant case where MODE is no
1341 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1343 if ((GET_CODE (x) == CONST_INT
1344 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1345 || (GET_MODE_CLASS (mode) == MODE_INT
1346 && GET_MODE_CLASS (oldmode) == MODE_INT
1347 && (GET_CODE (x) == CONST_DOUBLE
1348 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1349 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1350 && direct_load[(int) mode])
1351 || (GET_CODE (x) == REG
1352 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1353 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1355 /* ?? If we don't know OLDMODE, we have to assume here that
1356 X does not need sign- or zero-extension. This may not be
1357 the case, but it's the best we can do. */
1358 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1359 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1361 HOST_WIDE_INT val = INTVAL (x);
1362 int width = GET_MODE_BITSIZE (oldmode);
1364 /* We must sign or zero-extend in this case. Start by
1365 zero-extending, then sign extend if we need to. */
1366 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1368 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1369 val |= (HOST_WIDE_INT) (-1) << width;
1371 return GEN_INT (trunc_int_for_mode (val, mode));
1374 return gen_lowpart (mode, x);
1377 temp = gen_reg_rtx (mode);
1378 convert_move (temp, x, unsignedp);
1382 /* This macro is used to determine what the largest unit size that
1383 move_by_pieces can use is. */
1385 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1386 move efficiently, as opposed to MOVE_MAX which is the maximum
1387 number of bytes we can move with a single instruction. */
1389 #ifndef MOVE_MAX_PIECES
1390 #define MOVE_MAX_PIECES MOVE_MAX
1393 /* Generate several move instructions to copy LEN bytes from block FROM to
1394 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1395 and TO through protect_from_queue before calling.
1397 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1398 used to push FROM to the stack.
1400 ALIGN is maximum alignment we can assume. */
1403 move_by_pieces (to, from, len, align)
1405 unsigned HOST_WIDE_INT len;
1408 struct move_by_pieces data;
1409 rtx to_addr, from_addr = XEXP (from, 0);
1410 unsigned int max_size = MOVE_MAX_PIECES + 1;
1411 enum machine_mode mode = VOIDmode, tmode;
1412 enum insn_code icode;
1415 data.from_addr = from_addr;
1418 to_addr = XEXP (to, 0);
1421 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1422 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1424 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1431 #ifdef STACK_GROWS_DOWNWARD
1437 data.to_addr = to_addr;
1440 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1441 || GET_CODE (from_addr) == POST_INC
1442 || GET_CODE (from_addr) == POST_DEC);
1444 data.explicit_inc_from = 0;
1445 data.explicit_inc_to = 0;
1446 if (data.reverse) data.offset = len;
1449 /* If copying requires more than two move insns,
1450 copy addresses to registers (to make displacements shorter)
1451 and use post-increment if available. */
1452 if (!(data.autinc_from && data.autinc_to)
1453 && move_by_pieces_ninsns (len, align) > 2)
1455 /* Find the mode of the largest move... */
1456 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1457 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1458 if (GET_MODE_SIZE (tmode) < max_size)
1461 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1463 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1464 data.autinc_from = 1;
1465 data.explicit_inc_from = -1;
1467 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1469 data.from_addr = copy_addr_to_reg (from_addr);
1470 data.autinc_from = 1;
1471 data.explicit_inc_from = 1;
1473 if (!data.autinc_from && CONSTANT_P (from_addr))
1474 data.from_addr = copy_addr_to_reg (from_addr);
1475 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1477 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1479 data.explicit_inc_to = -1;
1481 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1483 data.to_addr = copy_addr_to_reg (to_addr);
1485 data.explicit_inc_to = 1;
1487 if (!data.autinc_to && CONSTANT_P (to_addr))
1488 data.to_addr = copy_addr_to_reg (to_addr);
1491 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1492 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1493 align = MOVE_MAX * BITS_PER_UNIT;
1495 /* First move what we can in the largest integer mode, then go to
1496 successively smaller modes. */
1498 while (max_size > 1)
1500 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1501 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1502 if (GET_MODE_SIZE (tmode) < max_size)
1505 if (mode == VOIDmode)
1508 icode = mov_optab->handlers[(int) mode].insn_code;
1509 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1510 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1512 max_size = GET_MODE_SIZE (mode);
1515 /* The code above should have handled everything. */
1520 /* Return number of insns required to move L bytes by pieces.
1521 ALIGN (in bits) is maximum alignment we can assume. */
1523 static unsigned HOST_WIDE_INT
1524 move_by_pieces_ninsns (l, align)
1525 unsigned HOST_WIDE_INT l;
1528 unsigned HOST_WIDE_INT n_insns = 0;
1529 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1531 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1532 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1533 align = MOVE_MAX * BITS_PER_UNIT;
1535 while (max_size > 1)
1537 enum machine_mode mode = VOIDmode, tmode;
1538 enum insn_code icode;
1540 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1541 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1542 if (GET_MODE_SIZE (tmode) < max_size)
1545 if (mode == VOIDmode)
1548 icode = mov_optab->handlers[(int) mode].insn_code;
1549 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1550 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1552 max_size = GET_MODE_SIZE (mode);
1560 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1561 with move instructions for mode MODE. GENFUN is the gen_... function
1562 to make a move insn for that mode. DATA has all the other info. */
1565 move_by_pieces_1 (genfun, mode, data)
1566 rtx (*genfun) PARAMS ((rtx, ...));
1567 enum machine_mode mode;
1568 struct move_by_pieces *data;
1570 unsigned int size = GET_MODE_SIZE (mode);
1571 rtx to1 = NULL_RTX, from1;
1573 while (data->len >= size)
1576 data->offset -= size;
1580 if (data->autinc_to)
1582 to1 = replace_equiv_address (data->to, data->to_addr);
1583 to1 = adjust_address (to1, mode, 0);
1586 to1 = adjust_address (data->to, mode, data->offset);
1589 if (data->autinc_from)
1591 from1 = replace_equiv_address (data->from, data->from_addr);
1592 from1 = adjust_address (from1, mode, 0);
1595 from1 = adjust_address (data->from, mode, data->offset);
1597 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1598 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1599 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1600 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1603 emit_insn ((*genfun) (to1, from1));
1606 #ifdef PUSH_ROUNDING
1607 emit_single_push_insn (mode, from1, NULL);
1613 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1614 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1615 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1616 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1618 if (! data->reverse)
1619 data->offset += size;
1625 /* Emit code to move a block Y to a block X.
1626 This may be done with string-move instructions,
1627 with multiple scalar move instructions, or with a library call.
1629 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1631 SIZE is an rtx that says how long they are.
1632 ALIGN is the maximum alignment we can assume they have.
1634 Return the address of the new block, if memcpy is called and returns it,
1638 emit_block_move (x, y, size)
1643 #ifdef TARGET_MEM_FUNCTIONS
1645 tree call_expr, arg_list;
1647 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1649 if (GET_MODE (x) != BLKmode)
1652 if (GET_MODE (y) != BLKmode)
1655 x = protect_from_queue (x, 1);
1656 y = protect_from_queue (y, 0);
1657 size = protect_from_queue (size, 0);
1659 if (GET_CODE (x) != MEM)
1661 if (GET_CODE (y) != MEM)
1666 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1667 move_by_pieces (x, y, INTVAL (size), align);
1670 /* Try the most limited insn first, because there's no point
1671 including more than one in the machine description unless
1672 the more limited one has some advantage. */
1674 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1675 enum machine_mode mode;
1677 /* Since this is a move insn, we don't care about volatility. */
1680 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1681 mode = GET_MODE_WIDER_MODE (mode))
1683 enum insn_code code = movstr_optab[(int) mode];
1684 insn_operand_predicate_fn pred;
1686 if (code != CODE_FOR_nothing
1687 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1688 here because if SIZE is less than the mode mask, as it is
1689 returned by the macro, it will definitely be less than the
1690 actual mode mask. */
1691 && ((GET_CODE (size) == CONST_INT
1692 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1693 <= (GET_MODE_MASK (mode) >> 1)))
1694 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1695 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1696 || (*pred) (x, BLKmode))
1697 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1698 || (*pred) (y, BLKmode))
1699 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1700 || (*pred) (opalign, VOIDmode)))
1703 rtx last = get_last_insn ();
1706 op2 = convert_to_mode (mode, size, 1);
1707 pred = insn_data[(int) code].operand[2].predicate;
1708 if (pred != 0 && ! (*pred) (op2, mode))
1709 op2 = copy_to_mode_reg (mode, op2);
1711 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1719 delete_insns_since (last);
1725 /* X, Y, or SIZE may have been passed through protect_from_queue.
1727 It is unsafe to save the value generated by protect_from_queue
1728 and reuse it later. Consider what happens if emit_queue is
1729 called before the return value from protect_from_queue is used.
1731 Expansion of the CALL_EXPR below will call emit_queue before
1732 we are finished emitting RTL for argument setup. So if we are
1733 not careful we could get the wrong value for an argument.
1735 To avoid this problem we go ahead and emit code to copy X, Y &
1736 SIZE into new pseudos. We can then place those new pseudos
1737 into an RTL_EXPR and use them later, even after a call to
1740 Note this is not strictly needed for library calls since they
1741 do not call emit_queue before loading their arguments. However,
1742 we may need to have library calls call emit_queue in the future
1743 since failing to do so could cause problems for targets which
1744 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1745 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1746 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1748 #ifdef TARGET_MEM_FUNCTIONS
1749 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1751 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1752 TREE_UNSIGNED (integer_type_node));
1753 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1756 #ifdef TARGET_MEM_FUNCTIONS
1757 /* It is incorrect to use the libcall calling conventions to call
1758 memcpy in this context.
1760 This could be a user call to memcpy and the user may wish to
1761 examine the return value from memcpy.
1763 For targets where libcalls and normal calls have different conventions
1764 for returning pointers, we could end up generating incorrect code.
1766 So instead of using a libcall sequence we build up a suitable
1767 CALL_EXPR and expand the call in the normal fashion. */
1768 if (fn == NULL_TREE)
1772 /* This was copied from except.c, I don't know if all this is
1773 necessary in this context or not. */
1774 fn = get_identifier ("memcpy");
1775 fntype = build_pointer_type (void_type_node);
1776 fntype = build_function_type (fntype, NULL_TREE);
1777 fn = build_decl (FUNCTION_DECL, fn, fntype);
1778 ggc_add_tree_root (&fn, 1);
1779 DECL_EXTERNAL (fn) = 1;
1780 TREE_PUBLIC (fn) = 1;
1781 DECL_ARTIFICIAL (fn) = 1;
1782 TREE_NOTHROW (fn) = 1;
1783 make_decl_rtl (fn, NULL);
1784 assemble_external (fn);
1787 /* We need to make an argument list for the function call.
1789 memcpy has three arguments, the first two are void * addresses and
1790 the last is a size_t byte count for the copy. */
1792 = build_tree_list (NULL_TREE,
1793 make_tree (build_pointer_type (void_type_node), x));
1794 TREE_CHAIN (arg_list)
1795 = build_tree_list (NULL_TREE,
1796 make_tree (build_pointer_type (void_type_node), y));
1797 TREE_CHAIN (TREE_CHAIN (arg_list))
1798 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1799 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1801 /* Now we have to build up the CALL_EXPR itself. */
1802 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1803 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1804 call_expr, arg_list, NULL_TREE);
1805 TREE_SIDE_EFFECTS (call_expr) = 1;
1807 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1809 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1810 VOIDmode, 3, y, Pmode, x, Pmode,
1811 convert_to_mode (TYPE_MODE (integer_type_node), size,
1812 TREE_UNSIGNED (integer_type_node)),
1813 TYPE_MODE (integer_type_node));
1820 /* Copy all or part of a value X into registers starting at REGNO.
1821 The number of registers to be filled is NREGS. */
1824 move_block_to_reg (regno, x, nregs, mode)
1828 enum machine_mode mode;
1831 #ifdef HAVE_load_multiple
1839 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1840 x = validize_mem (force_const_mem (mode, x));
1842 /* See if the machine can do this with a load multiple insn. */
1843 #ifdef HAVE_load_multiple
1844 if (HAVE_load_multiple)
1846 last = get_last_insn ();
1847 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1855 delete_insns_since (last);
1859 for (i = 0; i < nregs; i++)
1860 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1861 operand_subword_force (x, i, mode));
1864 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1865 The number of registers to be filled is NREGS. SIZE indicates the number
1866 of bytes in the object X. */
1869 move_block_from_reg (regno, x, nregs, size)
1876 #ifdef HAVE_store_multiple
1880 enum machine_mode mode;
1885 /* If SIZE is that of a mode no bigger than a word, just use that
1886 mode's store operation. */
1887 if (size <= UNITS_PER_WORD
1888 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1890 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1894 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1895 to the left before storing to memory. Note that the previous test
1896 doesn't handle all cases (e.g. SIZE == 3). */
1897 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1899 rtx tem = operand_subword (x, 0, 1, BLKmode);
1905 shift = expand_shift (LSHIFT_EXPR, word_mode,
1906 gen_rtx_REG (word_mode, regno),
1907 build_int_2 ((UNITS_PER_WORD - size)
1908 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1909 emit_move_insn (tem, shift);
1913 /* See if the machine can do this with a store multiple insn. */
1914 #ifdef HAVE_store_multiple
1915 if (HAVE_store_multiple)
1917 last = get_last_insn ();
1918 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1926 delete_insns_since (last);
1930 for (i = 0; i < nregs; i++)
1932 rtx tem = operand_subword (x, i, 1, BLKmode);
1937 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1941 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1942 registers represented by a PARALLEL. SSIZE represents the total size of
1943 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1945 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1946 the balance will be in what would be the low-order memory addresses, i.e.
1947 left justified for big endian, right justified for little endian. This
1948 happens to be true for the targets currently using this support. If this
1949 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1953 emit_group_load (dst, orig_src, ssize, align)
1961 if (GET_CODE (dst) != PARALLEL)
1964 /* Check for a NULL entry, used to indicate that the parameter goes
1965 both on the stack and in registers. */
1966 if (XEXP (XVECEXP (dst, 0, 0), 0))
1971 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1973 /* Process the pieces. */
1974 for (i = start; i < XVECLEN (dst, 0); i++)
1976 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1977 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1978 unsigned int bytelen = GET_MODE_SIZE (mode);
1981 /* Handle trailing fragments that run over the size of the struct. */
1982 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1984 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1985 bytelen = ssize - bytepos;
1990 /* If we won't be loading directly from memory, protect the real source
1991 from strange tricks we might play; but make sure that the source can
1992 be loaded directly into the destination. */
1994 if (GET_CODE (orig_src) != MEM
1995 && (!CONSTANT_P (orig_src)
1996 || (GET_MODE (orig_src) != mode
1997 && GET_MODE (orig_src) != VOIDmode)))
1999 if (GET_MODE (orig_src) == VOIDmode)
2000 src = gen_reg_rtx (mode);
2002 src = gen_reg_rtx (GET_MODE (orig_src));
2003 emit_move_insn (src, orig_src);
2006 /* Optimize the access just a bit. */
2007 if (GET_CODE (src) == MEM
2008 && align >= GET_MODE_ALIGNMENT (mode)
2009 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2010 && bytelen == GET_MODE_SIZE (mode))
2012 tmps[i] = gen_reg_rtx (mode);
2013 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2015 else if (GET_CODE (src) == CONCAT)
2018 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2019 tmps[i] = XEXP (src, 0);
2020 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2021 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2022 tmps[i] = XEXP (src, 1);
2026 else if (CONSTANT_P (src)
2027 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2030 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2031 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2032 mode, mode, align, ssize);
2034 if (BYTES_BIG_ENDIAN && shift)
2035 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2036 tmps[i], 0, OPTAB_WIDEN);
2041 /* Copy the extracted pieces into the proper (probable) hard regs. */
2042 for (i = start; i < XVECLEN (dst, 0); i++)
2043 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2046 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2047 registers represented by a PARALLEL. SSIZE represents the total size of
2048 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2051 emit_group_store (orig_dst, src, ssize, align)
2059 if (GET_CODE (src) != PARALLEL)
2062 /* Check for a NULL entry, used to indicate that the parameter goes
2063 both on the stack and in registers. */
2064 if (XEXP (XVECEXP (src, 0, 0), 0))
2069 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2071 /* Copy the (probable) hard regs into pseudos. */
2072 for (i = start; i < XVECLEN (src, 0); i++)
2074 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2075 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2076 emit_move_insn (tmps[i], reg);
2080 /* If we won't be storing directly into memory, protect the real destination
2081 from strange tricks we might play. */
2083 if (GET_CODE (dst) == PARALLEL)
2087 /* We can get a PARALLEL dst if there is a conditional expression in
2088 a return statement. In that case, the dst and src are the same,
2089 so no action is necessary. */
2090 if (rtx_equal_p (dst, src))
2093 /* It is unclear if we can ever reach here, but we may as well handle
2094 it. Allocate a temporary, and split this into a store/load to/from
2097 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2098 emit_group_store (temp, src, ssize, align);
2099 emit_group_load (dst, temp, ssize, align);
2102 else if (GET_CODE (dst) != MEM)
2104 dst = gen_reg_rtx (GET_MODE (orig_dst));
2105 /* Make life a bit easier for combine. */
2106 emit_move_insn (dst, const0_rtx);
2109 /* Process the pieces. */
2110 for (i = start; i < XVECLEN (src, 0); i++)
2112 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2113 enum machine_mode mode = GET_MODE (tmps[i]);
2114 unsigned int bytelen = GET_MODE_SIZE (mode);
2116 /* Handle trailing fragments that run over the size of the struct. */
2117 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2119 if (BYTES_BIG_ENDIAN)
2121 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2122 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2123 tmps[i], 0, OPTAB_WIDEN);
2125 bytelen = ssize - bytepos;
2128 /* Optimize the access just a bit. */
2129 if (GET_CODE (dst) == MEM
2130 && align >= GET_MODE_ALIGNMENT (mode)
2131 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2132 && bytelen == GET_MODE_SIZE (mode))
2133 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2135 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2136 mode, tmps[i], align, ssize);
2141 /* Copy from the pseudo into the (probable) hard reg. */
2142 if (GET_CODE (dst) == REG)
2143 emit_move_insn (orig_dst, dst);
2146 /* Generate code to copy a BLKmode object of TYPE out of a
2147 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2148 is null, a stack temporary is created. TGTBLK is returned.
2150 The primary purpose of this routine is to handle functions
2151 that return BLKmode structures in registers. Some machines
2152 (the PA for example) want to return all small structures
2153 in registers regardless of the structure's alignment. */
2156 copy_blkmode_from_reg (tgtblk, srcreg, type)
2161 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2162 rtx src = NULL, dst = NULL;
2163 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2164 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2168 tgtblk = assign_temp (build_qualified_type (type,
2170 | TYPE_QUAL_CONST)),
2172 preserve_temp_slots (tgtblk);
2175 /* This code assumes srcreg is at least a full word. If it isn't,
2176 copy it into a new pseudo which is a full word. */
2177 if (GET_MODE (srcreg) != BLKmode
2178 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2179 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2181 /* Structures whose size is not a multiple of a word are aligned
2182 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2183 machine, this means we must skip the empty high order bytes when
2184 calculating the bit offset. */
2185 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2186 big_endian_correction
2187 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2189 /* Copy the structure BITSIZE bites at a time.
2191 We could probably emit more efficient code for machines which do not use
2192 strict alignment, but it doesn't seem worth the effort at the current
2194 for (bitpos = 0, xbitpos = big_endian_correction;
2195 bitpos < bytes * BITS_PER_UNIT;
2196 bitpos += bitsize, xbitpos += bitsize)
2198 /* We need a new source operand each time xbitpos is on a
2199 word boundary and when xbitpos == big_endian_correction
2200 (the first time through). */
2201 if (xbitpos % BITS_PER_WORD == 0
2202 || xbitpos == big_endian_correction)
2203 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2206 /* We need a new destination operand each time bitpos is on
2208 if (bitpos % BITS_PER_WORD == 0)
2209 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2211 /* Use xbitpos for the source extraction (right justified) and
2212 xbitpos for the destination store (left justified). */
2213 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2214 extract_bit_field (src, bitsize,
2215 xbitpos % BITS_PER_WORD, 1,
2216 NULL_RTX, word_mode, word_mode,
2217 bitsize, BITS_PER_WORD),
2218 bitsize, BITS_PER_WORD);
2224 /* Add a USE expression for REG to the (possibly empty) list pointed
2225 to by CALL_FUSAGE. REG must denote a hard register. */
2228 use_reg (call_fusage, reg)
2229 rtx *call_fusage, reg;
2231 if (GET_CODE (reg) != REG
2232 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2236 = gen_rtx_EXPR_LIST (VOIDmode,
2237 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2240 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2241 starting at REGNO. All of these registers must be hard registers. */
2244 use_regs (call_fusage, regno, nregs)
2251 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2254 for (i = 0; i < nregs; i++)
2255 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2258 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2259 PARALLEL REGS. This is for calls that pass values in multiple
2260 non-contiguous locations. The Irix 6 ABI has examples of this. */
2263 use_group_regs (call_fusage, regs)
2269 for (i = 0; i < XVECLEN (regs, 0); i++)
2271 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2273 /* A NULL entry means the parameter goes both on the stack and in
2274 registers. This can also be a MEM for targets that pass values
2275 partially on the stack and partially in registers. */
2276 if (reg != 0 && GET_CODE (reg) == REG)
2277 use_reg (call_fusage, reg);
2283 can_store_by_pieces (len, constfun, constfundata, align)
2284 unsigned HOST_WIDE_INT len;
2285 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2289 unsigned HOST_WIDE_INT max_size, l;
2290 HOST_WIDE_INT offset = 0;
2291 enum machine_mode mode, tmode;
2292 enum insn_code icode;
2296 if (! MOVE_BY_PIECES_P (len, align))
2299 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2300 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2301 align = MOVE_MAX * BITS_PER_UNIT;
2303 /* We would first store what we can in the largest integer mode, then go to
2304 successively smaller modes. */
2307 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2312 max_size = MOVE_MAX_PIECES + 1;
2313 while (max_size > 1)
2315 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2316 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2317 if (GET_MODE_SIZE (tmode) < max_size)
2320 if (mode == VOIDmode)
2323 icode = mov_optab->handlers[(int) mode].insn_code;
2324 if (icode != CODE_FOR_nothing
2325 && align >= GET_MODE_ALIGNMENT (mode))
2327 unsigned int size = GET_MODE_SIZE (mode);
2334 cst = (*constfun) (constfundata, offset, mode);
2335 if (!LEGITIMATE_CONSTANT_P (cst))
2345 max_size = GET_MODE_SIZE (mode);
2348 /* The code above should have handled everything. */
2356 /* Generate several move instructions to store LEN bytes generated by
2357 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2358 pointer which will be passed as argument in every CONSTFUN call.
2359 ALIGN is maximum alignment we can assume. */
2362 store_by_pieces (to, len, constfun, constfundata, align)
2364 unsigned HOST_WIDE_INT len;
2365 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2369 struct store_by_pieces data;
2371 if (! MOVE_BY_PIECES_P (len, align))
2373 to = protect_from_queue (to, 1);
2374 data.constfun = constfun;
2375 data.constfundata = constfundata;
2378 store_by_pieces_1 (&data, align);
2381 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2382 rtx with BLKmode). The caller must pass TO through protect_from_queue
2383 before calling. ALIGN is maximum alignment we can assume. */
2386 clear_by_pieces (to, len, align)
2388 unsigned HOST_WIDE_INT len;
2391 struct store_by_pieces data;
2393 data.constfun = clear_by_pieces_1;
2394 data.constfundata = NULL;
2397 store_by_pieces_1 (&data, align);
2400 /* Callback routine for clear_by_pieces.
2401 Return const0_rtx unconditionally. */
2404 clear_by_pieces_1 (data, offset, mode)
2405 PTR data ATTRIBUTE_UNUSED;
2406 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2407 enum machine_mode mode ATTRIBUTE_UNUSED;
2412 /* Subroutine of clear_by_pieces and store_by_pieces.
2413 Generate several move instructions to store LEN bytes of block TO. (A MEM
2414 rtx with BLKmode). The caller must pass TO through protect_from_queue
2415 before calling. ALIGN is maximum alignment we can assume. */
2418 store_by_pieces_1 (data, align)
2419 struct store_by_pieces *data;
2422 rtx to_addr = XEXP (data->to, 0);
2423 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2424 enum machine_mode mode = VOIDmode, tmode;
2425 enum insn_code icode;
2428 data->to_addr = to_addr;
2430 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2431 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2433 data->explicit_inc_to = 0;
2435 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2437 data->offset = data->len;
2439 /* If storing requires more than two move insns,
2440 copy addresses to registers (to make displacements shorter)
2441 and use post-increment if available. */
2442 if (!data->autinc_to
2443 && move_by_pieces_ninsns (data->len, align) > 2)
2445 /* Determine the main mode we'll be using. */
2446 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2447 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2448 if (GET_MODE_SIZE (tmode) < max_size)
2451 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2453 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2454 data->autinc_to = 1;
2455 data->explicit_inc_to = -1;
2458 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2459 && ! data->autinc_to)
2461 data->to_addr = copy_addr_to_reg (to_addr);
2462 data->autinc_to = 1;
2463 data->explicit_inc_to = 1;
2466 if ( !data->autinc_to && CONSTANT_P (to_addr))
2467 data->to_addr = copy_addr_to_reg (to_addr);
2470 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2471 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2472 align = MOVE_MAX * BITS_PER_UNIT;
2474 /* First store what we can in the largest integer mode, then go to
2475 successively smaller modes. */
2477 while (max_size > 1)
2479 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2480 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2481 if (GET_MODE_SIZE (tmode) < max_size)
2484 if (mode == VOIDmode)
2487 icode = mov_optab->handlers[(int) mode].insn_code;
2488 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2489 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2491 max_size = GET_MODE_SIZE (mode);
2494 /* The code above should have handled everything. */
2499 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2500 with move instructions for mode MODE. GENFUN is the gen_... function
2501 to make a move insn for that mode. DATA has all the other info. */
2504 store_by_pieces_2 (genfun, mode, data)
2505 rtx (*genfun) PARAMS ((rtx, ...));
2506 enum machine_mode mode;
2507 struct store_by_pieces *data;
2509 unsigned int size = GET_MODE_SIZE (mode);
2512 while (data->len >= size)
2515 data->offset -= size;
2517 if (data->autinc_to)
2519 to1 = replace_equiv_address (data->to, data->to_addr);
2520 to1 = adjust_address (to1, mode, 0);
2523 to1 = adjust_address (data->to, mode, data->offset);
2525 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2526 emit_insn (gen_add2_insn (data->to_addr,
2527 GEN_INT (-(HOST_WIDE_INT) size)));
2529 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2530 emit_insn ((*genfun) (to1, cst));
2532 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2533 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2535 if (! data->reverse)
2536 data->offset += size;
2542 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2543 its length in bytes. */
2546 clear_storage (object, size)
2550 #ifdef TARGET_MEM_FUNCTIONS
2552 tree call_expr, arg_list;
2555 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2556 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2558 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2559 just move a zero. Otherwise, do this a piece at a time. */
2560 if (GET_MODE (object) != BLKmode
2561 && GET_CODE (size) == CONST_INT
2562 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2563 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2566 object = protect_from_queue (object, 1);
2567 size = protect_from_queue (size, 0);
2569 if (GET_CODE (size) == CONST_INT
2570 && MOVE_BY_PIECES_P (INTVAL (size), align))
2571 clear_by_pieces (object, INTVAL (size), align);
2574 /* Try the most limited insn first, because there's no point
2575 including more than one in the machine description unless
2576 the more limited one has some advantage. */
2578 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2579 enum machine_mode mode;
2581 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2582 mode = GET_MODE_WIDER_MODE (mode))
2584 enum insn_code code = clrstr_optab[(int) mode];
2585 insn_operand_predicate_fn pred;
2587 if (code != CODE_FOR_nothing
2588 /* We don't need MODE to be narrower than
2589 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2590 the mode mask, as it is returned by the macro, it will
2591 definitely be less than the actual mode mask. */
2592 && ((GET_CODE (size) == CONST_INT
2593 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2594 <= (GET_MODE_MASK (mode) >> 1)))
2595 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2596 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2597 || (*pred) (object, BLKmode))
2598 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2599 || (*pred) (opalign, VOIDmode)))
2602 rtx last = get_last_insn ();
2605 op1 = convert_to_mode (mode, size, 1);
2606 pred = insn_data[(int) code].operand[1].predicate;
2607 if (pred != 0 && ! (*pred) (op1, mode))
2608 op1 = copy_to_mode_reg (mode, op1);
2610 pat = GEN_FCN ((int) code) (object, op1, opalign);
2617 delete_insns_since (last);
2621 /* OBJECT or SIZE may have been passed through protect_from_queue.
2623 It is unsafe to save the value generated by protect_from_queue
2624 and reuse it later. Consider what happens if emit_queue is
2625 called before the return value from protect_from_queue is used.
2627 Expansion of the CALL_EXPR below will call emit_queue before
2628 we are finished emitting RTL for argument setup. So if we are
2629 not careful we could get the wrong value for an argument.
2631 To avoid this problem we go ahead and emit code to copy OBJECT
2632 and SIZE into new pseudos. We can then place those new pseudos
2633 into an RTL_EXPR and use them later, even after a call to
2636 Note this is not strictly needed for library calls since they
2637 do not call emit_queue before loading their arguments. However,
2638 we may need to have library calls call emit_queue in the future
2639 since failing to do so could cause problems for targets which
2640 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2641 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2643 #ifdef TARGET_MEM_FUNCTIONS
2644 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2646 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2647 TREE_UNSIGNED (integer_type_node));
2648 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2651 #ifdef TARGET_MEM_FUNCTIONS
2652 /* It is incorrect to use the libcall calling conventions to call
2653 memset in this context.
2655 This could be a user call to memset and the user may wish to
2656 examine the return value from memset.
2658 For targets where libcalls and normal calls have different
2659 conventions for returning pointers, we could end up generating
2662 So instead of using a libcall sequence we build up a suitable
2663 CALL_EXPR and expand the call in the normal fashion. */
2664 if (fn == NULL_TREE)
2668 /* This was copied from except.c, I don't know if all this is
2669 necessary in this context or not. */
2670 fn = get_identifier ("memset");
2671 fntype = build_pointer_type (void_type_node);
2672 fntype = build_function_type (fntype, NULL_TREE);
2673 fn = build_decl (FUNCTION_DECL, fn, fntype);
2674 ggc_add_tree_root (&fn, 1);
2675 DECL_EXTERNAL (fn) = 1;
2676 TREE_PUBLIC (fn) = 1;
2677 DECL_ARTIFICIAL (fn) = 1;
2678 TREE_NOTHROW (fn) = 1;
2679 make_decl_rtl (fn, NULL);
2680 assemble_external (fn);
2683 /* We need to make an argument list for the function call.
2685 memset has three arguments, the first is a void * addresses, the
2686 second an integer with the initialization value, the last is a
2687 size_t byte count for the copy. */
2689 = build_tree_list (NULL_TREE,
2690 make_tree (build_pointer_type (void_type_node),
2692 TREE_CHAIN (arg_list)
2693 = build_tree_list (NULL_TREE,
2694 make_tree (integer_type_node, const0_rtx));
2695 TREE_CHAIN (TREE_CHAIN (arg_list))
2696 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2697 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2699 /* Now we have to build up the CALL_EXPR itself. */
2700 call_expr = build1 (ADDR_EXPR,
2701 build_pointer_type (TREE_TYPE (fn)), fn);
2702 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2703 call_expr, arg_list, NULL_TREE);
2704 TREE_SIDE_EFFECTS (call_expr) = 1;
2706 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2708 emit_library_call (bzero_libfunc, LCT_NORMAL,
2709 VOIDmode, 2, object, Pmode, size,
2710 TYPE_MODE (integer_type_node));
2718 /* Generate code to copy Y into X.
2719 Both Y and X must have the same mode, except that
2720 Y can be a constant with VOIDmode.
2721 This mode cannot be BLKmode; use emit_block_move for that.
2723 Return the last instruction emitted. */
2726 emit_move_insn (x, y)
2729 enum machine_mode mode = GET_MODE (x);
2730 rtx y_cst = NULL_RTX;
2733 x = protect_from_queue (x, 1);
2734 y = protect_from_queue (y, 0);
2736 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2739 /* Never force constant_p_rtx to memory. */
2740 if (GET_CODE (y) == CONSTANT_P_RTX)
2742 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2745 y = force_const_mem (mode, y);
2748 /* If X or Y are memory references, verify that their addresses are valid
2750 if (GET_CODE (x) == MEM
2751 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2752 && ! push_operand (x, GET_MODE (x)))
2754 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2755 x = validize_mem (x);
2757 if (GET_CODE (y) == MEM
2758 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2760 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2761 y = validize_mem (y);
2763 if (mode == BLKmode)
2766 last_insn = emit_move_insn_1 (x, y);
2768 if (y_cst && GET_CODE (x) == REG)
2769 REG_NOTES (last_insn)
2770 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2775 /* Low level part of emit_move_insn.
2776 Called just like emit_move_insn, but assumes X and Y
2777 are basically valid. */
2780 emit_move_insn_1 (x, y)
2783 enum machine_mode mode = GET_MODE (x);
2784 enum machine_mode submode;
2785 enum mode_class class = GET_MODE_CLASS (mode);
2788 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2791 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2793 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2795 /* Expand complex moves by moving real part and imag part, if possible. */
2796 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2797 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2799 (class == MODE_COMPLEX_INT
2800 ? MODE_INT : MODE_FLOAT),
2802 && (mov_optab->handlers[(int) submode].insn_code
2803 != CODE_FOR_nothing))
2805 /* Don't split destination if it is a stack push. */
2806 int stack = push_operand (x, GET_MODE (x));
2808 #ifdef PUSH_ROUNDING
2809 /* In case we output to the stack, but the size is smaller machine can
2810 push exactly, we need to use move instructions. */
2812 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2815 int offset1, offset2;
2817 /* Do not use anti_adjust_stack, since we don't want to update
2818 stack_pointer_delta. */
2819 temp = expand_binop (Pmode,
2820 #ifdef STACK_GROWS_DOWNWARD
2827 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2831 if (temp != stack_pointer_rtx)
2832 emit_move_insn (stack_pointer_rtx, temp);
2833 #ifdef STACK_GROWS_DOWNWARD
2835 offset2 = GET_MODE_SIZE (submode);
2837 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2838 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2839 + GET_MODE_SIZE (submode));
2841 emit_move_insn (change_address (x, submode,
2842 gen_rtx_PLUS (Pmode,
2844 GEN_INT (offset1))),
2845 gen_realpart (submode, y));
2846 emit_move_insn (change_address (x, submode,
2847 gen_rtx_PLUS (Pmode,
2849 GEN_INT (offset2))),
2850 gen_imagpart (submode, y));
2854 /* If this is a stack, push the highpart first, so it
2855 will be in the argument order.
2857 In that case, change_address is used only to convert
2858 the mode, not to change the address. */
2861 /* Note that the real part always precedes the imag part in memory
2862 regardless of machine's endianness. */
2863 #ifdef STACK_GROWS_DOWNWARD
2864 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2865 (gen_rtx_MEM (submode, XEXP (x, 0)),
2866 gen_imagpart (submode, y)));
2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2868 (gen_rtx_MEM (submode, XEXP (x, 0)),
2869 gen_realpart (submode, y)));
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2872 (gen_rtx_MEM (submode, XEXP (x, 0)),
2873 gen_realpart (submode, y)));
2874 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2875 (gen_rtx_MEM (submode, XEXP (x, 0)),
2876 gen_imagpart (submode, y)));
2881 rtx realpart_x, realpart_y;
2882 rtx imagpart_x, imagpart_y;
2884 /* If this is a complex value with each part being smaller than a
2885 word, the usual calling sequence will likely pack the pieces into
2886 a single register. Unfortunately, SUBREG of hard registers only
2887 deals in terms of words, so we have a problem converting input
2888 arguments to the CONCAT of two registers that is used elsewhere
2889 for complex values. If this is before reload, we can copy it into
2890 memory and reload. FIXME, we should see about using extract and
2891 insert on integer registers, but complex short and complex char
2892 variables should be rarely used. */
2893 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2894 && (reload_in_progress | reload_completed) == 0)
2896 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2897 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2899 if (packed_dest_p || packed_src_p)
2901 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2902 ? MODE_FLOAT : MODE_INT);
2904 enum machine_mode reg_mode
2905 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2907 if (reg_mode != BLKmode)
2909 rtx mem = assign_stack_temp (reg_mode,
2910 GET_MODE_SIZE (mode), 0);
2911 rtx cmem = adjust_address (mem, mode, 0);
2914 = N_("function using short complex types cannot be inline");
2918 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2919 emit_move_insn_1 (cmem, y);
2920 return emit_move_insn_1 (sreg, mem);
2924 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2925 emit_move_insn_1 (mem, sreg);
2926 return emit_move_insn_1 (x, cmem);
2932 realpart_x = gen_realpart (submode, x);
2933 realpart_y = gen_realpart (submode, y);
2934 imagpart_x = gen_imagpart (submode, x);
2935 imagpart_y = gen_imagpart (submode, y);
2937 /* Show the output dies here. This is necessary for SUBREGs
2938 of pseudos since we cannot track their lifetimes correctly;
2939 hard regs shouldn't appear here except as return values.
2940 We never want to emit such a clobber after reload. */
2942 && ! (reload_in_progress || reload_completed)
2943 && (GET_CODE (realpart_x) == SUBREG
2944 || GET_CODE (imagpart_x) == SUBREG))
2946 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2949 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2950 (realpart_x, realpart_y));
2951 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2952 (imagpart_x, imagpart_y));
2955 return get_last_insn ();
2958 /* This will handle any multi-word mode that lacks a move_insn pattern.
2959 However, you will get better code if you define such patterns,
2960 even if they must turn into multiple assembler instructions. */
2961 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2967 #ifdef PUSH_ROUNDING
2969 /* If X is a push on the stack, do the push now and replace
2970 X with a reference to the stack pointer. */
2971 if (push_operand (x, GET_MODE (x)))
2976 /* Do not use anti_adjust_stack, since we don't want to update
2977 stack_pointer_delta. */
2978 temp = expand_binop (Pmode,
2979 #ifdef STACK_GROWS_DOWNWARD
2986 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2990 if (temp != stack_pointer_rtx)
2991 emit_move_insn (stack_pointer_rtx, temp);
2993 code = GET_CODE (XEXP (x, 0));
2994 /* Just hope that small offsets off SP are OK. */
2995 if (code == POST_INC)
2996 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2997 GEN_INT (-(HOST_WIDE_INT)
2998 GET_MODE_SIZE (GET_MODE (x))));
2999 else if (code == POST_DEC)
3000 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3001 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3003 temp = stack_pointer_rtx;
3005 x = change_address (x, VOIDmode, temp);
3009 /* If we are in reload, see if either operand is a MEM whose address
3010 is scheduled for replacement. */
3011 if (reload_in_progress && GET_CODE (x) == MEM
3012 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3013 x = replace_equiv_address_nv (x, inner);
3014 if (reload_in_progress && GET_CODE (y) == MEM
3015 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3016 y = replace_equiv_address_nv (y, inner);
3022 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3025 rtx xpart = operand_subword (x, i, 1, mode);
3026 rtx ypart = operand_subword (y, i, 1, mode);
3028 /* If we can't get a part of Y, put Y into memory if it is a
3029 constant. Otherwise, force it into a register. If we still
3030 can't get a part of Y, abort. */
3031 if (ypart == 0 && CONSTANT_P (y))
3033 y = force_const_mem (mode, y);
3034 ypart = operand_subword (y, i, 1, mode);
3036 else if (ypart == 0)
3037 ypart = operand_subword_force (y, i, mode);
3039 if (xpart == 0 || ypart == 0)
3042 need_clobber |= (GET_CODE (xpart) == SUBREG);
3044 last_insn = emit_move_insn (xpart, ypart);
3047 seq = gen_sequence ();
3050 /* Show the output dies here. This is necessary for SUBREGs
3051 of pseudos since we cannot track their lifetimes correctly;
3052 hard regs shouldn't appear here except as return values.
3053 We never want to emit such a clobber after reload. */
3055 && ! (reload_in_progress || reload_completed)
3056 && need_clobber != 0)
3058 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3069 /* Pushing data onto the stack. */
3071 /* Push a block of length SIZE (perhaps variable)
3072 and return an rtx to address the beginning of the block.
3073 Note that it is not possible for the value returned to be a QUEUED.
3074 The value may be virtual_outgoing_args_rtx.
3076 EXTRA is the number of bytes of padding to push in addition to SIZE.
3077 BELOW nonzero means this padding comes at low addresses;
3078 otherwise, the padding comes at high addresses. */
3081 push_block (size, extra, below)
3087 size = convert_modes (Pmode, ptr_mode, size, 1);
3088 if (CONSTANT_P (size))
3089 anti_adjust_stack (plus_constant (size, extra));
3090 else if (GET_CODE (size) == REG && extra == 0)
3091 anti_adjust_stack (size);
3094 temp = copy_to_mode_reg (Pmode, size);
3096 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3097 temp, 0, OPTAB_LIB_WIDEN);
3098 anti_adjust_stack (temp);
3101 #ifndef STACK_GROWS_DOWNWARD
3107 temp = virtual_outgoing_args_rtx;
3108 if (extra != 0 && below)
3109 temp = plus_constant (temp, extra);
3113 if (GET_CODE (size) == CONST_INT)
3114 temp = plus_constant (virtual_outgoing_args_rtx,
3115 -INTVAL (size) - (below ? 0 : extra));
3116 else if (extra != 0 && !below)
3117 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3118 negate_rtx (Pmode, plus_constant (size, extra)));
3120 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3121 negate_rtx (Pmode, size));
3124 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3128 /* Return an rtx for the address of the beginning of an as-if-it-was-pushed
3129 block of SIZE bytes. */
3132 get_push_address (size)
3137 if (STACK_PUSH_CODE == POST_DEC)
3138 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3139 else if (STACK_PUSH_CODE == POST_INC)
3140 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3142 temp = stack_pointer_rtx;
3144 return copy_to_reg (temp);
3147 #ifdef PUSH_ROUNDING
3149 /* Emit single push insn. */
3152 emit_single_push_insn (mode, x, type)
3154 enum machine_mode mode;
3158 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3160 enum insn_code icode;
3161 insn_operand_predicate_fn pred;
3163 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3164 /* If there is push pattern, use it. Otherwise try old way of throwing
3165 MEM representing push operation to move expander. */
3166 icode = push_optab->handlers[(int) mode].insn_code;
3167 if (icode != CODE_FOR_nothing)
3169 if (((pred = insn_data[(int) icode].operand[0].predicate)
3170 && !((*pred) (x, mode))))
3171 x = force_reg (mode, x);
3172 emit_insn (GEN_FCN (icode) (x));
3175 if (GET_MODE_SIZE (mode) == rounded_size)
3176 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3179 #ifdef STACK_GROWS_DOWNWARD
3180 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3181 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3183 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3184 GEN_INT (rounded_size));
3186 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3189 dest = gen_rtx_MEM (mode, dest_addr);
3193 set_mem_attributes (dest, type, 1);
3194 /* Function incoming arguments may overlap with sibling call
3195 outgoing arguments and we cannot allow reordering of reads
3196 from function arguments with stores to outgoing arguments
3197 of sibling calls. */
3198 set_mem_alias_set (dest, 0);
3200 emit_move_insn (dest, x);
3204 /* Generate code to push X onto the stack, assuming it has mode MODE and
3206 MODE is redundant except when X is a CONST_INT (since they don't
3208 SIZE is an rtx for the size of data to be copied (in bytes),
3209 needed only if X is BLKmode.
3211 ALIGN (in bits) is maximum alignment we can assume.
3213 If PARTIAL and REG are both nonzero, then copy that many of the first
3214 words of X into registers starting with REG, and push the rest of X.
3215 The amount of space pushed is decreased by PARTIAL words,
3216 rounded *down* to a multiple of PARM_BOUNDARY.
3217 REG must be a hard register in this case.
3218 If REG is zero but PARTIAL is not, take any all others actions for an
3219 argument partially in registers, but do not actually load any
3222 EXTRA is the amount in bytes of extra space to leave next to this arg.
3223 This is ignored if an argument block has already been allocated.
3225 On a machine that lacks real push insns, ARGS_ADDR is the address of
3226 the bottom of the argument block for this call. We use indexing off there
3227 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3228 argument block has not been preallocated.
3230 ARGS_SO_FAR is the size of args previously pushed for this call.
3232 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3233 for arguments passed in registers. If nonzero, it will be the number
3234 of bytes required. */
3237 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3238 args_addr, args_so_far, reg_parm_stack_space,
3241 enum machine_mode mode;
3250 int reg_parm_stack_space;
3254 enum direction stack_direction
3255 #ifdef STACK_GROWS_DOWNWARD
3261 /* Decide where to pad the argument: `downward' for below,
3262 `upward' for above, or `none' for don't pad it.
3263 Default is below for small data on big-endian machines; else above. */
3264 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3266 /* Invert direction if stack is post-decrement.
3268 if (STACK_PUSH_CODE == POST_DEC)
3269 if (where_pad != none)
3270 where_pad = (where_pad == downward ? upward : downward);
3272 xinner = x = protect_from_queue (x, 0);
3274 if (mode == BLKmode)
3276 /* Copy a block into the stack, entirely or partially. */
3279 int used = partial * UNITS_PER_WORD;
3280 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3288 /* USED is now the # of bytes we need not copy to the stack
3289 because registers will take care of them. */
3292 xinner = adjust_address (xinner, BLKmode, used);
3294 /* If the partial register-part of the arg counts in its stack size,
3295 skip the part of stack space corresponding to the registers.
3296 Otherwise, start copying to the beginning of the stack space,
3297 by setting SKIP to 0. */
3298 skip = (reg_parm_stack_space == 0) ? 0 : used;
3300 #ifdef PUSH_ROUNDING
3301 /* Do it with several push insns if that doesn't take lots of insns
3302 and if there is no difficulty with push insns that skip bytes
3303 on the stack for alignment purposes. */
3306 && GET_CODE (size) == CONST_INT
3308 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3309 /* Here we avoid the case of a structure whose weak alignment
3310 forces many pushes of a small amount of data,
3311 and such small pushes do rounding that causes trouble. */
3312 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3313 || align >= BIGGEST_ALIGNMENT
3314 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3315 == (align / BITS_PER_UNIT)))
3316 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3318 /* Push padding now if padding above and stack grows down,
3319 or if padding below and stack grows up.
3320 But if space already allocated, this has already been done. */
3321 if (extra && args_addr == 0
3322 && where_pad != none && where_pad != stack_direction)
3323 anti_adjust_stack (GEN_INT (extra));
3325 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3327 if (current_function_check_memory_usage && ! in_check_memory_usage)
3331 in_check_memory_usage = 1;
3332 temp = get_push_address (INTVAL (size) - used);
3333 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3334 emit_library_call (chkr_copy_bitmap_libfunc,
3335 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3336 Pmode, XEXP (xinner, 0), Pmode,
3337 GEN_INT (INTVAL (size) - used),
3338 TYPE_MODE (sizetype));
3340 emit_library_call (chkr_set_right_libfunc,
3341 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3342 Pmode, GEN_INT (INTVAL (size) - used),
3343 TYPE_MODE (sizetype),
3344 GEN_INT (MEMORY_USE_RW),
3345 TYPE_MODE (integer_type_node));
3346 in_check_memory_usage = 0;
3350 #endif /* PUSH_ROUNDING */
3354 /* Otherwise make space on the stack and copy the data
3355 to the address of that space. */
3357 /* Deduct words put into registers from the size we must copy. */
3360 if (GET_CODE (size) == CONST_INT)
3361 size = GEN_INT (INTVAL (size) - used);
3363 size = expand_binop (GET_MODE (size), sub_optab, size,
3364 GEN_INT (used), NULL_RTX, 0,
3368 /* Get the address of the stack space.
3369 In this case, we do not deal with EXTRA separately.
3370 A single stack adjust will do. */
3373 temp = push_block (size, extra, where_pad == downward);
3376 else if (GET_CODE (args_so_far) == CONST_INT)
3377 temp = memory_address (BLKmode,
3378 plus_constant (args_addr,
3379 skip + INTVAL (args_so_far)));
3381 temp = memory_address (BLKmode,
3382 plus_constant (gen_rtx_PLUS (Pmode,
3386 if (current_function_check_memory_usage && ! in_check_memory_usage)
3388 in_check_memory_usage = 1;
3389 target = copy_to_reg (temp);
3390 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3391 emit_library_call (chkr_copy_bitmap_libfunc,
3392 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3394 XEXP (xinner, 0), Pmode,
3395 size, TYPE_MODE (sizetype));
3397 emit_library_call (chkr_set_right_libfunc,
3398 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3400 size, TYPE_MODE (sizetype),
3401 GEN_INT (MEMORY_USE_RW),
3402 TYPE_MODE (integer_type_node));
3403 in_check_memory_usage = 0;
3406 target = gen_rtx_MEM (BLKmode, temp);
3410 set_mem_attributes (target, type, 1);
3411 /* Function incoming arguments may overlap with sibling call
3412 outgoing arguments and we cannot allow reordering of reads
3413 from function arguments with stores to outgoing arguments
3414 of sibling calls. */
3415 set_mem_alias_set (target, 0);
3418 set_mem_align (target, align);
3420 /* TEMP is the address of the block. Copy the data there. */
3421 if (GET_CODE (size) == CONST_INT
3422 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3424 move_by_pieces (target, xinner, INTVAL (size), align);
3429 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3430 enum machine_mode mode;
3432 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3434 mode = GET_MODE_WIDER_MODE (mode))
3436 enum insn_code code = movstr_optab[(int) mode];
3437 insn_operand_predicate_fn pred;
3439 if (code != CODE_FOR_nothing
3440 && ((GET_CODE (size) == CONST_INT
3441 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3442 <= (GET_MODE_MASK (mode) >> 1)))
3443 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3444 && (!(pred = insn_data[(int) code].operand[0].predicate)
3445 || ((*pred) (target, BLKmode)))
3446 && (!(pred = insn_data[(int) code].operand[1].predicate)
3447 || ((*pred) (xinner, BLKmode)))
3448 && (!(pred = insn_data[(int) code].operand[3].predicate)
3449 || ((*pred) (opalign, VOIDmode))))
3451 rtx op2 = convert_to_mode (mode, size, 1);
3452 rtx last = get_last_insn ();
3455 pred = insn_data[(int) code].operand[2].predicate;
3456 if (pred != 0 && ! (*pred) (op2, mode))
3457 op2 = copy_to_mode_reg (mode, op2);
3459 pat = GEN_FCN ((int) code) (target, xinner,
3467 delete_insns_since (last);
3472 if (!ACCUMULATE_OUTGOING_ARGS)
3474 /* If the source is referenced relative to the stack pointer,
3475 copy it to another register to stabilize it. We do not need
3476 to do this if we know that we won't be changing sp. */
3478 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3479 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3480 temp = copy_to_reg (temp);
3483 /* Make inhibit_defer_pop nonzero around the library call
3484 to force it to pop the bcopy-arguments right away. */
3486 #ifdef TARGET_MEM_FUNCTIONS
3487 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3488 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3489 convert_to_mode (TYPE_MODE (sizetype),
3490 size, TREE_UNSIGNED (sizetype)),
3491 TYPE_MODE (sizetype));
3493 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3494 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3495 convert_to_mode (TYPE_MODE (integer_type_node),
3497 TREE_UNSIGNED (integer_type_node)),
3498 TYPE_MODE (integer_type_node));
3503 else if (partial > 0)
3505 /* Scalar partly in registers. */
3507 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3510 /* # words of start of argument
3511 that we must make space for but need not store. */
3512 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3513 int args_offset = INTVAL (args_so_far);
3516 /* Push padding now if padding above and stack grows down,
3517 or if padding below and stack grows up.
3518 But if space already allocated, this has already been done. */
3519 if (extra && args_addr == 0
3520 && where_pad != none && where_pad != stack_direction)
3521 anti_adjust_stack (GEN_INT (extra));
3523 /* If we make space by pushing it, we might as well push
3524 the real data. Otherwise, we can leave OFFSET nonzero
3525 and leave the space uninitialized. */
3529 /* Now NOT_STACK gets the number of words that we don't need to
3530 allocate on the stack. */
3531 not_stack = partial - offset;
3533 /* If the partial register-part of the arg counts in its stack size,
3534 skip the part of stack space corresponding to the registers.
3535 Otherwise, start copying to the beginning of the stack space,
3536 by setting SKIP to 0. */
3537 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3539 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3540 x = validize_mem (force_const_mem (mode, x));
3542 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3543 SUBREGs of such registers are not allowed. */
3544 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3545 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3546 x = copy_to_reg (x);
3548 /* Loop over all the words allocated on the stack for this arg. */
3549 /* We can do it by words, because any scalar bigger than a word
3550 has a size a multiple of a word. */
3551 #ifndef PUSH_ARGS_REVERSED
3552 for (i = not_stack; i < size; i++)
3554 for (i = size - 1; i >= not_stack; i--)
3556 if (i >= not_stack + offset)
3557 emit_push_insn (operand_subword_force (x, i, mode),
3558 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3560 GEN_INT (args_offset + ((i - not_stack + skip)
3562 reg_parm_stack_space, alignment_pad);
3567 rtx target = NULL_RTX;
3570 /* Push padding now if padding above and stack grows down,
3571 or if padding below and stack grows up.
3572 But if space already allocated, this has already been done. */
3573 if (extra && args_addr == 0
3574 && where_pad != none && where_pad != stack_direction)
3575 anti_adjust_stack (GEN_INT (extra));
3577 #ifdef PUSH_ROUNDING
3578 if (args_addr == 0 && PUSH_ARGS)
3579 emit_single_push_insn (mode, x, type);
3583 if (GET_CODE (args_so_far) == CONST_INT)
3585 = memory_address (mode,
3586 plus_constant (args_addr,
3587 INTVAL (args_so_far)));
3589 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3592 dest = gen_rtx_MEM (mode, addr);
3595 set_mem_attributes (dest, type, 1);
3596 /* Function incoming arguments may overlap with sibling call
3597 outgoing arguments and we cannot allow reordering of reads
3598 from function arguments with stores to outgoing arguments
3599 of sibling calls. */
3600 set_mem_alias_set (dest, 0);
3603 emit_move_insn (dest, x);
3607 if (current_function_check_memory_usage && ! in_check_memory_usage)
3609 in_check_memory_usage = 1;
3611 target = get_push_address (GET_MODE_SIZE (mode));
3613 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3614 emit_library_call (chkr_copy_bitmap_libfunc,
3615 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3616 Pmode, XEXP (x, 0), Pmode,
3617 GEN_INT (GET_MODE_SIZE (mode)),
3618 TYPE_MODE (sizetype));
3620 emit_library_call (chkr_set_right_libfunc,
3621 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3622 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3623 TYPE_MODE (sizetype),
3624 GEN_INT (MEMORY_USE_RW),
3625 TYPE_MODE (integer_type_node));
3626 in_check_memory_usage = 0;
3631 /* If part should go in registers, copy that part
3632 into the appropriate registers. Do this now, at the end,
3633 since mem-to-mem copies above may do function calls. */
3634 if (partial > 0 && reg != 0)
3636 /* Handle calls that pass values in multiple non-contiguous locations.
3637 The Irix 6 ABI has examples of this. */
3638 if (GET_CODE (reg) == PARALLEL)
3639 emit_group_load (reg, x, -1, align); /* ??? size? */
3641 move_block_to_reg (REGNO (reg), x, partial, mode);
3644 if (extra && args_addr == 0 && where_pad == stack_direction)
3645 anti_adjust_stack (GEN_INT (extra));
3647 if (alignment_pad && args_addr == 0)
3648 anti_adjust_stack (alignment_pad);
3651 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3659 /* Only registers can be subtargets. */
3660 || GET_CODE (x) != REG
3661 /* If the register is readonly, it can't be set more than once. */
3662 || RTX_UNCHANGING_P (x)
3663 /* Don't use hard regs to avoid extending their life. */
3664 || REGNO (x) < FIRST_PSEUDO_REGISTER
3665 /* Avoid subtargets inside loops,
3666 since they hide some invariant expressions. */
3667 || preserve_subexpressions_p ())
3671 /* Expand an assignment that stores the value of FROM into TO.
3672 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3673 (This may contain a QUEUED rtx;
3674 if the value is constant, this rtx is a constant.)
3675 Otherwise, the returned value is NULL_RTX.
3677 SUGGEST_REG is no longer actually used.
3678 It used to mean, copy the value through a register
3679 and return that register, if that is possible.
3680 We now use WANT_VALUE to decide whether to do this. */
3683 expand_assignment (to, from, want_value, suggest_reg)
3686 int suggest_reg ATTRIBUTE_UNUSED;
3691 /* Don't crash if the lhs of the assignment was erroneous. */
3693 if (TREE_CODE (to) == ERROR_MARK)
3695 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3696 return want_value ? result : NULL_RTX;
3699 /* Assignment of a structure component needs special treatment
3700 if the structure component's rtx is not simply a MEM.
3701 Assignment of an array element at a constant index, and assignment of
3702 an array element in an unaligned packed structure field, has the same
3705 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3706 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3708 enum machine_mode mode1;
3709 HOST_WIDE_INT bitsize, bitpos;
3714 unsigned int alignment;
3717 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3718 &unsignedp, &volatilep, &alignment);
3720 /* If we are going to use store_bit_field and extract_bit_field,
3721 make sure to_rtx will be safe for multiple use. */
3723 if (mode1 == VOIDmode && want_value)
3724 tem = stabilize_reference (tem);
3726 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3729 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3731 if (GET_CODE (to_rtx) != MEM)
3734 if (GET_MODE (offset_rtx) != ptr_mode)
3735 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3737 #ifdef POINTERS_EXTEND_UNSIGNED
3738 if (GET_MODE (offset_rtx) != Pmode)
3739 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3742 /* A constant address in TO_RTX can have VOIDmode, we must not try
3743 to call force_reg for that case. Avoid that case. */
3744 if (GET_CODE (to_rtx) == MEM
3745 && GET_MODE (to_rtx) == BLKmode
3746 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3748 && (bitpos % bitsize) == 0
3749 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3750 && alignment == GET_MODE_ALIGNMENT (mode1))
3753 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3755 if (GET_CODE (XEXP (temp, 0)) == REG)
3758 to_rtx = (replace_equiv_address
3759 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3764 to_rtx = offset_address (to_rtx, offset_rtx,
3765 highest_pow2_factor (offset));
3770 if (GET_CODE (to_rtx) == MEM)
3772 /* When the offset is zero, to_rtx is the address of the
3773 structure we are storing into, and hence may be shared.
3774 We must make a new MEM before setting the volatile bit. */
3776 to_rtx = copy_rtx (to_rtx);
3778 MEM_VOLATILE_P (to_rtx) = 1;
3780 #if 0 /* This was turned off because, when a field is volatile
3781 in an object which is not volatile, the object may be in a register,
3782 and then we would abort over here. */
3788 if (TREE_CODE (to) == COMPONENT_REF
3789 && TREE_READONLY (TREE_OPERAND (to, 1)))
3792 to_rtx = copy_rtx (to_rtx);
3794 RTX_UNCHANGING_P (to_rtx) = 1;
3797 /* Check the access. */
3798 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3803 enum machine_mode best_mode;
3805 best_mode = get_best_mode (bitsize, bitpos,
3806 TYPE_ALIGN (TREE_TYPE (tem)),
3808 if (best_mode == VOIDmode)
3811 best_mode_size = GET_MODE_BITSIZE (best_mode);
3812 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3813 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3814 size *= GET_MODE_SIZE (best_mode);
3816 /* Check the access right of the pointer. */
3817 in_check_memory_usage = 1;
3819 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3820 VOIDmode, 3, to_addr, Pmode,
3821 GEN_INT (size), TYPE_MODE (sizetype),
3822 GEN_INT (MEMORY_USE_WO),
3823 TYPE_MODE (integer_type_node));
3824 in_check_memory_usage = 0;
3827 /* If this is a varying-length object, we must get the address of
3828 the source and do an explicit block move. */
3831 unsigned int from_align;
3832 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3834 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3836 emit_block_move (inner_to_rtx, from_rtx, expr_size (from));
3844 if (! can_address_p (to))
3846 to_rtx = copy_rtx (to_rtx);
3847 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3850 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3852 /* Spurious cast for HPUX compiler. */
3853 ? ((enum machine_mode)
3854 TYPE_MODE (TREE_TYPE (to)))
3858 int_size_in_bytes (TREE_TYPE (tem)),
3859 get_alias_set (to));
3861 preserve_temp_slots (result);
3865 /* If the value is meaningful, convert RESULT to the proper mode.
3866 Otherwise, return nothing. */
3867 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3868 TYPE_MODE (TREE_TYPE (from)),
3870 TREE_UNSIGNED (TREE_TYPE (to)))
3875 /* If the rhs is a function call and its value is not an aggregate,
3876 call the function before we start to compute the lhs.
3877 This is needed for correct code for cases such as
3878 val = setjmp (buf) on machines where reference to val
3879 requires loading up part of an address in a separate insn.
3881 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3882 since it might be a promoted variable where the zero- or sign- extension
3883 needs to be done. Handling this in the normal way is safe because no
3884 computation is done before the call. */
3885 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3886 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3887 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3888 && GET_CODE (DECL_RTL (to)) == REG))
3893 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3895 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3897 /* Handle calls that return values in multiple non-contiguous locations.
3898 The Irix 6 ABI has examples of this. */
3899 if (GET_CODE (to_rtx) == PARALLEL)
3900 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3901 TYPE_ALIGN (TREE_TYPE (from)));
3902 else if (GET_MODE (to_rtx) == BLKmode)
3903 emit_block_move (to_rtx, value, expr_size (from));
3906 #ifdef POINTERS_EXTEND_UNSIGNED
3907 if (POINTER_TYPE_P (TREE_TYPE (to))
3908 && GET_MODE (to_rtx) != GET_MODE (value))
3909 value = convert_memory_address (GET_MODE (to_rtx), value);
3911 emit_move_insn (to_rtx, value);
3913 preserve_temp_slots (to_rtx);
3916 return want_value ? to_rtx : NULL_RTX;
3919 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3920 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3923 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3925 /* Don't move directly into a return register. */
3926 if (TREE_CODE (to) == RESULT_DECL
3927 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3932 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3934 if (GET_CODE (to_rtx) == PARALLEL)
3935 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3936 TYPE_ALIGN (TREE_TYPE (from)));
3938 emit_move_insn (to_rtx, temp);
3940 preserve_temp_slots (to_rtx);
3943 return want_value ? to_rtx : NULL_RTX;
3946 /* In case we are returning the contents of an object which overlaps
3947 the place the value is being stored, use a safe function when copying
3948 a value through a pointer into a structure value return block. */
3949 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3950 && current_function_returns_struct
3951 && !current_function_returns_pcc_struct)
3956 size = expr_size (from);
3957 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3958 EXPAND_MEMORY_USE_DONT);
3960 /* Copy the rights of the bitmap. */
3961 if (current_function_check_memory_usage)
3962 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3963 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3964 XEXP (from_rtx, 0), Pmode,
3965 convert_to_mode (TYPE_MODE (sizetype),
3966 size, TREE_UNSIGNED (sizetype)),
3967 TYPE_MODE (sizetype));
3969 #ifdef TARGET_MEM_FUNCTIONS
3970 emit_library_call (memmove_libfunc, LCT_NORMAL,
3971 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3972 XEXP (from_rtx, 0), Pmode,
3973 convert_to_mode (TYPE_MODE (sizetype),
3974 size, TREE_UNSIGNED (sizetype)),
3975 TYPE_MODE (sizetype));
3977 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3978 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3979 XEXP (to_rtx, 0), Pmode,
3980 convert_to_mode (TYPE_MODE (integer_type_node),
3981 size, TREE_UNSIGNED (integer_type_node)),
3982 TYPE_MODE (integer_type_node));
3985 preserve_temp_slots (to_rtx);
3988 return want_value ? to_rtx : NULL_RTX;
3991 /* Compute FROM and store the value in the rtx we got. */
3994 result = store_expr (from, to_rtx, want_value);
3995 preserve_temp_slots (result);
3998 return want_value ? result : NULL_RTX;
4001 /* Generate code for computing expression EXP,
4002 and storing the value into TARGET.
4003 TARGET may contain a QUEUED rtx.
4005 If WANT_VALUE is nonzero, return a copy of the value
4006 not in TARGET, so that we can be sure to use the proper
4007 value in a containing expression even if TARGET has something
4008 else stored in it. If possible, we copy the value through a pseudo
4009 and return that pseudo. Or, if the value is constant, we try to
4010 return the constant. In some cases, we return a pseudo
4011 copied *from* TARGET.
4013 If the mode is BLKmode then we may return TARGET itself.
4014 It turns out that in BLKmode it doesn't cause a problem.
4015 because C has no operators that could combine two different
4016 assignments into the same BLKmode object with different values
4017 with no sequence point. Will other languages need this to
4020 If WANT_VALUE is 0, we return NULL, to make sure
4021 to catch quickly any cases where the caller uses the value
4022 and fails to set WANT_VALUE. */
4025 store_expr (exp, target, want_value)
4031 int dont_return_target = 0;
4032 int dont_store_target = 0;
4034 if (TREE_CODE (exp) == COMPOUND_EXPR)
4036 /* Perform first part of compound expression, then assign from second
4038 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4040 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4042 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4044 /* For conditional expression, get safe form of the target. Then
4045 test the condition, doing the appropriate assignment on either
4046 side. This avoids the creation of unnecessary temporaries.
4047 For non-BLKmode, it is more efficient not to do this. */
4049 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4052 target = protect_from_queue (target, 1);
4054 do_pending_stack_adjust ();
4056 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4057 start_cleanup_deferral ();
4058 store_expr (TREE_OPERAND (exp, 1), target, 0);
4059 end_cleanup_deferral ();
4061 emit_jump_insn (gen_jump (lab2));
4064 start_cleanup_deferral ();
4065 store_expr (TREE_OPERAND (exp, 2), target, 0);
4066 end_cleanup_deferral ();
4071 return want_value ? target : NULL_RTX;
4073 else if (queued_subexp_p (target))
4074 /* If target contains a postincrement, let's not risk
4075 using it as the place to generate the rhs. */
4077 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4079 /* Expand EXP into a new pseudo. */
4080 temp = gen_reg_rtx (GET_MODE (target));
4081 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4084 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4086 /* If target is volatile, ANSI requires accessing the value
4087 *from* the target, if it is accessed. So make that happen.
4088 In no case return the target itself. */
4089 if (! MEM_VOLATILE_P (target) && want_value)
4090 dont_return_target = 1;
4092 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4093 && GET_MODE (target) != BLKmode)
4094 /* If target is in memory and caller wants value in a register instead,
4095 arrange that. Pass TARGET as target for expand_expr so that,
4096 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4097 We know expand_expr will not use the target in that case.
4098 Don't do this if TARGET is volatile because we are supposed
4099 to write it and then read it. */
4101 temp = expand_expr (exp, target, GET_MODE (target), 0);
4102 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4104 /* If TEMP is already in the desired TARGET, only copy it from
4105 memory and don't store it there again. */
4107 || (rtx_equal_p (temp, target)
4108 && ! side_effects_p (temp) && ! side_effects_p (target)))
4109 dont_store_target = 1;
4110 temp = copy_to_reg (temp);
4112 dont_return_target = 1;
4114 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4115 /* If this is an scalar in a register that is stored in a wider mode
4116 than the declared mode, compute the result into its declared mode
4117 and then convert to the wider mode. Our value is the computed
4120 /* If we don't want a value, we can do the conversion inside EXP,
4121 which will often result in some optimizations. Do the conversion
4122 in two steps: first change the signedness, if needed, then
4123 the extend. But don't do this if the type of EXP is a subtype
4124 of something else since then the conversion might involve
4125 more than just converting modes. */
4126 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4127 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4129 if (TREE_UNSIGNED (TREE_TYPE (exp))
4130 != SUBREG_PROMOTED_UNSIGNED_P (target))
4133 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4137 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4138 SUBREG_PROMOTED_UNSIGNED_P (target)),
4142 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4144 /* If TEMP is a volatile MEM and we want a result value, make
4145 the access now so it gets done only once. Likewise if
4146 it contains TARGET. */
4147 if (GET_CODE (temp) == MEM && want_value
4148 && (MEM_VOLATILE_P (temp)
4149 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4150 temp = copy_to_reg (temp);
4152 /* If TEMP is a VOIDmode constant, use convert_modes to make
4153 sure that we properly convert it. */
4154 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4156 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4157 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4158 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4159 GET_MODE (target), temp,
4160 SUBREG_PROMOTED_UNSIGNED_P (target));
4163 convert_move (SUBREG_REG (target), temp,
4164 SUBREG_PROMOTED_UNSIGNED_P (target));
4166 /* If we promoted a constant, change the mode back down to match
4167 target. Otherwise, the caller might get confused by a result whose
4168 mode is larger than expected. */
4170 if (want_value && GET_MODE (temp) != GET_MODE (target)
4171 && GET_MODE (temp) != VOIDmode)
4173 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4174 SUBREG_PROMOTED_VAR_P (temp) = 1;
4175 SUBREG_PROMOTED_UNSIGNED_P (temp)
4176 = SUBREG_PROMOTED_UNSIGNED_P (target);
4179 return want_value ? temp : NULL_RTX;
4183 temp = expand_expr (exp, target, GET_MODE (target), 0);
4184 /* Return TARGET if it's a specified hardware register.
4185 If TARGET is a volatile mem ref, either return TARGET
4186 or return a reg copied *from* TARGET; ANSI requires this.
4188 Otherwise, if TEMP is not TARGET, return TEMP
4189 if it is constant (for efficiency),
4190 or if we really want the correct value. */
4191 if (!(target && GET_CODE (target) == REG
4192 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4193 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4194 && ! rtx_equal_p (temp, target)
4195 && (CONSTANT_P (temp) || want_value))
4196 dont_return_target = 1;
4199 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4200 the same as that of TARGET, adjust the constant. This is needed, for
4201 example, in case it is a CONST_DOUBLE and we want only a word-sized
4203 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4204 && TREE_CODE (exp) != ERROR_MARK
4205 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4206 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4207 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4209 if (current_function_check_memory_usage
4210 && GET_CODE (target) == MEM
4211 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4213 in_check_memory_usage = 1;
4214 if (GET_CODE (temp) == MEM)
4215 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4216 VOIDmode, 3, XEXP (target, 0), Pmode,
4217 XEXP (temp, 0), Pmode,
4218 expr_size (exp), TYPE_MODE (sizetype));
4220 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4221 VOIDmode, 3, XEXP (target, 0), Pmode,
4222 expr_size (exp), TYPE_MODE (sizetype),
4223 GEN_INT (MEMORY_USE_WO),
4224 TYPE_MODE (integer_type_node));
4225 in_check_memory_usage = 0;
4228 /* If value was not generated in the target, store it there.
4229 Convert the value to TARGET's type first if nec. */
4230 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4231 one or both of them are volatile memory refs, we have to distinguish
4233 - expand_expr has used TARGET. In this case, we must not generate
4234 another copy. This can be detected by TARGET being equal according
4236 - expand_expr has not used TARGET - that means that the source just
4237 happens to have the same RTX form. Since temp will have been created
4238 by expand_expr, it will compare unequal according to == .
4239 We must generate a copy in this case, to reach the correct number
4240 of volatile memory references. */
4242 if ((! rtx_equal_p (temp, target)
4243 || (temp != target && (side_effects_p (temp)
4244 || side_effects_p (target))))
4245 && TREE_CODE (exp) != ERROR_MARK
4246 && ! dont_store_target)
4248 target = protect_from_queue (target, 1);
4249 if (GET_MODE (temp) != GET_MODE (target)
4250 && GET_MODE (temp) != VOIDmode)
4252 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4253 if (dont_return_target)
4255 /* In this case, we will return TEMP,
4256 so make sure it has the proper mode.
4257 But don't forget to store the value into TARGET. */
4258 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4259 emit_move_insn (target, temp);
4262 convert_move (target, temp, unsignedp);
4265 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4267 /* Handle copying a string constant into an array.
4268 The string constant may be shorter than the array.
4269 So copy just the string's actual length, and clear the rest. */
4273 /* Get the size of the data type of the string,
4274 which is actually the size of the target. */
4275 size = expr_size (exp);
4276 if (GET_CODE (size) == CONST_INT
4277 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4278 emit_block_move (target, temp, size);
4281 /* Compute the size of the data to copy from the string. */
4283 = size_binop (MIN_EXPR,
4284 make_tree (sizetype, size),
4285 size_int (TREE_STRING_LENGTH (exp)));
4286 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4290 /* Copy that much. */
4291 emit_block_move (target, temp, copy_size_rtx);
4293 /* Figure out how much is left in TARGET that we have to clear.
4294 Do all calculations in ptr_mode. */
4296 addr = XEXP (target, 0);
4297 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4299 if (GET_CODE (copy_size_rtx) == CONST_INT)
4301 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4302 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4306 addr = force_reg (ptr_mode, addr);
4307 addr = expand_binop (ptr_mode, add_optab, addr,
4308 copy_size_rtx, NULL_RTX, 0,
4311 size = expand_binop (ptr_mode, sub_optab, size,
4312 copy_size_rtx, NULL_RTX, 0,
4315 label = gen_label_rtx ();
4316 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4317 GET_MODE (size), 0, 0, label);
4320 if (size != const0_rtx)
4322 rtx dest = gen_rtx_MEM (BLKmode, addr);
4324 MEM_COPY_ATTRIBUTES (dest, target);
4326 /* Be sure we can write on ADDR. */
4327 in_check_memory_usage = 1;
4328 if (current_function_check_memory_usage)
4329 emit_library_call (chkr_check_addr_libfunc,
4330 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4332 size, TYPE_MODE (sizetype),
4333 GEN_INT (MEMORY_USE_WO),
4334 TYPE_MODE (integer_type_node));
4335 in_check_memory_usage = 0;
4336 clear_storage (dest, size);
4343 /* Handle calls that return values in multiple non-contiguous locations.
4344 The Irix 6 ABI has examples of this. */
4345 else if (GET_CODE (target) == PARALLEL)
4346 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4347 TYPE_ALIGN (TREE_TYPE (exp)));
4348 else if (GET_MODE (temp) == BLKmode)
4349 emit_block_move (target, temp, expr_size (exp));
4351 emit_move_insn (target, temp);
4354 /* If we don't want a value, return NULL_RTX. */
4358 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4359 ??? The latter test doesn't seem to make sense. */
4360 else if (dont_return_target && GET_CODE (temp) != MEM)
4363 /* Return TARGET itself if it is a hard register. */
4364 else if (want_value && GET_MODE (target) != BLKmode
4365 && ! (GET_CODE (target) == REG
4366 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4367 return copy_to_reg (target);
4373 /* Return 1 if EXP just contains zeros. */
4381 switch (TREE_CODE (exp))
4385 case NON_LVALUE_EXPR:
4386 return is_zeros_p (TREE_OPERAND (exp, 0));
4389 return integer_zerop (exp);
4393 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4396 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4399 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4400 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4401 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4402 if (! is_zeros_p (TREE_VALUE (elt)))
4412 /* Return 1 if EXP contains mostly (3/4) zeros. */
4415 mostly_zeros_p (exp)
4418 if (TREE_CODE (exp) == CONSTRUCTOR)
4420 int elts = 0, zeros = 0;
4421 tree elt = CONSTRUCTOR_ELTS (exp);
4422 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4424 /* If there are no ranges of true bits, it is all zero. */
4425 return elt == NULL_TREE;
4427 for (; elt; elt = TREE_CHAIN (elt))
4429 /* We do not handle the case where the index is a RANGE_EXPR,
4430 so the statistic will be somewhat inaccurate.
4431 We do make a more accurate count in store_constructor itself,
4432 so since this function is only used for nested array elements,
4433 this should be close enough. */
4434 if (mostly_zeros_p (TREE_VALUE (elt)))
4439 return 4 * zeros >= 3 * elts;
4442 return is_zeros_p (exp);
4445 /* Helper function for store_constructor.
4446 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4447 TYPE is the type of the CONSTRUCTOR, not the element type.
4448 ALIGN and CLEARED are as for store_constructor.
4449 ALIAS_SET is the alias set to use for any stores.
4451 This provides a recursive shortcut back to store_constructor when it isn't
4452 necessary to go through store_field. This is so that we can pass through
4453 the cleared field to let store_constructor know that we may not have to
4454 clear a substructure if the outer structure has already been cleared. */
4457 store_constructor_field (target, bitsize, bitpos,
4458 mode, exp, type, align, cleared, alias_set)
4460 unsigned HOST_WIDE_INT bitsize;
4461 HOST_WIDE_INT bitpos;
4462 enum machine_mode mode;
4468 if (TREE_CODE (exp) == CONSTRUCTOR
4469 && bitpos % BITS_PER_UNIT == 0
4470 /* If we have a non-zero bitpos for a register target, then we just
4471 let store_field do the bitfield handling. This is unlikely to
4472 generate unnecessary clear instructions anyways. */
4473 && (bitpos == 0 || GET_CODE (target) == MEM))
4477 = adjust_address (target,
4478 GET_MODE (target) == BLKmode
4480 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4481 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4484 /* Show the alignment may no longer be what it was and update the alias
4485 set, if required. */
4487 align = MIN (align, (unsigned int) bitpos & - bitpos);
4489 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4490 && MEM_ALIAS_SET (target) != 0)
4491 set_mem_alias_set (target, alias_set);
4493 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4496 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4497 int_size_in_bytes (type), alias_set);
4500 /* Store the value of constructor EXP into the rtx TARGET.
4501 TARGET is either a REG or a MEM.
4502 ALIGN is the maximum known alignment for TARGET.
4503 CLEARED is true if TARGET is known to have been zero'd.
4504 SIZE is the number of bytes of TARGET we are allowed to modify: this
4505 may not be the same as the size of EXP if we are assigning to a field
4506 which has been packed to exclude padding bits. */
4509 store_constructor (exp, target, align, cleared, size)
4516 tree type = TREE_TYPE (exp);
4517 #ifdef WORD_REGISTER_OPERATIONS
4518 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4521 /* We know our target cannot conflict, since safe_from_p has been called. */
4523 /* Don't try copying piece by piece into a hard register
4524 since that is vulnerable to being clobbered by EXP.
4525 Instead, construct in a pseudo register and then copy it all. */
4526 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4528 rtx temp = gen_reg_rtx (GET_MODE (target));
4529 store_constructor (exp, temp, align, cleared, size);
4530 emit_move_insn (target, temp);
4535 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4536 || TREE_CODE (type) == QUAL_UNION_TYPE)
4540 /* Inform later passes that the whole union value is dead. */
4541 if ((TREE_CODE (type) == UNION_TYPE
4542 || TREE_CODE (type) == QUAL_UNION_TYPE)
4545 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4547 /* If the constructor is empty, clear the union. */
4548 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4549 clear_storage (target, expr_size (exp));
4552 /* If we are building a static constructor into a register,
4553 set the initial value as zero so we can fold the value into
4554 a constant. But if more than one register is involved,
4555 this probably loses. */
4556 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4557 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4560 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4565 /* If the constructor has fewer fields than the structure
4566 or if we are initializing the structure to mostly zeros,
4567 clear the whole structure first. Don't do this if TARGET is a
4568 register whose mode size isn't equal to SIZE since clear_storage
4569 can't handle this case. */
4571 && ((list_length (CONSTRUCTOR_ELTS (exp))
4572 != fields_length (type))
4573 || mostly_zeros_p (exp))
4574 && (GET_CODE (target) != REG
4575 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4578 clear_storage (target, GEN_INT (size));
4583 /* Inform later passes that the old value is dead. */
4584 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4586 /* Store each element of the constructor into
4587 the corresponding field of TARGET. */
4589 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4591 tree field = TREE_PURPOSE (elt);
4592 #ifdef WORD_REGISTER_OPERATIONS
4593 tree value = TREE_VALUE (elt);
4595 enum machine_mode mode;
4596 HOST_WIDE_INT bitsize;
4597 HOST_WIDE_INT bitpos = 0;
4600 rtx to_rtx = target;
4602 /* Just ignore missing fields.
4603 We cleared the whole structure, above,
4604 if any fields are missing. */
4608 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4611 if (host_integerp (DECL_SIZE (field), 1))
4612 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4616 unsignedp = TREE_UNSIGNED (field);
4617 mode = DECL_MODE (field);
4618 if (DECL_BIT_FIELD (field))
4621 offset = DECL_FIELD_OFFSET (field);
4622 if (host_integerp (offset, 0)
4623 && host_integerp (bit_position (field), 0))
4625 bitpos = int_bit_position (field);
4629 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4635 if (contains_placeholder_p (offset))
4636 offset = build (WITH_RECORD_EXPR, sizetype,
4637 offset, make_tree (TREE_TYPE (exp), target));
4639 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4640 if (GET_CODE (to_rtx) != MEM)
4643 if (GET_MODE (offset_rtx) != ptr_mode)
4644 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4646 #ifdef POINTERS_EXTEND_UNSIGNED
4647 if (GET_MODE (offset_rtx) != Pmode)
4648 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4651 to_rtx = offset_address (to_rtx, offset_rtx,
4652 highest_pow2_factor (offset));
4654 align = DECL_OFFSET_ALIGN (field);
4657 if (TREE_READONLY (field))
4659 if (GET_CODE (to_rtx) == MEM)
4660 to_rtx = copy_rtx (to_rtx);
4662 RTX_UNCHANGING_P (to_rtx) = 1;
4665 #ifdef WORD_REGISTER_OPERATIONS
4666 /* If this initializes a field that is smaller than a word, at the
4667 start of a word, try to widen it to a full word.
4668 This special case allows us to output C++ member function
4669 initializations in a form that the optimizers can understand. */
4670 if (GET_CODE (target) == REG
4671 && bitsize < BITS_PER_WORD
4672 && bitpos % BITS_PER_WORD == 0
4673 && GET_MODE_CLASS (mode) == MODE_INT
4674 && TREE_CODE (value) == INTEGER_CST
4676 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4678 tree type = TREE_TYPE (value);
4679 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4681 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4682 value = convert (type, value);
4684 if (BYTES_BIG_ENDIAN)
4686 = fold (build (LSHIFT_EXPR, type, value,
4687 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4688 bitsize = BITS_PER_WORD;
4693 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4694 && DECL_NONADDRESSABLE_P (field))
4696 to_rtx = copy_rtx (to_rtx);
4697 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4700 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4701 TREE_VALUE (elt), type, align, cleared,
4702 get_alias_set (TREE_TYPE (field)));
4705 else if (TREE_CODE (type) == ARRAY_TYPE)
4710 tree domain = TYPE_DOMAIN (type);
4711 tree elttype = TREE_TYPE (type);
4712 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4713 && TYPE_MAX_VALUE (domain)
4714 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4715 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4716 HOST_WIDE_INT minelt = 0;
4717 HOST_WIDE_INT maxelt = 0;
4719 /* If we have constant bounds for the range of the type, get them. */
4722 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4723 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4726 /* If the constructor has fewer elements than the array,
4727 clear the whole array first. Similarly if this is
4728 static constructor of a non-BLKmode object. */
4729 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4733 HOST_WIDE_INT count = 0, zero_count = 0;
4734 need_to_clear = ! const_bounds_p;
4736 /* This loop is a more accurate version of the loop in
4737 mostly_zeros_p (it handles RANGE_EXPR in an index).
4738 It is also needed to check for missing elements. */
4739 for (elt = CONSTRUCTOR_ELTS (exp);
4740 elt != NULL_TREE && ! need_to_clear;
4741 elt = TREE_CHAIN (elt))
4743 tree index = TREE_PURPOSE (elt);
4744 HOST_WIDE_INT this_node_count;
4746 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4748 tree lo_index = TREE_OPERAND (index, 0);
4749 tree hi_index = TREE_OPERAND (index, 1);
4751 if (! host_integerp (lo_index, 1)
4752 || ! host_integerp (hi_index, 1))
4758 this_node_count = (tree_low_cst (hi_index, 1)
4759 - tree_low_cst (lo_index, 1) + 1);
4762 this_node_count = 1;
4764 count += this_node_count;
4765 if (mostly_zeros_p (TREE_VALUE (elt)))
4766 zero_count += this_node_count;
4769 /* Clear the entire array first if there are any missing elements,
4770 or if the incidence of zero elements is >= 75%. */
4772 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4776 if (need_to_clear && size > 0)
4779 clear_storage (target, GEN_INT (size));
4782 else if (REG_P (target))
4783 /* Inform later passes that the old value is dead. */
4784 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4786 /* Store each element of the constructor into
4787 the corresponding element of TARGET, determined
4788 by counting the elements. */
4789 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4791 elt = TREE_CHAIN (elt), i++)
4793 enum machine_mode mode;
4794 HOST_WIDE_INT bitsize;
4795 HOST_WIDE_INT bitpos;
4797 tree value = TREE_VALUE (elt);
4798 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4799 tree index = TREE_PURPOSE (elt);
4800 rtx xtarget = target;
4802 if (cleared && is_zeros_p (value))
4805 unsignedp = TREE_UNSIGNED (elttype);
4806 mode = TYPE_MODE (elttype);
4807 if (mode == BLKmode)
4808 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4809 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4812 bitsize = GET_MODE_BITSIZE (mode);
4814 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4816 tree lo_index = TREE_OPERAND (index, 0);
4817 tree hi_index = TREE_OPERAND (index, 1);
4818 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4819 struct nesting *loop;
4820 HOST_WIDE_INT lo, hi, count;
4823 /* If the range is constant and "small", unroll the loop. */
4825 && host_integerp (lo_index, 0)
4826 && host_integerp (hi_index, 0)
4827 && (lo = tree_low_cst (lo_index, 0),
4828 hi = tree_low_cst (hi_index, 0),
4829 count = hi - lo + 1,
4830 (GET_CODE (target) != MEM
4832 || (host_integerp (TYPE_SIZE (elttype), 1)
4833 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4836 lo -= minelt; hi -= minelt;
4837 for (; lo <= hi; lo++)
4839 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4841 if (GET_CODE (target) == MEM
4842 && !MEM_KEEP_ALIAS_SET_P (target)
4843 && TYPE_NONALIASED_COMPONENT (type))
4845 target = copy_rtx (target);
4846 MEM_KEEP_ALIAS_SET_P (target) = 1;
4849 store_constructor_field
4850 (target, bitsize, bitpos, mode, value, type, align,
4851 cleared, get_alias_set (elttype));
4856 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4857 loop_top = gen_label_rtx ();
4858 loop_end = gen_label_rtx ();
4860 unsignedp = TREE_UNSIGNED (domain);
4862 index = build_decl (VAR_DECL, NULL_TREE, domain);
4865 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4867 SET_DECL_RTL (index, index_r);
4868 if (TREE_CODE (value) == SAVE_EXPR
4869 && SAVE_EXPR_RTL (value) == 0)
4871 /* Make sure value gets expanded once before the
4873 expand_expr (value, const0_rtx, VOIDmode, 0);
4876 store_expr (lo_index, index_r, 0);
4877 loop = expand_start_loop (0);
4879 /* Assign value to element index. */
4881 = convert (ssizetype,
4882 fold (build (MINUS_EXPR, TREE_TYPE (index),
4883 index, TYPE_MIN_VALUE (domain))));
4884 position = size_binop (MULT_EXPR, position,
4886 TYPE_SIZE_UNIT (elttype)));
4888 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4889 xtarget = offset_address (target, pos_rtx,
4890 highest_pow2_factor (position));
4891 xtarget = adjust_address (xtarget, mode, 0);
4892 if (TREE_CODE (value) == CONSTRUCTOR)
4893 store_constructor (value, xtarget, align, cleared,
4894 bitsize / BITS_PER_UNIT);
4896 store_expr (value, xtarget, 0);
4898 expand_exit_loop_if_false (loop,
4899 build (LT_EXPR, integer_type_node,
4902 expand_increment (build (PREINCREMENT_EXPR,
4904 index, integer_one_node), 0, 0);
4906 emit_label (loop_end);
4909 else if ((index != 0 && ! host_integerp (index, 0))
4910 || ! host_integerp (TYPE_SIZE (elttype), 1))
4915 index = ssize_int (1);
4918 index = convert (ssizetype,
4919 fold (build (MINUS_EXPR, index,
4920 TYPE_MIN_VALUE (domain))));
4922 position = size_binop (MULT_EXPR, index,
4924 TYPE_SIZE_UNIT (elttype)));
4925 xtarget = offset_address (target,
4926 expand_expr (position, 0, VOIDmode, 0),
4927 highest_pow2_factor (position));
4928 xtarget = adjust_address (xtarget, mode, 0);
4929 store_expr (value, xtarget, 0);
4934 bitpos = ((tree_low_cst (index, 0) - minelt)
4935 * tree_low_cst (TYPE_SIZE (elttype), 1));
4937 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4939 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4940 && TYPE_NONALIASED_COMPONENT (type))
4942 target = copy_rtx (target);
4943 MEM_KEEP_ALIAS_SET_P (target) = 1;
4946 store_constructor_field (target, bitsize, bitpos, mode, value,
4947 type, align, cleared,
4948 get_alias_set (elttype));
4954 /* Set constructor assignments. */
4955 else if (TREE_CODE (type) == SET_TYPE)
4957 tree elt = CONSTRUCTOR_ELTS (exp);
4958 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4959 tree domain = TYPE_DOMAIN (type);
4960 tree domain_min, domain_max, bitlength;
4962 /* The default implementation strategy is to extract the constant
4963 parts of the constructor, use that to initialize the target,
4964 and then "or" in whatever non-constant ranges we need in addition.
4966 If a large set is all zero or all ones, it is
4967 probably better to set it using memset (if available) or bzero.
4968 Also, if a large set has just a single range, it may also be
4969 better to first clear all the first clear the set (using
4970 bzero/memset), and set the bits we want. */
4972 /* Check for all zeros. */
4973 if (elt == NULL_TREE && size > 0)
4976 clear_storage (target, GEN_INT (size));
4980 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4981 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4982 bitlength = size_binop (PLUS_EXPR,
4983 size_diffop (domain_max, domain_min),
4986 nbits = tree_low_cst (bitlength, 1);
4988 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4989 are "complicated" (more than one range), initialize (the
4990 constant parts) by copying from a constant. */
4991 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4992 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4994 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4995 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4996 char *bit_buffer = (char *) alloca (nbits);
4997 HOST_WIDE_INT word = 0;
4998 unsigned int bit_pos = 0;
4999 unsigned int ibit = 0;
5000 unsigned int offset = 0; /* In bytes from beginning of set. */
5002 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5005 if (bit_buffer[ibit])
5007 if (BYTES_BIG_ENDIAN)
5008 word |= (1 << (set_word_size - 1 - bit_pos));
5010 word |= 1 << bit_pos;
5014 if (bit_pos >= set_word_size || ibit == nbits)
5016 if (word != 0 || ! cleared)
5018 rtx datum = GEN_INT (word);
5021 /* The assumption here is that it is safe to use
5022 XEXP if the set is multi-word, but not if
5023 it's single-word. */
5024 if (GET_CODE (target) == MEM)
5025 to_rtx = adjust_address (target, mode, offset);
5026 else if (offset == 0)
5030 emit_move_insn (to_rtx, datum);
5037 offset += set_word_size / BITS_PER_UNIT;
5042 /* Don't bother clearing storage if the set is all ones. */
5043 if (TREE_CHAIN (elt) != NULL_TREE
5044 || (TREE_PURPOSE (elt) == NULL_TREE
5046 : ( ! host_integerp (TREE_VALUE (elt), 0)
5047 || ! host_integerp (TREE_PURPOSE (elt), 0)
5048 || (tree_low_cst (TREE_VALUE (elt), 0)
5049 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5050 != (HOST_WIDE_INT) nbits))))
5051 clear_storage (target, expr_size (exp));
5053 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5055 /* Start of range of element or NULL. */
5056 tree startbit = TREE_PURPOSE (elt);
5057 /* End of range of element, or element value. */
5058 tree endbit = TREE_VALUE (elt);
5059 #ifdef TARGET_MEM_FUNCTIONS
5060 HOST_WIDE_INT startb, endb;
5062 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5064 bitlength_rtx = expand_expr (bitlength,
5065 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5067 /* Handle non-range tuple element like [ expr ]. */
5068 if (startbit == NULL_TREE)
5070 startbit = save_expr (endbit);
5074 startbit = convert (sizetype, startbit);
5075 endbit = convert (sizetype, endbit);
5076 if (! integer_zerop (domain_min))
5078 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5079 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5081 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5082 EXPAND_CONST_ADDRESS);
5083 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5084 EXPAND_CONST_ADDRESS);
5090 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5093 emit_move_insn (targetx, target);
5096 else if (GET_CODE (target) == MEM)
5101 #ifdef TARGET_MEM_FUNCTIONS
5102 /* Optimization: If startbit and endbit are
5103 constants divisible by BITS_PER_UNIT,
5104 call memset instead. */
5105 if (TREE_CODE (startbit) == INTEGER_CST
5106 && TREE_CODE (endbit) == INTEGER_CST
5107 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5108 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5110 emit_library_call (memset_libfunc, LCT_NORMAL,
5112 plus_constant (XEXP (targetx, 0),
5113 startb / BITS_PER_UNIT),
5115 constm1_rtx, TYPE_MODE (integer_type_node),
5116 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5117 TYPE_MODE (sizetype));
5121 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5122 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5123 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5124 startbit_rtx, TYPE_MODE (sizetype),
5125 endbit_rtx, TYPE_MODE (sizetype));
5128 emit_move_insn (target, targetx);
5136 /* Store the value of EXP (an expression tree)
5137 into a subfield of TARGET which has mode MODE and occupies
5138 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5139 If MODE is VOIDmode, it means that we are storing into a bit-field.
5141 If VALUE_MODE is VOIDmode, return nothing in particular.
5142 UNSIGNEDP is not used in this case.
5144 Otherwise, return an rtx for the value stored. This rtx
5145 has mode VALUE_MODE if that is convenient to do.
5146 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5148 ALIGN is the alignment that TARGET is known to have.
5149 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5151 ALIAS_SET is the alias set for the destination. This value will
5152 (in general) be different from that for TARGET, since TARGET is a
5153 reference to the containing structure. */
5156 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5157 unsignedp, align, total_size, alias_set)
5159 HOST_WIDE_INT bitsize;
5160 HOST_WIDE_INT bitpos;
5161 enum machine_mode mode;
5163 enum machine_mode value_mode;
5166 HOST_WIDE_INT total_size;
5169 HOST_WIDE_INT width_mask = 0;
5171 if (TREE_CODE (exp) == ERROR_MARK)
5174 /* If we have nothing to store, do nothing unless the expression has
5177 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5179 if (bitsize < HOST_BITS_PER_WIDE_INT)
5180 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5182 /* If we are storing into an unaligned field of an aligned union that is
5183 in a register, we may have the mode of TARGET being an integer mode but
5184 MODE == BLKmode. In that case, get an aligned object whose size and
5185 alignment are the same as TARGET and store TARGET into it (we can avoid
5186 the store if the field being stored is the entire width of TARGET). Then
5187 call ourselves recursively to store the field into a BLKmode version of
5188 that object. Finally, load from the object into TARGET. This is not
5189 very efficient in general, but should only be slightly more expensive
5190 than the otherwise-required unaligned accesses. Perhaps this can be
5191 cleaned up later. */
5194 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5198 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5201 rtx blk_object = copy_rtx (object);
5203 PUT_MODE (blk_object, BLKmode);
5205 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5206 emit_move_insn (object, target);
5208 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5209 align, total_size, alias_set);
5211 /* Even though we aren't returning target, we need to
5212 give it the updated value. */
5213 emit_move_insn (target, object);
5218 if (GET_CODE (target) == CONCAT)
5220 /* We're storing into a struct containing a single __complex. */
5224 return store_expr (exp, target, 0);
5227 /* If the structure is in a register or if the component
5228 is a bit field, we cannot use addressing to access it.
5229 Use bit-field techniques or SUBREG to store in it. */
5231 if (mode == VOIDmode
5232 || (mode != BLKmode && ! direct_store[(int) mode]
5233 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5234 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5235 || GET_CODE (target) == REG
5236 || GET_CODE (target) == SUBREG
5237 /* If the field isn't aligned enough to store as an ordinary memref,
5238 store it as a bit field. */
5239 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5240 && (align < GET_MODE_ALIGNMENT (mode)
5241 || bitpos % GET_MODE_ALIGNMENT (mode)))
5242 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5243 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5244 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5245 /* If the RHS and field are a constant size and the size of the
5246 RHS isn't the same size as the bitfield, we must use bitfield
5249 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5250 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5252 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5254 /* If BITSIZE is narrower than the size of the type of EXP
5255 we will be narrowing TEMP. Normally, what's wanted are the
5256 low-order bits. However, if EXP's type is a record and this is
5257 big-endian machine, we want the upper BITSIZE bits. */
5258 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5259 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5260 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5261 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5262 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5266 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5268 if (mode != VOIDmode && mode != BLKmode
5269 && mode != TYPE_MODE (TREE_TYPE (exp)))
5270 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5272 /* If the modes of TARGET and TEMP are both BLKmode, both
5273 must be in memory and BITPOS must be aligned on a byte
5274 boundary. If so, we simply do a block copy. */
5275 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5277 unsigned int exp_align = expr_align (exp);
5279 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5280 || bitpos % BITS_PER_UNIT != 0)
5283 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5285 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5286 align = MIN (exp_align, align);
5288 /* Find an alignment that is consistent with the bit position. */
5289 while ((bitpos % align) != 0)
5292 emit_block_move (target, temp,
5293 bitsize == -1 ? expr_size (exp)
5294 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5297 return value_mode == VOIDmode ? const0_rtx : target;
5300 /* Store the value in the bitfield. */
5301 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5302 if (value_mode != VOIDmode)
5304 /* The caller wants an rtx for the value. */
5305 /* If possible, avoid refetching from the bitfield itself. */
5307 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5310 enum machine_mode tmode;
5313 return expand_and (temp,
5317 GET_MODE (temp) == VOIDmode
5319 : GET_MODE (temp))), NULL_RTX);
5320 tmode = GET_MODE (temp);
5321 if (tmode == VOIDmode)
5323 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5324 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5325 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5327 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5328 NULL_RTX, value_mode, 0, align,
5335 rtx addr = XEXP (target, 0);
5338 /* If a value is wanted, it must be the lhs;
5339 so make the address stable for multiple use. */
5341 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5342 && ! CONSTANT_ADDRESS_P (addr)
5343 /* A frame-pointer reference is already stable. */
5344 && ! (GET_CODE (addr) == PLUS
5345 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5346 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5347 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5348 target = replace_equiv_address (target, copy_to_reg (addr));
5350 /* Now build a reference to just the desired component. */
5352 to_rtx = copy_rtx (adjust_address (target, mode,
5353 bitpos / BITS_PER_UNIT));
5355 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5356 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5357 set_mem_alias_set (to_rtx, alias_set);
5359 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5363 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5364 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5365 codes and find the ultimate containing object, which we return.
5367 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5368 bit position, and *PUNSIGNEDP to the signedness of the field.
5369 If the position of the field is variable, we store a tree
5370 giving the variable offset (in units) in *POFFSET.
5371 This offset is in addition to the bit position.
5372 If the position is not variable, we store 0 in *POFFSET.
5373 We set *PALIGNMENT to the alignment of the address that will be
5374 computed. This is the alignment of the thing we return if *POFFSET
5375 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5377 If any of the extraction expressions is volatile,
5378 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5380 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5381 is a mode that can be used to access the field. In that case, *PBITSIZE
5384 If the field describes a variable-sized object, *PMODE is set to
5385 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5386 this case, but the address of the object can be found. */
5389 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5390 punsignedp, pvolatilep, palignment)
5392 HOST_WIDE_INT *pbitsize;
5393 HOST_WIDE_INT *pbitpos;
5395 enum machine_mode *pmode;
5398 unsigned int *palignment;
5401 enum machine_mode mode = VOIDmode;
5402 tree offset = size_zero_node;
5403 tree bit_offset = bitsize_zero_node;
5404 unsigned int alignment = BIGGEST_ALIGNMENT;
5405 tree placeholder_ptr = 0;
5408 /* First get the mode, signedness, and size. We do this from just the
5409 outermost expression. */
5410 if (TREE_CODE (exp) == COMPONENT_REF)
5412 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5413 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5414 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5416 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5418 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5420 size_tree = TREE_OPERAND (exp, 1);
5421 *punsignedp = TREE_UNSIGNED (exp);
5425 mode = TYPE_MODE (TREE_TYPE (exp));
5426 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5428 if (mode == BLKmode)
5429 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5431 *pbitsize = GET_MODE_BITSIZE (mode);
5436 if (! host_integerp (size_tree, 1))
5437 mode = BLKmode, *pbitsize = -1;
5439 *pbitsize = tree_low_cst (size_tree, 1);
5442 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5443 and find the ultimate containing object. */
5446 if (TREE_CODE (exp) == BIT_FIELD_REF)
5447 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5448 else if (TREE_CODE (exp) == COMPONENT_REF)
5450 tree field = TREE_OPERAND (exp, 1);
5451 tree this_offset = DECL_FIELD_OFFSET (field);
5453 /* If this field hasn't been filled in yet, don't go
5454 past it. This should only happen when folding expressions
5455 made during type construction. */
5456 if (this_offset == 0)
5458 else if (! TREE_CONSTANT (this_offset)
5459 && contains_placeholder_p (this_offset))
5460 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5462 offset = size_binop (PLUS_EXPR, offset, this_offset);
5463 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5464 DECL_FIELD_BIT_OFFSET (field));
5466 if (! host_integerp (offset, 0))
5467 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5470 else if (TREE_CODE (exp) == ARRAY_REF
5471 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5473 tree index = TREE_OPERAND (exp, 1);
5474 tree array = TREE_OPERAND (exp, 0);
5475 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5476 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5477 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5479 /* We assume all arrays have sizes that are a multiple of a byte.
5480 First subtract the lower bound, if any, in the type of the
5481 index, then convert to sizetype and multiply by the size of the
5483 if (low_bound != 0 && ! integer_zerop (low_bound))
5484 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5487 /* If the index has a self-referential type, pass it to a
5488 WITH_RECORD_EXPR; if the component size is, pass our
5489 component to one. */
5490 if (! TREE_CONSTANT (index)
5491 && contains_placeholder_p (index))
5492 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5493 if (! TREE_CONSTANT (unit_size)
5494 && contains_placeholder_p (unit_size))
5495 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5497 offset = size_binop (PLUS_EXPR, offset,
5498 size_binop (MULT_EXPR,
5499 convert (sizetype, index),
5503 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5505 exp = find_placeholder (exp, &placeholder_ptr);
5508 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5509 && ! ((TREE_CODE (exp) == NOP_EXPR
5510 || TREE_CODE (exp) == CONVERT_EXPR)
5511 && (TYPE_MODE (TREE_TYPE (exp))
5512 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5515 /* If any reference in the chain is volatile, the effect is volatile. */
5516 if (TREE_THIS_VOLATILE (exp))
5519 /* If the offset is non-constant already, then we can't assume any
5520 alignment more than the alignment here. */
5521 if (! TREE_CONSTANT (offset))
5522 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5524 exp = TREE_OPERAND (exp, 0);
5528 alignment = MIN (alignment, DECL_ALIGN (exp));
5529 else if (TREE_TYPE (exp) != 0)
5530 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5532 /* If OFFSET is constant, see if we can return the whole thing as a
5533 constant bit position. Otherwise, split it up. */
5534 if (host_integerp (offset, 0)
5535 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5537 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5538 && host_integerp (tem, 0))
5539 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5541 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5544 *palignment = alignment;
5548 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5550 static enum memory_use_mode
5551 get_memory_usage_from_modifier (modifier)
5552 enum expand_modifier modifier;
5558 return MEMORY_USE_RO;
5560 case EXPAND_MEMORY_USE_WO:
5561 return MEMORY_USE_WO;
5563 case EXPAND_MEMORY_USE_RW:
5564 return MEMORY_USE_RW;
5566 case EXPAND_MEMORY_USE_DONT:
5567 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5568 MEMORY_USE_DONT, because they are modifiers to a call of
5569 expand_expr in the ADDR_EXPR case of expand_expr. */
5570 case EXPAND_CONST_ADDRESS:
5571 case EXPAND_INITIALIZER:
5572 return MEMORY_USE_DONT;
5573 case EXPAND_MEMORY_USE_BAD:
5579 /* Given an rtx VALUE that may contain additions and multiplications, return
5580 an equivalent value that just refers to a register, memory, or constant.
5581 This is done by generating instructions to perform the arithmetic and
5582 returning a pseudo-register containing the value.
5584 The returned value may be a REG, SUBREG, MEM or constant. */
5587 force_operand (value, target)
5591 /* Use a temporary to force order of execution of calls to
5595 /* Use subtarget as the target for operand 0 of a binary operation. */
5596 rtx subtarget = get_subtarget (target);
5598 /* Check for a PIC address load. */
5600 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5601 && XEXP (value, 0) == pic_offset_table_rtx
5602 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5603 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5604 || GET_CODE (XEXP (value, 1)) == CONST))
5607 subtarget = gen_reg_rtx (GET_MODE (value));
5608 emit_move_insn (subtarget, value);
5612 if (GET_CODE (value) == PLUS)
5613 binoptab = add_optab;
5614 else if (GET_CODE (value) == MINUS)
5615 binoptab = sub_optab;
5616 else if (GET_CODE (value) == MULT)
5618 op2 = XEXP (value, 1);
5619 if (!CONSTANT_P (op2)
5620 && !(GET_CODE (op2) == REG && op2 != subtarget))
5622 tmp = force_operand (XEXP (value, 0), subtarget);
5623 return expand_mult (GET_MODE (value), tmp,
5624 force_operand (op2, NULL_RTX),
5630 op2 = XEXP (value, 1);
5631 if (!CONSTANT_P (op2)
5632 && !(GET_CODE (op2) == REG && op2 != subtarget))
5634 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5636 binoptab = add_optab;
5637 op2 = negate_rtx (GET_MODE (value), op2);
5640 /* Check for an addition with OP2 a constant integer and our first
5641 operand a PLUS of a virtual register and something else. In that
5642 case, we want to emit the sum of the virtual register and the
5643 constant first and then add the other value. This allows virtual
5644 register instantiation to simply modify the constant rather than
5645 creating another one around this addition. */
5646 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5647 && GET_CODE (XEXP (value, 0)) == PLUS
5648 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5649 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5650 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5652 rtx temp = expand_binop (GET_MODE (value), binoptab,
5653 XEXP (XEXP (value, 0), 0), op2,
5654 subtarget, 0, OPTAB_LIB_WIDEN);
5655 return expand_binop (GET_MODE (value), binoptab, temp,
5656 force_operand (XEXP (XEXP (value, 0), 1), 0),
5657 target, 0, OPTAB_LIB_WIDEN);
5660 tmp = force_operand (XEXP (value, 0), subtarget);
5661 return expand_binop (GET_MODE (value), binoptab, tmp,
5662 force_operand (op2, NULL_RTX),
5663 target, 0, OPTAB_LIB_WIDEN);
5664 /* We give UNSIGNEDP = 0 to expand_binop
5665 because the only operations we are expanding here are signed ones. */
5670 /* Subroutine of expand_expr: return nonzero iff there is no way that
5671 EXP can reference X, which is being modified. TOP_P is nonzero if this
5672 call is going to be used to determine whether we need a temporary
5673 for EXP, as opposed to a recursive call to this function.
5675 It is always safe for this routine to return zero since it merely
5676 searches for optimization opportunities. */
5679 safe_from_p (x, exp, top_p)
5686 static tree save_expr_list;
5689 /* If EXP has varying size, we MUST use a target since we currently
5690 have no way of allocating temporaries of variable size
5691 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5692 So we assume here that something at a higher level has prevented a
5693 clash. This is somewhat bogus, but the best we can do. Only
5694 do this when X is BLKmode and when we are at the top level. */
5695 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5696 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5697 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5698 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5699 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5701 && GET_MODE (x) == BLKmode)
5702 /* If X is in the outgoing argument area, it is always safe. */
5703 || (GET_CODE (x) == MEM
5704 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5705 || (GET_CODE (XEXP (x, 0)) == PLUS
5706 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5709 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5710 find the underlying pseudo. */
5711 if (GET_CODE (x) == SUBREG)
5714 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5718 /* A SAVE_EXPR might appear many times in the expression passed to the
5719 top-level safe_from_p call, and if it has a complex subexpression,
5720 examining it multiple times could result in a combinatorial explosion.
5721 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5722 with optimization took about 28 minutes to compile -- even though it was
5723 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5724 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5725 we have processed. Note that the only test of top_p was above. */
5734 rtn = safe_from_p (x, exp, 0);
5736 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5737 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5742 /* Now look at our tree code and possibly recurse. */
5743 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5746 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5753 if (TREE_CODE (exp) == TREE_LIST)
5754 return ((TREE_VALUE (exp) == 0
5755 || safe_from_p (x, TREE_VALUE (exp), 0))
5756 && (TREE_CHAIN (exp) == 0
5757 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5758 else if (TREE_CODE (exp) == ERROR_MARK)
5759 return 1; /* An already-visited SAVE_EXPR? */
5764 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5768 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5769 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5773 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5774 the expression. If it is set, we conflict iff we are that rtx or
5775 both are in memory. Otherwise, we check all operands of the
5776 expression recursively. */
5778 switch (TREE_CODE (exp))
5781 return (staticp (TREE_OPERAND (exp, 0))
5782 || TREE_STATIC (exp)
5783 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5786 if (GET_CODE (x) == MEM
5787 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5788 get_alias_set (exp)))
5793 /* Assume that the call will clobber all hard registers and
5795 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5796 || GET_CODE (x) == MEM)
5801 /* If a sequence exists, we would have to scan every instruction
5802 in the sequence to see if it was safe. This is probably not
5804 if (RTL_EXPR_SEQUENCE (exp))
5807 exp_rtl = RTL_EXPR_RTL (exp);
5810 case WITH_CLEANUP_EXPR:
5811 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5814 case CLEANUP_POINT_EXPR:
5815 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5818 exp_rtl = SAVE_EXPR_RTL (exp);
5822 /* If we've already scanned this, don't do it again. Otherwise,
5823 show we've scanned it and record for clearing the flag if we're
5825 if (TREE_PRIVATE (exp))
5828 TREE_PRIVATE (exp) = 1;
5829 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5831 TREE_PRIVATE (exp) = 0;
5835 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5839 /* The only operand we look at is operand 1. The rest aren't
5840 part of the expression. */
5841 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5843 case METHOD_CALL_EXPR:
5844 /* This takes an rtx argument, but shouldn't appear here. */
5851 /* If we have an rtx, we do not need to scan our operands. */
5855 nops = first_rtl_op (TREE_CODE (exp));
5856 for (i = 0; i < nops; i++)
5857 if (TREE_OPERAND (exp, i) != 0
5858 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5861 /* If this is a language-specific tree code, it may require
5862 special handling. */
5863 if ((unsigned int) TREE_CODE (exp)
5864 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5866 && !(*lang_safe_from_p) (x, exp))
5870 /* If we have an rtl, find any enclosed object. Then see if we conflict
5874 if (GET_CODE (exp_rtl) == SUBREG)
5876 exp_rtl = SUBREG_REG (exp_rtl);
5877 if (GET_CODE (exp_rtl) == REG
5878 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5882 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5883 are memory and they conflict. */
5884 return ! (rtx_equal_p (x, exp_rtl)
5885 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5886 && true_dependence (exp_rtl, GET_MODE (x), x,
5887 rtx_addr_varies_p)));
5890 /* If we reach here, it is safe. */
5894 /* Subroutine of expand_expr: return rtx if EXP is a
5895 variable or parameter; else return 0. */
5902 switch (TREE_CODE (exp))
5906 return DECL_RTL (exp);
5912 #ifdef MAX_INTEGER_COMPUTATION_MODE
5915 check_max_integer_computation_mode (exp)
5918 enum tree_code code;
5919 enum machine_mode mode;
5921 /* Strip any NOPs that don't change the mode. */
5923 code = TREE_CODE (exp);
5925 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5926 if (code == NOP_EXPR
5927 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5930 /* First check the type of the overall operation. We need only look at
5931 unary, binary and relational operations. */
5932 if (TREE_CODE_CLASS (code) == '1'
5933 || TREE_CODE_CLASS (code) == '2'
5934 || TREE_CODE_CLASS (code) == '<')
5936 mode = TYPE_MODE (TREE_TYPE (exp));
5937 if (GET_MODE_CLASS (mode) == MODE_INT
5938 && mode > MAX_INTEGER_COMPUTATION_MODE)
5939 internal_error ("unsupported wide integer operation");
5942 /* Check operand of a unary op. */
5943 if (TREE_CODE_CLASS (code) == '1')
5945 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5946 if (GET_MODE_CLASS (mode) == MODE_INT
5947 && mode > MAX_INTEGER_COMPUTATION_MODE)
5948 internal_error ("unsupported wide integer operation");
5951 /* Check operands of a binary/comparison op. */
5952 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5954 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5955 if (GET_MODE_CLASS (mode) == MODE_INT
5956 && mode > MAX_INTEGER_COMPUTATION_MODE)
5957 internal_error ("unsupported wide integer operation");
5959 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5960 if (GET_MODE_CLASS (mode) == MODE_INT
5961 && mode > MAX_INTEGER_COMPUTATION_MODE)
5962 internal_error ("unsupported wide integer operation");
5967 /* Return the highest power of two that EXP is known to be a multiple of.
5968 This is used in updating alignment of MEMs in array references. */
5970 static HOST_WIDE_INT
5971 highest_pow2_factor (exp)
5974 HOST_WIDE_INT c0, c1;
5976 switch (TREE_CODE (exp))
5979 /* If the integer is expressable in a HOST_WIDE_INT, we can find
5980 the lowest bit that's a one. If the result is zero or negative,
5981 pessimize by returning 1. This is overly-conservative, but such
5982 things should not happen in the offset expressions that we are
5984 if (host_integerp (exp, 0))
5986 c0 = tree_low_cst (exp, 0);
5987 return c0 >= 0 ? c0 & -c0 : 1;
5991 case PLUS_EXPR: case MINUS_EXPR:
5992 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5993 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5994 return MIN (c0, c1);
5997 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5998 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6001 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6003 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6004 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6005 return MAX (1, c0 / c1);
6007 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6008 case COMPOUND_EXPR: case SAVE_EXPR:
6009 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6012 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6013 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6014 return MIN (c0, c1);
6023 /* Return an object on the placeholder list that matches EXP, a
6024 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6025 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6026 tree.def. If no such object is found, abort. If PLIST is nonzero, it is
6027 a location which initially points to a starting location in the
6028 placeholder list (zero means start of the list) and where a pointer into
6029 the placeholder list at which the object is found is placed. */
6032 find_placeholder (exp, plist)
6036 tree type = TREE_TYPE (exp);
6037 tree placeholder_expr;
6039 for (placeholder_expr
6040 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6041 placeholder_expr != 0;
6042 placeholder_expr = TREE_CHAIN (placeholder_expr))
6044 tree need_type = TYPE_MAIN_VARIANT (type);
6047 /* Find the outermost reference that is of the type we want. If none,
6048 see if any object has a type that is a pointer to the type we
6050 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6051 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6052 || TREE_CODE (elt) == COND_EXPR)
6053 ? TREE_OPERAND (elt, 1)
6054 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6055 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6056 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6057 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6058 ? TREE_OPERAND (elt, 0) : 0))
6059 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6062 *plist = placeholder_expr;
6066 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6068 = ((TREE_CODE (elt) == COMPOUND_EXPR
6069 || TREE_CODE (elt) == COND_EXPR)
6070 ? TREE_OPERAND (elt, 1)
6071 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6072 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6073 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6074 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6075 ? TREE_OPERAND (elt, 0) : 0))
6076 if (POINTER_TYPE_P (TREE_TYPE (elt))
6077 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6081 *plist = placeholder_expr;
6082 return build1 (INDIRECT_REF, need_type, elt);
6089 /* expand_expr: generate code for computing expression EXP.
6090 An rtx for the computed value is returned. The value is never null.
6091 In the case of a void EXP, const0_rtx is returned.
6093 The value may be stored in TARGET if TARGET is nonzero.
6094 TARGET is just a suggestion; callers must assume that
6095 the rtx returned may not be the same as TARGET.
6097 If TARGET is CONST0_RTX, it means that the value will be ignored.
6099 If TMODE is not VOIDmode, it suggests generating the
6100 result in mode TMODE. But this is done only when convenient.
6101 Otherwise, TMODE is ignored and the value generated in its natural mode.
6102 TMODE is just a suggestion; callers must assume that
6103 the rtx returned may not have mode TMODE.
6105 Note that TARGET may have neither TMODE nor MODE. In that case, it
6106 probably will not be used.
6108 If MODIFIER is EXPAND_SUM then when EXP is an addition
6109 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6110 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6111 products as above, or REG or MEM, or constant.
6112 Ordinarily in such cases we would output mul or add instructions
6113 and then return a pseudo reg containing the sum.
6115 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6116 it also marks a label as absolutely required (it can't be dead).
6117 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6118 This is used for outputting expressions used in initializers.
6120 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6121 with a constant address even if that address is not normally legitimate.
6122 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6125 expand_expr (exp, target, tmode, modifier)
6128 enum machine_mode tmode;
6129 enum expand_modifier modifier;
6132 tree type = TREE_TYPE (exp);
6133 int unsignedp = TREE_UNSIGNED (type);
6134 enum machine_mode mode;
6135 enum tree_code code = TREE_CODE (exp);
6137 rtx subtarget, original_target;
6140 /* Used by check-memory-usage to make modifier read only. */
6141 enum expand_modifier ro_modifier;
6143 /* Handle ERROR_MARK before anybody tries to access its type. */
6144 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6146 op0 = CONST0_RTX (tmode);
6152 mode = TYPE_MODE (type);
6153 /* Use subtarget as the target for operand 0 of a binary operation. */
6154 subtarget = get_subtarget (target);
6155 original_target = target;
6156 ignore = (target == const0_rtx
6157 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6158 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6159 || code == COND_EXPR)
6160 && TREE_CODE (type) == VOID_TYPE));
6162 /* Make a read-only version of the modifier. */
6163 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6164 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6165 ro_modifier = modifier;
6167 ro_modifier = EXPAND_NORMAL;
6169 /* If we are going to ignore this result, we need only do something
6170 if there is a side-effect somewhere in the expression. If there
6171 is, short-circuit the most common cases here. Note that we must
6172 not call expand_expr with anything but const0_rtx in case this
6173 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6177 if (! TREE_SIDE_EFFECTS (exp))
6180 /* Ensure we reference a volatile object even if value is ignored, but
6181 don't do this if all we are doing is taking its address. */
6182 if (TREE_THIS_VOLATILE (exp)
6183 && TREE_CODE (exp) != FUNCTION_DECL
6184 && mode != VOIDmode && mode != BLKmode
6185 && modifier != EXPAND_CONST_ADDRESS)
6187 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6188 if (GET_CODE (temp) == MEM)
6189 temp = copy_to_reg (temp);
6193 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6194 || code == INDIRECT_REF || code == BUFFER_REF)
6195 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6196 VOIDmode, ro_modifier);
6197 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6198 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6200 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6202 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6206 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6207 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6208 /* If the second operand has no side effects, just evaluate
6210 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6211 VOIDmode, ro_modifier);
6212 else if (code == BIT_FIELD_REF)
6214 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6216 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6218 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6226 #ifdef MAX_INTEGER_COMPUTATION_MODE
6227 /* Only check stuff here if the mode we want is different from the mode
6228 of the expression; if it's the same, check_max_integer_computiation_mode
6229 will handle it. Do we really need to check this stuff at all? */
6232 && GET_MODE (target) != mode
6233 && TREE_CODE (exp) != INTEGER_CST
6234 && TREE_CODE (exp) != PARM_DECL
6235 && TREE_CODE (exp) != ARRAY_REF
6236 && TREE_CODE (exp) != ARRAY_RANGE_REF
6237 && TREE_CODE (exp) != COMPONENT_REF
6238 && TREE_CODE (exp) != BIT_FIELD_REF
6239 && TREE_CODE (exp) != INDIRECT_REF
6240 && TREE_CODE (exp) != CALL_EXPR
6241 && TREE_CODE (exp) != VAR_DECL
6242 && TREE_CODE (exp) != RTL_EXPR)
6244 enum machine_mode mode = GET_MODE (target);
6246 if (GET_MODE_CLASS (mode) == MODE_INT
6247 && mode > MAX_INTEGER_COMPUTATION_MODE)
6248 internal_error ("unsupported wide integer operation");
6252 && TREE_CODE (exp) != INTEGER_CST
6253 && TREE_CODE (exp) != PARM_DECL
6254 && TREE_CODE (exp) != ARRAY_REF
6255 && TREE_CODE (exp) != ARRAY_RANGE_REF
6256 && TREE_CODE (exp) != COMPONENT_REF
6257 && TREE_CODE (exp) != BIT_FIELD_REF
6258 && TREE_CODE (exp) != INDIRECT_REF
6259 && TREE_CODE (exp) != VAR_DECL
6260 && TREE_CODE (exp) != CALL_EXPR
6261 && TREE_CODE (exp) != RTL_EXPR
6262 && GET_MODE_CLASS (tmode) == MODE_INT
6263 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6264 internal_error ("unsupported wide integer operation");
6266 check_max_integer_computation_mode (exp);
6269 /* If will do cse, generate all results into pseudo registers
6270 since 1) that allows cse to find more things
6271 and 2) otherwise cse could produce an insn the machine
6274 if (! cse_not_expected && mode != BLKmode && target
6275 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6282 tree function = decl_function_context (exp);
6283 /* Handle using a label in a containing function. */
6284 if (function != current_function_decl
6285 && function != inline_function_decl && function != 0)
6287 struct function *p = find_function_data (function);
6288 p->expr->x_forced_labels
6289 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6290 p->expr->x_forced_labels);
6294 if (modifier == EXPAND_INITIALIZER)
6295 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6300 temp = gen_rtx_MEM (FUNCTION_MODE,
6301 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6302 if (function != current_function_decl
6303 && function != inline_function_decl && function != 0)
6304 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6309 if (DECL_RTL (exp) == 0)
6311 error_with_decl (exp, "prior parameter's size depends on `%s'");
6312 return CONST0_RTX (mode);
6315 /* ... fall through ... */
6318 /* If a static var's type was incomplete when the decl was written,
6319 but the type is complete now, lay out the decl now. */
6320 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6321 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6323 layout_decl (exp, 0);
6324 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6327 /* Although static-storage variables start off initialized, according to
6328 ANSI C, a memcpy could overwrite them with uninitialized values. So
6329 we check them too. This also lets us check for read-only variables
6330 accessed via a non-const declaration, in case it won't be detected
6331 any other way (e.g., in an embedded system or OS kernel without
6334 Aggregates are not checked here; they're handled elsewhere. */
6335 if (cfun && current_function_check_memory_usage
6337 && GET_CODE (DECL_RTL (exp)) == MEM
6338 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6340 enum memory_use_mode memory_usage;
6341 memory_usage = get_memory_usage_from_modifier (modifier);
6343 in_check_memory_usage = 1;
6344 if (memory_usage != MEMORY_USE_DONT)
6345 emit_library_call (chkr_check_addr_libfunc,
6346 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6347 XEXP (DECL_RTL (exp), 0), Pmode,
6348 GEN_INT (int_size_in_bytes (type)),
6349 TYPE_MODE (sizetype),
6350 GEN_INT (memory_usage),
6351 TYPE_MODE (integer_type_node));
6352 in_check_memory_usage = 0;
6355 /* ... fall through ... */
6359 if (DECL_RTL (exp) == 0)
6362 /* Ensure variable marked as used even if it doesn't go through
6363 a parser. If it hasn't be used yet, write out an external
6365 if (! TREE_USED (exp))
6367 assemble_external (exp);
6368 TREE_USED (exp) = 1;
6371 /* Show we haven't gotten RTL for this yet. */
6374 /* Handle variables inherited from containing functions. */
6375 context = decl_function_context (exp);
6377 /* We treat inline_function_decl as an alias for the current function
6378 because that is the inline function whose vars, types, etc.
6379 are being merged into the current function.
6380 See expand_inline_function. */
6382 if (context != 0 && context != current_function_decl
6383 && context != inline_function_decl
6384 /* If var is static, we don't need a static chain to access it. */
6385 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6386 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6390 /* Mark as non-local and addressable. */
6391 DECL_NONLOCAL (exp) = 1;
6392 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6394 mark_addressable (exp);
6395 if (GET_CODE (DECL_RTL (exp)) != MEM)
6397 addr = XEXP (DECL_RTL (exp), 0);
6398 if (GET_CODE (addr) == MEM)
6400 = replace_equiv_address (addr,
6401 fix_lexical_addr (XEXP (addr, 0), exp));
6403 addr = fix_lexical_addr (addr, exp);
6405 temp = replace_equiv_address (DECL_RTL (exp), addr);
6408 /* This is the case of an array whose size is to be determined
6409 from its initializer, while the initializer is still being parsed.
6412 else if (GET_CODE (DECL_RTL (exp)) == MEM
6413 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6414 temp = validize_mem (DECL_RTL (exp));
6416 /* If DECL_RTL is memory, we are in the normal case and either
6417 the address is not valid or it is not a register and -fforce-addr
6418 is specified, get the address into a register. */
6420 else if (GET_CODE (DECL_RTL (exp)) == MEM
6421 && modifier != EXPAND_CONST_ADDRESS
6422 && modifier != EXPAND_SUM
6423 && modifier != EXPAND_INITIALIZER
6424 && (! memory_address_p (DECL_MODE (exp),
6425 XEXP (DECL_RTL (exp), 0))
6427 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6428 temp = replace_equiv_address (DECL_RTL (exp),
6429 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6431 /* If we got something, return it. But first, set the alignment
6432 if the address is a register. */
6435 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6436 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6441 /* If the mode of DECL_RTL does not match that of the decl, it
6442 must be a promoted value. We return a SUBREG of the wanted mode,
6443 but mark it so that we know that it was already extended. */
6445 if (GET_CODE (DECL_RTL (exp)) == REG
6446 && GET_MODE (DECL_RTL (exp)) != mode)
6448 /* Get the signedness used for this variable. Ensure we get the
6449 same mode we got when the variable was declared. */
6450 if (GET_MODE (DECL_RTL (exp))
6451 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6454 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6455 SUBREG_PROMOTED_VAR_P (temp) = 1;
6456 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6460 return DECL_RTL (exp);
6463 return immed_double_const (TREE_INT_CST_LOW (exp),
6464 TREE_INT_CST_HIGH (exp), mode);
6467 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6468 EXPAND_MEMORY_USE_BAD);
6471 /* If optimized, generate immediate CONST_DOUBLE
6472 which will be turned into memory by reload if necessary.
6474 We used to force a register so that loop.c could see it. But
6475 this does not allow gen_* patterns to perform optimizations with
6476 the constants. It also produces two insns in cases like "x = 1.0;".
6477 On most machines, floating-point constants are not permitted in
6478 many insns, so we'd end up copying it to a register in any case.
6480 Now, we do the copying in expand_binop, if appropriate. */
6481 return immed_real_const (exp);
6485 if (! TREE_CST_RTL (exp))
6486 output_constant_def (exp, 1);
6488 /* TREE_CST_RTL probably contains a constant address.
6489 On RISC machines where a constant address isn't valid,
6490 make some insns to get that address into a register. */
6491 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6492 && modifier != EXPAND_CONST_ADDRESS
6493 && modifier != EXPAND_INITIALIZER
6494 && modifier != EXPAND_SUM
6495 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6497 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6498 return replace_equiv_address (TREE_CST_RTL (exp),
6499 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6500 return TREE_CST_RTL (exp);
6502 case EXPR_WITH_FILE_LOCATION:
6505 const char *saved_input_filename = input_filename;
6506 int saved_lineno = lineno;
6507 input_filename = EXPR_WFL_FILENAME (exp);
6508 lineno = EXPR_WFL_LINENO (exp);
6509 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6510 emit_line_note (input_filename, lineno);
6511 /* Possibly avoid switching back and forth here. */
6512 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6513 input_filename = saved_input_filename;
6514 lineno = saved_lineno;
6519 context = decl_function_context (exp);
6521 /* If this SAVE_EXPR was at global context, assume we are an
6522 initialization function and move it into our context. */
6524 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6526 /* We treat inline_function_decl as an alias for the current function
6527 because that is the inline function whose vars, types, etc.
6528 are being merged into the current function.
6529 See expand_inline_function. */
6530 if (context == current_function_decl || context == inline_function_decl)
6533 /* If this is non-local, handle it. */
6536 /* The following call just exists to abort if the context is
6537 not of a containing function. */
6538 find_function_data (context);
6540 temp = SAVE_EXPR_RTL (exp);
6541 if (temp && GET_CODE (temp) == REG)
6543 put_var_into_stack (exp);
6544 temp = SAVE_EXPR_RTL (exp);
6546 if (temp == 0 || GET_CODE (temp) != MEM)
6549 replace_equiv_address (temp,
6550 fix_lexical_addr (XEXP (temp, 0), exp));
6552 if (SAVE_EXPR_RTL (exp) == 0)
6554 if (mode == VOIDmode)
6557 temp = assign_temp (build_qualified_type (type,
6559 | TYPE_QUAL_CONST)),
6562 SAVE_EXPR_RTL (exp) = temp;
6563 if (!optimize && GET_CODE (temp) == REG)
6564 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6567 /* If the mode of TEMP does not match that of the expression, it
6568 must be a promoted value. We pass store_expr a SUBREG of the
6569 wanted mode but mark it so that we know that it was already
6570 extended. Note that `unsignedp' was modified above in
6573 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6575 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6576 SUBREG_PROMOTED_VAR_P (temp) = 1;
6577 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6580 if (temp == const0_rtx)
6581 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6582 EXPAND_MEMORY_USE_BAD);
6584 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6586 TREE_USED (exp) = 1;
6589 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6590 must be a promoted value. We return a SUBREG of the wanted mode,
6591 but mark it so that we know that it was already extended. */
6593 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6594 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6596 /* Compute the signedness and make the proper SUBREG. */
6597 promote_mode (type, mode, &unsignedp, 0);
6598 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6599 SUBREG_PROMOTED_VAR_P (temp) = 1;
6600 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6604 return SAVE_EXPR_RTL (exp);
6609 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6610 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6614 case PLACEHOLDER_EXPR:
6616 tree old_list = placeholder_list;
6617 tree placeholder_expr = 0;
6619 exp = find_placeholder (exp, &placeholder_expr);
6620 placeholder_list = TREE_CHAIN (placeholder_expr);
6621 temp = expand_expr (exp, original_target, tmode, ro_modifier);
6622 placeholder_list = old_list;
6626 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6629 case WITH_RECORD_EXPR:
6630 /* Put the object on the placeholder list, expand our first operand,
6631 and pop the list. */
6632 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6634 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6635 tmode, ro_modifier);
6636 placeholder_list = TREE_CHAIN (placeholder_list);
6640 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6641 expand_goto (TREE_OPERAND (exp, 0));
6643 expand_computed_goto (TREE_OPERAND (exp, 0));
6647 expand_exit_loop_if_false (NULL,
6648 invert_truthvalue (TREE_OPERAND (exp, 0)));
6651 case LABELED_BLOCK_EXPR:
6652 if (LABELED_BLOCK_BODY (exp))
6653 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6654 /* Should perhaps use expand_label, but this is simpler and safer. */
6655 do_pending_stack_adjust ();
6656 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6659 case EXIT_BLOCK_EXPR:
6660 if (EXIT_BLOCK_RETURN (exp))
6661 sorry ("returned value in block_exit_expr");
6662 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6667 expand_start_loop (1);
6668 expand_expr_stmt (TREE_OPERAND (exp, 0));
6676 tree vars = TREE_OPERAND (exp, 0);
6677 int vars_need_expansion = 0;
6679 /* Need to open a binding contour here because
6680 if there are any cleanups they must be contained here. */
6681 expand_start_bindings (2);
6683 /* Mark the corresponding BLOCK for output in its proper place. */
6684 if (TREE_OPERAND (exp, 2) != 0
6685 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6686 insert_block (TREE_OPERAND (exp, 2));
6688 /* If VARS have not yet been expanded, expand them now. */
6691 if (!DECL_RTL_SET_P (vars))
6693 vars_need_expansion = 1;
6696 expand_decl_init (vars);
6697 vars = TREE_CHAIN (vars);
6700 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6702 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6708 if (RTL_EXPR_SEQUENCE (exp))
6710 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6712 emit_insns (RTL_EXPR_SEQUENCE (exp));
6713 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6715 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6716 free_temps_for_rtl_expr (exp);
6717 return RTL_EXPR_RTL (exp);
6720 /* If we don't need the result, just ensure we evaluate any
6725 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6726 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6727 EXPAND_MEMORY_USE_BAD);
6731 /* All elts simple constants => refer to a constant in memory. But
6732 if this is a non-BLKmode mode, let it store a field at a time
6733 since that should make a CONST_INT or CONST_DOUBLE when we
6734 fold. Likewise, if we have a target we can use, it is best to
6735 store directly into the target unless the type is large enough
6736 that memcpy will be used. If we are making an initializer and
6737 all operands are constant, put it in memory as well. */
6738 else if ((TREE_STATIC (exp)
6739 && ((mode == BLKmode
6740 && ! (target != 0 && safe_from_p (target, exp, 1)))
6741 || TREE_ADDRESSABLE (exp)
6742 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6743 && (! MOVE_BY_PIECES_P
6744 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6746 && ! mostly_zeros_p (exp))))
6747 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6749 rtx constructor = output_constant_def (exp, 1);
6751 if (modifier != EXPAND_CONST_ADDRESS
6752 && modifier != EXPAND_INITIALIZER
6753 && modifier != EXPAND_SUM)
6754 constructor = validize_mem (constructor);
6760 /* Handle calls that pass values in multiple non-contiguous
6761 locations. The Irix 6 ABI has examples of this. */
6762 if (target == 0 || ! safe_from_p (target, exp, 1)
6763 || GET_CODE (target) == PARALLEL)
6765 = assign_temp (build_qualified_type (type,
6767 | (TREE_READONLY (exp)
6768 * TYPE_QUAL_CONST))),
6769 TREE_ADDRESSABLE (exp), 1, 1);
6771 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6772 int_size_in_bytes (TREE_TYPE (exp)));
6778 tree exp1 = TREE_OPERAND (exp, 0);
6780 tree string = string_constant (exp1, &index);
6782 /* Try to optimize reads from const strings. */
6784 && TREE_CODE (string) == STRING_CST
6785 && TREE_CODE (index) == INTEGER_CST
6786 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6787 && GET_MODE_CLASS (mode) == MODE_INT
6788 && GET_MODE_SIZE (mode) == 1
6789 && modifier != EXPAND_MEMORY_USE_WO)
6791 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6793 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6794 op0 = memory_address (mode, op0);
6796 if (cfun && current_function_check_memory_usage
6797 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6799 enum memory_use_mode memory_usage;
6800 memory_usage = get_memory_usage_from_modifier (modifier);
6802 if (memory_usage != MEMORY_USE_DONT)
6804 in_check_memory_usage = 1;
6805 emit_library_call (chkr_check_addr_libfunc,
6806 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6807 Pmode, GEN_INT (int_size_in_bytes (type)),
6808 TYPE_MODE (sizetype),
6809 GEN_INT (memory_usage),
6810 TYPE_MODE (integer_type_node));
6811 in_check_memory_usage = 0;
6815 temp = gen_rtx_MEM (mode, op0);
6816 set_mem_attributes (temp, exp, 0);
6818 /* If we are writing to this object and its type is a record with
6819 readonly fields, we must mark it as readonly so it will
6820 conflict with readonly references to those fields. */
6821 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6822 RTX_UNCHANGING_P (temp) = 1;
6828 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6832 tree array = TREE_OPERAND (exp, 0);
6833 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6834 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6835 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6838 /* Optimize the special-case of a zero lower bound.
6840 We convert the low_bound to sizetype to avoid some problems
6841 with constant folding. (E.g. suppose the lower bound is 1,
6842 and its mode is QI. Without the conversion, (ARRAY
6843 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6844 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6846 if (! integer_zerop (low_bound))
6847 index = size_diffop (index, convert (sizetype, low_bound));
6849 /* Fold an expression like: "foo"[2].
6850 This is not done in fold so it won't happen inside &.
6851 Don't fold if this is for wide characters since it's too
6852 difficult to do correctly and this is a very rare case. */
6854 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6855 && TREE_CODE (array) == STRING_CST
6856 && TREE_CODE (index) == INTEGER_CST
6857 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6858 && GET_MODE_CLASS (mode) == MODE_INT
6859 && GET_MODE_SIZE (mode) == 1)
6861 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6863 /* If this is a constant index into a constant array,
6864 just get the value from the array. Handle both the cases when
6865 we have an explicit constructor and when our operand is a variable
6866 that was declared const. */
6868 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6869 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6870 && TREE_CODE (index) == INTEGER_CST
6871 && 0 > compare_tree_int (index,
6872 list_length (CONSTRUCTOR_ELTS
6873 (TREE_OPERAND (exp, 0)))))
6877 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6878 i = TREE_INT_CST_LOW (index);
6879 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6883 return expand_expr (fold (TREE_VALUE (elem)), target,
6884 tmode, ro_modifier);
6887 else if (optimize >= 1
6888 && modifier != EXPAND_CONST_ADDRESS
6889 && modifier != EXPAND_INITIALIZER
6890 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6891 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6892 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6894 if (TREE_CODE (index) == INTEGER_CST)
6896 tree init = DECL_INITIAL (array);
6898 if (TREE_CODE (init) == CONSTRUCTOR)
6902 for (elem = CONSTRUCTOR_ELTS (init);
6904 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6905 elem = TREE_CHAIN (elem))
6908 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6909 return expand_expr (fold (TREE_VALUE (elem)), target,
6910 tmode, ro_modifier);
6912 else if (TREE_CODE (init) == STRING_CST
6913 && 0 > compare_tree_int (index,
6914 TREE_STRING_LENGTH (init)))
6916 tree type = TREE_TYPE (TREE_TYPE (init));
6917 enum machine_mode mode = TYPE_MODE (type);
6919 if (GET_MODE_CLASS (mode) == MODE_INT
6920 && GET_MODE_SIZE (mode) == 1)
6922 (TREE_STRING_POINTER
6923 (init)[TREE_INT_CST_LOW (index)]));
6932 case ARRAY_RANGE_REF:
6933 /* If the operand is a CONSTRUCTOR, we can just extract the
6934 appropriate field if it is present. Don't do this if we have
6935 already written the data since we want to refer to that copy
6936 and varasm.c assumes that's what we'll do. */
6937 if (code == COMPONENT_REF
6938 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6939 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6943 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6944 elt = TREE_CHAIN (elt))
6945 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6946 /* We can normally use the value of the field in the
6947 CONSTRUCTOR. However, if this is a bitfield in
6948 an integral mode that we can fit in a HOST_WIDE_INT,
6949 we must mask only the number of bits in the bitfield,
6950 since this is done implicitly by the constructor. If
6951 the bitfield does not meet either of those conditions,
6952 we can't do this optimization. */
6953 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6954 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6956 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6957 <= HOST_BITS_PER_WIDE_INT))))
6959 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6960 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6962 HOST_WIDE_INT bitsize
6963 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6965 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6967 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6968 op0 = expand_and (op0, op1, target);
6972 enum machine_mode imode
6973 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6975 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6978 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6980 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6990 enum machine_mode mode1;
6991 HOST_WIDE_INT bitsize, bitpos;
6994 unsigned int alignment;
6995 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6996 &mode1, &unsignedp, &volatilep,
7000 /* If we got back the original object, something is wrong. Perhaps
7001 we are evaluating an expression too early. In any event, don't
7002 infinitely recurse. */
7006 /* If TEM's type is a union of variable size, pass TARGET to the inner
7007 computation, since it will need a temporary and TARGET is known
7008 to have to do. This occurs in unchecked conversion in Ada. */
7012 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7013 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7015 ? target : NULL_RTX),
7017 (modifier == EXPAND_INITIALIZER
7018 || modifier == EXPAND_CONST_ADDRESS)
7019 ? modifier : EXPAND_NORMAL);
7021 /* If this is a constant, put it into a register if it is a
7022 legitimate constant and OFFSET is 0 and memory if it isn't. */
7023 if (CONSTANT_P (op0))
7025 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7026 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7028 op0 = force_reg (mode, op0);
7030 op0 = validize_mem (force_const_mem (mode, op0));
7035 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7037 /* If this object is in a register, put it into memory.
7038 This case can't occur in C, but can in Ada if we have
7039 unchecked conversion of an expression from a scalar type to
7040 an array or record type. */
7041 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7042 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7044 /* If the operand is a SAVE_EXPR, we can deal with this by
7045 forcing the SAVE_EXPR into memory. */
7046 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7048 put_var_into_stack (TREE_OPERAND (exp, 0));
7049 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7054 = build_qualified_type (TREE_TYPE (tem),
7055 (TYPE_QUALS (TREE_TYPE (tem))
7056 | TYPE_QUAL_CONST));
7057 rtx memloc = assign_temp (nt, 1, 1, 1);
7059 mark_temp_addr_taken (memloc);
7060 emit_move_insn (memloc, op0);
7065 if (GET_CODE (op0) != MEM)
7068 if (GET_MODE (offset_rtx) != ptr_mode)
7069 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7071 #ifdef POINTERS_EXTEND_UNSIGNED
7072 if (GET_MODE (offset_rtx) != Pmode)
7073 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7076 /* A constant address in OP0 can have VOIDmode, we must not try
7077 to call force_reg for that case. Avoid that case. */
7078 if (GET_CODE (op0) == MEM
7079 && GET_MODE (op0) == BLKmode
7080 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7082 && (bitpos % bitsize) == 0
7083 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7084 && alignment == GET_MODE_ALIGNMENT (mode1))
7086 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7088 if (GET_CODE (XEXP (temp, 0)) == REG)
7091 op0 = (replace_equiv_address
7093 force_reg (GET_MODE (XEXP (temp, 0)),
7098 op0 = offset_address (op0, offset_rtx,
7099 highest_pow2_factor (offset));
7102 /* Don't forget about volatility even if this is a bitfield. */
7103 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7105 if (op0 == orig_op0)
7106 op0 = copy_rtx (op0);
7108 MEM_VOLATILE_P (op0) = 1;
7111 /* Check the access. */
7112 if (cfun != 0 && current_function_check_memory_usage
7113 && GET_CODE (op0) == MEM)
7115 enum memory_use_mode memory_usage;
7116 memory_usage = get_memory_usage_from_modifier (modifier);
7118 if (memory_usage != MEMORY_USE_DONT)
7123 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7124 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7126 /* Check the access right of the pointer. */
7127 in_check_memory_usage = 1;
7128 if (size > BITS_PER_UNIT)
7129 emit_library_call (chkr_check_addr_libfunc,
7130 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7131 Pmode, GEN_INT (size / BITS_PER_UNIT),
7132 TYPE_MODE (sizetype),
7133 GEN_INT (memory_usage),
7134 TYPE_MODE (integer_type_node));
7135 in_check_memory_usage = 0;
7139 /* In cases where an aligned union has an unaligned object
7140 as a field, we might be extracting a BLKmode value from
7141 an integer-mode (e.g., SImode) object. Handle this case
7142 by doing the extract into an object as wide as the field
7143 (which we know to be the width of a basic mode), then
7144 storing into memory, and changing the mode to BLKmode. */
7145 if (mode1 == VOIDmode
7146 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7147 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7148 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7149 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7150 && modifier != EXPAND_CONST_ADDRESS
7151 && modifier != EXPAND_INITIALIZER)
7152 /* If the field isn't aligned enough to fetch as a memref,
7153 fetch it as a bit field. */
7154 || (mode1 != BLKmode
7155 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7156 && ((TYPE_ALIGN (TREE_TYPE (tem))
7157 < GET_MODE_ALIGNMENT (mode))
7158 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7159 /* If the type and the field are a constant size and the
7160 size of the type isn't the same size as the bitfield,
7161 we must use bitfield operations. */
7163 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7165 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7168 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7169 && (TYPE_ALIGN (type) > alignment
7170 || bitpos % TYPE_ALIGN (type) != 0)))
7172 enum machine_mode ext_mode = mode;
7174 if (ext_mode == BLKmode
7175 && ! (target != 0 && GET_CODE (op0) == MEM
7176 && GET_CODE (target) == MEM
7177 && bitpos % BITS_PER_UNIT == 0))
7178 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7180 if (ext_mode == BLKmode)
7182 /* In this case, BITPOS must start at a byte boundary and
7183 TARGET, if specified, must be a MEM. */
7184 if (GET_CODE (op0) != MEM
7185 || (target != 0 && GET_CODE (target) != MEM)
7186 || bitpos % BITS_PER_UNIT != 0)
7189 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7191 target = assign_temp (type, 0, 1, 1);
7193 emit_block_move (target, op0,
7194 bitsize == -1 ? expr_size (exp)
7195 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7201 op0 = validize_mem (op0);
7203 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7204 mark_reg_pointer (XEXP (op0, 0), alignment);
7206 op0 = extract_bit_field (op0, bitsize, bitpos,
7207 unsignedp, target, ext_mode, ext_mode,
7209 int_size_in_bytes (TREE_TYPE (tem)));
7211 /* If the result is a record type and BITSIZE is narrower than
7212 the mode of OP0, an integral mode, and this is a big endian
7213 machine, we must put the field into the high-order bits. */
7214 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7215 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7216 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7217 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7218 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7222 if (mode == BLKmode)
7224 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7226 rtx new = assign_temp (nt, 0, 1, 1);
7228 emit_move_insn (new, op0);
7229 op0 = copy_rtx (new);
7230 PUT_MODE (op0, BLKmode);
7236 /* If the result is BLKmode, use that to access the object
7238 if (mode == BLKmode)
7241 /* Get a reference to just this component. */
7242 if (modifier == EXPAND_CONST_ADDRESS
7243 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7244 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7246 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7248 if (op0 == orig_op0)
7249 op0 = copy_rtx (op0);
7251 set_mem_attributes (op0, exp, 0);
7252 if (GET_CODE (XEXP (op0, 0)) == REG)
7253 mark_reg_pointer (XEXP (op0, 0), alignment);
7255 MEM_VOLATILE_P (op0) |= volatilep;
7256 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7257 || modifier == EXPAND_CONST_ADDRESS
7258 || modifier == EXPAND_INITIALIZER)
7260 else if (target == 0)
7261 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7263 convert_move (target, op0, unsignedp);
7269 rtx insn, before = get_last_insn (), vtbl_ref;
7271 /* Evaluate the interior expression. */
7272 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7275 /* Get or create an instruction off which to hang a note. */
7276 if (REG_P (subtarget))
7279 insn = get_last_insn ();
7282 if (! INSN_P (insn))
7283 insn = prev_nonnote_insn (insn);
7287 target = gen_reg_rtx (GET_MODE (subtarget));
7288 insn = emit_move_insn (target, subtarget);
7291 /* Collect the data for the note. */
7292 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7293 vtbl_ref = plus_constant (vtbl_ref,
7294 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7295 /* Discard the initial CONST that was added. */
7296 vtbl_ref = XEXP (vtbl_ref, 0);
7299 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7304 /* Intended for a reference to a buffer of a file-object in Pascal.
7305 But it's not certain that a special tree code will really be
7306 necessary for these. INDIRECT_REF might work for them. */
7312 /* Pascal set IN expression.
7315 rlo = set_low - (set_low%bits_per_word);
7316 the_word = set [ (index - rlo)/bits_per_word ];
7317 bit_index = index % bits_per_word;
7318 bitmask = 1 << bit_index;
7319 return !!(the_word & bitmask); */
7321 tree set = TREE_OPERAND (exp, 0);
7322 tree index = TREE_OPERAND (exp, 1);
7323 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7324 tree set_type = TREE_TYPE (set);
7325 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7326 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7327 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7328 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7329 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7330 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7331 rtx setaddr = XEXP (setval, 0);
7332 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7334 rtx diff, quo, rem, addr, bit, result;
7336 /* If domain is empty, answer is no. Likewise if index is constant
7337 and out of bounds. */
7338 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7339 && TREE_CODE (set_low_bound) == INTEGER_CST
7340 && tree_int_cst_lt (set_high_bound, set_low_bound))
7341 || (TREE_CODE (index) == INTEGER_CST
7342 && TREE_CODE (set_low_bound) == INTEGER_CST
7343 && tree_int_cst_lt (index, set_low_bound))
7344 || (TREE_CODE (set_high_bound) == INTEGER_CST
7345 && TREE_CODE (index) == INTEGER_CST
7346 && tree_int_cst_lt (set_high_bound, index))))
7350 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7352 /* If we get here, we have to generate the code for both cases
7353 (in range and out of range). */
7355 op0 = gen_label_rtx ();
7356 op1 = gen_label_rtx ();
7358 if (! (GET_CODE (index_val) == CONST_INT
7359 && GET_CODE (lo_r) == CONST_INT))
7361 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7362 GET_MODE (index_val), iunsignedp, 0, op1);
7365 if (! (GET_CODE (index_val) == CONST_INT
7366 && GET_CODE (hi_r) == CONST_INT))
7368 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7369 GET_MODE (index_val), iunsignedp, 0, op1);
7372 /* Calculate the element number of bit zero in the first word
7374 if (GET_CODE (lo_r) == CONST_INT)
7375 rlow = GEN_INT (INTVAL (lo_r)
7376 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7378 rlow = expand_binop (index_mode, and_optab, lo_r,
7379 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7380 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7382 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7383 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7385 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7386 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7387 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7388 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7390 addr = memory_address (byte_mode,
7391 expand_binop (index_mode, add_optab, diff,
7392 setaddr, NULL_RTX, iunsignedp,
7395 /* Extract the bit we want to examine. */
7396 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7397 gen_rtx_MEM (byte_mode, addr),
7398 make_tree (TREE_TYPE (index), rem),
7400 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7401 GET_MODE (target) == byte_mode ? target : 0,
7402 1, OPTAB_LIB_WIDEN);
7404 if (result != target)
7405 convert_move (target, result, 1);
7407 /* Output the code to handle the out-of-range case. */
7410 emit_move_insn (target, const0_rtx);
7415 case WITH_CLEANUP_EXPR:
7416 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7418 WITH_CLEANUP_EXPR_RTL (exp)
7419 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7420 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7422 /* That's it for this cleanup. */
7423 TREE_OPERAND (exp, 1) = 0;
7425 return WITH_CLEANUP_EXPR_RTL (exp);
7427 case CLEANUP_POINT_EXPR:
7429 /* Start a new binding layer that will keep track of all cleanup
7430 actions to be performed. */
7431 expand_start_bindings (2);
7433 target_temp_slot_level = temp_slot_level;
7435 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7436 /* If we're going to use this value, load it up now. */
7438 op0 = force_not_mem (op0);
7439 preserve_temp_slots (op0);
7440 expand_end_bindings (NULL_TREE, 0, 0);
7445 /* Check for a built-in function. */
7446 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7447 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7449 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7451 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7452 == BUILT_IN_FRONTEND)
7453 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7455 return expand_builtin (exp, target, subtarget, tmode, ignore);
7458 return expand_call (exp, target, ignore);
7460 case NON_LVALUE_EXPR:
7463 case REFERENCE_EXPR:
7464 if (TREE_OPERAND (exp, 0) == error_mark_node)
7467 if (TREE_CODE (type) == UNION_TYPE)
7469 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7471 /* If both input and output are BLKmode, this conversion
7472 isn't actually doing anything unless we need to make the
7473 alignment stricter. */
7474 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7475 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7476 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7477 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7481 target = assign_temp (type, 0, 1, 1);
7483 if (GET_CODE (target) == MEM)
7484 /* Store data into beginning of memory target. */
7485 store_expr (TREE_OPERAND (exp, 0),
7486 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7488 else if (GET_CODE (target) == REG)
7489 /* Store this field into a union of the proper type. */
7490 store_field (target,
7491 MIN ((int_size_in_bytes (TREE_TYPE
7492 (TREE_OPERAND (exp, 0)))
7494 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7495 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7496 VOIDmode, 0, BITS_PER_UNIT,
7497 int_size_in_bytes (type), 0);
7501 /* Return the entire union. */
7505 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7507 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7510 /* If the signedness of the conversion differs and OP0 is
7511 a promoted SUBREG, clear that indication since we now
7512 have to do the proper extension. */
7513 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7514 && GET_CODE (op0) == SUBREG)
7515 SUBREG_PROMOTED_VAR_P (op0) = 0;
7520 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7521 if (GET_MODE (op0) == mode)
7524 /* If OP0 is a constant, just convert it into the proper mode. */
7525 if (CONSTANT_P (op0))
7527 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7528 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7530 if (modifier == EXPAND_INITIALIZER)
7531 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7535 convert_to_mode (mode, op0,
7536 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7538 convert_move (target, op0,
7539 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7543 /* We come here from MINUS_EXPR when the second operand is a
7546 this_optab = ! unsignedp && flag_trapv
7547 && (GET_MODE_CLASS(mode) == MODE_INT)
7548 ? addv_optab : add_optab;
7550 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7551 something else, make sure we add the register to the constant and
7552 then to the other thing. This case can occur during strength
7553 reduction and doing it this way will produce better code if the
7554 frame pointer or argument pointer is eliminated.
7556 fold-const.c will ensure that the constant is always in the inner
7557 PLUS_EXPR, so the only case we need to do anything about is if
7558 sp, ap, or fp is our second argument, in which case we must swap
7559 the innermost first argument and our second argument. */
7561 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7562 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7563 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7564 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7565 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7566 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7568 tree t = TREE_OPERAND (exp, 1);
7570 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7571 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7574 /* If the result is to be ptr_mode and we are adding an integer to
7575 something, we might be forming a constant. So try to use
7576 plus_constant. If it produces a sum and we can't accept it,
7577 use force_operand. This allows P = &ARR[const] to generate
7578 efficient code on machines where a SYMBOL_REF is not a valid
7581 If this is an EXPAND_SUM call, always return the sum. */
7582 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7583 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7585 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7586 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7587 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7591 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7593 /* Use immed_double_const to ensure that the constant is
7594 truncated according to the mode of OP1, then sign extended
7595 to a HOST_WIDE_INT. Using the constant directly can result
7596 in non-canonical RTL in a 64x32 cross compile. */
7598 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7600 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7601 op1 = plus_constant (op1, INTVAL (constant_part));
7602 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7603 op1 = force_operand (op1, target);
7607 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7608 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7609 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7613 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7615 if (! CONSTANT_P (op0))
7617 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7618 VOIDmode, modifier);
7619 /* Don't go to both_summands if modifier
7620 says it's not right to return a PLUS. */
7621 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7625 /* Use immed_double_const to ensure that the constant is
7626 truncated according to the mode of OP1, then sign extended
7627 to a HOST_WIDE_INT. Using the constant directly can result
7628 in non-canonical RTL in a 64x32 cross compile. */
7630 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7632 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7633 op0 = plus_constant (op0, INTVAL (constant_part));
7634 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7635 op0 = force_operand (op0, target);
7640 /* No sense saving up arithmetic to be done
7641 if it's all in the wrong mode to form part of an address.
7642 And force_operand won't know whether to sign-extend or
7644 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7645 || mode != ptr_mode)
7648 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7651 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7652 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7655 /* Make sure any term that's a sum with a constant comes last. */
7656 if (GET_CODE (op0) == PLUS
7657 && CONSTANT_P (XEXP (op0, 1)))
7663 /* If adding to a sum including a constant,
7664 associate it to put the constant outside. */
7665 if (GET_CODE (op1) == PLUS
7666 && CONSTANT_P (XEXP (op1, 1)))
7668 rtx constant_term = const0_rtx;
7670 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7673 /* Ensure that MULT comes first if there is one. */
7674 else if (GET_CODE (op0) == MULT)
7675 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7677 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7679 /* Let's also eliminate constants from op0 if possible. */
7680 op0 = eliminate_constant_term (op0, &constant_term);
7682 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7683 their sum should be a constant. Form it into OP1, since the
7684 result we want will then be OP0 + OP1. */
7686 temp = simplify_binary_operation (PLUS, mode, constant_term,
7691 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7694 /* Put a constant term last and put a multiplication first. */
7695 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7696 temp = op1, op1 = op0, op0 = temp;
7698 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7699 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7702 /* For initializers, we are allowed to return a MINUS of two
7703 symbolic constants. Here we handle all cases when both operands
7705 /* Handle difference of two symbolic constants,
7706 for the sake of an initializer. */
7707 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7708 && really_constant_p (TREE_OPERAND (exp, 0))
7709 && really_constant_p (TREE_OPERAND (exp, 1)))
7711 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7712 VOIDmode, ro_modifier);
7713 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7714 VOIDmode, ro_modifier);
7716 /* If the last operand is a CONST_INT, use plus_constant of
7717 the negated constant. Else make the MINUS. */
7718 if (GET_CODE (op1) == CONST_INT)
7719 return plus_constant (op0, - INTVAL (op1));
7721 return gen_rtx_MINUS (mode, op0, op1);
7723 /* Convert A - const to A + (-const). */
7724 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7726 tree negated = fold (build1 (NEGATE_EXPR, type,
7727 TREE_OPERAND (exp, 1)));
7729 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7730 /* If we can't negate the constant in TYPE, leave it alone and
7731 expand_binop will negate it for us. We used to try to do it
7732 here in the signed version of TYPE, but that doesn't work
7733 on POINTER_TYPEs. */;
7736 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7740 this_optab = ! unsignedp && flag_trapv
7741 && (GET_MODE_CLASS(mode) == MODE_INT)
7742 ? subv_optab : sub_optab;
7746 /* If first operand is constant, swap them.
7747 Thus the following special case checks need only
7748 check the second operand. */
7749 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7751 tree t1 = TREE_OPERAND (exp, 0);
7752 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7753 TREE_OPERAND (exp, 1) = t1;
7756 /* Attempt to return something suitable for generating an
7757 indexed address, for machines that support that. */
7759 if (modifier == EXPAND_SUM && mode == ptr_mode
7760 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7761 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7763 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7766 /* Apply distributive law if OP0 is x+c. */
7767 if (GET_CODE (op0) == PLUS
7768 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7773 (mode, XEXP (op0, 0),
7774 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7775 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7776 * INTVAL (XEXP (op0, 1))));
7778 if (GET_CODE (op0) != REG)
7779 op0 = force_operand (op0, NULL_RTX);
7780 if (GET_CODE (op0) != REG)
7781 op0 = copy_to_mode_reg (mode, op0);
7784 gen_rtx_MULT (mode, op0,
7785 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7788 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7791 /* Check for multiplying things that have been extended
7792 from a narrower type. If this machine supports multiplying
7793 in that narrower type with a result in the desired type,
7794 do it that way, and avoid the explicit type-conversion. */
7795 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7796 && TREE_CODE (type) == INTEGER_TYPE
7797 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7798 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7799 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7800 && int_fits_type_p (TREE_OPERAND (exp, 1),
7801 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7802 /* Don't use a widening multiply if a shift will do. */
7803 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7804 > HOST_BITS_PER_WIDE_INT)
7805 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7807 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7808 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7810 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7811 /* If both operands are extended, they must either both
7812 be zero-extended or both be sign-extended. */
7813 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7815 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7817 enum machine_mode innermode
7818 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7819 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7820 ? smul_widen_optab : umul_widen_optab);
7821 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7822 ? umul_widen_optab : smul_widen_optab);
7823 if (mode == GET_MODE_WIDER_MODE (innermode))
7825 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7827 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7828 NULL_RTX, VOIDmode, 0);
7829 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7830 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7833 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7834 NULL_RTX, VOIDmode, 0);
7837 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7838 && innermode == word_mode)
7841 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7842 NULL_RTX, VOIDmode, 0);
7843 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7844 op1 = convert_modes (innermode, mode,
7845 expand_expr (TREE_OPERAND (exp, 1),
7846 NULL_RTX, VOIDmode, 0),
7849 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7850 NULL_RTX, VOIDmode, 0);
7851 temp = expand_binop (mode, other_optab, op0, op1, target,
7852 unsignedp, OPTAB_LIB_WIDEN);
7853 htem = expand_mult_highpart_adjust (innermode,
7854 gen_highpart (innermode, temp),
7856 gen_highpart (innermode, temp),
7858 emit_move_insn (gen_highpart (innermode, temp), htem);
7863 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7864 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7865 return expand_mult (mode, op0, op1, target, unsignedp);
7867 case TRUNC_DIV_EXPR:
7868 case FLOOR_DIV_EXPR:
7870 case ROUND_DIV_EXPR:
7871 case EXACT_DIV_EXPR:
7872 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7874 /* Possible optimization: compute the dividend with EXPAND_SUM
7875 then if the divisor is constant can optimize the case
7876 where some terms of the dividend have coeffs divisible by it. */
7877 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7878 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7879 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7882 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7883 expensive divide. If not, combine will rebuild the original
7885 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7886 && !real_onep (TREE_OPERAND (exp, 0)))
7887 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7888 build (RDIV_EXPR, type,
7889 build_real (type, dconst1),
7890 TREE_OPERAND (exp, 1))),
7891 target, tmode, unsignedp);
7892 this_optab = sdiv_optab;
7895 case TRUNC_MOD_EXPR:
7896 case FLOOR_MOD_EXPR:
7898 case ROUND_MOD_EXPR:
7899 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7901 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7902 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7903 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7905 case FIX_ROUND_EXPR:
7906 case FIX_FLOOR_EXPR:
7908 abort (); /* Not used for C. */
7910 case FIX_TRUNC_EXPR:
7911 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7913 target = gen_reg_rtx (mode);
7914 expand_fix (target, op0, unsignedp);
7918 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7920 target = gen_reg_rtx (mode);
7921 /* expand_float can't figure out what to do if FROM has VOIDmode.
7922 So give it the correct mode. With -O, cse will optimize this. */
7923 if (GET_MODE (op0) == VOIDmode)
7924 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7926 expand_float (target, op0,
7927 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7931 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7932 temp = expand_unop (mode,
7933 ! unsignedp && flag_trapv
7934 && (GET_MODE_CLASS(mode) == MODE_INT)
7935 ? negv_optab : neg_optab, op0, target, 0);
7941 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7943 /* Handle complex values specially. */
7944 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7945 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7946 return expand_complex_abs (mode, op0, target, unsignedp);
7948 /* Unsigned abs is simply the operand. Testing here means we don't
7949 risk generating incorrect code below. */
7950 if (TREE_UNSIGNED (type))
7953 return expand_abs (mode, op0, target, unsignedp,
7954 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7958 target = original_target;
7959 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7960 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7961 || GET_MODE (target) != mode
7962 || (GET_CODE (target) == REG
7963 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7964 target = gen_reg_rtx (mode);
7965 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7966 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7968 /* First try to do it with a special MIN or MAX instruction.
7969 If that does not win, use a conditional jump to select the proper
7971 this_optab = (TREE_UNSIGNED (type)
7972 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7973 : (code == MIN_EXPR ? smin_optab : smax_optab));
7975 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7980 /* At this point, a MEM target is no longer useful; we will get better
7983 if (GET_CODE (target) == MEM)
7984 target = gen_reg_rtx (mode);
7987 emit_move_insn (target, op0);
7989 op0 = gen_label_rtx ();
7991 /* If this mode is an integer too wide to compare properly,
7992 compare word by word. Rely on cse to optimize constant cases. */
7993 if (GET_MODE_CLASS (mode) == MODE_INT
7994 && ! can_compare_p (GE, mode, ccp_jump))
7996 if (code == MAX_EXPR)
7997 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7998 target, op1, NULL_RTX, op0);
8000 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8001 op1, target, NULL_RTX, op0);
8005 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8006 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8007 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
8010 emit_move_insn (target, op1);
8015 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8016 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8022 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8023 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8028 /* ??? Can optimize bitwise operations with one arg constant.
8029 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8030 and (a bitwise1 b) bitwise2 b (etc)
8031 but that is probably not worth while. */
8033 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8034 boolean values when we want in all cases to compute both of them. In
8035 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8036 as actual zero-or-1 values and then bitwise anding. In cases where
8037 there cannot be any side effects, better code would be made by
8038 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8039 how to recognize those cases. */
8041 case TRUTH_AND_EXPR:
8043 this_optab = and_optab;
8048 this_optab = ior_optab;
8051 case TRUTH_XOR_EXPR:
8053 this_optab = xor_optab;
8060 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8062 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8063 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8066 /* Could determine the answer when only additive constants differ. Also,
8067 the addition of one can be handled by changing the condition. */
8074 case UNORDERED_EXPR:
8081 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8085 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8086 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8088 && GET_CODE (original_target) == REG
8089 && (GET_MODE (original_target)
8090 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8092 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8095 if (temp != original_target)
8096 temp = copy_to_reg (temp);
8098 op1 = gen_label_rtx ();
8099 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8100 GET_MODE (temp), unsignedp, 0, op1);
8101 emit_move_insn (temp, const1_rtx);
8106 /* If no set-flag instruction, must generate a conditional
8107 store into a temporary variable. Drop through
8108 and handle this like && and ||. */
8110 case TRUTH_ANDIF_EXPR:
8111 case TRUTH_ORIF_EXPR:
8113 && (target == 0 || ! safe_from_p (target, exp, 1)
8114 /* Make sure we don't have a hard reg (such as function's return
8115 value) live across basic blocks, if not optimizing. */
8116 || (!optimize && GET_CODE (target) == REG
8117 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8118 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8121 emit_clr_insn (target);
8123 op1 = gen_label_rtx ();
8124 jumpifnot (exp, op1);
8127 emit_0_to_1_insn (target);
8130 return ignore ? const0_rtx : target;
8132 case TRUTH_NOT_EXPR:
8133 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8134 /* The parser is careful to generate TRUTH_NOT_EXPR
8135 only with operands that are always zero or one. */
8136 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8137 target, 1, OPTAB_LIB_WIDEN);
8143 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8145 return expand_expr (TREE_OPERAND (exp, 1),
8146 (ignore ? const0_rtx : target),
8150 /* If we would have a "singleton" (see below) were it not for a
8151 conversion in each arm, bring that conversion back out. */
8152 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8153 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8154 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8155 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8157 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8158 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8160 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8161 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8162 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8163 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8164 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8165 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8166 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8167 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8168 return expand_expr (build1 (NOP_EXPR, type,
8169 build (COND_EXPR, TREE_TYPE (iftrue),
8170 TREE_OPERAND (exp, 0),
8172 target, tmode, modifier);
8176 /* Note that COND_EXPRs whose type is a structure or union
8177 are required to be constructed to contain assignments of
8178 a temporary variable, so that we can evaluate them here
8179 for side effect only. If type is void, we must do likewise. */
8181 /* If an arm of the branch requires a cleanup,
8182 only that cleanup is performed. */
8185 tree binary_op = 0, unary_op = 0;
8187 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8188 convert it to our mode, if necessary. */
8189 if (integer_onep (TREE_OPERAND (exp, 1))
8190 && integer_zerop (TREE_OPERAND (exp, 2))
8191 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8195 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8200 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8201 if (GET_MODE (op0) == mode)
8205 target = gen_reg_rtx (mode);
8206 convert_move (target, op0, unsignedp);
8210 /* Check for X ? A + B : A. If we have this, we can copy A to the
8211 output and conditionally add B. Similarly for unary operations.
8212 Don't do this if X has side-effects because those side effects
8213 might affect A or B and the "?" operation is a sequence point in
8214 ANSI. (operand_equal_p tests for side effects.) */
8216 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8217 && operand_equal_p (TREE_OPERAND (exp, 2),
8218 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8219 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8220 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8221 && operand_equal_p (TREE_OPERAND (exp, 1),
8222 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8223 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8224 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8225 && operand_equal_p (TREE_OPERAND (exp, 2),
8226 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8227 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8228 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8229 && operand_equal_p (TREE_OPERAND (exp, 1),
8230 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8231 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8233 /* If we are not to produce a result, we have no target. Otherwise,
8234 if a target was specified use it; it will not be used as an
8235 intermediate target unless it is safe. If no target, use a
8240 else if (original_target
8241 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8242 || (singleton && GET_CODE (original_target) == REG
8243 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8244 && original_target == var_rtx (singleton)))
8245 && GET_MODE (original_target) == mode
8246 #ifdef HAVE_conditional_move
8247 && (! can_conditionally_move_p (mode)
8248 || GET_CODE (original_target) == REG
8249 || TREE_ADDRESSABLE (type))
8251 && (GET_CODE (original_target) != MEM
8252 || TREE_ADDRESSABLE (type)))
8253 temp = original_target;
8254 else if (TREE_ADDRESSABLE (type))
8257 temp = assign_temp (type, 0, 0, 1);
8259 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8260 do the test of X as a store-flag operation, do this as
8261 A + ((X != 0) << log C). Similarly for other simple binary
8262 operators. Only do for C == 1 if BRANCH_COST is low. */
8263 if (temp && singleton && binary_op
8264 && (TREE_CODE (binary_op) == PLUS_EXPR
8265 || TREE_CODE (binary_op) == MINUS_EXPR
8266 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8267 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8268 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8269 : integer_onep (TREE_OPERAND (binary_op, 1)))
8270 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8273 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8274 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8275 ? addv_optab : add_optab)
8276 : TREE_CODE (binary_op) == MINUS_EXPR
8277 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8278 ? subv_optab : sub_optab)
8279 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8282 /* If we had X ? A : A + 1, do this as A + (X == 0).
8284 We have to invert the truth value here and then put it
8285 back later if do_store_flag fails. We cannot simply copy
8286 TREE_OPERAND (exp, 0) to another variable and modify that
8287 because invert_truthvalue can modify the tree pointed to
8289 if (singleton == TREE_OPERAND (exp, 1))
8290 TREE_OPERAND (exp, 0)
8291 = invert_truthvalue (TREE_OPERAND (exp, 0));
8293 result = do_store_flag (TREE_OPERAND (exp, 0),
8294 (safe_from_p (temp, singleton, 1)
8296 mode, BRANCH_COST <= 1);
8298 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8299 result = expand_shift (LSHIFT_EXPR, mode, result,
8300 build_int_2 (tree_log2
8304 (safe_from_p (temp, singleton, 1)
8305 ? temp : NULL_RTX), 0);
8309 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8310 return expand_binop (mode, boptab, op1, result, temp,
8311 unsignedp, OPTAB_LIB_WIDEN);
8313 else if (singleton == TREE_OPERAND (exp, 1))
8314 TREE_OPERAND (exp, 0)
8315 = invert_truthvalue (TREE_OPERAND (exp, 0));
8318 do_pending_stack_adjust ();
8320 op0 = gen_label_rtx ();
8322 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8326 /* If the target conflicts with the other operand of the
8327 binary op, we can't use it. Also, we can't use the target
8328 if it is a hard register, because evaluating the condition
8329 might clobber it. */
8331 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8332 || (GET_CODE (temp) == REG
8333 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8334 temp = gen_reg_rtx (mode);
8335 store_expr (singleton, temp, 0);
8338 expand_expr (singleton,
8339 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8340 if (singleton == TREE_OPERAND (exp, 1))
8341 jumpif (TREE_OPERAND (exp, 0), op0);
8343 jumpifnot (TREE_OPERAND (exp, 0), op0);
8345 start_cleanup_deferral ();
8346 if (binary_op && temp == 0)
8347 /* Just touch the other operand. */
8348 expand_expr (TREE_OPERAND (binary_op, 1),
8349 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8351 store_expr (build (TREE_CODE (binary_op), type,
8352 make_tree (type, temp),
8353 TREE_OPERAND (binary_op, 1)),
8356 store_expr (build1 (TREE_CODE (unary_op), type,
8357 make_tree (type, temp)),
8361 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8362 comparison operator. If we have one of these cases, set the
8363 output to A, branch on A (cse will merge these two references),
8364 then set the output to FOO. */
8366 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8367 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8368 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8369 TREE_OPERAND (exp, 1), 0)
8370 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8371 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8372 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8374 if (GET_CODE (temp) == REG
8375 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8376 temp = gen_reg_rtx (mode);
8377 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8378 jumpif (TREE_OPERAND (exp, 0), op0);
8380 start_cleanup_deferral ();
8381 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8385 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8386 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8387 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8388 TREE_OPERAND (exp, 2), 0)
8389 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8390 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8391 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8393 if (GET_CODE (temp) == REG
8394 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8395 temp = gen_reg_rtx (mode);
8396 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8397 jumpifnot (TREE_OPERAND (exp, 0), op0);
8399 start_cleanup_deferral ();
8400 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8405 op1 = gen_label_rtx ();
8406 jumpifnot (TREE_OPERAND (exp, 0), op0);
8408 start_cleanup_deferral ();
8410 /* One branch of the cond can be void, if it never returns. For
8411 example A ? throw : E */
8413 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8414 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8416 expand_expr (TREE_OPERAND (exp, 1),
8417 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8418 end_cleanup_deferral ();
8420 emit_jump_insn (gen_jump (op1));
8423 start_cleanup_deferral ();
8425 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8426 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8428 expand_expr (TREE_OPERAND (exp, 2),
8429 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8432 end_cleanup_deferral ();
8443 /* Something needs to be initialized, but we didn't know
8444 where that thing was when building the tree. For example,
8445 it could be the return value of a function, or a parameter
8446 to a function which lays down in the stack, or a temporary
8447 variable which must be passed by reference.
8449 We guarantee that the expression will either be constructed
8450 or copied into our original target. */
8452 tree slot = TREE_OPERAND (exp, 0);
8453 tree cleanups = NULL_TREE;
8456 if (TREE_CODE (slot) != VAR_DECL)
8460 target = original_target;
8462 /* Set this here so that if we get a target that refers to a
8463 register variable that's already been used, put_reg_into_stack
8464 knows that it should fix up those uses. */
8465 TREE_USED (slot) = 1;
8469 if (DECL_RTL_SET_P (slot))
8471 target = DECL_RTL (slot);
8472 /* If we have already expanded the slot, so don't do
8474 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8479 target = assign_temp (type, 2, 0, 1);
8480 /* All temp slots at this level must not conflict. */
8481 preserve_temp_slots (target);
8482 SET_DECL_RTL (slot, target);
8483 if (TREE_ADDRESSABLE (slot))
8484 put_var_into_stack (slot);
8486 /* Since SLOT is not known to the called function
8487 to belong to its stack frame, we must build an explicit
8488 cleanup. This case occurs when we must build up a reference
8489 to pass the reference as an argument. In this case,
8490 it is very likely that such a reference need not be
8493 if (TREE_OPERAND (exp, 2) == 0)
8494 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8495 cleanups = TREE_OPERAND (exp, 2);
8500 /* This case does occur, when expanding a parameter which
8501 needs to be constructed on the stack. The target
8502 is the actual stack address that we want to initialize.
8503 The function we call will perform the cleanup in this case. */
8505 /* If we have already assigned it space, use that space,
8506 not target that we were passed in, as our target
8507 parameter is only a hint. */
8508 if (DECL_RTL_SET_P (slot))
8510 target = DECL_RTL (slot);
8511 /* If we have already expanded the slot, so don't do
8513 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8518 SET_DECL_RTL (slot, target);
8519 /* If we must have an addressable slot, then make sure that
8520 the RTL that we just stored in slot is OK. */
8521 if (TREE_ADDRESSABLE (slot))
8522 put_var_into_stack (slot);
8526 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8527 /* Mark it as expanded. */
8528 TREE_OPERAND (exp, 1) = NULL_TREE;
8530 store_expr (exp1, target, 0);
8532 expand_decl_cleanup (NULL_TREE, cleanups);
8539 tree lhs = TREE_OPERAND (exp, 0);
8540 tree rhs = TREE_OPERAND (exp, 1);
8542 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8548 /* If lhs is complex, expand calls in rhs before computing it.
8549 That's so we don't compute a pointer and save it over a
8550 call. If lhs is simple, compute it first so we can give it
8551 as a target if the rhs is just a call. This avoids an
8552 extra temp and copy and that prevents a partial-subsumption
8553 which makes bad code. Actually we could treat
8554 component_ref's of vars like vars. */
8556 tree lhs = TREE_OPERAND (exp, 0);
8557 tree rhs = TREE_OPERAND (exp, 1);
8561 /* Check for |= or &= of a bitfield of size one into another bitfield
8562 of size 1. In this case, (unless we need the result of the
8563 assignment) we can do this more efficiently with a
8564 test followed by an assignment, if necessary.
8566 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8567 things change so we do, this code should be enhanced to
8570 && TREE_CODE (lhs) == COMPONENT_REF
8571 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8572 || TREE_CODE (rhs) == BIT_AND_EXPR)
8573 && TREE_OPERAND (rhs, 0) == lhs
8574 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8575 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8576 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8578 rtx label = gen_label_rtx ();
8580 do_jump (TREE_OPERAND (rhs, 1),
8581 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8582 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8583 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8584 (TREE_CODE (rhs) == BIT_IOR_EXPR
8586 : integer_zero_node)),
8588 do_pending_stack_adjust ();
8593 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8599 if (!TREE_OPERAND (exp, 0))
8600 expand_null_return ();
8602 expand_return (TREE_OPERAND (exp, 0));
8605 case PREINCREMENT_EXPR:
8606 case PREDECREMENT_EXPR:
8607 return expand_increment (exp, 0, ignore);
8609 case POSTINCREMENT_EXPR:
8610 case POSTDECREMENT_EXPR:
8611 /* Faster to treat as pre-increment if result is not used. */
8612 return expand_increment (exp, ! ignore, ignore);
8615 /* If nonzero, TEMP will be set to the address of something that might
8616 be a MEM corresponding to a stack slot. */
8619 /* Are we taking the address of a nested function? */
8620 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8621 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8622 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8623 && ! TREE_STATIC (exp))
8625 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8626 op0 = force_operand (op0, target);
8628 /* If we are taking the address of something erroneous, just
8630 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8634 /* We make sure to pass const0_rtx down if we came in with
8635 ignore set, to avoid doing the cleanups twice for something. */
8636 op0 = expand_expr (TREE_OPERAND (exp, 0),
8637 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8638 (modifier == EXPAND_INITIALIZER
8639 ? modifier : EXPAND_CONST_ADDRESS));
8641 /* If we are going to ignore the result, OP0 will have been set
8642 to const0_rtx, so just return it. Don't get confused and
8643 think we are taking the address of the constant. */
8647 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8648 clever and returns a REG when given a MEM. */
8649 op0 = protect_from_queue (op0, 1);
8651 /* We would like the object in memory. If it is a constant, we can
8652 have it be statically allocated into memory. For a non-constant,
8653 we need to allocate some memory and store the value into it. */
8655 if (CONSTANT_P (op0))
8656 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8658 else if (GET_CODE (op0) == MEM)
8660 mark_temp_addr_taken (op0);
8661 temp = XEXP (op0, 0);
8664 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8665 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8666 || GET_CODE (op0) == PARALLEL)
8668 /* If this object is in a register, it must be not
8670 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8671 tree nt = build_qualified_type (inner_type,
8672 (TYPE_QUALS (inner_type)
8673 | TYPE_QUAL_CONST));
8674 rtx memloc = assign_temp (nt, 1, 1, 1);
8676 mark_temp_addr_taken (memloc);
8677 if (GET_CODE (op0) == PARALLEL)
8678 /* Handle calls that pass values in multiple non-contiguous
8679 locations. The Irix 6 ABI has examples of this. */
8680 emit_group_store (memloc, op0,
8681 int_size_in_bytes (inner_type),
8682 TYPE_ALIGN (inner_type));
8684 emit_move_insn (memloc, op0);
8688 if (GET_CODE (op0) != MEM)
8691 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8693 temp = XEXP (op0, 0);
8694 #ifdef POINTERS_EXTEND_UNSIGNED
8695 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8696 && mode == ptr_mode)
8697 temp = convert_memory_address (ptr_mode, temp);
8702 op0 = force_operand (XEXP (op0, 0), target);
8705 if (flag_force_addr && GET_CODE (op0) != REG)
8706 op0 = force_reg (Pmode, op0);
8708 if (GET_CODE (op0) == REG
8709 && ! REG_USERVAR_P (op0))
8710 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8712 /* If we might have had a temp slot, add an equivalent address
8715 update_temp_slot_address (temp, op0);
8717 #ifdef POINTERS_EXTEND_UNSIGNED
8718 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8719 && mode == ptr_mode)
8720 op0 = convert_memory_address (ptr_mode, op0);
8725 case ENTRY_VALUE_EXPR:
8728 /* COMPLEX type for Extended Pascal & Fortran */
8731 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8734 /* Get the rtx code of the operands. */
8735 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8736 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8739 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8743 /* Move the real (op0) and imaginary (op1) parts to their location. */
8744 emit_move_insn (gen_realpart (mode, target), op0);
8745 emit_move_insn (gen_imagpart (mode, target), op1);
8747 insns = get_insns ();
8750 /* Complex construction should appear as a single unit. */
8751 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8752 each with a separate pseudo as destination.
8753 It's not correct for flow to treat them as a unit. */
8754 if (GET_CODE (target) != CONCAT)
8755 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8763 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8764 return gen_realpart (mode, op0);
8767 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8768 return gen_imagpart (mode, op0);
8772 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8776 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8779 target = gen_reg_rtx (mode);
8783 /* Store the realpart and the negated imagpart to target. */
8784 emit_move_insn (gen_realpart (partmode, target),
8785 gen_realpart (partmode, op0));
8787 imag_t = gen_imagpart (partmode, target);
8788 temp = expand_unop (partmode,
8789 ! unsignedp && flag_trapv
8790 && (GET_MODE_CLASS(partmode) == MODE_INT)
8791 ? negv_optab : neg_optab,
8792 gen_imagpart (partmode, op0), imag_t, 0);
8794 emit_move_insn (imag_t, temp);
8796 insns = get_insns ();
8799 /* Conjugate should appear as a single unit
8800 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8801 each with a separate pseudo as destination.
8802 It's not correct for flow to treat them as a unit. */
8803 if (GET_CODE (target) != CONCAT)
8804 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8811 case TRY_CATCH_EXPR:
8813 tree handler = TREE_OPERAND (exp, 1);
8815 expand_eh_region_start ();
8817 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8819 expand_eh_region_end_cleanup (handler);
8824 case TRY_FINALLY_EXPR:
8826 tree try_block = TREE_OPERAND (exp, 0);
8827 tree finally_block = TREE_OPERAND (exp, 1);
8828 rtx finally_label = gen_label_rtx ();
8829 rtx done_label = gen_label_rtx ();
8830 rtx return_link = gen_reg_rtx (Pmode);
8831 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8832 (tree) finally_label, (tree) return_link);
8833 TREE_SIDE_EFFECTS (cleanup) = 1;
8835 /* Start a new binding layer that will keep track of all cleanup
8836 actions to be performed. */
8837 expand_start_bindings (2);
8839 target_temp_slot_level = temp_slot_level;
8841 expand_decl_cleanup (NULL_TREE, cleanup);
8842 op0 = expand_expr (try_block, target, tmode, modifier);
8844 preserve_temp_slots (op0);
8845 expand_end_bindings (NULL_TREE, 0, 0);
8846 emit_jump (done_label);
8847 emit_label (finally_label);
8848 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8849 emit_indirect_jump (return_link);
8850 emit_label (done_label);
8854 case GOTO_SUBROUTINE_EXPR:
8856 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8857 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8858 rtx return_address = gen_label_rtx ();
8859 emit_move_insn (return_link,
8860 gen_rtx_LABEL_REF (Pmode, return_address));
8862 emit_label (return_address);
8867 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8870 return get_exception_pointer (cfun);
8873 /* Function descriptors are not valid except for as
8874 initialization constants, and should not be expanded. */
8878 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8881 /* Here to do an ordinary binary operator, generating an instruction
8882 from the optab already placed in `this_optab'. */
8884 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8886 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8887 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8889 temp = expand_binop (mode, this_optab, op0, op1, target,
8890 unsignedp, OPTAB_LIB_WIDEN);
8896 /* Similar to expand_expr, except that we don't specify a target, target
8897 mode, or modifier and we return the alignment of the inner type. This is
8898 used in cases where it is not necessary to align the result to the
8899 alignment of its type as long as we know the alignment of the result, for
8900 example for comparisons of BLKmode values. */
8903 expand_expr_unaligned (exp, palign)
8905 unsigned int *palign;
8908 tree type = TREE_TYPE (exp);
8909 enum machine_mode mode = TYPE_MODE (type);
8911 /* Default the alignment we return to that of the type. */
8912 *palign = TYPE_ALIGN (type);
8914 /* The only cases in which we do anything special is if the resulting mode
8916 if (mode != BLKmode)
8917 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8919 switch (TREE_CODE (exp))
8923 case NON_LVALUE_EXPR:
8924 /* Conversions between BLKmode values don't change the underlying
8925 alignment or value. */
8926 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8927 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8931 /* Much of the code for this case is copied directly from expand_expr.
8932 We need to duplicate it here because we will do something different
8933 in the fall-through case, so we need to handle the same exceptions
8936 tree array = TREE_OPERAND (exp, 0);
8937 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8938 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8939 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8942 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8945 /* Optimize the special-case of a zero lower bound.
8947 We convert the low_bound to sizetype to avoid some problems
8948 with constant folding. (E.g. suppose the lower bound is 1,
8949 and its mode is QI. Without the conversion, (ARRAY
8950 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8951 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8953 if (! integer_zerop (low_bound))
8954 index = size_diffop (index, convert (sizetype, low_bound));
8956 /* If this is a constant index into a constant array,
8957 just get the value from the array. Handle both the cases when
8958 we have an explicit constructor and when our operand is a variable
8959 that was declared const. */
8961 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8962 && host_integerp (index, 0)
8963 && 0 > compare_tree_int (index,
8964 list_length (CONSTRUCTOR_ELTS
8965 (TREE_OPERAND (exp, 0)))))
8969 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8970 i = tree_low_cst (index, 0);
8971 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8975 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8978 else if (optimize >= 1
8979 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8980 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8981 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8983 if (TREE_CODE (index) == INTEGER_CST)
8985 tree init = DECL_INITIAL (array);
8987 if (TREE_CODE (init) == CONSTRUCTOR)
8991 for (elem = CONSTRUCTOR_ELTS (init);
8992 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8993 elem = TREE_CHAIN (elem))
8997 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
9007 case ARRAY_RANGE_REF:
9008 /* If the operand is a CONSTRUCTOR, we can just extract the
9009 appropriate field if it is present. Don't do this if we have
9010 already written the data since we want to refer to that copy
9011 and varasm.c assumes that's what we'll do. */
9012 if (TREE_CODE (exp) == COMPONENT_REF
9013 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9014 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
9018 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
9019 elt = TREE_CHAIN (elt))
9020 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
9021 /* Note that unlike the case in expand_expr, we know this is
9022 BLKmode and hence not an integer. */
9023 return expand_expr_unaligned (TREE_VALUE (elt), palign);
9027 enum machine_mode mode1;
9028 HOST_WIDE_INT bitsize, bitpos;
9031 unsigned int alignment;
9033 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9034 &mode1, &unsignedp, &volatilep,
9037 /* If we got back the original object, something is wrong. Perhaps
9038 we are evaluating an expression too early. In any event, don't
9039 infinitely recurse. */
9043 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9045 /* If this is a constant, put it into a register if it is a
9046 legitimate constant and OFFSET is 0 and memory if it isn't. */
9047 if (CONSTANT_P (op0))
9049 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9051 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9053 op0 = force_reg (inner_mode, op0);
9055 op0 = validize_mem (force_const_mem (inner_mode, op0));
9060 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9062 /* If this object is in a register, put it into memory.
9063 This case can't occur in C, but can in Ada if we have
9064 unchecked conversion of an expression from a scalar type to
9065 an array or record type. */
9066 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9067 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9069 tree nt = build_qualified_type (TREE_TYPE (tem),
9070 (TYPE_QUALS (TREE_TYPE (tem))
9071 | TYPE_QUAL_CONST));
9072 rtx memloc = assign_temp (nt, 1, 1, 1);
9074 mark_temp_addr_taken (memloc);
9075 emit_move_insn (memloc, op0);
9079 if (GET_CODE (op0) != MEM)
9082 if (GET_MODE (offset_rtx) != ptr_mode)
9083 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9085 #ifdef POINTERS_EXTEND_UNSIGNED
9086 if (GET_MODE (offset_rtx) != Pmode)
9087 offset_rtx = convert_memory_address (Pmode, offset_rtx);
9090 op0 = offset_address (op0, offset_rtx,
9091 highest_pow2_factor (offset));
9094 /* Don't forget about volatility even if this is a bitfield. */
9095 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9097 op0 = copy_rtx (op0);
9098 MEM_VOLATILE_P (op0) = 1;
9101 /* Check the access. */
9102 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9107 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9108 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9110 /* Check the access right of the pointer. */
9111 in_check_memory_usage = 1;
9112 if (size > BITS_PER_UNIT)
9113 emit_library_call (chkr_check_addr_libfunc,
9114 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9115 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9116 TYPE_MODE (sizetype),
9117 GEN_INT (MEMORY_USE_RO),
9118 TYPE_MODE (integer_type_node));
9119 in_check_memory_usage = 0;
9122 /* In cases where an aligned union has an unaligned object
9123 as a field, we might be extracting a BLKmode value from
9124 an integer-mode (e.g., SImode) object. Handle this case
9125 by doing the extract into an object as wide as the field
9126 (which we know to be the width of a basic mode), then
9127 storing into memory, and changing the mode to BLKmode.
9128 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9129 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9130 if (mode1 == VOIDmode
9131 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9132 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9133 && (TYPE_ALIGN (type) > alignment
9134 || bitpos % TYPE_ALIGN (type) != 0)))
9136 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9138 if (ext_mode == BLKmode)
9140 /* In this case, BITPOS must start at a byte boundary. */
9141 if (GET_CODE (op0) != MEM
9142 || bitpos % BITS_PER_UNIT != 0)
9145 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9149 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9151 rtx new = assign_temp (nt, 0, 1, 1);
9153 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9154 unsignedp, NULL_RTX, ext_mode,
9155 ext_mode, alignment,
9156 int_size_in_bytes (TREE_TYPE (tem)));
9158 /* If the result is a record type and BITSIZE is narrower than
9159 the mode of OP0, an integral mode, and this is a big endian
9160 machine, we must put the field into the high-order bits. */
9161 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9162 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9163 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9164 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9165 size_int (GET_MODE_BITSIZE
9170 emit_move_insn (new, op0);
9171 op0 = copy_rtx (new);
9172 PUT_MODE (op0, BLKmode);
9176 /* Get a reference to just this component. */
9177 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9179 set_mem_attributes (op0, exp, 0);
9181 /* Adjust the alignment in case the bit position is not
9182 a multiple of the alignment of the inner object. */
9183 while (bitpos % alignment != 0)
9186 if (GET_CODE (XEXP (op0, 0)) == REG)
9187 mark_reg_pointer (XEXP (op0, 0), alignment);
9189 MEM_IN_STRUCT_P (op0) = 1;
9190 MEM_VOLATILE_P (op0) |= volatilep;
9192 *palign = alignment;
9201 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9204 /* Return the tree node if a ARG corresponds to a string constant or zero
9205 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9206 in bytes within the string that ARG is accessing. The type of the
9207 offset will be `sizetype'. */
9210 string_constant (arg, ptr_offset)
9216 if (TREE_CODE (arg) == ADDR_EXPR
9217 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9219 *ptr_offset = size_zero_node;
9220 return TREE_OPERAND (arg, 0);
9222 else if (TREE_CODE (arg) == PLUS_EXPR)
9224 tree arg0 = TREE_OPERAND (arg, 0);
9225 tree arg1 = TREE_OPERAND (arg, 1);
9230 if (TREE_CODE (arg0) == ADDR_EXPR
9231 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9233 *ptr_offset = convert (sizetype, arg1);
9234 return TREE_OPERAND (arg0, 0);
9236 else if (TREE_CODE (arg1) == ADDR_EXPR
9237 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9239 *ptr_offset = convert (sizetype, arg0);
9240 return TREE_OPERAND (arg1, 0);
9247 /* Expand code for a post- or pre- increment or decrement
9248 and return the RTX for the result.
9249 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9252 expand_increment (exp, post, ignore)
9258 tree incremented = TREE_OPERAND (exp, 0);
9259 optab this_optab = add_optab;
9261 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9262 int op0_is_copy = 0;
9263 int single_insn = 0;
9264 /* 1 means we can't store into OP0 directly,
9265 because it is a subreg narrower than a word,
9266 and we don't dare clobber the rest of the word. */
9269 /* Stabilize any component ref that might need to be
9270 evaluated more than once below. */
9272 || TREE_CODE (incremented) == BIT_FIELD_REF
9273 || (TREE_CODE (incremented) == COMPONENT_REF
9274 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9275 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9276 incremented = stabilize_reference (incremented);
9277 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9278 ones into save exprs so that they don't accidentally get evaluated
9279 more than once by the code below. */
9280 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9281 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9282 incremented = save_expr (incremented);
9284 /* Compute the operands as RTX.
9285 Note whether OP0 is the actual lvalue or a copy of it:
9286 I believe it is a copy iff it is a register or subreg
9287 and insns were generated in computing it. */
9289 temp = get_last_insn ();
9290 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9292 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9293 in place but instead must do sign- or zero-extension during assignment,
9294 so we copy it into a new register and let the code below use it as
9297 Note that we can safely modify this SUBREG since it is know not to be
9298 shared (it was made by the expand_expr call above). */
9300 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9303 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9307 else if (GET_CODE (op0) == SUBREG
9308 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9310 /* We cannot increment this SUBREG in place. If we are
9311 post-incrementing, get a copy of the old value. Otherwise,
9312 just mark that we cannot increment in place. */
9314 op0 = copy_to_reg (op0);
9319 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9320 && temp != get_last_insn ());
9321 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9322 EXPAND_MEMORY_USE_BAD);
9324 /* Decide whether incrementing or decrementing. */
9325 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9326 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9327 this_optab = sub_optab;
9329 /* Convert decrement by a constant into a negative increment. */
9330 if (this_optab == sub_optab
9331 && GET_CODE (op1) == CONST_INT)
9333 op1 = GEN_INT (-INTVAL (op1));
9334 this_optab = add_optab;
9337 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9338 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9340 /* For a preincrement, see if we can do this with a single instruction. */
9343 icode = (int) this_optab->handlers[(int) mode].insn_code;
9344 if (icode != (int) CODE_FOR_nothing
9345 /* Make sure that OP0 is valid for operands 0 and 1
9346 of the insn we want to queue. */
9347 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9348 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9349 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9353 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9354 then we cannot just increment OP0. We must therefore contrive to
9355 increment the original value. Then, for postincrement, we can return
9356 OP0 since it is a copy of the old value. For preincrement, expand here
9357 unless we can do it with a single insn.
9359 Likewise if storing directly into OP0 would clobber high bits
9360 we need to preserve (bad_subreg). */
9361 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9363 /* This is the easiest way to increment the value wherever it is.
9364 Problems with multiple evaluation of INCREMENTED are prevented
9365 because either (1) it is a component_ref or preincrement,
9366 in which case it was stabilized above, or (2) it is an array_ref
9367 with constant index in an array in a register, which is
9368 safe to reevaluate. */
9369 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9370 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9371 ? MINUS_EXPR : PLUS_EXPR),
9374 TREE_OPERAND (exp, 1));
9376 while (TREE_CODE (incremented) == NOP_EXPR
9377 || TREE_CODE (incremented) == CONVERT_EXPR)
9379 newexp = convert (TREE_TYPE (incremented), newexp);
9380 incremented = TREE_OPERAND (incremented, 0);
9383 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9384 return post ? op0 : temp;
9389 /* We have a true reference to the value in OP0.
9390 If there is an insn to add or subtract in this mode, queue it.
9391 Queueing the increment insn avoids the register shuffling
9392 that often results if we must increment now and first save
9393 the old value for subsequent use. */
9395 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9396 op0 = stabilize (op0);
9399 icode = (int) this_optab->handlers[(int) mode].insn_code;
9400 if (icode != (int) CODE_FOR_nothing
9401 /* Make sure that OP0 is valid for operands 0 and 1
9402 of the insn we want to queue. */
9403 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9404 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9406 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9407 op1 = force_reg (mode, op1);
9409 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9411 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9413 rtx addr = (general_operand (XEXP (op0, 0), mode)
9414 ? force_reg (Pmode, XEXP (op0, 0))
9415 : copy_to_reg (XEXP (op0, 0)));
9418 op0 = replace_equiv_address (op0, addr);
9419 temp = force_reg (GET_MODE (op0), op0);
9420 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9421 op1 = force_reg (mode, op1);
9423 /* The increment queue is LIFO, thus we have to `queue'
9424 the instructions in reverse order. */
9425 enqueue_insn (op0, gen_move_insn (op0, temp));
9426 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9431 /* Preincrement, or we can't increment with one simple insn. */
9433 /* Save a copy of the value before inc or dec, to return it later. */
9434 temp = value = copy_to_reg (op0);
9436 /* Arrange to return the incremented value. */
9437 /* Copy the rtx because expand_binop will protect from the queue,
9438 and the results of that would be invalid for us to return
9439 if our caller does emit_queue before using our result. */
9440 temp = copy_rtx (value = op0);
9442 /* Increment however we can. */
9443 op1 = expand_binop (mode, this_optab, value, op1,
9444 current_function_check_memory_usage ? NULL_RTX : op0,
9445 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9446 /* Make sure the value is stored into OP0. */
9448 emit_move_insn (op0, op1);
9453 /* At the start of a function, record that we have no previously-pushed
9454 arguments waiting to be popped. */
9457 init_pending_stack_adjust ()
9459 pending_stack_adjust = 0;
9462 /* When exiting from function, if safe, clear out any pending stack adjust
9463 so the adjustment won't get done.
9465 Note, if the current function calls alloca, then it must have a
9466 frame pointer regardless of the value of flag_omit_frame_pointer. */
9469 clear_pending_stack_adjust ()
9471 #ifdef EXIT_IGNORE_STACK
9473 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9474 && EXIT_IGNORE_STACK
9475 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9476 && ! flag_inline_functions)
9478 stack_pointer_delta -= pending_stack_adjust,
9479 pending_stack_adjust = 0;
9484 /* Pop any previously-pushed arguments that have not been popped yet. */
9487 do_pending_stack_adjust ()
9489 if (inhibit_defer_pop == 0)
9491 if (pending_stack_adjust != 0)
9492 adjust_stack (GEN_INT (pending_stack_adjust));
9493 pending_stack_adjust = 0;
9497 /* Expand conditional expressions. */
9499 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9500 LABEL is an rtx of code CODE_LABEL, in this function and all the
9504 jumpifnot (exp, label)
9508 do_jump (exp, label, NULL_RTX);
9511 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9518 do_jump (exp, NULL_RTX, label);
9521 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9522 the result is zero, or IF_TRUE_LABEL if the result is one.
9523 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9524 meaning fall through in that case.
9526 do_jump always does any pending stack adjust except when it does not
9527 actually perform a jump. An example where there is no jump
9528 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9530 This function is responsible for optimizing cases such as
9531 &&, || and comparison operators in EXP. */
9534 do_jump (exp, if_false_label, if_true_label)
9536 rtx if_false_label, if_true_label;
9538 enum tree_code code = TREE_CODE (exp);
9539 /* Some cases need to create a label to jump to
9540 in order to properly fall through.
9541 These cases set DROP_THROUGH_LABEL nonzero. */
9542 rtx drop_through_label = 0;
9546 enum machine_mode mode;
9548 #ifdef MAX_INTEGER_COMPUTATION_MODE
9549 check_max_integer_computation_mode (exp);
9560 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9566 /* This is not true with #pragma weak */
9568 /* The address of something can never be zero. */
9570 emit_jump (if_true_label);
9575 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9576 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9577 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9578 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9581 /* If we are narrowing the operand, we have to do the compare in the
9583 if ((TYPE_PRECISION (TREE_TYPE (exp))
9584 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9586 case NON_LVALUE_EXPR:
9587 case REFERENCE_EXPR:
9592 /* These cannot change zero->non-zero or vice versa. */
9593 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9596 case WITH_RECORD_EXPR:
9597 /* Put the object on the placeholder list, recurse through our first
9598 operand, and pop the list. */
9599 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9601 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9602 placeholder_list = TREE_CHAIN (placeholder_list);
9606 /* This is never less insns than evaluating the PLUS_EXPR followed by
9607 a test and can be longer if the test is eliminated. */
9609 /* Reduce to minus. */
9610 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9611 TREE_OPERAND (exp, 0),
9612 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9613 TREE_OPERAND (exp, 1))));
9614 /* Process as MINUS. */
9618 /* Non-zero iff operands of minus differ. */
9619 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9620 TREE_OPERAND (exp, 0),
9621 TREE_OPERAND (exp, 1)),
9622 NE, NE, if_false_label, if_true_label);
9626 /* If we are AND'ing with a small constant, do this comparison in the
9627 smallest type that fits. If the machine doesn't have comparisons
9628 that small, it will be converted back to the wider comparison.
9629 This helps if we are testing the sign bit of a narrower object.
9630 combine can't do this for us because it can't know whether a
9631 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9633 if (! SLOW_BYTE_ACCESS
9634 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9635 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9636 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9637 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9638 && (type = type_for_mode (mode, 1)) != 0
9639 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9640 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9641 != CODE_FOR_nothing))
9643 do_jump (convert (type, exp), if_false_label, if_true_label);
9648 case TRUTH_NOT_EXPR:
9649 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9652 case TRUTH_ANDIF_EXPR:
9653 if (if_false_label == 0)
9654 if_false_label = drop_through_label = gen_label_rtx ();
9655 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9656 start_cleanup_deferral ();
9657 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9658 end_cleanup_deferral ();
9661 case TRUTH_ORIF_EXPR:
9662 if (if_true_label == 0)
9663 if_true_label = drop_through_label = gen_label_rtx ();
9664 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9665 start_cleanup_deferral ();
9666 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9667 end_cleanup_deferral ();
9672 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9673 preserve_temp_slots (NULL_RTX);
9677 do_pending_stack_adjust ();
9678 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9684 case ARRAY_RANGE_REF:
9686 HOST_WIDE_INT bitsize, bitpos;
9688 enum machine_mode mode;
9692 unsigned int alignment;
9694 /* Get description of this reference. We don't actually care
9695 about the underlying object here. */
9696 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9697 &unsignedp, &volatilep, &alignment);
9699 type = type_for_size (bitsize, unsignedp);
9700 if (! SLOW_BYTE_ACCESS
9701 && type != 0 && bitsize >= 0
9702 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9703 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9704 != CODE_FOR_nothing))
9706 do_jump (convert (type, exp), if_false_label, if_true_label);
9713 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9714 if (integer_onep (TREE_OPERAND (exp, 1))
9715 && integer_zerop (TREE_OPERAND (exp, 2)))
9716 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9718 else if (integer_zerop (TREE_OPERAND (exp, 1))
9719 && integer_onep (TREE_OPERAND (exp, 2)))
9720 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9724 rtx label1 = gen_label_rtx ();
9725 drop_through_label = gen_label_rtx ();
9727 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9729 start_cleanup_deferral ();
9730 /* Now the THEN-expression. */
9731 do_jump (TREE_OPERAND (exp, 1),
9732 if_false_label ? if_false_label : drop_through_label,
9733 if_true_label ? if_true_label : drop_through_label);
9734 /* In case the do_jump just above never jumps. */
9735 do_pending_stack_adjust ();
9736 emit_label (label1);
9738 /* Now the ELSE-expression. */
9739 do_jump (TREE_OPERAND (exp, 2),
9740 if_false_label ? if_false_label : drop_through_label,
9741 if_true_label ? if_true_label : drop_through_label);
9742 end_cleanup_deferral ();
9748 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9750 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9751 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9753 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9754 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9757 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9758 fold (build (EQ_EXPR, TREE_TYPE (exp),
9759 fold (build1 (REALPART_EXPR,
9760 TREE_TYPE (inner_type),
9762 fold (build1 (REALPART_EXPR,
9763 TREE_TYPE (inner_type),
9765 fold (build (EQ_EXPR, TREE_TYPE (exp),
9766 fold (build1 (IMAGPART_EXPR,
9767 TREE_TYPE (inner_type),
9769 fold (build1 (IMAGPART_EXPR,
9770 TREE_TYPE (inner_type),
9772 if_false_label, if_true_label);
9775 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9776 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9778 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9779 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9780 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9782 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9788 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9790 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9791 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9793 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9794 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9797 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9798 fold (build (NE_EXPR, TREE_TYPE (exp),
9799 fold (build1 (REALPART_EXPR,
9800 TREE_TYPE (inner_type),
9802 fold (build1 (REALPART_EXPR,
9803 TREE_TYPE (inner_type),
9805 fold (build (NE_EXPR, TREE_TYPE (exp),
9806 fold (build1 (IMAGPART_EXPR,
9807 TREE_TYPE (inner_type),
9809 fold (build1 (IMAGPART_EXPR,
9810 TREE_TYPE (inner_type),
9812 if_false_label, if_true_label);
9815 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9816 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9818 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9819 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9820 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9822 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9827 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9828 if (GET_MODE_CLASS (mode) == MODE_INT
9829 && ! can_compare_p (LT, mode, ccp_jump))
9830 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9832 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9836 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9837 if (GET_MODE_CLASS (mode) == MODE_INT
9838 && ! can_compare_p (LE, mode, ccp_jump))
9839 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9841 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9845 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9846 if (GET_MODE_CLASS (mode) == MODE_INT
9847 && ! can_compare_p (GT, mode, ccp_jump))
9848 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9850 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9854 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9855 if (GET_MODE_CLASS (mode) == MODE_INT
9856 && ! can_compare_p (GE, mode, ccp_jump))
9857 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9859 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9862 case UNORDERED_EXPR:
9865 enum rtx_code cmp, rcmp;
9868 if (code == UNORDERED_EXPR)
9869 cmp = UNORDERED, rcmp = ORDERED;
9871 cmp = ORDERED, rcmp = UNORDERED;
9872 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9875 if (! can_compare_p (cmp, mode, ccp_jump)
9876 && (can_compare_p (rcmp, mode, ccp_jump)
9877 /* If the target doesn't provide either UNORDERED or ORDERED
9878 comparisons, canonicalize on UNORDERED for the library. */
9879 || rcmp == UNORDERED))
9883 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9885 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9890 enum rtx_code rcode1;
9891 enum tree_code tcode2;
9915 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9916 if (can_compare_p (rcode1, mode, ccp_jump))
9917 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9921 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9922 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9925 /* If the target doesn't support combined unordered
9926 compares, decompose into UNORDERED + comparison. */
9927 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9928 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9929 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9930 do_jump (exp, if_false_label, if_true_label);
9936 __builtin_expect (<test>, 0) and
9937 __builtin_expect (<test>, 1)
9939 We need to do this here, so that <test> is not converted to a SCC
9940 operation on machines that use condition code registers and COMPARE
9941 like the PowerPC, and then the jump is done based on whether the SCC
9942 operation produced a 1 or 0. */
9944 /* Check for a built-in function. */
9945 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9947 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9948 tree arglist = TREE_OPERAND (exp, 1);
9950 if (TREE_CODE (fndecl) == FUNCTION_DECL
9951 && DECL_BUILT_IN (fndecl)
9952 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9953 && arglist != NULL_TREE
9954 && TREE_CHAIN (arglist) != NULL_TREE)
9956 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9959 if (seq != NULL_RTX)
9966 /* fall through and generate the normal code. */
9970 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9972 /* This is not needed any more and causes poor code since it causes
9973 comparisons and tests from non-SI objects to have different code
9975 /* Copy to register to avoid generating bad insns by cse
9976 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9977 if (!cse_not_expected && GET_CODE (temp) == MEM)
9978 temp = copy_to_reg (temp);
9980 do_pending_stack_adjust ();
9981 /* Do any postincrements in the expression that was tested. */
9984 if (GET_CODE (temp) == CONST_INT
9985 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9986 || GET_CODE (temp) == LABEL_REF)
9988 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9992 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9993 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9994 /* Note swapping the labels gives us not-equal. */
9995 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9996 else if (GET_MODE (temp) != VOIDmode)
9997 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9998 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9999 GET_MODE (temp), NULL_RTX, 0,
10000 if_false_label, if_true_label);
10005 if (drop_through_label)
10007 /* If do_jump produces code that might be jumped around,
10008 do any stack adjusts from that code, before the place
10009 where control merges in. */
10010 do_pending_stack_adjust ();
10011 emit_label (drop_through_label);
10015 /* Given a comparison expression EXP for values too wide to be compared
10016 with one insn, test the comparison and jump to the appropriate label.
10017 The code of EXP is ignored; we always test GT if SWAP is 0,
10018 and LT if SWAP is 1. */
10021 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10024 rtx if_false_label, if_true_label;
10026 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10027 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10028 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10029 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10031 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10034 /* Compare OP0 with OP1, word at a time, in mode MODE.
10035 UNSIGNEDP says to do unsigned comparison.
10036 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10039 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10040 enum machine_mode mode;
10043 rtx if_false_label, if_true_label;
10045 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10046 rtx drop_through_label = 0;
10049 if (! if_true_label || ! if_false_label)
10050 drop_through_label = gen_label_rtx ();
10051 if (! if_true_label)
10052 if_true_label = drop_through_label;
10053 if (! if_false_label)
10054 if_false_label = drop_through_label;
10056 /* Compare a word at a time, high order first. */
10057 for (i = 0; i < nwords; i++)
10059 rtx op0_word, op1_word;
10061 if (WORDS_BIG_ENDIAN)
10063 op0_word = operand_subword_force (op0, i, mode);
10064 op1_word = operand_subword_force (op1, i, mode);
10068 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10069 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10072 /* All but high-order word must be compared as unsigned. */
10073 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10074 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10075 NULL_RTX, if_true_label);
10077 /* Consider lower words only if these are equal. */
10078 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10079 NULL_RTX, 0, NULL_RTX, if_false_label);
10082 if (if_false_label)
10083 emit_jump (if_false_label);
10084 if (drop_through_label)
10085 emit_label (drop_through_label);
10088 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10089 with one insn, test the comparison and jump to the appropriate label. */
10092 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10094 rtx if_false_label, if_true_label;
10096 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10097 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10098 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10099 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10101 rtx drop_through_label = 0;
10103 if (! if_false_label)
10104 drop_through_label = if_false_label = gen_label_rtx ();
10106 for (i = 0; i < nwords; i++)
10107 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10108 operand_subword_force (op1, i, mode),
10109 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10110 word_mode, NULL_RTX, 0, if_false_label,
10114 emit_jump (if_true_label);
10115 if (drop_through_label)
10116 emit_label (drop_through_label);
10119 /* Jump according to whether OP0 is 0.
10120 We assume that OP0 has an integer mode that is too wide
10121 for the available compare insns. */
10124 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10126 rtx if_false_label, if_true_label;
10128 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10131 rtx drop_through_label = 0;
10133 /* The fastest way of doing this comparison on almost any machine is to
10134 "or" all the words and compare the result. If all have to be loaded
10135 from memory and this is a very wide item, it's possible this may
10136 be slower, but that's highly unlikely. */
10138 part = gen_reg_rtx (word_mode);
10139 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10140 for (i = 1; i < nwords && part != 0; i++)
10141 part = expand_binop (word_mode, ior_optab, part,
10142 operand_subword_force (op0, i, GET_MODE (op0)),
10143 part, 1, OPTAB_WIDEN);
10147 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10148 NULL_RTX, 0, if_false_label, if_true_label);
10153 /* If we couldn't do the "or" simply, do this with a series of compares. */
10154 if (! if_false_label)
10155 drop_through_label = if_false_label = gen_label_rtx ();
10157 for (i = 0; i < nwords; i++)
10158 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10159 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10160 if_false_label, NULL_RTX);
10163 emit_jump (if_true_label);
10165 if (drop_through_label)
10166 emit_label (drop_through_label);
10169 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10170 (including code to compute the values to be compared)
10171 and set (CC0) according to the result.
10172 The decision as to signed or unsigned comparison must be made by the caller.
10174 We force a stack adjustment unless there are currently
10175 things pushed on the stack that aren't yet used.
10177 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10180 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10181 size of MODE should be used. */
10184 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10186 enum rtx_code code;
10188 enum machine_mode mode;
10190 unsigned int align;
10194 /* If one operand is constant, make it the second one. Only do this
10195 if the other operand is not constant as well. */
10197 if (swap_commutative_operands_p (op0, op1))
10202 code = swap_condition (code);
10205 if (flag_force_mem)
10207 op0 = force_not_mem (op0);
10208 op1 = force_not_mem (op1);
10211 do_pending_stack_adjust ();
10213 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10214 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10218 /* There's no need to do this now that combine.c can eliminate lots of
10219 sign extensions. This can be less efficient in certain cases on other
10222 /* If this is a signed equality comparison, we can do it as an
10223 unsigned comparison since zero-extension is cheaper than sign
10224 extension and comparisons with zero are done as unsigned. This is
10225 the case even on machines that can do fast sign extension, since
10226 zero-extension is easier to combine with other operations than
10227 sign-extension is. If we are comparing against a constant, we must
10228 convert it to what it would look like unsigned. */
10229 if ((code == EQ || code == NE) && ! unsignedp
10230 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10232 if (GET_CODE (op1) == CONST_INT
10233 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10234 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10239 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10241 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10244 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10245 The decision as to signed or unsigned comparison must be made by the caller.
10247 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10250 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10251 size of MODE should be used. */
10254 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10255 if_false_label, if_true_label)
10257 enum rtx_code code;
10259 enum machine_mode mode;
10261 unsigned int align;
10262 rtx if_false_label, if_true_label;
10265 int dummy_true_label = 0;
10267 /* Reverse the comparison if that is safe and we want to jump if it is
10269 if (! if_true_label && ! FLOAT_MODE_P (mode))
10271 if_true_label = if_false_label;
10272 if_false_label = 0;
10273 code = reverse_condition (code);
10276 /* If one operand is constant, make it the second one. Only do this
10277 if the other operand is not constant as well. */
10279 if (swap_commutative_operands_p (op0, op1))
10284 code = swap_condition (code);
10287 if (flag_force_mem)
10289 op0 = force_not_mem (op0);
10290 op1 = force_not_mem (op1);
10293 do_pending_stack_adjust ();
10295 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10296 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10298 if (tem == const_true_rtx)
10301 emit_jump (if_true_label);
10305 if (if_false_label)
10306 emit_jump (if_false_label);
10312 /* There's no need to do this now that combine.c can eliminate lots of
10313 sign extensions. This can be less efficient in certain cases on other
10316 /* If this is a signed equality comparison, we can do it as an
10317 unsigned comparison since zero-extension is cheaper than sign
10318 extension and comparisons with zero are done as unsigned. This is
10319 the case even on machines that can do fast sign extension, since
10320 zero-extension is easier to combine with other operations than
10321 sign-extension is. If we are comparing against a constant, we must
10322 convert it to what it would look like unsigned. */
10323 if ((code == EQ || code == NE) && ! unsignedp
10324 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10326 if (GET_CODE (op1) == CONST_INT
10327 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10328 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10333 if (! if_true_label)
10335 dummy_true_label = 1;
10336 if_true_label = gen_label_rtx ();
10339 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10342 if (if_false_label)
10343 emit_jump (if_false_label);
10344 if (dummy_true_label)
10345 emit_label (if_true_label);
10348 /* Generate code for a comparison expression EXP (including code to compute
10349 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10350 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10351 generated code will drop through.
10352 SIGNED_CODE should be the rtx operation for this comparison for
10353 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10355 We force a stack adjustment unless there are currently
10356 things pushed on the stack that aren't yet used. */
10359 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10362 enum rtx_code signed_code, unsigned_code;
10363 rtx if_false_label, if_true_label;
10365 unsigned int align0, align1;
10368 enum machine_mode mode;
10370 enum rtx_code code;
10372 /* Don't crash if the comparison was erroneous. */
10373 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10374 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10377 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10378 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10381 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10382 mode = TYPE_MODE (type);
10383 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10384 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10385 || (GET_MODE_BITSIZE (mode)
10386 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10389 /* op0 might have been replaced by promoted constant, in which
10390 case the type of second argument should be used. */
10391 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10392 mode = TYPE_MODE (type);
10394 unsignedp = TREE_UNSIGNED (type);
10395 code = unsignedp ? unsigned_code : signed_code;
10397 #ifdef HAVE_canonicalize_funcptr_for_compare
10398 /* If function pointers need to be "canonicalized" before they can
10399 be reliably compared, then canonicalize them. */
10400 if (HAVE_canonicalize_funcptr_for_compare
10401 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10402 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10405 rtx new_op0 = gen_reg_rtx (mode);
10407 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10411 if (HAVE_canonicalize_funcptr_for_compare
10412 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10413 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10416 rtx new_op1 = gen_reg_rtx (mode);
10418 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10423 /* Do any postincrements in the expression that was tested. */
10426 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10428 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10429 MIN (align0, align1),
10430 if_false_label, if_true_label);
10433 /* Generate code to calculate EXP using a store-flag instruction
10434 and return an rtx for the result. EXP is either a comparison
10435 or a TRUTH_NOT_EXPR whose operand is a comparison.
10437 If TARGET is nonzero, store the result there if convenient.
10439 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10442 Return zero if there is no suitable set-flag instruction
10443 available on this machine.
10445 Once expand_expr has been called on the arguments of the comparison,
10446 we are committed to doing the store flag, since it is not safe to
10447 re-evaluate the expression. We emit the store-flag insn by calling
10448 emit_store_flag, but only expand the arguments if we have a reason
10449 to believe that emit_store_flag will be successful. If we think that
10450 it will, but it isn't, we have to simulate the store-flag with a
10451 set/jump/set sequence. */
10454 do_store_flag (exp, target, mode, only_cheap)
10457 enum machine_mode mode;
10460 enum rtx_code code;
10461 tree arg0, arg1, type;
10463 enum machine_mode operand_mode;
10467 enum insn_code icode;
10468 rtx subtarget = target;
10471 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10472 result at the end. We can't simply invert the test since it would
10473 have already been inverted if it were valid. This case occurs for
10474 some floating-point comparisons. */
10476 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10477 invert = 1, exp = TREE_OPERAND (exp, 0);
10479 arg0 = TREE_OPERAND (exp, 0);
10480 arg1 = TREE_OPERAND (exp, 1);
10482 /* Don't crash if the comparison was erroneous. */
10483 if (arg0 == error_mark_node || arg1 == error_mark_node)
10486 type = TREE_TYPE (arg0);
10487 operand_mode = TYPE_MODE (type);
10488 unsignedp = TREE_UNSIGNED (type);
10490 /* We won't bother with BLKmode store-flag operations because it would mean
10491 passing a lot of information to emit_store_flag. */
10492 if (operand_mode == BLKmode)
10495 /* We won't bother with store-flag operations involving function pointers
10496 when function pointers must be canonicalized before comparisons. */
10497 #ifdef HAVE_canonicalize_funcptr_for_compare
10498 if (HAVE_canonicalize_funcptr_for_compare
10499 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10500 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10502 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10503 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10504 == FUNCTION_TYPE))))
10511 /* Get the rtx comparison code to use. We know that EXP is a comparison
10512 operation of some type. Some comparisons against 1 and -1 can be
10513 converted to comparisons with zero. Do so here so that the tests
10514 below will be aware that we have a comparison with zero. These
10515 tests will not catch constants in the first operand, but constants
10516 are rarely passed as the first operand. */
10518 switch (TREE_CODE (exp))
10527 if (integer_onep (arg1))
10528 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10530 code = unsignedp ? LTU : LT;
10533 if (! unsignedp && integer_all_onesp (arg1))
10534 arg1 = integer_zero_node, code = LT;
10536 code = unsignedp ? LEU : LE;
10539 if (! unsignedp && integer_all_onesp (arg1))
10540 arg1 = integer_zero_node, code = GE;
10542 code = unsignedp ? GTU : GT;
10545 if (integer_onep (arg1))
10546 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10548 code = unsignedp ? GEU : GE;
10551 case UNORDERED_EXPR:
10577 /* Put a constant second. */
10578 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10580 tem = arg0; arg0 = arg1; arg1 = tem;
10581 code = swap_condition (code);
10584 /* If this is an equality or inequality test of a single bit, we can
10585 do this by shifting the bit being tested to the low-order bit and
10586 masking the result with the constant 1. If the condition was EQ,
10587 we xor it with 1. This does not require an scc insn and is faster
10588 than an scc insn even if we have it. */
10590 if ((code == NE || code == EQ)
10591 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10592 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10594 tree inner = TREE_OPERAND (arg0, 0);
10595 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10598 /* If INNER is a right shift of a constant and it plus BITNUM does
10599 not overflow, adjust BITNUM and INNER. */
10601 if (TREE_CODE (inner) == RSHIFT_EXPR
10602 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10603 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10604 && bitnum < TYPE_PRECISION (type)
10605 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10606 bitnum - TYPE_PRECISION (type)))
10608 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10609 inner = TREE_OPERAND (inner, 0);
10612 /* If we are going to be able to omit the AND below, we must do our
10613 operations as unsigned. If we must use the AND, we have a choice.
10614 Normally unsigned is faster, but for some machines signed is. */
10615 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10616 #ifdef LOAD_EXTEND_OP
10617 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10623 if (! get_subtarget (subtarget)
10624 || GET_MODE (subtarget) != operand_mode
10625 || ! safe_from_p (subtarget, inner, 1))
10628 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10631 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10632 size_int (bitnum), subtarget, ops_unsignedp);
10634 if (GET_MODE (op0) != mode)
10635 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10637 if ((code == EQ && ! invert) || (code == NE && invert))
10638 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10639 ops_unsignedp, OPTAB_LIB_WIDEN);
10641 /* Put the AND last so it can combine with more things. */
10642 if (bitnum != TYPE_PRECISION (type) - 1)
10643 op0 = expand_and (op0, const1_rtx, subtarget);
10648 /* Now see if we are likely to be able to do this. Return if not. */
10649 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10652 icode = setcc_gen_code[(int) code];
10653 if (icode == CODE_FOR_nothing
10654 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10656 /* We can only do this if it is one of the special cases that
10657 can be handled without an scc insn. */
10658 if ((code == LT && integer_zerop (arg1))
10659 || (! only_cheap && code == GE && integer_zerop (arg1)))
10661 else if (BRANCH_COST >= 0
10662 && ! only_cheap && (code == NE || code == EQ)
10663 && TREE_CODE (type) != REAL_TYPE
10664 && ((abs_optab->handlers[(int) operand_mode].insn_code
10665 != CODE_FOR_nothing)
10666 || (ffs_optab->handlers[(int) operand_mode].insn_code
10667 != CODE_FOR_nothing)))
10673 if (! get_subtarget (target)
10674 || GET_MODE (subtarget) != operand_mode
10675 || ! safe_from_p (subtarget, arg1, 1))
10678 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10679 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10682 target = gen_reg_rtx (mode);
10684 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10685 because, if the emit_store_flag does anything it will succeed and
10686 OP0 and OP1 will not be used subsequently. */
10688 result = emit_store_flag (target, code,
10689 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10690 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10691 operand_mode, unsignedp, 1);
10696 result = expand_binop (mode, xor_optab, result, const1_rtx,
10697 result, 0, OPTAB_LIB_WIDEN);
10701 /* If this failed, we have to do this with set/compare/jump/set code. */
10702 if (GET_CODE (target) != REG
10703 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10704 target = gen_reg_rtx (GET_MODE (target));
10706 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10707 result = compare_from_rtx (op0, op1, code, unsignedp,
10708 operand_mode, NULL_RTX, 0);
10709 if (GET_CODE (result) == CONST_INT)
10710 return (((result == const0_rtx && ! invert)
10711 || (result != const0_rtx && invert))
10712 ? const0_rtx : const1_rtx);
10714 label = gen_label_rtx ();
10715 if (bcc_gen_fctn[(int) code] == 0)
10718 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10719 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10720 emit_label (label);
10726 /* Stubs in case we haven't got a casesi insn. */
10727 #ifndef HAVE_casesi
10728 # define HAVE_casesi 0
10729 # define gen_casesi(a, b, c, d, e) (0)
10730 # define CODE_FOR_casesi CODE_FOR_nothing
10733 /* If the machine does not have a case insn that compares the bounds,
10734 this means extra overhead for dispatch tables, which raises the
10735 threshold for using them. */
10736 #ifndef CASE_VALUES_THRESHOLD
10737 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10738 #endif /* CASE_VALUES_THRESHOLD */
10741 case_values_threshold ()
10743 return CASE_VALUES_THRESHOLD;
10746 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10747 0 otherwise (i.e. if there is no casesi instruction). */
10749 try_casesi (index_type, index_expr, minval, range,
10750 table_label, default_label)
10751 tree index_type, index_expr, minval, range;
10752 rtx table_label ATTRIBUTE_UNUSED;
10755 enum machine_mode index_mode = SImode;
10756 int index_bits = GET_MODE_BITSIZE (index_mode);
10757 rtx op1, op2, index;
10758 enum machine_mode op_mode;
10763 /* Convert the index to SImode. */
10764 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10766 enum machine_mode omode = TYPE_MODE (index_type);
10767 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10769 /* We must handle the endpoints in the original mode. */
10770 index_expr = build (MINUS_EXPR, index_type,
10771 index_expr, minval);
10772 minval = integer_zero_node;
10773 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10774 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10775 omode, 1, 0, default_label);
10776 /* Now we can safely truncate. */
10777 index = convert_to_mode (index_mode, index, 0);
10781 if (TYPE_MODE (index_type) != index_mode)
10783 index_expr = convert (type_for_size (index_bits, 0),
10785 index_type = TREE_TYPE (index_expr);
10788 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10791 index = protect_from_queue (index, 0);
10792 do_pending_stack_adjust ();
10794 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10795 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10797 index = copy_to_mode_reg (op_mode, index);
10799 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10801 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10802 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10803 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10804 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10806 op1 = copy_to_mode_reg (op_mode, op1);
10808 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10810 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10811 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10812 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10813 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10815 op2 = copy_to_mode_reg (op_mode, op2);
10817 emit_jump_insn (gen_casesi (index, op1, op2,
10818 table_label, default_label));
10822 /* Attempt to generate a tablejump instruction; same concept. */
10823 #ifndef HAVE_tablejump
10824 #define HAVE_tablejump 0
10825 #define gen_tablejump(x, y) (0)
10828 /* Subroutine of the next function.
10830 INDEX is the value being switched on, with the lowest value
10831 in the table already subtracted.
10832 MODE is its expected mode (needed if INDEX is constant).
10833 RANGE is the length of the jump table.
10834 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10836 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10837 index value is out of range. */
10840 do_tablejump (index, mode, range, table_label, default_label)
10841 rtx index, range, table_label, default_label;
10842 enum machine_mode mode;
10846 /* Do an unsigned comparison (in the proper mode) between the index
10847 expression and the value which represents the length of the range.
10848 Since we just finished subtracting the lower bound of the range
10849 from the index expression, this comparison allows us to simultaneously
10850 check that the original index expression value is both greater than
10851 or equal to the minimum value of the range and less than or equal to
10852 the maximum value of the range. */
10854 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10857 /* If index is in range, it must fit in Pmode.
10858 Convert to Pmode so we can index with it. */
10860 index = convert_to_mode (Pmode, index, 1);
10862 /* Don't let a MEM slip thru, because then INDEX that comes
10863 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10864 and break_out_memory_refs will go to work on it and mess it up. */
10865 #ifdef PIC_CASE_VECTOR_ADDRESS
10866 if (flag_pic && GET_CODE (index) != REG)
10867 index = copy_to_mode_reg (Pmode, index);
10870 /* If flag_force_addr were to affect this address
10871 it could interfere with the tricky assumptions made
10872 about addresses that contain label-refs,
10873 which may be valid only very near the tablejump itself. */
10874 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10875 GET_MODE_SIZE, because this indicates how large insns are. The other
10876 uses should all be Pmode, because they are addresses. This code
10877 could fail if addresses and insns are not the same size. */
10878 index = gen_rtx_PLUS (Pmode,
10879 gen_rtx_MULT (Pmode, index,
10880 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10881 gen_rtx_LABEL_REF (Pmode, table_label));
10882 #ifdef PIC_CASE_VECTOR_ADDRESS
10884 index = PIC_CASE_VECTOR_ADDRESS (index);
10887 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10888 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10889 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10890 RTX_UNCHANGING_P (vector) = 1;
10891 convert_move (temp, vector, 0);
10893 emit_jump_insn (gen_tablejump (temp, table_label));
10895 /* If we are generating PIC code or if the table is PC-relative, the
10896 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10897 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10902 try_tablejump (index_type, index_expr, minval, range,
10903 table_label, default_label)
10904 tree index_type, index_expr, minval, range;
10905 rtx table_label, default_label;
10909 if (! HAVE_tablejump)
10912 index_expr = fold (build (MINUS_EXPR, index_type,
10913 convert (index_type, index_expr),
10914 convert (index_type, minval)));
10915 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10917 index = protect_from_queue (index, 0);
10918 do_pending_stack_adjust ();
10920 do_tablejump (index, TYPE_MODE (index_type),
10921 convert_modes (TYPE_MODE (index_type),
10922 TYPE_MODE (TREE_TYPE (range)),
10923 expand_expr (range, NULL_RTX,
10925 TREE_UNSIGNED (TREE_TYPE (range))),
10926 table_label, default_label);