1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "typeclass.h"
45 /* Decide whether a function's arguments should be processed
46 from first to last or from last to first.
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED /* If it's last to first. */
59 #ifndef STACK_PUSH_CODE
60 #ifdef STACK_GROWS_DOWNWARD
61 #define STACK_PUSH_CODE PRE_DEC
63 #define STACK_PUSH_CODE PRE_INC
67 /* Assume that case vectors are not pc-relative. */
68 #ifndef CASE_VECTOR_PC_RELATIVE
69 #define CASE_VECTOR_PC_RELATIVE 0
72 /* Hook called by safe_from_p for language-specific tree codes. It is
73 up to the language front-end to install a hook if it has any such
74 codes that safe_from_p needs to know about. Since same_from_p will
75 recursively explore the TREE_OPERANDs of an expression, this hook
76 should not reexamine those pieces. This routine may recursively
77 call safe_from_p; it should always pass `0' as the TOP_P
79 int (*lang_safe_from_p) PARAMS ((rtx, tree));
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* Don't check memory usage, since code is being emitted to check a memory
90 usage. Used when current_function_check_memory_usage is true, to avoid
91 infinite recursion. */
92 static int in_check_memory_usage;
94 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
95 static tree placeholder_list = 0;
97 /* This structure is used by move_by_pieces to describe the move to
108 int explicit_inc_from;
109 unsigned HOST_WIDE_INT len;
110 HOST_WIDE_INT offset;
114 /* This structure is used by store_by_pieces to describe the clear to
117 struct store_by_pieces
123 unsigned HOST_WIDE_INT len;
124 HOST_WIDE_INT offset;
125 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
130 extern struct obstack permanent_obstack;
132 static rtx get_push_address PARAMS ((int));
134 static rtx enqueue_insn PARAMS ((rtx, rtx));
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
136 PARAMS ((unsigned HOST_WIDE_INT,
138 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
139 struct move_by_pieces *));
140 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
142 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
144 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
146 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
148 struct store_by_pieces *));
149 static rtx get_subtarget PARAMS ((rtx));
150 static int is_zeros_p PARAMS ((tree));
151 static int mostly_zeros_p PARAMS ((tree));
152 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
153 HOST_WIDE_INT, enum machine_mode,
154 tree, tree, unsigned int, int,
156 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
158 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int,
161 unsigned int, HOST_WIDE_INT, int));
162 static enum memory_use_mode
163 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
164 static tree save_noncopied_parts PARAMS ((tree, tree));
165 static tree init_noncopied_parts PARAMS ((tree, tree));
166 static int fixed_type_p PARAMS ((tree));
167 static rtx var_rtx PARAMS ((tree));
168 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
169 static rtx expand_increment PARAMS ((tree, int, int));
170 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
171 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
172 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
174 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
177 /* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
181 static char direct_load[NUM_MACHINE_MODES];
182 static char direct_store[NUM_MACHINE_MODES];
184 /* If a memory-to-memory move would take MOVE_RATIO or more simple
185 move-instruction sequences, we will do a movstr or libcall instead. */
188 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
191 /* If we are optimizing for space (-Os), cut down the default move ratio. */
192 #define MOVE_RATIO (optimize_size ? 3 : 15)
196 /* This macro is used to determine whether move_by_pieces should be called
197 to perform a structure copy. */
198 #ifndef MOVE_BY_PIECES_P
199 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
200 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
203 /* This array records the insn_code of insns to perform block moves. */
204 enum insn_code movstr_optab[NUM_MACHINE_MODES];
206 /* This array records the insn_code of insns to perform block clears. */
207 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
209 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
211 #ifndef SLOW_UNALIGNED_ACCESS
212 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
215 /* This is run once per compilation to set up which modes can be used
216 directly in memory and to initialize the block move optab. */
222 enum machine_mode mode;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
235 pat = PATTERN (insn);
237 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
238 mode = (enum machine_mode) ((int) mode + 1))
243 direct_load[(int) mode] = direct_store[(int) mode] = 0;
244 PUT_MODE (mem, mode);
245 PUT_MODE (mem1, mode);
247 /* See if there is some register that can be used in this mode and
248 directly loaded or stored from memory. */
250 if (mode != VOIDmode && mode != BLKmode)
251 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
252 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
255 if (! HARD_REGNO_MODE_OK (regno, mode))
258 reg = gen_rtx_REG (mode, regno);
261 SET_DEST (pat) = reg;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_load[(int) mode] = 1;
265 SET_SRC (pat) = mem1;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
271 SET_DEST (pat) = mem;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_store[(int) mode] = 1;
276 SET_DEST (pat) = mem1;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
285 /* This is run at the start of compiling a function. */
290 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
293 pending_stack_adjust = 0;
294 stack_pointer_delta = 0;
295 inhibit_defer_pop = 0;
297 apply_args_value = 0;
303 struct expr_status *p;
308 ggc_mark_rtx (p->x_saveregs_value);
309 ggc_mark_rtx (p->x_apply_args_value);
310 ggc_mark_rtx (p->x_forced_labels);
321 /* Small sanity check that the queue is empty at the end of a function. */
324 finish_expr_for_function ()
330 /* Manage the queue of increment instructions to be output
331 for POSTINCREMENT_EXPR expressions, etc. */
333 /* Queue up to increment (or change) VAR later. BODY says how:
334 BODY should be the same thing you would pass to emit_insn
335 to increment right away. It will go to emit_insn later on.
337 The value is a QUEUED expression to be used in place of VAR
338 where you want to guarantee the pre-incrementation value of VAR. */
341 enqueue_insn (var, body)
344 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
345 body, pending_chain);
346 return pending_chain;
349 /* Use protect_from_queue to convert a QUEUED expression
350 into something that you can put immediately into an instruction.
351 If the queued incrementation has not happened yet,
352 protect_from_queue returns the variable itself.
353 If the incrementation has happened, protect_from_queue returns a temp
354 that contains a copy of the old value of the variable.
356 Any time an rtx which might possibly be a QUEUED is to be put
357 into an instruction, it must be passed through protect_from_queue first.
358 QUEUED expressions are not meaningful in instructions.
360 Do not pass a value through protect_from_queue and then hold
361 on to it for a while before putting it in an instruction!
362 If the queue is flushed in between, incorrect code will result. */
365 protect_from_queue (x, modify)
369 register RTX_CODE code = GET_CODE (x);
371 #if 0 /* A QUEUED can hang around after the queue is forced out. */
372 /* Shortcut for most common case. */
373 if (pending_chain == 0)
379 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
380 use of autoincrement. Make a copy of the contents of the memory
381 location rather than a copy of the address, but not if the value is
382 of mode BLKmode. Don't modify X in place since it might be
384 if (code == MEM && GET_MODE (x) != BLKmode
385 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
387 register rtx y = XEXP (x, 0);
388 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
390 MEM_COPY_ATTRIBUTES (new, x);
394 register rtx temp = gen_reg_rtx (GET_MODE (new));
395 emit_insn_before (gen_move_insn (temp, new),
399 /* Copy the address into a pseudo, so that the returned value
400 remains correct across calls to emit_queue. */
401 XEXP (new, 0) = copy_to_reg (XEXP (new, 0));
404 /* Otherwise, recursively protect the subexpressions of all
405 the kinds of rtx's that can contain a QUEUED. */
408 rtx tem = protect_from_queue (XEXP (x, 0), 0);
409 if (tem != XEXP (x, 0))
415 else if (code == PLUS || code == MULT)
417 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
418 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
419 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
428 /* If the increment has not happened, use the variable itself. Copy it
429 into a new pseudo so that the value remains correct across calls to
431 if (QUEUED_INSN (x) == 0)
432 return copy_to_reg (QUEUED_VAR (x));
433 /* If the increment has happened and a pre-increment copy exists,
435 if (QUEUED_COPY (x) != 0)
436 return QUEUED_COPY (x);
437 /* The increment has happened but we haven't set up a pre-increment copy.
438 Set one up now, and use it. */
439 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
440 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
442 return QUEUED_COPY (x);
445 /* Return nonzero if X contains a QUEUED expression:
446 if it contains anything that will be altered by a queued increment.
447 We handle only combinations of MEM, PLUS, MINUS and MULT operators
448 since memory addresses generally contain only those. */
454 register enum rtx_code code = GET_CODE (x);
460 return queued_subexp_p (XEXP (x, 0));
464 return (queued_subexp_p (XEXP (x, 0))
465 || queued_subexp_p (XEXP (x, 1)));
471 /* Perform all the pending incrementations. */
477 while ((p = pending_chain))
479 rtx body = QUEUED_BODY (p);
481 if (GET_CODE (body) == SEQUENCE)
483 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
484 emit_insn (QUEUED_BODY (p));
487 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
488 pending_chain = QUEUED_NEXT (p);
492 /* Copy data from FROM to TO, where the machine modes are not the same.
493 Both modes may be integer, or both may be floating.
494 UNSIGNEDP should be nonzero if FROM is an unsigned type.
495 This causes zero-extension instead of sign-extension. */
498 convert_move (to, from, unsignedp)
499 register rtx to, from;
502 enum machine_mode to_mode = GET_MODE (to);
503 enum machine_mode from_mode = GET_MODE (from);
504 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
505 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
509 /* rtx code for making an equivalent value. */
510 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
512 to = protect_from_queue (to, 1);
513 from = protect_from_queue (from, 0);
515 if (to_real != from_real)
518 /* If FROM is a SUBREG that indicates that we have already done at least
519 the required extension, strip it. We don't handle such SUBREGs as
522 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
523 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
524 >= GET_MODE_SIZE (to_mode))
525 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
526 from = gen_lowpart (to_mode, from), from_mode = to_mode;
528 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
531 if (to_mode == from_mode
532 || (from_mode == VOIDmode && CONSTANT_P (from)))
534 emit_move_insn (to, from);
538 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
540 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
543 if (VECTOR_MODE_P (to_mode))
544 from = gen_rtx_SUBREG (to_mode, from, 0);
546 to = gen_rtx_SUBREG (from_mode, to, 0);
548 emit_move_insn (to, from);
552 if (to_real != from_real)
559 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
561 /* Try converting directly if the insn is supported. */
562 if ((code = can_extend_p (to_mode, from_mode, 0))
565 emit_unop_insn (code, to, from, UNKNOWN);
570 #ifdef HAVE_trunchfqf2
571 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
573 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
577 #ifdef HAVE_trunctqfqf2
578 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
580 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
584 #ifdef HAVE_truncsfqf2
585 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
587 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
591 #ifdef HAVE_truncdfqf2
592 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
594 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
598 #ifdef HAVE_truncxfqf2
599 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
601 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
605 #ifdef HAVE_trunctfqf2
606 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
608 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
613 #ifdef HAVE_trunctqfhf2
614 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
616 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
620 #ifdef HAVE_truncsfhf2
621 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
623 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
627 #ifdef HAVE_truncdfhf2
628 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
630 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
634 #ifdef HAVE_truncxfhf2
635 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
637 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
641 #ifdef HAVE_trunctfhf2
642 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
644 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
649 #ifdef HAVE_truncsftqf2
650 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
652 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
656 #ifdef HAVE_truncdftqf2
657 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
659 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
663 #ifdef HAVE_truncxftqf2
664 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
666 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
670 #ifdef HAVE_trunctftqf2
671 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
673 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
678 #ifdef HAVE_truncdfsf2
679 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
681 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
685 #ifdef HAVE_truncxfsf2
686 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
688 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
692 #ifdef HAVE_trunctfsf2
693 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
695 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
699 #ifdef HAVE_truncxfdf2
700 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
702 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
706 #ifdef HAVE_trunctfdf2
707 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
709 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
721 libcall = extendsfdf2_libfunc;
725 libcall = extendsfxf2_libfunc;
729 libcall = extendsftf2_libfunc;
741 libcall = truncdfsf2_libfunc;
745 libcall = extenddfxf2_libfunc;
749 libcall = extenddftf2_libfunc;
761 libcall = truncxfsf2_libfunc;
765 libcall = truncxfdf2_libfunc;
777 libcall = trunctfsf2_libfunc;
781 libcall = trunctfdf2_libfunc;
793 if (libcall == (rtx) 0)
794 /* This conversion is not implemented yet. */
798 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
800 insns = get_insns ();
802 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
807 /* Now both modes are integers. */
809 /* Handle expanding beyond a word. */
810 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
811 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
818 enum machine_mode lowpart_mode;
819 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
821 /* Try converting directly if the insn is supported. */
822 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
825 /* If FROM is a SUBREG, put it into a register. Do this
826 so that we always generate the same set of insns for
827 better cse'ing; if an intermediate assignment occurred,
828 we won't be doing the operation directly on the SUBREG. */
829 if (optimize > 0 && GET_CODE (from) == SUBREG)
830 from = force_reg (from_mode, from);
831 emit_unop_insn (code, to, from, equiv_code);
834 /* Next, try converting via full word. */
835 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
836 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
837 != CODE_FOR_nothing))
839 if (GET_CODE (to) == REG)
840 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
841 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
842 emit_unop_insn (code, to,
843 gen_lowpart (word_mode, to), equiv_code);
847 /* No special multiword conversion insn; do it by hand. */
850 /* Since we will turn this into a no conflict block, we must ensure
851 that the source does not overlap the target. */
853 if (reg_overlap_mentioned_p (to, from))
854 from = force_reg (from_mode, from);
856 /* Get a copy of FROM widened to a word, if necessary. */
857 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
858 lowpart_mode = word_mode;
860 lowpart_mode = from_mode;
862 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
864 lowpart = gen_lowpart (lowpart_mode, to);
865 emit_move_insn (lowpart, lowfrom);
867 /* Compute the value to put in each remaining word. */
869 fill_value = const0_rtx;
874 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
875 && STORE_FLAG_VALUE == -1)
877 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
879 fill_value = gen_reg_rtx (word_mode);
880 emit_insn (gen_slt (fill_value));
886 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
887 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
889 fill_value = convert_to_mode (word_mode, fill_value, 1);
893 /* Fill the remaining words. */
894 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
896 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
897 rtx subword = operand_subword (to, index, 1, to_mode);
902 if (fill_value != subword)
903 emit_move_insn (subword, fill_value);
906 insns = get_insns ();
909 emit_no_conflict_block (insns, to, from, NULL_RTX,
910 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
914 /* Truncating multi-word to a word or less. */
915 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
916 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
918 if (!((GET_CODE (from) == MEM
919 && ! MEM_VOLATILE_P (from)
920 && direct_load[(int) to_mode]
921 && ! mode_dependent_address_p (XEXP (from, 0)))
922 || GET_CODE (from) == REG
923 || GET_CODE (from) == SUBREG))
924 from = force_reg (from_mode, from);
925 convert_move (to, gen_lowpart (word_mode, from), 0);
929 /* Handle pointer conversion. */ /* SPEE 900220. */
930 if (to_mode == PQImode)
932 if (from_mode != QImode)
933 from = convert_to_mode (QImode, from, unsignedp);
935 #ifdef HAVE_truncqipqi2
936 if (HAVE_truncqipqi2)
938 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
941 #endif /* HAVE_truncqipqi2 */
945 if (from_mode == PQImode)
947 if (to_mode != QImode)
949 from = convert_to_mode (QImode, from, unsignedp);
954 #ifdef HAVE_extendpqiqi2
955 if (HAVE_extendpqiqi2)
957 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
960 #endif /* HAVE_extendpqiqi2 */
965 if (to_mode == PSImode)
967 if (from_mode != SImode)
968 from = convert_to_mode (SImode, from, unsignedp);
970 #ifdef HAVE_truncsipsi2
971 if (HAVE_truncsipsi2)
973 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
976 #endif /* HAVE_truncsipsi2 */
980 if (from_mode == PSImode)
982 if (to_mode != SImode)
984 from = convert_to_mode (SImode, from, unsignedp);
989 #ifdef HAVE_extendpsisi2
990 if (! unsignedp && HAVE_extendpsisi2)
992 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
995 #endif /* HAVE_extendpsisi2 */
996 #ifdef HAVE_zero_extendpsisi2
997 if (unsignedp && HAVE_zero_extendpsisi2)
999 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1002 #endif /* HAVE_zero_extendpsisi2 */
1007 if (to_mode == PDImode)
1009 if (from_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1012 #ifdef HAVE_truncdipdi2
1013 if (HAVE_truncdipdi2)
1015 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1018 #endif /* HAVE_truncdipdi2 */
1022 if (from_mode == PDImode)
1024 if (to_mode != DImode)
1026 from = convert_to_mode (DImode, from, unsignedp);
1031 #ifdef HAVE_extendpdidi2
1032 if (HAVE_extendpdidi2)
1034 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1037 #endif /* HAVE_extendpdidi2 */
1042 /* Now follow all the conversions between integers
1043 no more than a word long. */
1045 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1046 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1047 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1048 GET_MODE_BITSIZE (from_mode)))
1050 if (!((GET_CODE (from) == MEM
1051 && ! MEM_VOLATILE_P (from)
1052 && direct_load[(int) to_mode]
1053 && ! mode_dependent_address_p (XEXP (from, 0)))
1054 || GET_CODE (from) == REG
1055 || GET_CODE (from) == SUBREG))
1056 from = force_reg (from_mode, from);
1057 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1058 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1059 from = copy_to_reg (from);
1060 emit_move_insn (to, gen_lowpart (to_mode, from));
1064 /* Handle extension. */
1065 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1067 /* Convert directly if that works. */
1068 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1069 != CODE_FOR_nothing)
1071 emit_unop_insn (code, to, from, equiv_code);
1076 enum machine_mode intermediate;
1080 /* Search for a mode to convert via. */
1081 for (intermediate = from_mode; intermediate != VOIDmode;
1082 intermediate = GET_MODE_WIDER_MODE (intermediate))
1083 if (((can_extend_p (to_mode, intermediate, unsignedp)
1084 != CODE_FOR_nothing)
1085 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1086 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1087 GET_MODE_BITSIZE (intermediate))))
1088 && (can_extend_p (intermediate, from_mode, unsignedp)
1089 != CODE_FOR_nothing))
1091 convert_move (to, convert_to_mode (intermediate, from,
1092 unsignedp), unsignedp);
1096 /* No suitable intermediate mode.
1097 Generate what we need with shifts. */
1098 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1099 - GET_MODE_BITSIZE (from_mode), 0);
1100 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1101 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1103 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1106 emit_move_insn (to, tmp);
1111 /* Support special truncate insns for certain modes. */
1113 if (from_mode == DImode && to_mode == SImode)
1115 #ifdef HAVE_truncdisi2
1116 if (HAVE_truncdisi2)
1118 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1126 if (from_mode == DImode && to_mode == HImode)
1128 #ifdef HAVE_truncdihi2
1129 if (HAVE_truncdihi2)
1131 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1139 if (from_mode == DImode && to_mode == QImode)
1141 #ifdef HAVE_truncdiqi2
1142 if (HAVE_truncdiqi2)
1144 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1152 if (from_mode == SImode && to_mode == HImode)
1154 #ifdef HAVE_truncsihi2
1155 if (HAVE_truncsihi2)
1157 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1165 if (from_mode == SImode && to_mode == QImode)
1167 #ifdef HAVE_truncsiqi2
1168 if (HAVE_truncsiqi2)
1170 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1178 if (from_mode == HImode && to_mode == QImode)
1180 #ifdef HAVE_trunchiqi2
1181 if (HAVE_trunchiqi2)
1183 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1191 if (from_mode == TImode && to_mode == DImode)
1193 #ifdef HAVE_trunctidi2
1194 if (HAVE_trunctidi2)
1196 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1204 if (from_mode == TImode && to_mode == SImode)
1206 #ifdef HAVE_trunctisi2
1207 if (HAVE_trunctisi2)
1209 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1217 if (from_mode == TImode && to_mode == HImode)
1219 #ifdef HAVE_trunctihi2
1220 if (HAVE_trunctihi2)
1222 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1230 if (from_mode == TImode && to_mode == QImode)
1232 #ifdef HAVE_trunctiqi2
1233 if (HAVE_trunctiqi2)
1235 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1239 convert_move (to, force_reg (from_mode, from), unsignedp);
1243 /* Handle truncation of volatile memrefs, and so on;
1244 the things that couldn't be truncated directly,
1245 and for which there was no special instruction. */
1246 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1248 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1249 emit_move_insn (to, temp);
1253 /* Mode combination is not recognized. */
1257 /* Return an rtx for a value that would result
1258 from converting X to mode MODE.
1259 Both X and MODE may be floating, or both integer.
1260 UNSIGNEDP is nonzero if X is an unsigned value.
1261 This can be done by referring to a part of X in place
1262 or by copying to a new temporary with conversion.
1264 This function *must not* call protect_from_queue
1265 except when putting X into an insn (in which case convert_move does it). */
1268 convert_to_mode (mode, x, unsignedp)
1269 enum machine_mode mode;
1273 return convert_modes (mode, VOIDmode, x, unsignedp);
1276 /* Return an rtx for a value that would result
1277 from converting X from mode OLDMODE to mode MODE.
1278 Both modes may be floating, or both integer.
1279 UNSIGNEDP is nonzero if X is an unsigned value.
1281 This can be done by referring to a part of X in place
1282 or by copying to a new temporary with conversion.
1284 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1286 This function *must not* call protect_from_queue
1287 except when putting X into an insn (in which case convert_move does it). */
1290 convert_modes (mode, oldmode, x, unsignedp)
1291 enum machine_mode mode, oldmode;
1297 /* If FROM is a SUBREG that indicates that we have already done at least
1298 the required extension, strip it. */
1300 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1301 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1302 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1303 x = gen_lowpart (mode, x);
1305 if (GET_MODE (x) != VOIDmode)
1306 oldmode = GET_MODE (x);
1308 if (mode == oldmode)
1311 /* There is one case that we must handle specially: If we are converting
1312 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1313 we are to interpret the constant as unsigned, gen_lowpart will do
1314 the wrong if the constant appears negative. What we want to do is
1315 make the high-order word of the constant zero, not all ones. */
1317 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1318 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1319 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1321 HOST_WIDE_INT val = INTVAL (x);
1323 if (oldmode != VOIDmode
1324 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1326 int width = GET_MODE_BITSIZE (oldmode);
1328 /* We need to zero extend VAL. */
1329 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1332 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1335 /* We can do this with a gen_lowpart if both desired and current modes
1336 are integer, and this is either a constant integer, a register, or a
1337 non-volatile MEM. Except for the constant case where MODE is no
1338 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1340 if ((GET_CODE (x) == CONST_INT
1341 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1342 || (GET_MODE_CLASS (mode) == MODE_INT
1343 && GET_MODE_CLASS (oldmode) == MODE_INT
1344 && (GET_CODE (x) == CONST_DOUBLE
1345 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1346 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1347 && direct_load[(int) mode])
1348 || (GET_CODE (x) == REG
1349 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1350 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1352 /* ?? If we don't know OLDMODE, we have to assume here that
1353 X does not need sign- or zero-extension. This may not be
1354 the case, but it's the best we can do. */
1355 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1356 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1358 HOST_WIDE_INT val = INTVAL (x);
1359 int width = GET_MODE_BITSIZE (oldmode);
1361 /* We must sign or zero-extend in this case. Start by
1362 zero-extending, then sign extend if we need to. */
1363 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1365 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1366 val |= (HOST_WIDE_INT) (-1) << width;
1368 return GEN_INT (trunc_int_for_mode (val, mode));
1371 return gen_lowpart (mode, x);
1374 temp = gen_reg_rtx (mode);
1375 convert_move (temp, x, unsignedp);
1379 /* This macro is used to determine what the largest unit size that
1380 move_by_pieces can use is. */
1382 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1383 move efficiently, as opposed to MOVE_MAX which is the maximum
1384 number of bytes we can move with a single instruction. */
1386 #ifndef MOVE_MAX_PIECES
1387 #define MOVE_MAX_PIECES MOVE_MAX
1390 /* Generate several move instructions to copy LEN bytes
1391 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1392 The caller must pass FROM and TO
1393 through protect_from_queue before calling.
1395 When TO is NULL, the emit_single_push_insn is used to push the
1398 ALIGN is maximum alignment we can assume. */
1401 move_by_pieces (to, from, len, align)
1403 unsigned HOST_WIDE_INT len;
1406 struct move_by_pieces data;
1407 rtx to_addr, from_addr = XEXP (from, 0);
1408 unsigned int max_size = MOVE_MAX_PIECES + 1;
1409 enum machine_mode mode = VOIDmode, tmode;
1410 enum insn_code icode;
1413 data.from_addr = from_addr;
1416 to_addr = XEXP (to, 0);
1419 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1420 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1422 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1429 #ifdef STACK_GROWS_DOWNWARD
1435 data.to_addr = to_addr;
1438 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1439 || GET_CODE (from_addr) == POST_INC
1440 || GET_CODE (from_addr) == POST_DEC);
1442 data.explicit_inc_from = 0;
1443 data.explicit_inc_to = 0;
1444 if (data.reverse) data.offset = len;
1447 /* If copying requires more than two move insns,
1448 copy addresses to registers (to make displacements shorter)
1449 and use post-increment if available. */
1450 if (!(data.autinc_from && data.autinc_to)
1451 && move_by_pieces_ninsns (len, align) > 2)
1453 /* Find the mode of the largest move... */
1454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1456 if (GET_MODE_SIZE (tmode) < max_size)
1459 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1461 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1462 data.autinc_from = 1;
1463 data.explicit_inc_from = -1;
1465 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1467 data.from_addr = copy_addr_to_reg (from_addr);
1468 data.autinc_from = 1;
1469 data.explicit_inc_from = 1;
1471 if (!data.autinc_from && CONSTANT_P (from_addr))
1472 data.from_addr = copy_addr_to_reg (from_addr);
1473 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1475 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1477 data.explicit_inc_to = -1;
1479 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1481 data.to_addr = copy_addr_to_reg (to_addr);
1483 data.explicit_inc_to = 1;
1485 if (!data.autinc_to && CONSTANT_P (to_addr))
1486 data.to_addr = copy_addr_to_reg (to_addr);
1489 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1490 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1491 align = MOVE_MAX * BITS_PER_UNIT;
1493 /* First move what we can in the largest integer mode, then go to
1494 successively smaller modes. */
1496 while (max_size > 1)
1498 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1499 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1500 if (GET_MODE_SIZE (tmode) < max_size)
1503 if (mode == VOIDmode)
1506 icode = mov_optab->handlers[(int) mode].insn_code;
1507 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1508 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1510 max_size = GET_MODE_SIZE (mode);
1513 /* The code above should have handled everything. */
1518 /* Return number of insns required to move L bytes by pieces.
1519 ALIGN (in bits) is maximum alignment we can assume. */
1521 static unsigned HOST_WIDE_INT
1522 move_by_pieces_ninsns (l, align)
1523 unsigned HOST_WIDE_INT l;
1526 unsigned HOST_WIDE_INT n_insns = 0;
1527 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1529 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1530 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1531 align = MOVE_MAX * BITS_PER_UNIT;
1533 while (max_size > 1)
1535 enum machine_mode mode = VOIDmode, tmode;
1536 enum insn_code icode;
1538 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1539 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1540 if (GET_MODE_SIZE (tmode) < max_size)
1543 if (mode == VOIDmode)
1546 icode = mov_optab->handlers[(int) mode].insn_code;
1547 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1548 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1550 max_size = GET_MODE_SIZE (mode);
1558 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1559 with move instructions for mode MODE. GENFUN is the gen_... function
1560 to make a move insn for that mode. DATA has all the other info. */
1563 move_by_pieces_1 (genfun, mode, data)
1564 rtx (*genfun) PARAMS ((rtx, ...));
1565 enum machine_mode mode;
1566 struct move_by_pieces *data;
1568 unsigned int size = GET_MODE_SIZE (mode);
1571 while (data->len >= size)
1574 data->offset -= size;
1578 if (data->autinc_to)
1580 to1 = gen_rtx_MEM (mode, data->to_addr);
1581 MEM_COPY_ATTRIBUTES (to1, data->to);
1584 to1 = adjust_address (data->to, mode, data->offset);
1587 if (data->autinc_from)
1589 from1 = gen_rtx_MEM (mode, data->from_addr);
1590 MEM_COPY_ATTRIBUTES (from1, data->from);
1593 from1 = adjust_address (data->from, mode, data->offset);
1595 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1596 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1597 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1598 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1601 emit_insn ((*genfun) (to1, from1));
1603 emit_single_push_insn (mode, from1, NULL);
1605 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1606 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1607 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1608 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1610 if (! data->reverse)
1611 data->offset += size;
1617 /* Emit code to move a block Y to a block X.
1618 This may be done with string-move instructions,
1619 with multiple scalar move instructions, or with a library call.
1621 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1623 SIZE is an rtx that says how long they are.
1624 ALIGN is the maximum alignment we can assume they have.
1626 Return the address of the new block, if memcpy is called and returns it,
1630 emit_block_move (x, y, size, align)
1636 #ifdef TARGET_MEM_FUNCTIONS
1638 tree call_expr, arg_list;
1641 if (GET_MODE (x) != BLKmode)
1644 if (GET_MODE (y) != BLKmode)
1647 x = protect_from_queue (x, 1);
1648 y = protect_from_queue (y, 0);
1649 size = protect_from_queue (size, 0);
1651 if (GET_CODE (x) != MEM)
1653 if (GET_CODE (y) != MEM)
1658 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1659 move_by_pieces (x, y, INTVAL (size), align);
1662 /* Try the most limited insn first, because there's no point
1663 including more than one in the machine description unless
1664 the more limited one has some advantage. */
1666 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1667 enum machine_mode mode;
1669 /* Since this is a move insn, we don't care about volatility. */
1672 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1673 mode = GET_MODE_WIDER_MODE (mode))
1675 enum insn_code code = movstr_optab[(int) mode];
1676 insn_operand_predicate_fn pred;
1678 if (code != CODE_FOR_nothing
1679 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1680 here because if SIZE is less than the mode mask, as it is
1681 returned by the macro, it will definitely be less than the
1682 actual mode mask. */
1683 && ((GET_CODE (size) == CONST_INT
1684 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1685 <= (GET_MODE_MASK (mode) >> 1)))
1686 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1687 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1688 || (*pred) (x, BLKmode))
1689 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1690 || (*pred) (y, BLKmode))
1691 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1692 || (*pred) (opalign, VOIDmode)))
1695 rtx last = get_last_insn ();
1698 op2 = convert_to_mode (mode, size, 1);
1699 pred = insn_data[(int) code].operand[2].predicate;
1700 if (pred != 0 && ! (*pred) (op2, mode))
1701 op2 = copy_to_mode_reg (mode, op2);
1703 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1711 delete_insns_since (last);
1717 /* X, Y, or SIZE may have been passed through protect_from_queue.
1719 It is unsafe to save the value generated by protect_from_queue
1720 and reuse it later. Consider what happens if emit_queue is
1721 called before the return value from protect_from_queue is used.
1723 Expansion of the CALL_EXPR below will call emit_queue before
1724 we are finished emitting RTL for argument setup. So if we are
1725 not careful we could get the wrong value for an argument.
1727 To avoid this problem we go ahead and emit code to copy X, Y &
1728 SIZE into new pseudos. We can then place those new pseudos
1729 into an RTL_EXPR and use them later, even after a call to
1732 Note this is not strictly needed for library calls since they
1733 do not call emit_queue before loading their arguments. However,
1734 we may need to have library calls call emit_queue in the future
1735 since failing to do so could cause problems for targets which
1736 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1737 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1738 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1740 #ifdef TARGET_MEM_FUNCTIONS
1741 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1743 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1744 TREE_UNSIGNED (integer_type_node));
1745 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1748 #ifdef TARGET_MEM_FUNCTIONS
1749 /* It is incorrect to use the libcall calling conventions to call
1750 memcpy in this context.
1752 This could be a user call to memcpy and the user may wish to
1753 examine the return value from memcpy.
1755 For targets where libcalls and normal calls have different conventions
1756 for returning pointers, we could end up generating incorrect code.
1758 So instead of using a libcall sequence we build up a suitable
1759 CALL_EXPR and expand the call in the normal fashion. */
1760 if (fn == NULL_TREE)
1764 /* This was copied from except.c, I don't know if all this is
1765 necessary in this context or not. */
1766 fn = get_identifier ("memcpy");
1767 fntype = build_pointer_type (void_type_node);
1768 fntype = build_function_type (fntype, NULL_TREE);
1769 fn = build_decl (FUNCTION_DECL, fn, fntype);
1770 ggc_add_tree_root (&fn, 1);
1771 DECL_EXTERNAL (fn) = 1;
1772 TREE_PUBLIC (fn) = 1;
1773 DECL_ARTIFICIAL (fn) = 1;
1774 TREE_NOTHROW (fn) = 1;
1775 make_decl_rtl (fn, NULL);
1776 assemble_external (fn);
1779 /* We need to make an argument list for the function call.
1781 memcpy has three arguments, the first two are void * addresses and
1782 the last is a size_t byte count for the copy. */
1784 = build_tree_list (NULL_TREE,
1785 make_tree (build_pointer_type (void_type_node), x));
1786 TREE_CHAIN (arg_list)
1787 = build_tree_list (NULL_TREE,
1788 make_tree (build_pointer_type (void_type_node), y));
1789 TREE_CHAIN (TREE_CHAIN (arg_list))
1790 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1791 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1793 /* Now we have to build up the CALL_EXPR itself. */
1794 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1795 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1796 call_expr, arg_list, NULL_TREE);
1797 TREE_SIDE_EFFECTS (call_expr) = 1;
1799 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1801 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1802 VOIDmode, 3, y, Pmode, x, Pmode,
1803 convert_to_mode (TYPE_MODE (integer_type_node), size,
1804 TREE_UNSIGNED (integer_type_node)),
1805 TYPE_MODE (integer_type_node));
1812 /* Copy all or part of a value X into registers starting at REGNO.
1813 The number of registers to be filled is NREGS. */
1816 move_block_to_reg (regno, x, nregs, mode)
1820 enum machine_mode mode;
1823 #ifdef HAVE_load_multiple
1831 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1832 x = validize_mem (force_const_mem (mode, x));
1834 /* See if the machine can do this with a load multiple insn. */
1835 #ifdef HAVE_load_multiple
1836 if (HAVE_load_multiple)
1838 last = get_last_insn ();
1839 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1847 delete_insns_since (last);
1851 for (i = 0; i < nregs; i++)
1852 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1853 operand_subword_force (x, i, mode));
1856 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1857 The number of registers to be filled is NREGS. SIZE indicates the number
1858 of bytes in the object X. */
1861 move_block_from_reg (regno, x, nregs, size)
1868 #ifdef HAVE_store_multiple
1872 enum machine_mode mode;
1877 /* If SIZE is that of a mode no bigger than a word, just use that
1878 mode's store operation. */
1879 if (size <= UNITS_PER_WORD
1880 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1882 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1886 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1887 to the left before storing to memory. Note that the previous test
1888 doesn't handle all cases (e.g. SIZE == 3). */
1889 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1891 rtx tem = operand_subword (x, 0, 1, BLKmode);
1897 shift = expand_shift (LSHIFT_EXPR, word_mode,
1898 gen_rtx_REG (word_mode, regno),
1899 build_int_2 ((UNITS_PER_WORD - size)
1900 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1901 emit_move_insn (tem, shift);
1905 /* See if the machine can do this with a store multiple insn. */
1906 #ifdef HAVE_store_multiple
1907 if (HAVE_store_multiple)
1909 last = get_last_insn ();
1910 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1918 delete_insns_since (last);
1922 for (i = 0; i < nregs; i++)
1924 rtx tem = operand_subword (x, i, 1, BLKmode);
1929 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1933 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1934 registers represented by a PARALLEL. SSIZE represents the total size of
1935 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1937 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1938 the balance will be in what would be the low-order memory addresses, i.e.
1939 left justified for big endian, right justified for little endian. This
1940 happens to be true for the targets currently using this support. If this
1941 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1945 emit_group_load (dst, orig_src, ssize, align)
1953 if (GET_CODE (dst) != PARALLEL)
1956 /* Check for a NULL entry, used to indicate that the parameter goes
1957 both on the stack and in registers. */
1958 if (XEXP (XVECEXP (dst, 0, 0), 0))
1963 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1965 /* Process the pieces. */
1966 for (i = start; i < XVECLEN (dst, 0); i++)
1968 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1969 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1970 unsigned int bytelen = GET_MODE_SIZE (mode);
1973 /* Handle trailing fragments that run over the size of the struct. */
1974 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1976 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1977 bytelen = ssize - bytepos;
1982 /* If we won't be loading directly from memory, protect the real source
1983 from strange tricks we might play; but make sure that the source can
1984 be loaded directly into the destination. */
1986 if (GET_CODE (orig_src) != MEM
1987 && (!CONSTANT_P (orig_src)
1988 || (GET_MODE (orig_src) != mode
1989 && GET_MODE (orig_src) != VOIDmode)))
1991 if (GET_MODE (orig_src) == VOIDmode)
1992 src = gen_reg_rtx (mode);
1994 src = gen_reg_rtx (GET_MODE (orig_src));
1995 emit_move_insn (src, orig_src);
1998 /* Optimize the access just a bit. */
1999 if (GET_CODE (src) == MEM
2000 && align >= GET_MODE_ALIGNMENT (mode)
2001 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2002 && bytelen == GET_MODE_SIZE (mode))
2004 tmps[i] = gen_reg_rtx (mode);
2005 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2007 else if (GET_CODE (src) == CONCAT)
2010 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2011 tmps[i] = XEXP (src, 0);
2012 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2013 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2014 tmps[i] = XEXP (src, 1);
2018 else if (CONSTANT_P (src)
2019 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2022 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2023 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2024 mode, mode, align, ssize);
2026 if (BYTES_BIG_ENDIAN && shift)
2027 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2028 tmps[i], 0, OPTAB_WIDEN);
2033 /* Copy the extracted pieces into the proper (probable) hard regs. */
2034 for (i = start; i < XVECLEN (dst, 0); i++)
2035 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2038 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2039 registers represented by a PARALLEL. SSIZE represents the total size of
2040 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2043 emit_group_store (orig_dst, src, ssize, align)
2051 if (GET_CODE (src) != PARALLEL)
2054 /* Check for a NULL entry, used to indicate that the parameter goes
2055 both on the stack and in registers. */
2056 if (XEXP (XVECEXP (src, 0, 0), 0))
2061 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2063 /* Copy the (probable) hard regs into pseudos. */
2064 for (i = start; i < XVECLEN (src, 0); i++)
2066 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2067 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2068 emit_move_insn (tmps[i], reg);
2072 /* If we won't be storing directly into memory, protect the real destination
2073 from strange tricks we might play. */
2075 if (GET_CODE (dst) == PARALLEL)
2079 /* We can get a PARALLEL dst if there is a conditional expression in
2080 a return statement. In that case, the dst and src are the same,
2081 so no action is necessary. */
2082 if (rtx_equal_p (dst, src))
2085 /* It is unclear if we can ever reach here, but we may as well handle
2086 it. Allocate a temporary, and split this into a store/load to/from
2089 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2090 emit_group_store (temp, src, ssize, align);
2091 emit_group_load (dst, temp, ssize, align);
2094 else if (GET_CODE (dst) != MEM)
2096 dst = gen_reg_rtx (GET_MODE (orig_dst));
2097 /* Make life a bit easier for combine. */
2098 emit_move_insn (dst, const0_rtx);
2101 /* Process the pieces. */
2102 for (i = start; i < XVECLEN (src, 0); i++)
2104 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2105 enum machine_mode mode = GET_MODE (tmps[i]);
2106 unsigned int bytelen = GET_MODE_SIZE (mode);
2108 /* Handle trailing fragments that run over the size of the struct. */
2109 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2111 if (BYTES_BIG_ENDIAN)
2113 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2114 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2115 tmps[i], 0, OPTAB_WIDEN);
2117 bytelen = ssize - bytepos;
2120 /* Optimize the access just a bit. */
2121 if (GET_CODE (dst) == MEM
2122 && align >= GET_MODE_ALIGNMENT (mode)
2123 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2124 && bytelen == GET_MODE_SIZE (mode))
2125 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2127 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2128 mode, tmps[i], align, ssize);
2133 /* Copy from the pseudo into the (probable) hard reg. */
2134 if (GET_CODE (dst) == REG)
2135 emit_move_insn (orig_dst, dst);
2138 /* Generate code to copy a BLKmode object of TYPE out of a
2139 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2140 is null, a stack temporary is created. TGTBLK is returned.
2142 The primary purpose of this routine is to handle functions
2143 that return BLKmode structures in registers. Some machines
2144 (the PA for example) want to return all small structures
2145 in registers regardless of the structure's alignment. */
2148 copy_blkmode_from_reg (tgtblk, srcreg, type)
2153 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2154 rtx src = NULL, dst = NULL;
2155 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2156 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2160 tgtblk = assign_temp (build_qualified_type (type,
2162 | TYPE_QUAL_CONST)),
2164 preserve_temp_slots (tgtblk);
2167 /* This code assumes srcreg is at least a full word. If it isn't,
2168 copy it into a new pseudo which is a full word. */
2169 if (GET_MODE (srcreg) != BLKmode
2170 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2171 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2173 /* Structures whose size is not a multiple of a word are aligned
2174 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2175 machine, this means we must skip the empty high order bytes when
2176 calculating the bit offset. */
2177 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2178 big_endian_correction
2179 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2181 /* Copy the structure BITSIZE bites at a time.
2183 We could probably emit more efficient code for machines which do not use
2184 strict alignment, but it doesn't seem worth the effort at the current
2186 for (bitpos = 0, xbitpos = big_endian_correction;
2187 bitpos < bytes * BITS_PER_UNIT;
2188 bitpos += bitsize, xbitpos += bitsize)
2190 /* We need a new source operand each time xbitpos is on a
2191 word boundary and when xbitpos == big_endian_correction
2192 (the first time through). */
2193 if (xbitpos % BITS_PER_WORD == 0
2194 || xbitpos == big_endian_correction)
2195 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2198 /* We need a new destination operand each time bitpos is on
2200 if (bitpos % BITS_PER_WORD == 0)
2201 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2203 /* Use xbitpos for the source extraction (right justified) and
2204 xbitpos for the destination store (left justified). */
2205 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2206 extract_bit_field (src, bitsize,
2207 xbitpos % BITS_PER_WORD, 1,
2208 NULL_RTX, word_mode, word_mode,
2209 bitsize, BITS_PER_WORD),
2210 bitsize, BITS_PER_WORD);
2216 /* Add a USE expression for REG to the (possibly empty) list pointed
2217 to by CALL_FUSAGE. REG must denote a hard register. */
2220 use_reg (call_fusage, reg)
2221 rtx *call_fusage, reg;
2223 if (GET_CODE (reg) != REG
2224 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2228 = gen_rtx_EXPR_LIST (VOIDmode,
2229 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2232 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2233 starting at REGNO. All of these registers must be hard registers. */
2236 use_regs (call_fusage, regno, nregs)
2243 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2246 for (i = 0; i < nregs; i++)
2247 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2250 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2251 PARALLEL REGS. This is for calls that pass values in multiple
2252 non-contiguous locations. The Irix 6 ABI has examples of this. */
2255 use_group_regs (call_fusage, regs)
2261 for (i = 0; i < XVECLEN (regs, 0); i++)
2263 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2265 /* A NULL entry means the parameter goes both on the stack and in
2266 registers. This can also be a MEM for targets that pass values
2267 partially on the stack and partially in registers. */
2268 if (reg != 0 && GET_CODE (reg) == REG)
2269 use_reg (call_fusage, reg);
2275 can_store_by_pieces (len, constfun, constfundata, align)
2276 unsigned HOST_WIDE_INT len;
2277 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2281 unsigned HOST_WIDE_INT max_size, l;
2282 HOST_WIDE_INT offset = 0;
2283 enum machine_mode mode, tmode;
2284 enum insn_code icode;
2288 if (! MOVE_BY_PIECES_P (len, align))
2291 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2292 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2293 align = MOVE_MAX * BITS_PER_UNIT;
2295 /* We would first store what we can in the largest integer mode, then go to
2296 successively smaller modes. */
2299 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2304 max_size = MOVE_MAX_PIECES + 1;
2305 while (max_size > 1)
2307 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2308 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2309 if (GET_MODE_SIZE (tmode) < max_size)
2312 if (mode == VOIDmode)
2315 icode = mov_optab->handlers[(int) mode].insn_code;
2316 if (icode != CODE_FOR_nothing
2317 && align >= GET_MODE_ALIGNMENT (mode))
2319 unsigned int size = GET_MODE_SIZE (mode);
2326 cst = (*constfun) (constfundata, offset, mode);
2327 if (!LEGITIMATE_CONSTANT_P (cst))
2337 max_size = GET_MODE_SIZE (mode);
2340 /* The code above should have handled everything. */
2348 /* Generate several move instructions to store LEN bytes generated by
2349 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2350 pointer which will be passed as argument in every CONSTFUN call.
2351 ALIGN is maximum alignment we can assume. */
2354 store_by_pieces (to, len, constfun, constfundata, align)
2356 unsigned HOST_WIDE_INT len;
2357 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2361 struct store_by_pieces data;
2363 if (! MOVE_BY_PIECES_P (len, align))
2365 to = protect_from_queue (to, 1);
2366 data.constfun = constfun;
2367 data.constfundata = constfundata;
2370 store_by_pieces_1 (&data, align);
2373 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2374 rtx with BLKmode). The caller must pass TO through protect_from_queue
2375 before calling. ALIGN is maximum alignment we can assume. */
2378 clear_by_pieces (to, len, align)
2380 unsigned HOST_WIDE_INT len;
2383 struct store_by_pieces data;
2385 data.constfun = clear_by_pieces_1;
2386 data.constfundata = NULL;
2389 store_by_pieces_1 (&data, align);
2392 /* Callback routine for clear_by_pieces.
2393 Return const0_rtx unconditionally. */
2396 clear_by_pieces_1 (data, offset, mode)
2397 PTR data ATTRIBUTE_UNUSED;
2398 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2399 enum machine_mode mode ATTRIBUTE_UNUSED;
2404 /* Subroutine of clear_by_pieces and store_by_pieces.
2405 Generate several move instructions to store LEN bytes of block TO. (A MEM
2406 rtx with BLKmode). The caller must pass TO through protect_from_queue
2407 before calling. ALIGN is maximum alignment we can assume. */
2410 store_by_pieces_1 (data, align)
2411 struct store_by_pieces *data;
2414 rtx to_addr = XEXP (data->to, 0);
2415 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2416 enum machine_mode mode = VOIDmode, tmode;
2417 enum insn_code icode;
2420 data->to_addr = to_addr;
2422 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2423 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2425 data->explicit_inc_to = 0;
2427 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2429 data->offset = data->len;
2431 /* If storing requires more than two move insns,
2432 copy addresses to registers (to make displacements shorter)
2433 and use post-increment if available. */
2434 if (!data->autinc_to
2435 && move_by_pieces_ninsns (data->len, align) > 2)
2437 /* Determine the main mode we'll be using. */
2438 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2439 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2440 if (GET_MODE_SIZE (tmode) < max_size)
2443 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2445 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2446 data->autinc_to = 1;
2447 data->explicit_inc_to = -1;
2450 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2451 && ! data->autinc_to)
2453 data->to_addr = copy_addr_to_reg (to_addr);
2454 data->autinc_to = 1;
2455 data->explicit_inc_to = 1;
2458 if ( !data->autinc_to && CONSTANT_P (to_addr))
2459 data->to_addr = copy_addr_to_reg (to_addr);
2462 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2463 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2464 align = MOVE_MAX * BITS_PER_UNIT;
2466 /* First store what we can in the largest integer mode, then go to
2467 successively smaller modes. */
2469 while (max_size > 1)
2471 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2472 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2473 if (GET_MODE_SIZE (tmode) < max_size)
2476 if (mode == VOIDmode)
2479 icode = mov_optab->handlers[(int) mode].insn_code;
2480 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2481 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2483 max_size = GET_MODE_SIZE (mode);
2486 /* The code above should have handled everything. */
2491 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2492 with move instructions for mode MODE. GENFUN is the gen_... function
2493 to make a move insn for that mode. DATA has all the other info. */
2496 store_by_pieces_2 (genfun, mode, data)
2497 rtx (*genfun) PARAMS ((rtx, ...));
2498 enum machine_mode mode;
2499 struct store_by_pieces *data;
2501 unsigned int size = GET_MODE_SIZE (mode);
2504 while (data->len >= size)
2507 data->offset -= size;
2509 if (data->autinc_to)
2511 to1 = gen_rtx_MEM (mode, data->to_addr);
2512 MEM_COPY_ATTRIBUTES (to1, data->to);
2515 to1 = adjust_address (data->to, mode, data->offset);
2517 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2518 emit_insn (gen_add2_insn (data->to_addr,
2519 GEN_INT (-(HOST_WIDE_INT) size)));
2521 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2522 emit_insn ((*genfun) (to1, cst));
2524 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2525 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2527 if (! data->reverse)
2528 data->offset += size;
2534 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2535 its length in bytes and ALIGN is the maximum alignment we can is has.
2537 If we call a function that returns the length of the block, return it. */
2540 clear_storage (object, size, align)
2545 #ifdef TARGET_MEM_FUNCTIONS
2547 tree call_expr, arg_list;
2551 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2552 just move a zero. Otherwise, do this a piece at a time. */
2553 if (GET_MODE (object) != BLKmode
2554 && GET_CODE (size) == CONST_INT
2555 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2556 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2559 object = protect_from_queue (object, 1);
2560 size = protect_from_queue (size, 0);
2562 if (GET_CODE (size) == CONST_INT
2563 && MOVE_BY_PIECES_P (INTVAL (size), align))
2564 clear_by_pieces (object, INTVAL (size), align);
2567 /* Try the most limited insn first, because there's no point
2568 including more than one in the machine description unless
2569 the more limited one has some advantage. */
2571 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2572 enum machine_mode mode;
2574 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2575 mode = GET_MODE_WIDER_MODE (mode))
2577 enum insn_code code = clrstr_optab[(int) mode];
2578 insn_operand_predicate_fn pred;
2580 if (code != CODE_FOR_nothing
2581 /* We don't need MODE to be narrower than
2582 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2583 the mode mask, as it is returned by the macro, it will
2584 definitely be less than the actual mode mask. */
2585 && ((GET_CODE (size) == CONST_INT
2586 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2587 <= (GET_MODE_MASK (mode) >> 1)))
2588 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2589 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2590 || (*pred) (object, BLKmode))
2591 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2592 || (*pred) (opalign, VOIDmode)))
2595 rtx last = get_last_insn ();
2598 op1 = convert_to_mode (mode, size, 1);
2599 pred = insn_data[(int) code].operand[1].predicate;
2600 if (pred != 0 && ! (*pred) (op1, mode))
2601 op1 = copy_to_mode_reg (mode, op1);
2603 pat = GEN_FCN ((int) code) (object, op1, opalign);
2610 delete_insns_since (last);
2614 /* OBJECT or SIZE may have been passed through protect_from_queue.
2616 It is unsafe to save the value generated by protect_from_queue
2617 and reuse it later. Consider what happens if emit_queue is
2618 called before the return value from protect_from_queue is used.
2620 Expansion of the CALL_EXPR below will call emit_queue before
2621 we are finished emitting RTL for argument setup. So if we are
2622 not careful we could get the wrong value for an argument.
2624 To avoid this problem we go ahead and emit code to copy OBJECT
2625 and SIZE into new pseudos. We can then place those new pseudos
2626 into an RTL_EXPR and use them later, even after a call to
2629 Note this is not strictly needed for library calls since they
2630 do not call emit_queue before loading their arguments. However,
2631 we may need to have library calls call emit_queue in the future
2632 since failing to do so could cause problems for targets which
2633 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2634 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2636 #ifdef TARGET_MEM_FUNCTIONS
2637 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2639 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2640 TREE_UNSIGNED (integer_type_node));
2641 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2644 #ifdef TARGET_MEM_FUNCTIONS
2645 /* It is incorrect to use the libcall calling conventions to call
2646 memset in this context.
2648 This could be a user call to memset and the user may wish to
2649 examine the return value from memset.
2651 For targets where libcalls and normal calls have different
2652 conventions for returning pointers, we could end up generating
2655 So instead of using a libcall sequence we build up a suitable
2656 CALL_EXPR and expand the call in the normal fashion. */
2657 if (fn == NULL_TREE)
2661 /* This was copied from except.c, I don't know if all this is
2662 necessary in this context or not. */
2663 fn = get_identifier ("memset");
2664 fntype = build_pointer_type (void_type_node);
2665 fntype = build_function_type (fntype, NULL_TREE);
2666 fn = build_decl (FUNCTION_DECL, fn, fntype);
2667 ggc_add_tree_root (&fn, 1);
2668 DECL_EXTERNAL (fn) = 1;
2669 TREE_PUBLIC (fn) = 1;
2670 DECL_ARTIFICIAL (fn) = 1;
2671 TREE_NOTHROW (fn) = 1;
2672 make_decl_rtl (fn, NULL);
2673 assemble_external (fn);
2676 /* We need to make an argument list for the function call.
2678 memset has three arguments, the first is a void * addresses, the
2679 second a integer with the initialization value, the last is a
2680 size_t byte count for the copy. */
2682 = build_tree_list (NULL_TREE,
2683 make_tree (build_pointer_type (void_type_node),
2685 TREE_CHAIN (arg_list)
2686 = build_tree_list (NULL_TREE,
2687 make_tree (integer_type_node, const0_rtx));
2688 TREE_CHAIN (TREE_CHAIN (arg_list))
2689 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2690 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2692 /* Now we have to build up the CALL_EXPR itself. */
2693 call_expr = build1 (ADDR_EXPR,
2694 build_pointer_type (TREE_TYPE (fn)), fn);
2695 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2696 call_expr, arg_list, NULL_TREE);
2697 TREE_SIDE_EFFECTS (call_expr) = 1;
2699 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2701 emit_library_call (bzero_libfunc, LCT_NORMAL,
2702 VOIDmode, 2, object, Pmode, size,
2703 TYPE_MODE (integer_type_node));
2711 /* Generate code to copy Y into X.
2712 Both Y and X must have the same mode, except that
2713 Y can be a constant with VOIDmode.
2714 This mode cannot be BLKmode; use emit_block_move for that.
2716 Return the last instruction emitted. */
2719 emit_move_insn (x, y)
2722 enum machine_mode mode = GET_MODE (x);
2723 rtx y_cst = NULL_RTX;
2726 x = protect_from_queue (x, 1);
2727 y = protect_from_queue (y, 0);
2729 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2732 /* Never force constant_p_rtx to memory. */
2733 if (GET_CODE (y) == CONSTANT_P_RTX)
2735 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2738 y = force_const_mem (mode, y);
2741 /* If X or Y are memory references, verify that their addresses are valid
2743 if (GET_CODE (x) == MEM
2744 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2745 && ! push_operand (x, GET_MODE (x)))
2747 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2748 x = validize_mem (x);
2750 if (GET_CODE (y) == MEM
2751 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2753 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2754 y = validize_mem (y);
2756 if (mode == BLKmode)
2759 last_insn = emit_move_insn_1 (x, y);
2761 if (y_cst && GET_CODE (x) == REG)
2762 REG_NOTES (last_insn)
2763 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2768 /* Low level part of emit_move_insn.
2769 Called just like emit_move_insn, but assumes X and Y
2770 are basically valid. */
2773 emit_move_insn_1 (x, y)
2776 enum machine_mode mode = GET_MODE (x);
2777 enum machine_mode submode;
2778 enum mode_class class = GET_MODE_CLASS (mode);
2781 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2784 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2786 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2788 /* Expand complex moves by moving real part and imag part, if possible. */
2789 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2790 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2792 (class == MODE_COMPLEX_INT
2793 ? MODE_INT : MODE_FLOAT),
2795 && (mov_optab->handlers[(int) submode].insn_code
2796 != CODE_FOR_nothing))
2798 /* Don't split destination if it is a stack push. */
2799 int stack = push_operand (x, GET_MODE (x));
2801 #ifdef PUSH_ROUNDING
2802 /* In case we output to the stack, but the size is smaller machine can
2803 push exactly, we need to use move instructions. */
2805 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2808 int offset1, offset2;
2810 /* Do not use anti_adjust_stack, since we don't want to update
2811 stack_pointer_delta. */
2812 temp = expand_binop (Pmode,
2813 #ifdef STACK_GROWS_DOWNWARD
2820 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2824 if (temp != stack_pointer_rtx)
2825 emit_move_insn (stack_pointer_rtx, temp);
2826 #ifdef STACK_GROWS_DOWNWARD
2828 offset2 = GET_MODE_SIZE (submode);
2830 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2831 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2832 + GET_MODE_SIZE (submode));
2834 emit_move_insn (change_address (x, submode,
2835 gen_rtx_PLUS (Pmode,
2837 GEN_INT (offset1))),
2838 gen_realpart (submode, y));
2839 emit_move_insn (change_address (x, submode,
2840 gen_rtx_PLUS (Pmode,
2842 GEN_INT (offset2))),
2843 gen_imagpart (submode, y));
2847 /* If this is a stack, push the highpart first, so it
2848 will be in the argument order.
2850 In that case, change_address is used only to convert
2851 the mode, not to change the address. */
2854 /* Note that the real part always precedes the imag part in memory
2855 regardless of machine's endianness. */
2856 #ifdef STACK_GROWS_DOWNWARD
2857 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2858 (gen_rtx_MEM (submode, XEXP (x, 0)),
2859 gen_imagpart (submode, y)));
2860 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2861 (gen_rtx_MEM (submode, XEXP (x, 0)),
2862 gen_realpart (submode, y)));
2864 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2865 (gen_rtx_MEM (submode, XEXP (x, 0)),
2866 gen_realpart (submode, y)));
2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2868 (gen_rtx_MEM (submode, XEXP (x, 0)),
2869 gen_imagpart (submode, y)));
2874 rtx realpart_x, realpart_y;
2875 rtx imagpart_x, imagpart_y;
2877 /* If this is a complex value with each part being smaller than a
2878 word, the usual calling sequence will likely pack the pieces into
2879 a single register. Unfortunately, SUBREG of hard registers only
2880 deals in terms of words, so we have a problem converting input
2881 arguments to the CONCAT of two registers that is used elsewhere
2882 for complex values. If this is before reload, we can copy it into
2883 memory and reload. FIXME, we should see about using extract and
2884 insert on integer registers, but complex short and complex char
2885 variables should be rarely used. */
2886 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2887 && (reload_in_progress | reload_completed) == 0)
2889 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2890 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2892 if (packed_dest_p || packed_src_p)
2894 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2895 ? MODE_FLOAT : MODE_INT);
2897 enum machine_mode reg_mode
2898 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2900 if (reg_mode != BLKmode)
2902 rtx mem = assign_stack_temp (reg_mode,
2903 GET_MODE_SIZE (mode), 0);
2904 rtx cmem = adjust_address (mem, mode, 0);
2907 = N_("function using short complex types cannot be inline");
2911 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2912 emit_move_insn_1 (cmem, y);
2913 return emit_move_insn_1 (sreg, mem);
2917 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2918 emit_move_insn_1 (mem, sreg);
2919 return emit_move_insn_1 (x, cmem);
2925 realpart_x = gen_realpart (submode, x);
2926 realpart_y = gen_realpart (submode, y);
2927 imagpart_x = gen_imagpart (submode, x);
2928 imagpart_y = gen_imagpart (submode, y);
2930 /* Show the output dies here. This is necessary for SUBREGs
2931 of pseudos since we cannot track their lifetimes correctly;
2932 hard regs shouldn't appear here except as return values.
2933 We never want to emit such a clobber after reload. */
2935 && ! (reload_in_progress || reload_completed)
2936 && (GET_CODE (realpart_x) == SUBREG
2937 || GET_CODE (imagpart_x) == SUBREG))
2939 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2942 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2943 (realpart_x, realpart_y));
2944 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2945 (imagpart_x, imagpart_y));
2948 return get_last_insn ();
2951 /* This will handle any multi-word mode that lacks a move_insn pattern.
2952 However, you will get better code if you define such patterns,
2953 even if they must turn into multiple assembler instructions. */
2954 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2960 #ifdef PUSH_ROUNDING
2962 /* If X is a push on the stack, do the push now and replace
2963 X with a reference to the stack pointer. */
2964 if (push_operand (x, GET_MODE (x)))
2969 /* Do not use anti_adjust_stack, since we don't want to update
2970 stack_pointer_delta. */
2971 temp = expand_binop (Pmode,
2972 #ifdef STACK_GROWS_DOWNWARD
2979 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2983 if (temp != stack_pointer_rtx)
2984 emit_move_insn (stack_pointer_rtx, temp);
2986 code = GET_CODE (XEXP (x, 0));
2987 /* Just hope that small offsets off SP are OK. */
2988 if (code == POST_INC)
2989 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2990 GEN_INT (-(HOST_WIDE_INT)
2991 GET_MODE_SIZE (GET_MODE (x))));
2992 else if (code == POST_DEC)
2993 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2994 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2996 temp = stack_pointer_rtx;
2998 x = change_address (x, VOIDmode, temp);
3002 /* If we are in reload, see if either operand is a MEM whose address
3003 is scheduled for replacement. */
3004 if (reload_in_progress && GET_CODE (x) == MEM
3005 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3007 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
3009 MEM_COPY_ATTRIBUTES (new, x);
3012 if (reload_in_progress && GET_CODE (y) == MEM
3013 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3015 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
3017 MEM_COPY_ATTRIBUTES (new, y);
3025 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3028 rtx xpart = operand_subword (x, i, 1, mode);
3029 rtx ypart = operand_subword (y, i, 1, mode);
3031 /* If we can't get a part of Y, put Y into memory if it is a
3032 constant. Otherwise, force it into a register. If we still
3033 can't get a part of Y, abort. */
3034 if (ypart == 0 && CONSTANT_P (y))
3036 y = force_const_mem (mode, y);
3037 ypart = operand_subword (y, i, 1, mode);
3039 else if (ypart == 0)
3040 ypart = operand_subword_force (y, i, mode);
3042 if (xpart == 0 || ypart == 0)
3045 need_clobber |= (GET_CODE (xpart) == SUBREG);
3047 last_insn = emit_move_insn (xpart, ypart);
3050 seq = gen_sequence ();
3053 /* Show the output dies here. This is necessary for SUBREGs
3054 of pseudos since we cannot track their lifetimes correctly;
3055 hard regs shouldn't appear here except as return values.
3056 We never want to emit such a clobber after reload. */
3058 && ! (reload_in_progress || reload_completed)
3059 && need_clobber != 0)
3061 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3072 /* Pushing data onto the stack. */
3074 /* Push a block of length SIZE (perhaps variable)
3075 and return an rtx to address the beginning of the block.
3076 Note that it is not possible for the value returned to be a QUEUED.
3077 The value may be virtual_outgoing_args_rtx.
3079 EXTRA is the number of bytes of padding to push in addition to SIZE.
3080 BELOW nonzero means this padding comes at low addresses;
3081 otherwise, the padding comes at high addresses. */
3084 push_block (size, extra, below)
3090 size = convert_modes (Pmode, ptr_mode, size, 1);
3091 if (CONSTANT_P (size))
3092 anti_adjust_stack (plus_constant (size, extra));
3093 else if (GET_CODE (size) == REG && extra == 0)
3094 anti_adjust_stack (size);
3097 temp = copy_to_mode_reg (Pmode, size);
3099 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3100 temp, 0, OPTAB_LIB_WIDEN);
3101 anti_adjust_stack (temp);
3104 #ifndef STACK_GROWS_DOWNWARD
3105 #ifdef ARGS_GROW_DOWNWARD
3106 if (!ACCUMULATE_OUTGOING_ARGS)
3114 /* Return the lowest stack address when STACK or ARGS grow downward and
3115 we are not aaccumulating outgoing arguments (the c4x port uses such
3117 temp = virtual_outgoing_args_rtx;
3118 if (extra != 0 && below)
3119 temp = plus_constant (temp, extra);
3123 if (GET_CODE (size) == CONST_INT)
3124 temp = plus_constant (virtual_outgoing_args_rtx,
3125 -INTVAL (size) - (below ? 0 : extra));
3126 else if (extra != 0 && !below)
3127 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3128 negate_rtx (Pmode, plus_constant (size, extra)));
3130 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3131 negate_rtx (Pmode, size));
3134 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3138 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3139 block of SIZE bytes. */
3142 get_push_address (size)
3147 if (STACK_PUSH_CODE == POST_DEC)
3148 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3149 else if (STACK_PUSH_CODE == POST_INC)
3150 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3152 temp = stack_pointer_rtx;
3154 return copy_to_reg (temp);
3157 /* Emit single push insn. */
3159 emit_single_push_insn (mode, x, type)
3161 enum machine_mode mode;
3164 #ifdef PUSH_ROUNDING
3166 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3169 if (GET_MODE_SIZE (mode) == rounded_size)
3170 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3173 #ifdef STACK_GROWS_DOWNWARD
3174 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3175 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3177 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3178 GEN_INT (rounded_size));
3180 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3183 dest = gen_rtx_MEM (mode, dest_addr);
3185 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3189 set_mem_attributes (dest, type, 1);
3190 /* Function incoming arguments may overlap with sibling call
3191 outgoing arguments and we cannot allow reordering of reads
3192 from function arguments with stores to outgoing arguments
3193 of sibling calls. */
3194 MEM_ALIAS_SET (dest) = 0;
3196 emit_move_insn (dest, x);
3202 /* Generate code to push X onto the stack, assuming it has mode MODE and
3204 MODE is redundant except when X is a CONST_INT (since they don't
3206 SIZE is an rtx for the size of data to be copied (in bytes),
3207 needed only if X is BLKmode.
3209 ALIGN (in bits) is maximum alignment we can assume.
3211 If PARTIAL and REG are both nonzero, then copy that many of the first
3212 words of X into registers starting with REG, and push the rest of X.
3213 The amount of space pushed is decreased by PARTIAL words,
3214 rounded *down* to a multiple of PARM_BOUNDARY.
3215 REG must be a hard register in this case.
3216 If REG is zero but PARTIAL is not, take any all others actions for an
3217 argument partially in registers, but do not actually load any
3220 EXTRA is the amount in bytes of extra space to leave next to this arg.
3221 This is ignored if an argument block has already been allocated.
3223 On a machine that lacks real push insns, ARGS_ADDR is the address of
3224 the bottom of the argument block for this call. We use indexing off there
3225 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3226 argument block has not been preallocated.
3228 ARGS_SO_FAR is the size of args previously pushed for this call.
3230 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3231 for arguments passed in registers. If nonzero, it will be the number
3232 of bytes required. */
3235 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3236 args_addr, args_so_far, reg_parm_stack_space,
3239 enum machine_mode mode;
3248 int reg_parm_stack_space;
3252 enum direction stack_direction
3253 #ifdef STACK_GROWS_DOWNWARD
3259 /* Decide where to pad the argument: `downward' for below,
3260 `upward' for above, or `none' for don't pad it.
3261 Default is below for small data on big-endian machines; else above. */
3262 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3264 /* Invert direction if stack is post-update. */
3265 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3266 if (where_pad != none)
3267 where_pad = (where_pad == downward ? upward : downward);
3269 xinner = x = protect_from_queue (x, 0);
3271 if (mode == BLKmode)
3273 /* Copy a block into the stack, entirely or partially. */
3276 int used = partial * UNITS_PER_WORD;
3277 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3285 /* USED is now the # of bytes we need not copy to the stack
3286 because registers will take care of them. */
3289 xinner = adjust_address (xinner, BLKmode, used);
3291 /* If the partial register-part of the arg counts in its stack size,
3292 skip the part of stack space corresponding to the registers.
3293 Otherwise, start copying to the beginning of the stack space,
3294 by setting SKIP to 0. */
3295 skip = (reg_parm_stack_space == 0) ? 0 : used;
3297 #ifdef PUSH_ROUNDING
3298 /* Do it with several push insns if that doesn't take lots of insns
3299 and if there is no difficulty with push insns that skip bytes
3300 on the stack for alignment purposes. */
3303 && GET_CODE (size) == CONST_INT
3305 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3306 /* Here we avoid the case of a structure whose weak alignment
3307 forces many pushes of a small amount of data,
3308 and such small pushes do rounding that causes trouble. */
3309 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3310 || align >= BIGGEST_ALIGNMENT
3311 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3312 == (align / BITS_PER_UNIT)))
3313 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3315 /* Push padding now if padding above and stack grows down,
3316 or if padding below and stack grows up.
3317 But if space already allocated, this has already been done. */
3318 if (extra && args_addr == 0
3319 && where_pad != none && where_pad != stack_direction)
3320 anti_adjust_stack (GEN_INT (extra));
3322 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3324 if (current_function_check_memory_usage && ! in_check_memory_usage)
3328 in_check_memory_usage = 1;
3329 temp = get_push_address (INTVAL (size) - used);
3330 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3331 emit_library_call (chkr_copy_bitmap_libfunc,
3332 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3333 Pmode, XEXP (xinner, 0), Pmode,
3334 GEN_INT (INTVAL (size) - used),
3335 TYPE_MODE (sizetype));
3337 emit_library_call (chkr_set_right_libfunc,
3338 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3339 Pmode, GEN_INT (INTVAL (size) - used),
3340 TYPE_MODE (sizetype),
3341 GEN_INT (MEMORY_USE_RW),
3342 TYPE_MODE (integer_type_node));
3343 in_check_memory_usage = 0;
3347 #endif /* PUSH_ROUNDING */
3351 /* Otherwise make space on the stack and copy the data
3352 to the address of that space. */
3354 /* Deduct words put into registers from the size we must copy. */
3357 if (GET_CODE (size) == CONST_INT)
3358 size = GEN_INT (INTVAL (size) - used);
3360 size = expand_binop (GET_MODE (size), sub_optab, size,
3361 GEN_INT (used), NULL_RTX, 0,
3365 /* Get the address of the stack space.
3366 In this case, we do not deal with EXTRA separately.
3367 A single stack adjust will do. */
3370 temp = push_block (size, extra, where_pad == downward);
3373 else if (GET_CODE (args_so_far) == CONST_INT)
3374 temp = memory_address (BLKmode,
3375 plus_constant (args_addr,
3376 skip + INTVAL (args_so_far)));
3378 temp = memory_address (BLKmode,
3379 plus_constant (gen_rtx_PLUS (Pmode,
3383 if (current_function_check_memory_usage && ! in_check_memory_usage)
3385 in_check_memory_usage = 1;
3386 target = copy_to_reg (temp);
3387 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3388 emit_library_call (chkr_copy_bitmap_libfunc,
3389 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3391 XEXP (xinner, 0), Pmode,
3392 size, TYPE_MODE (sizetype));
3394 emit_library_call (chkr_set_right_libfunc,
3395 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3397 size, TYPE_MODE (sizetype),
3398 GEN_INT (MEMORY_USE_RW),
3399 TYPE_MODE (integer_type_node));
3400 in_check_memory_usage = 0;
3403 target = gen_rtx_MEM (BLKmode, temp);
3407 set_mem_attributes (target, type, 1);
3408 /* Function incoming arguments may overlap with sibling call
3409 outgoing arguments and we cannot allow reordering of reads
3410 from function arguments with stores to outgoing arguments
3411 of sibling calls. */
3412 MEM_ALIAS_SET (target) = 0;
3415 /* TEMP is the address of the block. Copy the data there. */
3416 if (GET_CODE (size) == CONST_INT
3417 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3419 move_by_pieces (target, xinner, INTVAL (size), align);
3424 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3425 enum machine_mode mode;
3427 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3429 mode = GET_MODE_WIDER_MODE (mode))
3431 enum insn_code code = movstr_optab[(int) mode];
3432 insn_operand_predicate_fn pred;
3434 if (code != CODE_FOR_nothing
3435 && ((GET_CODE (size) == CONST_INT
3436 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3437 <= (GET_MODE_MASK (mode) >> 1)))
3438 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3439 && (!(pred = insn_data[(int) code].operand[0].predicate)
3440 || ((*pred) (target, BLKmode)))
3441 && (!(pred = insn_data[(int) code].operand[1].predicate)
3442 || ((*pred) (xinner, BLKmode)))
3443 && (!(pred = insn_data[(int) code].operand[3].predicate)
3444 || ((*pred) (opalign, VOIDmode))))
3446 rtx op2 = convert_to_mode (mode, size, 1);
3447 rtx last = get_last_insn ();
3450 pred = insn_data[(int) code].operand[2].predicate;
3451 if (pred != 0 && ! (*pred) (op2, mode))
3452 op2 = copy_to_mode_reg (mode, op2);
3454 pat = GEN_FCN ((int) code) (target, xinner,
3462 delete_insns_since (last);
3467 if (!ACCUMULATE_OUTGOING_ARGS)
3469 /* If the source is referenced relative to the stack pointer,
3470 copy it to another register to stabilize it. We do not need
3471 to do this if we know that we won't be changing sp. */
3473 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3474 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3475 temp = copy_to_reg (temp);
3478 /* Make inhibit_defer_pop nonzero around the library call
3479 to force it to pop the bcopy-arguments right away. */
3481 #ifdef TARGET_MEM_FUNCTIONS
3482 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3483 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3484 convert_to_mode (TYPE_MODE (sizetype),
3485 size, TREE_UNSIGNED (sizetype)),
3486 TYPE_MODE (sizetype));
3488 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3489 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3490 convert_to_mode (TYPE_MODE (integer_type_node),
3492 TREE_UNSIGNED (integer_type_node)),
3493 TYPE_MODE (integer_type_node));
3498 else if (partial > 0)
3500 /* Scalar partly in registers. */
3502 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3505 /* # words of start of argument
3506 that we must make space for but need not store. */
3507 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3508 int args_offset = INTVAL (args_so_far);
3511 /* Push padding now if padding above and stack grows down,
3512 or if padding below and stack grows up.
3513 But if space already allocated, this has already been done. */
3514 if (extra && args_addr == 0
3515 && where_pad != none && where_pad != stack_direction)
3516 anti_adjust_stack (GEN_INT (extra));
3518 /* If we make space by pushing it, we might as well push
3519 the real data. Otherwise, we can leave OFFSET nonzero
3520 and leave the space uninitialized. */
3524 /* Now NOT_STACK gets the number of words that we don't need to
3525 allocate on the stack. */
3526 not_stack = partial - offset;
3528 /* If the partial register-part of the arg counts in its stack size,
3529 skip the part of stack space corresponding to the registers.
3530 Otherwise, start copying to the beginning of the stack space,
3531 by setting SKIP to 0. */
3532 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3534 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3535 x = validize_mem (force_const_mem (mode, x));
3537 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3538 SUBREGs of such registers are not allowed. */
3539 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3540 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3541 x = copy_to_reg (x);
3543 /* Loop over all the words allocated on the stack for this arg. */
3544 /* We can do it by words, because any scalar bigger than a word
3545 has a size a multiple of a word. */
3546 #ifndef PUSH_ARGS_REVERSED
3547 for (i = not_stack; i < size; i++)
3549 for (i = size - 1; i >= not_stack; i--)
3551 if (i >= not_stack + offset)
3552 emit_push_insn (operand_subword_force (x, i, mode),
3553 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3555 GEN_INT (args_offset + ((i - not_stack + skip)
3557 reg_parm_stack_space, alignment_pad);
3562 rtx target = NULL_RTX;
3565 /* Push padding now if padding above and stack grows down,
3566 or if padding below and stack grows up.
3567 But if space already allocated, this has already been done. */
3568 if (extra && args_addr == 0
3569 && where_pad != none && where_pad != stack_direction)
3570 anti_adjust_stack (GEN_INT (extra));
3572 #ifdef PUSH_ROUNDING
3573 if (args_addr == 0 && PUSH_ARGS)
3574 emit_single_push_insn (mode, x, type);
3578 if (GET_CODE (args_so_far) == CONST_INT)
3580 = memory_address (mode,
3581 plus_constant (args_addr,
3582 INTVAL (args_so_far)));
3584 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3587 dest = gen_rtx_MEM (mode, addr);
3590 set_mem_attributes (dest, type, 1);
3591 /* Function incoming arguments may overlap with sibling call
3592 outgoing arguments and we cannot allow reordering of reads
3593 from function arguments with stores to outgoing arguments
3594 of sibling calls. */
3595 MEM_ALIAS_SET (dest) = 0;
3598 emit_move_insn (dest, x);
3602 if (current_function_check_memory_usage && ! in_check_memory_usage)
3604 in_check_memory_usage = 1;
3606 target = get_push_address (GET_MODE_SIZE (mode));
3608 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3609 emit_library_call (chkr_copy_bitmap_libfunc,
3610 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3611 Pmode, XEXP (x, 0), Pmode,
3612 GEN_INT (GET_MODE_SIZE (mode)),
3613 TYPE_MODE (sizetype));
3615 emit_library_call (chkr_set_right_libfunc,
3616 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3617 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3618 TYPE_MODE (sizetype),
3619 GEN_INT (MEMORY_USE_RW),
3620 TYPE_MODE (integer_type_node));
3621 in_check_memory_usage = 0;
3626 /* If part should go in registers, copy that part
3627 into the appropriate registers. Do this now, at the end,
3628 since mem-to-mem copies above may do function calls. */
3629 if (partial > 0 && reg != 0)
3631 /* Handle calls that pass values in multiple non-contiguous locations.
3632 The Irix 6 ABI has examples of this. */
3633 if (GET_CODE (reg) == PARALLEL)
3634 emit_group_load (reg, x, -1, align); /* ??? size? */
3636 move_block_to_reg (REGNO (reg), x, partial, mode);
3639 if (extra && args_addr == 0 && where_pad == stack_direction)
3640 anti_adjust_stack (GEN_INT (extra));
3642 if (alignment_pad && args_addr == 0)
3643 anti_adjust_stack (alignment_pad);
3646 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3654 /* Only registers can be subtargets. */
3655 || GET_CODE (x) != REG
3656 /* If the register is readonly, it can't be set more than once. */
3657 || RTX_UNCHANGING_P (x)
3658 /* Don't use hard regs to avoid extending their life. */
3659 || REGNO (x) < FIRST_PSEUDO_REGISTER
3660 /* Avoid subtargets inside loops,
3661 since they hide some invariant expressions. */
3662 || preserve_subexpressions_p ())
3666 /* Expand an assignment that stores the value of FROM into TO.
3667 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3668 (This may contain a QUEUED rtx;
3669 if the value is constant, this rtx is a constant.)
3670 Otherwise, the returned value is NULL_RTX.
3672 SUGGEST_REG is no longer actually used.
3673 It used to mean, copy the value through a register
3674 and return that register, if that is possible.
3675 We now use WANT_VALUE to decide whether to do this. */
3678 expand_assignment (to, from, want_value, suggest_reg)
3681 int suggest_reg ATTRIBUTE_UNUSED;
3683 register rtx to_rtx = 0;
3686 /* Don't crash if the lhs of the assignment was erroneous. */
3688 if (TREE_CODE (to) == ERROR_MARK)
3690 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3691 return want_value ? result : NULL_RTX;
3694 /* Assignment of a structure component needs special treatment
3695 if the structure component's rtx is not simply a MEM.
3696 Assignment of an array element at a constant index, and assignment of
3697 an array element in an unaligned packed structure field, has the same
3700 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3701 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3703 enum machine_mode mode1;
3704 HOST_WIDE_INT bitsize, bitpos;
3709 unsigned int alignment;
3712 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3713 &unsignedp, &volatilep, &alignment);
3715 /* If we are going to use store_bit_field and extract_bit_field,
3716 make sure to_rtx will be safe for multiple use. */
3718 if (mode1 == VOIDmode && want_value)
3719 tem = stabilize_reference (tem);
3721 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3724 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3726 if (GET_CODE (to_rtx) != MEM)
3729 if (GET_MODE (offset_rtx) != ptr_mode)
3731 #ifdef POINTERS_EXTEND_UNSIGNED
3732 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3734 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3738 /* A constant address in TO_RTX can have VOIDmode, we must not try
3739 to call force_reg for that case. Avoid that case. */
3740 if (GET_CODE (to_rtx) == MEM
3741 && GET_MODE (to_rtx) == BLKmode
3742 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3744 && (bitpos % bitsize) == 0
3745 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3746 && alignment == GET_MODE_ALIGNMENT (mode1))
3749 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3751 if (GET_CODE (XEXP (temp, 0)) == REG)
3754 to_rtx = (replace_equiv_address
3755 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3760 to_rtx = change_address (to_rtx, VOIDmode,
3761 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3762 force_reg (ptr_mode,
3768 if (GET_CODE (to_rtx) == MEM)
3770 /* When the offset is zero, to_rtx is the address of the
3771 structure we are storing into, and hence may be shared.
3772 We must make a new MEM before setting the volatile bit. */
3774 to_rtx = copy_rtx (to_rtx);
3776 MEM_VOLATILE_P (to_rtx) = 1;
3778 #if 0 /* This was turned off because, when a field is volatile
3779 in an object which is not volatile, the object may be in a register,
3780 and then we would abort over here. */
3786 if (TREE_CODE (to) == COMPONENT_REF
3787 && TREE_READONLY (TREE_OPERAND (to, 1)))
3790 to_rtx = copy_rtx (to_rtx);
3792 RTX_UNCHANGING_P (to_rtx) = 1;
3795 /* Check the access. */
3796 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3801 enum machine_mode best_mode;
3803 best_mode = get_best_mode (bitsize, bitpos,
3804 TYPE_ALIGN (TREE_TYPE (tem)),
3806 if (best_mode == VOIDmode)
3809 best_mode_size = GET_MODE_BITSIZE (best_mode);
3810 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3811 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3812 size *= GET_MODE_SIZE (best_mode);
3814 /* Check the access right of the pointer. */
3815 in_check_memory_usage = 1;
3817 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3818 VOIDmode, 3, to_addr, Pmode,
3819 GEN_INT (size), TYPE_MODE (sizetype),
3820 GEN_INT (MEMORY_USE_WO),
3821 TYPE_MODE (integer_type_node));
3822 in_check_memory_usage = 0;
3825 /* If this is a varying-length object, we must get the address of
3826 the source and do an explicit block move. */
3829 unsigned int from_align;
3830 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3832 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3834 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3835 MIN (alignment, from_align));
3842 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3844 /* Spurious cast for HPUX compiler. */
3845 ? ((enum machine_mode)
3846 TYPE_MODE (TREE_TYPE (to)))
3850 int_size_in_bytes (TREE_TYPE (tem)),
3851 get_alias_set (to));
3853 preserve_temp_slots (result);
3857 /* If the value is meaningful, convert RESULT to the proper mode.
3858 Otherwise, return nothing. */
3859 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3860 TYPE_MODE (TREE_TYPE (from)),
3862 TREE_UNSIGNED (TREE_TYPE (to)))
3867 /* If the rhs is a function call and its value is not an aggregate,
3868 call the function before we start to compute the lhs.
3869 This is needed for correct code for cases such as
3870 val = setjmp (buf) on machines where reference to val
3871 requires loading up part of an address in a separate insn.
3873 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3874 since it might be a promoted variable where the zero- or sign- extension
3875 needs to be done. Handling this in the normal way is safe because no
3876 computation is done before the call. */
3877 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3878 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3879 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3880 && GET_CODE (DECL_RTL (to)) == REG))
3885 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3887 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3889 /* Handle calls that return values in multiple non-contiguous locations.
3890 The Irix 6 ABI has examples of this. */
3891 if (GET_CODE (to_rtx) == PARALLEL)
3892 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3893 TYPE_ALIGN (TREE_TYPE (from)));
3894 else if (GET_MODE (to_rtx) == BLKmode)
3895 emit_block_move (to_rtx, value, expr_size (from),
3896 TYPE_ALIGN (TREE_TYPE (from)));
3899 #ifdef POINTERS_EXTEND_UNSIGNED
3900 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3901 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3902 value = convert_memory_address (GET_MODE (to_rtx), value);
3904 emit_move_insn (to_rtx, value);
3906 preserve_temp_slots (to_rtx);
3909 return want_value ? to_rtx : NULL_RTX;
3912 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3913 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3917 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3918 if (GET_CODE (to_rtx) == MEM)
3919 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3922 /* Don't move directly into a return register. */
3923 if (TREE_CODE (to) == RESULT_DECL
3924 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3929 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3931 if (GET_CODE (to_rtx) == PARALLEL)
3932 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3933 TYPE_ALIGN (TREE_TYPE (from)));
3935 emit_move_insn (to_rtx, temp);
3937 preserve_temp_slots (to_rtx);
3940 return want_value ? to_rtx : NULL_RTX;
3943 /* In case we are returning the contents of an object which overlaps
3944 the place the value is being stored, use a safe function when copying
3945 a value through a pointer into a structure value return block. */
3946 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3947 && current_function_returns_struct
3948 && !current_function_returns_pcc_struct)
3953 size = expr_size (from);
3954 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3955 EXPAND_MEMORY_USE_DONT);
3957 /* Copy the rights of the bitmap. */
3958 if (current_function_check_memory_usage)
3959 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3960 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3961 XEXP (from_rtx, 0), Pmode,
3962 convert_to_mode (TYPE_MODE (sizetype),
3963 size, TREE_UNSIGNED (sizetype)),
3964 TYPE_MODE (sizetype));
3966 #ifdef TARGET_MEM_FUNCTIONS
3967 emit_library_call (memmove_libfunc, LCT_NORMAL,
3968 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3969 XEXP (from_rtx, 0), Pmode,
3970 convert_to_mode (TYPE_MODE (sizetype),
3971 size, TREE_UNSIGNED (sizetype)),
3972 TYPE_MODE (sizetype));
3974 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3975 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3976 XEXP (to_rtx, 0), Pmode,
3977 convert_to_mode (TYPE_MODE (integer_type_node),
3978 size, TREE_UNSIGNED (integer_type_node)),
3979 TYPE_MODE (integer_type_node));
3982 preserve_temp_slots (to_rtx);
3985 return want_value ? to_rtx : NULL_RTX;
3988 /* Compute FROM and store the value in the rtx we got. */
3991 result = store_expr (from, to_rtx, want_value);
3992 preserve_temp_slots (result);
3995 return want_value ? result : NULL_RTX;
3998 /* Generate code for computing expression EXP,
3999 and storing the value into TARGET.
4000 TARGET may contain a QUEUED rtx.
4002 If WANT_VALUE is nonzero, return a copy of the value
4003 not in TARGET, so that we can be sure to use the proper
4004 value in a containing expression even if TARGET has something
4005 else stored in it. If possible, we copy the value through a pseudo
4006 and return that pseudo. Or, if the value is constant, we try to
4007 return the constant. In some cases, we return a pseudo
4008 copied *from* TARGET.
4010 If the mode is BLKmode then we may return TARGET itself.
4011 It turns out that in BLKmode it doesn't cause a problem.
4012 because C has no operators that could combine two different
4013 assignments into the same BLKmode object with different values
4014 with no sequence point. Will other languages need this to
4017 If WANT_VALUE is 0, we return NULL, to make sure
4018 to catch quickly any cases where the caller uses the value
4019 and fails to set WANT_VALUE. */
4022 store_expr (exp, target, want_value)
4024 register rtx target;
4028 int dont_return_target = 0;
4029 int dont_store_target = 0;
4031 if (TREE_CODE (exp) == COMPOUND_EXPR)
4033 /* Perform first part of compound expression, then assign from second
4035 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4037 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4039 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4041 /* For conditional expression, get safe form of the target. Then
4042 test the condition, doing the appropriate assignment on either
4043 side. This avoids the creation of unnecessary temporaries.
4044 For non-BLKmode, it is more efficient not to do this. */
4046 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4049 target = protect_from_queue (target, 1);
4051 do_pending_stack_adjust ();
4053 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4054 start_cleanup_deferral ();
4055 store_expr (TREE_OPERAND (exp, 1), target, 0);
4056 end_cleanup_deferral ();
4058 emit_jump_insn (gen_jump (lab2));
4061 start_cleanup_deferral ();
4062 store_expr (TREE_OPERAND (exp, 2), target, 0);
4063 end_cleanup_deferral ();
4068 return want_value ? target : NULL_RTX;
4070 else if (queued_subexp_p (target))
4071 /* If target contains a postincrement, let's not risk
4072 using it as the place to generate the rhs. */
4074 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4076 /* Expand EXP into a new pseudo. */
4077 temp = gen_reg_rtx (GET_MODE (target));
4078 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4081 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4083 /* If target is volatile, ANSI requires accessing the value
4084 *from* the target, if it is accessed. So make that happen.
4085 In no case return the target itself. */
4086 if (! MEM_VOLATILE_P (target) && want_value)
4087 dont_return_target = 1;
4089 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4090 && GET_MODE (target) != BLKmode)
4091 /* If target is in memory and caller wants value in a register instead,
4092 arrange that. Pass TARGET as target for expand_expr so that,
4093 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4094 We know expand_expr will not use the target in that case.
4095 Don't do this if TARGET is volatile because we are supposed
4096 to write it and then read it. */
4098 temp = expand_expr (exp, target, GET_MODE (target), 0);
4099 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4101 /* If TEMP is already in the desired TARGET, only copy it from
4102 memory and don't store it there again. */
4104 || (rtx_equal_p (temp, target)
4105 && ! side_effects_p (temp) && ! side_effects_p (target)))
4106 dont_store_target = 1;
4107 temp = copy_to_reg (temp);
4109 dont_return_target = 1;
4111 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4112 /* If this is an scalar in a register that is stored in a wider mode
4113 than the declared mode, compute the result into its declared mode
4114 and then convert to the wider mode. Our value is the computed
4117 /* If we don't want a value, we can do the conversion inside EXP,
4118 which will often result in some optimizations. Do the conversion
4119 in two steps: first change the signedness, if needed, then
4120 the extend. But don't do this if the type of EXP is a subtype
4121 of something else since then the conversion might involve
4122 more than just converting modes. */
4123 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4124 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4126 if (TREE_UNSIGNED (TREE_TYPE (exp))
4127 != SUBREG_PROMOTED_UNSIGNED_P (target))
4130 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4134 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4135 SUBREG_PROMOTED_UNSIGNED_P (target)),
4139 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4141 /* If TEMP is a volatile MEM and we want a result value, make
4142 the access now so it gets done only once. Likewise if
4143 it contains TARGET. */
4144 if (GET_CODE (temp) == MEM && want_value
4145 && (MEM_VOLATILE_P (temp)
4146 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4147 temp = copy_to_reg (temp);
4149 /* If TEMP is a VOIDmode constant, use convert_modes to make
4150 sure that we properly convert it. */
4151 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4152 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4153 TYPE_MODE (TREE_TYPE (exp)), temp,
4154 SUBREG_PROMOTED_UNSIGNED_P (target));
4156 convert_move (SUBREG_REG (target), temp,
4157 SUBREG_PROMOTED_UNSIGNED_P (target));
4159 /* If we promoted a constant, change the mode back down to match
4160 target. Otherwise, the caller might get confused by a result whose
4161 mode is larger than expected. */
4163 if (want_value && GET_MODE (temp) != GET_MODE (target)
4164 && GET_MODE (temp) != VOIDmode)
4166 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4167 SUBREG_PROMOTED_VAR_P (temp) = 1;
4168 SUBREG_PROMOTED_UNSIGNED_P (temp)
4169 = SUBREG_PROMOTED_UNSIGNED_P (target);
4172 return want_value ? temp : NULL_RTX;
4176 temp = expand_expr (exp, target, GET_MODE (target), 0);
4177 /* Return TARGET if it's a specified hardware register.
4178 If TARGET is a volatile mem ref, either return TARGET
4179 or return a reg copied *from* TARGET; ANSI requires this.
4181 Otherwise, if TEMP is not TARGET, return TEMP
4182 if it is constant (for efficiency),
4183 or if we really want the correct value. */
4184 if (!(target && GET_CODE (target) == REG
4185 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4186 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4187 && ! rtx_equal_p (temp, target)
4188 && (CONSTANT_P (temp) || want_value))
4189 dont_return_target = 1;
4192 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4193 the same as that of TARGET, adjust the constant. This is needed, for
4194 example, in case it is a CONST_DOUBLE and we want only a word-sized
4196 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4197 && TREE_CODE (exp) != ERROR_MARK
4198 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4199 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4200 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4202 if (current_function_check_memory_usage
4203 && GET_CODE (target) == MEM
4204 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4206 in_check_memory_usage = 1;
4207 if (GET_CODE (temp) == MEM)
4208 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4209 VOIDmode, 3, XEXP (target, 0), Pmode,
4210 XEXP (temp, 0), Pmode,
4211 expr_size (exp), TYPE_MODE (sizetype));
4213 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4214 VOIDmode, 3, XEXP (target, 0), Pmode,
4215 expr_size (exp), TYPE_MODE (sizetype),
4216 GEN_INT (MEMORY_USE_WO),
4217 TYPE_MODE (integer_type_node));
4218 in_check_memory_usage = 0;
4221 /* If value was not generated in the target, store it there.
4222 Convert the value to TARGET's type first if nec. */
4223 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4224 one or both of them are volatile memory refs, we have to distinguish
4226 - expand_expr has used TARGET. In this case, we must not generate
4227 another copy. This can be detected by TARGET being equal according
4229 - expand_expr has not used TARGET - that means that the source just
4230 happens to have the same RTX form. Since temp will have been created
4231 by expand_expr, it will compare unequal according to == .
4232 We must generate a copy in this case, to reach the correct number
4233 of volatile memory references. */
4235 if ((! rtx_equal_p (temp, target)
4236 || (temp != target && (side_effects_p (temp)
4237 || side_effects_p (target))))
4238 && TREE_CODE (exp) != ERROR_MARK
4239 && ! dont_store_target)
4241 target = protect_from_queue (target, 1);
4242 if (GET_MODE (temp) != GET_MODE (target)
4243 && GET_MODE (temp) != VOIDmode)
4245 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4246 if (dont_return_target)
4248 /* In this case, we will return TEMP,
4249 so make sure it has the proper mode.
4250 But don't forget to store the value into TARGET. */
4251 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4252 emit_move_insn (target, temp);
4255 convert_move (target, temp, unsignedp);
4258 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4260 /* Handle copying a string constant into an array.
4261 The string constant may be shorter than the array.
4262 So copy just the string's actual length, and clear the rest. */
4266 /* Get the size of the data type of the string,
4267 which is actually the size of the target. */
4268 size = expr_size (exp);
4269 if (GET_CODE (size) == CONST_INT
4270 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4271 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4274 /* Compute the size of the data to copy from the string. */
4276 = size_binop (MIN_EXPR,
4277 make_tree (sizetype, size),
4278 size_int (TREE_STRING_LENGTH (exp)));
4279 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4280 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4284 /* Copy that much. */
4285 emit_block_move (target, temp, copy_size_rtx,
4286 TYPE_ALIGN (TREE_TYPE (exp)));
4288 /* Figure out how much is left in TARGET that we have to clear.
4289 Do all calculations in ptr_mode. */
4291 addr = XEXP (target, 0);
4292 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4294 if (GET_CODE (copy_size_rtx) == CONST_INT)
4296 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4297 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4299 (unsigned int) (BITS_PER_UNIT
4300 * (INTVAL (copy_size_rtx)
4301 & - INTVAL (copy_size_rtx))));
4305 addr = force_reg (ptr_mode, addr);
4306 addr = expand_binop (ptr_mode, add_optab, addr,
4307 copy_size_rtx, NULL_RTX, 0,
4310 size = expand_binop (ptr_mode, sub_optab, size,
4311 copy_size_rtx, NULL_RTX, 0,
4314 align = BITS_PER_UNIT;
4315 label = gen_label_rtx ();
4316 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4317 GET_MODE (size), 0, 0, label);
4319 align = MIN (align, expr_align (copy_size));
4321 if (size != const0_rtx)
4323 rtx dest = gen_rtx_MEM (BLKmode, addr);
4325 MEM_COPY_ATTRIBUTES (dest, target);
4327 /* Be sure we can write on ADDR. */
4328 in_check_memory_usage = 1;
4329 if (current_function_check_memory_usage)
4330 emit_library_call (chkr_check_addr_libfunc,
4331 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4333 size, TYPE_MODE (sizetype),
4334 GEN_INT (MEMORY_USE_WO),
4335 TYPE_MODE (integer_type_node));
4336 in_check_memory_usage = 0;
4337 clear_storage (dest, size, align);
4344 /* Handle calls that return values in multiple non-contiguous locations.
4345 The Irix 6 ABI has examples of this. */
4346 else if (GET_CODE (target) == PARALLEL)
4347 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4348 TYPE_ALIGN (TREE_TYPE (exp)));
4349 else if (GET_MODE (temp) == BLKmode)
4350 emit_block_move (target, temp, expr_size (exp),
4351 TYPE_ALIGN (TREE_TYPE (exp)));
4353 emit_move_insn (target, temp);
4356 /* If we don't want a value, return NULL_RTX. */
4360 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4361 ??? The latter test doesn't seem to make sense. */
4362 else if (dont_return_target && GET_CODE (temp) != MEM)
4365 /* Return TARGET itself if it is a hard register. */
4366 else if (want_value && GET_MODE (target) != BLKmode
4367 && ! (GET_CODE (target) == REG
4368 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4369 return copy_to_reg (target);
4375 /* Return 1 if EXP just contains zeros. */
4383 switch (TREE_CODE (exp))
4387 case NON_LVALUE_EXPR:
4388 return is_zeros_p (TREE_OPERAND (exp, 0));
4391 return integer_zerop (exp);
4395 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4398 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4401 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4402 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4403 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4404 if (! is_zeros_p (TREE_VALUE (elt)))
4414 /* Return 1 if EXP contains mostly (3/4) zeros. */
4417 mostly_zeros_p (exp)
4420 if (TREE_CODE (exp) == CONSTRUCTOR)
4422 int elts = 0, zeros = 0;
4423 tree elt = CONSTRUCTOR_ELTS (exp);
4424 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4426 /* If there are no ranges of true bits, it is all zero. */
4427 return elt == NULL_TREE;
4429 for (; elt; elt = TREE_CHAIN (elt))
4431 /* We do not handle the case where the index is a RANGE_EXPR,
4432 so the statistic will be somewhat inaccurate.
4433 We do make a more accurate count in store_constructor itself,
4434 so since this function is only used for nested array elements,
4435 this should be close enough. */
4436 if (mostly_zeros_p (TREE_VALUE (elt)))
4441 return 4 * zeros >= 3 * elts;
4444 return is_zeros_p (exp);
4447 /* Helper function for store_constructor.
4448 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4449 TYPE is the type of the CONSTRUCTOR, not the element type.
4450 ALIGN and CLEARED are as for store_constructor.
4451 ALIAS_SET is the alias set to use for any stores.
4453 This provides a recursive shortcut back to store_constructor when it isn't
4454 necessary to go through store_field. This is so that we can pass through
4455 the cleared field to let store_constructor know that we may not have to
4456 clear a substructure if the outer structure has already been cleared. */
4459 store_constructor_field (target, bitsize, bitpos,
4460 mode, exp, type, align, cleared, alias_set)
4462 unsigned HOST_WIDE_INT bitsize;
4463 HOST_WIDE_INT bitpos;
4464 enum machine_mode mode;
4470 if (TREE_CODE (exp) == CONSTRUCTOR
4471 && bitpos % BITS_PER_UNIT == 0
4472 /* If we have a non-zero bitpos for a register target, then we just
4473 let store_field do the bitfield handling. This is unlikely to
4474 generate unnecessary clear instructions anyways. */
4475 && (bitpos == 0 || GET_CODE (target) == MEM))
4479 = adjust_address (target,
4480 GET_MODE (target) == BLKmode
4482 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4483 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4486 /* Show the alignment may no longer be what it was and update the alias
4487 set, if required. */
4489 align = MIN (align, (unsigned int) bitpos & - bitpos);
4490 if (GET_CODE (target) == MEM)
4491 MEM_ALIAS_SET (target) = alias_set;
4493 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4496 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4497 int_size_in_bytes (type), alias_set);
4500 /* Store the value of constructor EXP into the rtx TARGET.
4501 TARGET is either a REG or a MEM.
4502 ALIGN is the maximum known alignment for TARGET.
4503 CLEARED is true if TARGET is known to have been zero'd.
4504 SIZE is the number of bytes of TARGET we are allowed to modify: this
4505 may not be the same as the size of EXP if we are assigning to a field
4506 which has been packed to exclude padding bits. */
4509 store_constructor (exp, target, align, cleared, size)
4516 tree type = TREE_TYPE (exp);
4517 #ifdef WORD_REGISTER_OPERATIONS
4518 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4521 /* We know our target cannot conflict, since safe_from_p has been called. */
4523 /* Don't try copying piece by piece into a hard register
4524 since that is vulnerable to being clobbered by EXP.
4525 Instead, construct in a pseudo register and then copy it all. */
4526 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4528 rtx temp = gen_reg_rtx (GET_MODE (target));
4529 store_constructor (exp, temp, align, cleared, size);
4530 emit_move_insn (target, temp);
4535 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4536 || TREE_CODE (type) == QUAL_UNION_TYPE)
4540 /* Inform later passes that the whole union value is dead. */
4541 if ((TREE_CODE (type) == UNION_TYPE
4542 || TREE_CODE (type) == QUAL_UNION_TYPE)
4545 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4547 /* If the constructor is empty, clear the union. */
4548 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4549 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4552 /* If we are building a static constructor into a register,
4553 set the initial value as zero so we can fold the value into
4554 a constant. But if more than one register is involved,
4555 this probably loses. */
4556 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4557 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4560 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4565 /* If the constructor has fewer fields than the structure
4566 or if we are initializing the structure to mostly zeros,
4567 clear the whole structure first. Don't do this if TARGET is a
4568 register whose mode size isn't equal to SIZE since clear_storage
4569 can't handle this case. */
4571 && ((list_length (CONSTRUCTOR_ELTS (exp))
4572 != fields_length (type))
4573 || mostly_zeros_p (exp))
4574 && (GET_CODE (target) != REG
4575 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4578 clear_storage (target, GEN_INT (size), align);
4583 /* Inform later passes that the old value is dead. */
4584 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4586 /* Store each element of the constructor into
4587 the corresponding field of TARGET. */
4589 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4591 register tree field = TREE_PURPOSE (elt);
4592 #ifdef WORD_REGISTER_OPERATIONS
4593 tree value = TREE_VALUE (elt);
4595 register enum machine_mode mode;
4596 HOST_WIDE_INT bitsize;
4597 HOST_WIDE_INT bitpos = 0;
4600 rtx to_rtx = target;
4602 /* Just ignore missing fields.
4603 We cleared the whole structure, above,
4604 if any fields are missing. */
4608 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4611 if (host_integerp (DECL_SIZE (field), 1))
4612 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4616 unsignedp = TREE_UNSIGNED (field);
4617 mode = DECL_MODE (field);
4618 if (DECL_BIT_FIELD (field))
4621 offset = DECL_FIELD_OFFSET (field);
4622 if (host_integerp (offset, 0)
4623 && host_integerp (bit_position (field), 0))
4625 bitpos = int_bit_position (field);
4629 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4635 if (contains_placeholder_p (offset))
4636 offset = build (WITH_RECORD_EXPR, sizetype,
4637 offset, make_tree (TREE_TYPE (exp), target));
4639 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4640 if (GET_CODE (to_rtx) != MEM)
4643 if (GET_MODE (offset_rtx) != ptr_mode)
4645 #ifdef POINTERS_EXTEND_UNSIGNED
4646 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4648 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4653 = change_address (to_rtx, VOIDmode,
4654 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4655 force_reg (ptr_mode,
4657 align = DECL_OFFSET_ALIGN (field);
4660 if (TREE_READONLY (field))
4662 if (GET_CODE (to_rtx) == MEM)
4663 to_rtx = copy_rtx (to_rtx);
4665 RTX_UNCHANGING_P (to_rtx) = 1;
4668 #ifdef WORD_REGISTER_OPERATIONS
4669 /* If this initializes a field that is smaller than a word, at the
4670 start of a word, try to widen it to a full word.
4671 This special case allows us to output C++ member function
4672 initializations in a form that the optimizers can understand. */
4673 if (GET_CODE (target) == REG
4674 && bitsize < BITS_PER_WORD
4675 && bitpos % BITS_PER_WORD == 0
4676 && GET_MODE_CLASS (mode) == MODE_INT
4677 && TREE_CODE (value) == INTEGER_CST
4679 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4681 tree type = TREE_TYPE (value);
4682 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4684 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4685 value = convert (type, value);
4687 if (BYTES_BIG_ENDIAN)
4689 = fold (build (LSHIFT_EXPR, type, value,
4690 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4691 bitsize = BITS_PER_WORD;
4695 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4696 TREE_VALUE (elt), type, align, cleared,
4697 (DECL_NONADDRESSABLE_P (field)
4698 && GET_CODE (to_rtx) == MEM)
4699 ? MEM_ALIAS_SET (to_rtx)
4700 : get_alias_set (TREE_TYPE (field)));
4703 else if (TREE_CODE (type) == ARRAY_TYPE)
4708 tree domain = TYPE_DOMAIN (type);
4709 tree elttype = TREE_TYPE (type);
4710 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4711 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4712 HOST_WIDE_INT minelt;
4713 HOST_WIDE_INT maxelt;
4715 /* If we have constant bounds for the range of the type, get them. */
4718 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4719 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4722 /* If the constructor has fewer elements than the array,
4723 clear the whole array first. Similarly if this is
4724 static constructor of a non-BLKmode object. */
4725 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4729 HOST_WIDE_INT count = 0, zero_count = 0;
4730 need_to_clear = ! const_bounds_p;
4732 /* This loop is a more accurate version of the loop in
4733 mostly_zeros_p (it handles RANGE_EXPR in an index).
4734 It is also needed to check for missing elements. */
4735 for (elt = CONSTRUCTOR_ELTS (exp);
4736 elt != NULL_TREE && ! need_to_clear;
4737 elt = TREE_CHAIN (elt))
4739 tree index = TREE_PURPOSE (elt);
4740 HOST_WIDE_INT this_node_count;
4742 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4744 tree lo_index = TREE_OPERAND (index, 0);
4745 tree hi_index = TREE_OPERAND (index, 1);
4747 if (! host_integerp (lo_index, 1)
4748 || ! host_integerp (hi_index, 1))
4754 this_node_count = (tree_low_cst (hi_index, 1)
4755 - tree_low_cst (lo_index, 1) + 1);
4758 this_node_count = 1;
4760 count += this_node_count;
4761 if (mostly_zeros_p (TREE_VALUE (elt)))
4762 zero_count += this_node_count;
4765 /* Clear the entire array first if there are any missing elements,
4766 or if the incidence of zero elements is >= 75%. */
4768 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4772 if (need_to_clear && size > 0)
4775 clear_storage (target, GEN_INT (size), align);
4779 /* Inform later passes that the old value is dead. */
4780 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4782 /* Store each element of the constructor into
4783 the corresponding element of TARGET, determined
4784 by counting the elements. */
4785 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4787 elt = TREE_CHAIN (elt), i++)
4789 register enum machine_mode mode;
4790 HOST_WIDE_INT bitsize;
4791 HOST_WIDE_INT bitpos;
4793 tree value = TREE_VALUE (elt);
4794 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4795 tree index = TREE_PURPOSE (elt);
4796 rtx xtarget = target;
4798 if (cleared && is_zeros_p (value))
4801 unsignedp = TREE_UNSIGNED (elttype);
4802 mode = TYPE_MODE (elttype);
4803 if (mode == BLKmode)
4804 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4805 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4808 bitsize = GET_MODE_BITSIZE (mode);
4810 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4812 tree lo_index = TREE_OPERAND (index, 0);
4813 tree hi_index = TREE_OPERAND (index, 1);
4814 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4815 struct nesting *loop;
4816 HOST_WIDE_INT lo, hi, count;
4819 /* If the range is constant and "small", unroll the loop. */
4821 && host_integerp (lo_index, 0)
4822 && host_integerp (hi_index, 0)
4823 && (lo = tree_low_cst (lo_index, 0),
4824 hi = tree_low_cst (hi_index, 0),
4825 count = hi - lo + 1,
4826 (GET_CODE (target) != MEM
4828 || (host_integerp (TYPE_SIZE (elttype), 1)
4829 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4832 lo -= minelt; hi -= minelt;
4833 for (; lo <= hi; lo++)
4835 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4836 store_constructor_field
4837 (target, bitsize, bitpos, mode, value, type, align,
4839 TYPE_NONALIASED_COMPONENT (type)
4840 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4845 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4846 loop_top = gen_label_rtx ();
4847 loop_end = gen_label_rtx ();
4849 unsignedp = TREE_UNSIGNED (domain);
4851 index = build_decl (VAR_DECL, NULL_TREE, domain);
4854 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4856 SET_DECL_RTL (index, index_r);
4857 if (TREE_CODE (value) == SAVE_EXPR
4858 && SAVE_EXPR_RTL (value) == 0)
4860 /* Make sure value gets expanded once before the
4862 expand_expr (value, const0_rtx, VOIDmode, 0);
4865 store_expr (lo_index, index_r, 0);
4866 loop = expand_start_loop (0);
4868 /* Assign value to element index. */
4870 = convert (ssizetype,
4871 fold (build (MINUS_EXPR, TREE_TYPE (index),
4872 index, TYPE_MIN_VALUE (domain))));
4873 position = size_binop (MULT_EXPR, position,
4875 TYPE_SIZE_UNIT (elttype)));
4877 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4878 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4879 xtarget = change_address (target, mode, addr);
4880 if (TREE_CODE (value) == CONSTRUCTOR)
4881 store_constructor (value, xtarget, align, cleared,
4882 bitsize / BITS_PER_UNIT);
4884 store_expr (value, xtarget, 0);
4886 expand_exit_loop_if_false (loop,
4887 build (LT_EXPR, integer_type_node,
4890 expand_increment (build (PREINCREMENT_EXPR,
4892 index, integer_one_node), 0, 0);
4894 emit_label (loop_end);
4897 else if ((index != 0 && ! host_integerp (index, 0))
4898 || ! host_integerp (TYPE_SIZE (elttype), 1))
4904 index = ssize_int (1);
4907 index = convert (ssizetype,
4908 fold (build (MINUS_EXPR, index,
4909 TYPE_MIN_VALUE (domain))));
4911 position = size_binop (MULT_EXPR, index,
4913 TYPE_SIZE_UNIT (elttype)));
4914 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4915 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4916 xtarget = change_address (target, mode, addr);
4917 store_expr (value, xtarget, 0);
4922 bitpos = ((tree_low_cst (index, 0) - minelt)
4923 * tree_low_cst (TYPE_SIZE (elttype), 1));
4925 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4927 store_constructor_field (target, bitsize, bitpos, mode, value,
4928 type, align, cleared,
4929 TYPE_NONALIASED_COMPONENT (type)
4930 && GET_CODE (target) == MEM
4931 ? MEM_ALIAS_SET (target) :
4932 get_alias_set (elttype));
4938 /* Set constructor assignments. */
4939 else if (TREE_CODE (type) == SET_TYPE)
4941 tree elt = CONSTRUCTOR_ELTS (exp);
4942 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4943 tree domain = TYPE_DOMAIN (type);
4944 tree domain_min, domain_max, bitlength;
4946 /* The default implementation strategy is to extract the constant
4947 parts of the constructor, use that to initialize the target,
4948 and then "or" in whatever non-constant ranges we need in addition.
4950 If a large set is all zero or all ones, it is
4951 probably better to set it using memset (if available) or bzero.
4952 Also, if a large set has just a single range, it may also be
4953 better to first clear all the first clear the set (using
4954 bzero/memset), and set the bits we want. */
4956 /* Check for all zeros. */
4957 if (elt == NULL_TREE && size > 0)
4960 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4964 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4965 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4966 bitlength = size_binop (PLUS_EXPR,
4967 size_diffop (domain_max, domain_min),
4970 nbits = tree_low_cst (bitlength, 1);
4972 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4973 are "complicated" (more than one range), initialize (the
4974 constant parts) by copying from a constant. */
4975 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4976 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4978 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4979 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4980 char *bit_buffer = (char *) alloca (nbits);
4981 HOST_WIDE_INT word = 0;
4982 unsigned int bit_pos = 0;
4983 unsigned int ibit = 0;
4984 unsigned int offset = 0; /* In bytes from beginning of set. */
4986 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4989 if (bit_buffer[ibit])
4991 if (BYTES_BIG_ENDIAN)
4992 word |= (1 << (set_word_size - 1 - bit_pos));
4994 word |= 1 << bit_pos;
4998 if (bit_pos >= set_word_size || ibit == nbits)
5000 if (word != 0 || ! cleared)
5002 rtx datum = GEN_INT (word);
5005 /* The assumption here is that it is safe to use
5006 XEXP if the set is multi-word, but not if
5007 it's single-word. */
5008 if (GET_CODE (target) == MEM)
5009 to_rtx = adjust_address (target, mode, offset);
5010 else if (offset == 0)
5014 emit_move_insn (to_rtx, datum);
5021 offset += set_word_size / BITS_PER_UNIT;
5026 /* Don't bother clearing storage if the set is all ones. */
5027 if (TREE_CHAIN (elt) != NULL_TREE
5028 || (TREE_PURPOSE (elt) == NULL_TREE
5030 : ( ! host_integerp (TREE_VALUE (elt), 0)
5031 || ! host_integerp (TREE_PURPOSE (elt), 0)
5032 || (tree_low_cst (TREE_VALUE (elt), 0)
5033 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5034 != (HOST_WIDE_INT) nbits))))
5035 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
5037 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5039 /* Start of range of element or NULL. */
5040 tree startbit = TREE_PURPOSE (elt);
5041 /* End of range of element, or element value. */
5042 tree endbit = TREE_VALUE (elt);
5043 #ifdef TARGET_MEM_FUNCTIONS
5044 HOST_WIDE_INT startb, endb;
5046 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5048 bitlength_rtx = expand_expr (bitlength,
5049 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5051 /* Handle non-range tuple element like [ expr ]. */
5052 if (startbit == NULL_TREE)
5054 startbit = save_expr (endbit);
5058 startbit = convert (sizetype, startbit);
5059 endbit = convert (sizetype, endbit);
5060 if (! integer_zerop (domain_min))
5062 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5063 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5065 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5066 EXPAND_CONST_ADDRESS);
5067 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5068 EXPAND_CONST_ADDRESS);
5074 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5077 emit_move_insn (targetx, target);
5080 else if (GET_CODE (target) == MEM)
5085 #ifdef TARGET_MEM_FUNCTIONS
5086 /* Optimization: If startbit and endbit are
5087 constants divisible by BITS_PER_UNIT,
5088 call memset instead. */
5089 if (TREE_CODE (startbit) == INTEGER_CST
5090 && TREE_CODE (endbit) == INTEGER_CST
5091 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5092 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5094 emit_library_call (memset_libfunc, LCT_NORMAL,
5096 plus_constant (XEXP (targetx, 0),
5097 startb / BITS_PER_UNIT),
5099 constm1_rtx, TYPE_MODE (integer_type_node),
5100 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5101 TYPE_MODE (sizetype));
5105 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5106 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5107 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5108 startbit_rtx, TYPE_MODE (sizetype),
5109 endbit_rtx, TYPE_MODE (sizetype));
5112 emit_move_insn (target, targetx);
5120 /* Store the value of EXP (an expression tree)
5121 into a subfield of TARGET which has mode MODE and occupies
5122 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5123 If MODE is VOIDmode, it means that we are storing into a bit-field.
5125 If VALUE_MODE is VOIDmode, return nothing in particular.
5126 UNSIGNEDP is not used in this case.
5128 Otherwise, return an rtx for the value stored. This rtx
5129 has mode VALUE_MODE if that is convenient to do.
5130 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5132 ALIGN is the alignment that TARGET is known to have.
5133 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5135 ALIAS_SET is the alias set for the destination. This value will
5136 (in general) be different from that for TARGET, since TARGET is a
5137 reference to the containing structure. */
5140 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5141 unsignedp, align, total_size, alias_set)
5143 HOST_WIDE_INT bitsize;
5144 HOST_WIDE_INT bitpos;
5145 enum machine_mode mode;
5147 enum machine_mode value_mode;
5150 HOST_WIDE_INT total_size;
5153 HOST_WIDE_INT width_mask = 0;
5155 if (TREE_CODE (exp) == ERROR_MARK)
5158 /* If we have nothing to store, do nothing unless the expression has
5161 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5163 if (bitsize < HOST_BITS_PER_WIDE_INT)
5164 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5166 /* If we are storing into an unaligned field of an aligned union that is
5167 in a register, we may have the mode of TARGET being an integer mode but
5168 MODE == BLKmode. In that case, get an aligned object whose size and
5169 alignment are the same as TARGET and store TARGET into it (we can avoid
5170 the store if the field being stored is the entire width of TARGET). Then
5171 call ourselves recursively to store the field into a BLKmode version of
5172 that object. Finally, load from the object into TARGET. This is not
5173 very efficient in general, but should only be slightly more expensive
5174 than the otherwise-required unaligned accesses. Perhaps this can be
5175 cleaned up later. */
5178 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5182 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5185 rtx blk_object = copy_rtx (object);
5187 PUT_MODE (blk_object, BLKmode);
5189 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5190 emit_move_insn (object, target);
5192 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5193 align, total_size, alias_set);
5195 /* Even though we aren't returning target, we need to
5196 give it the updated value. */
5197 emit_move_insn (target, object);
5202 if (GET_CODE (target) == CONCAT)
5204 /* We're storing into a struct containing a single __complex. */
5208 return store_expr (exp, target, 0);
5211 /* If the structure is in a register or if the component
5212 is a bit field, we cannot use addressing to access it.
5213 Use bit-field techniques or SUBREG to store in it. */
5215 if (mode == VOIDmode
5216 || (mode != BLKmode && ! direct_store[(int) mode]
5217 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5218 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5219 || GET_CODE (target) == REG
5220 || GET_CODE (target) == SUBREG
5221 /* If the field isn't aligned enough to store as an ordinary memref,
5222 store it as a bit field. */
5223 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5224 && (align < GET_MODE_ALIGNMENT (mode)
5225 || bitpos % GET_MODE_ALIGNMENT (mode)))
5226 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5227 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5228 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5229 /* If the RHS and field are a constant size and the size of the
5230 RHS isn't the same size as the bitfield, we must use bitfield
5233 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5234 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5236 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5238 /* If BITSIZE is narrower than the size of the type of EXP
5239 we will be narrowing TEMP. Normally, what's wanted are the
5240 low-order bits. However, if EXP's type is a record and this is
5241 big-endian machine, we want the upper BITSIZE bits. */
5242 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5243 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5244 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5245 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5246 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5250 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5252 if (mode != VOIDmode && mode != BLKmode
5253 && mode != TYPE_MODE (TREE_TYPE (exp)))
5254 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5256 /* If the modes of TARGET and TEMP are both BLKmode, both
5257 must be in memory and BITPOS must be aligned on a byte
5258 boundary. If so, we simply do a block copy. */
5259 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5261 unsigned int exp_align = expr_align (exp);
5263 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5264 || bitpos % BITS_PER_UNIT != 0)
5267 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5269 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5270 align = MIN (exp_align, align);
5272 /* Find an alignment that is consistent with the bit position. */
5273 while ((bitpos % align) != 0)
5276 emit_block_move (target, temp,
5277 bitsize == -1 ? expr_size (exp)
5278 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5282 return value_mode == VOIDmode ? const0_rtx : target;
5285 /* Store the value in the bitfield. */
5286 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5287 if (value_mode != VOIDmode)
5289 /* The caller wants an rtx for the value. */
5290 /* If possible, avoid refetching from the bitfield itself. */
5292 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5295 enum machine_mode tmode;
5298 return expand_and (temp,
5302 GET_MODE (temp) == VOIDmode
5304 : GET_MODE (temp))), NULL_RTX);
5305 tmode = GET_MODE (temp);
5306 if (tmode == VOIDmode)
5308 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5309 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5310 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5312 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5313 NULL_RTX, value_mode, 0, align,
5320 rtx addr = XEXP (target, 0);
5323 /* If a value is wanted, it must be the lhs;
5324 so make the address stable for multiple use. */
5326 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5327 && ! CONSTANT_ADDRESS_P (addr)
5328 /* A frame-pointer reference is already stable. */
5329 && ! (GET_CODE (addr) == PLUS
5330 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5331 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5332 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5333 target = replace_equiv_address (target, copy_to_reg (addr));
5335 /* Now build a reference to just the desired component. */
5337 to_rtx = copy_rtx (adjust_address (target, mode,
5338 bitpos / BITS_PER_UNIT));
5340 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5341 /* If the address of the structure varies, then it might be on
5342 the stack. And, stack slots may be shared across scopes.
5343 So, two different structures, of different types, can end up
5344 at the same location. We will give the structures alias set
5345 zero; here we must be careful not to give non-zero alias sets
5347 if (!rtx_varies_p (addr, /*for_alias=*/0))
5348 MEM_ALIAS_SET (to_rtx) = alias_set;
5350 MEM_ALIAS_SET (to_rtx) = 0;
5352 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5356 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5357 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5358 codes and find the ultimate containing object, which we return.
5360 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5361 bit position, and *PUNSIGNEDP to the signedness of the field.
5362 If the position of the field is variable, we store a tree
5363 giving the variable offset (in units) in *POFFSET.
5364 This offset is in addition to the bit position.
5365 If the position is not variable, we store 0 in *POFFSET.
5366 We set *PALIGNMENT to the alignment of the address that will be
5367 computed. This is the alignment of the thing we return if *POFFSET
5368 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5370 If any of the extraction expressions is volatile,
5371 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5373 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5374 is a mode that can be used to access the field. In that case, *PBITSIZE
5377 If the field describes a variable-sized object, *PMODE is set to
5378 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5379 this case, but the address of the object can be found. */
5382 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5383 punsignedp, pvolatilep, palignment)
5385 HOST_WIDE_INT *pbitsize;
5386 HOST_WIDE_INT *pbitpos;
5388 enum machine_mode *pmode;
5391 unsigned int *palignment;
5394 enum machine_mode mode = VOIDmode;
5395 tree offset = size_zero_node;
5396 tree bit_offset = bitsize_zero_node;
5397 unsigned int alignment = BIGGEST_ALIGNMENT;
5400 /* First get the mode, signedness, and size. We do this from just the
5401 outermost expression. */
5402 if (TREE_CODE (exp) == COMPONENT_REF)
5404 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5405 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5406 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5408 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5410 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5412 size_tree = TREE_OPERAND (exp, 1);
5413 *punsignedp = TREE_UNSIGNED (exp);
5417 mode = TYPE_MODE (TREE_TYPE (exp));
5418 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5420 if (mode == BLKmode)
5421 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5423 *pbitsize = GET_MODE_BITSIZE (mode);
5428 if (! host_integerp (size_tree, 1))
5429 mode = BLKmode, *pbitsize = -1;
5431 *pbitsize = tree_low_cst (size_tree, 1);
5434 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5435 and find the ultimate containing object. */
5438 if (TREE_CODE (exp) == BIT_FIELD_REF)
5439 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5440 else if (TREE_CODE (exp) == COMPONENT_REF)
5442 tree field = TREE_OPERAND (exp, 1);
5443 tree this_offset = DECL_FIELD_OFFSET (field);
5445 /* If this field hasn't been filled in yet, don't go
5446 past it. This should only happen when folding expressions
5447 made during type construction. */
5448 if (this_offset == 0)
5450 else if (! TREE_CONSTANT (this_offset)
5451 && contains_placeholder_p (this_offset))
5452 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5454 offset = size_binop (PLUS_EXPR, offset, this_offset);
5455 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5456 DECL_FIELD_BIT_OFFSET (field));
5458 if (! host_integerp (offset, 0))
5459 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5462 else if (TREE_CODE (exp) == ARRAY_REF
5463 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5465 tree index = TREE_OPERAND (exp, 1);
5466 tree array = TREE_OPERAND (exp, 0);
5467 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5468 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5469 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5471 /* We assume all arrays have sizes that are a multiple of a byte.
5472 First subtract the lower bound, if any, in the type of the
5473 index, then convert to sizetype and multiply by the size of the
5475 if (low_bound != 0 && ! integer_zerop (low_bound))
5476 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5479 /* If the index has a self-referential type, pass it to a
5480 WITH_RECORD_EXPR; if the component size is, pass our
5481 component to one. */
5482 if (! TREE_CONSTANT (index)
5483 && contains_placeholder_p (index))
5484 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5485 if (! TREE_CONSTANT (unit_size)
5486 && contains_placeholder_p (unit_size))
5487 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5489 offset = size_binop (PLUS_EXPR, offset,
5490 size_binop (MULT_EXPR,
5491 convert (sizetype, index),
5495 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5496 && ! ((TREE_CODE (exp) == NOP_EXPR
5497 || TREE_CODE (exp) == CONVERT_EXPR)
5498 && (TYPE_MODE (TREE_TYPE (exp))
5499 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5502 /* If any reference in the chain is volatile, the effect is volatile. */
5503 if (TREE_THIS_VOLATILE (exp))
5506 /* If the offset is non-constant already, then we can't assume any
5507 alignment more than the alignment here. */
5508 if (! TREE_CONSTANT (offset))
5509 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5511 exp = TREE_OPERAND (exp, 0);
5515 alignment = MIN (alignment, DECL_ALIGN (exp));
5516 else if (TREE_TYPE (exp) != 0)
5517 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5519 /* If OFFSET is constant, see if we can return the whole thing as a
5520 constant bit position. Otherwise, split it up. */
5521 if (host_integerp (offset, 0)
5522 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5524 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5525 && host_integerp (tem, 0))
5526 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5528 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5531 *palignment = alignment;
5535 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5537 static enum memory_use_mode
5538 get_memory_usage_from_modifier (modifier)
5539 enum expand_modifier modifier;
5545 return MEMORY_USE_RO;
5547 case EXPAND_MEMORY_USE_WO:
5548 return MEMORY_USE_WO;
5550 case EXPAND_MEMORY_USE_RW:
5551 return MEMORY_USE_RW;
5553 case EXPAND_MEMORY_USE_DONT:
5554 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5555 MEMORY_USE_DONT, because they are modifiers to a call of
5556 expand_expr in the ADDR_EXPR case of expand_expr. */
5557 case EXPAND_CONST_ADDRESS:
5558 case EXPAND_INITIALIZER:
5559 return MEMORY_USE_DONT;
5560 case EXPAND_MEMORY_USE_BAD:
5566 /* Given an rtx VALUE that may contain additions and multiplications, return
5567 an equivalent value that just refers to a register, memory, or constant.
5568 This is done by generating instructions to perform the arithmetic and
5569 returning a pseudo-register containing the value.
5571 The returned value may be a REG, SUBREG, MEM or constant. */
5574 force_operand (value, target)
5577 register optab binoptab = 0;
5578 /* Use a temporary to force order of execution of calls to
5582 /* Use subtarget as the target for operand 0 of a binary operation. */
5583 register rtx subtarget = get_subtarget (target);
5585 /* Check for a PIC address load. */
5587 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5588 && XEXP (value, 0) == pic_offset_table_rtx
5589 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5590 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5591 || GET_CODE (XEXP (value, 1)) == CONST))
5594 subtarget = gen_reg_rtx (GET_MODE (value));
5595 emit_move_insn (subtarget, value);
5599 if (GET_CODE (value) == PLUS)
5600 binoptab = add_optab;
5601 else if (GET_CODE (value) == MINUS)
5602 binoptab = sub_optab;
5603 else if (GET_CODE (value) == MULT)
5605 op2 = XEXP (value, 1);
5606 if (!CONSTANT_P (op2)
5607 && !(GET_CODE (op2) == REG && op2 != subtarget))
5609 tmp = force_operand (XEXP (value, 0), subtarget);
5610 return expand_mult (GET_MODE (value), tmp,
5611 force_operand (op2, NULL_RTX),
5617 op2 = XEXP (value, 1);
5618 if (!CONSTANT_P (op2)
5619 && !(GET_CODE (op2) == REG && op2 != subtarget))
5621 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5623 binoptab = add_optab;
5624 op2 = negate_rtx (GET_MODE (value), op2);
5627 /* Check for an addition with OP2 a constant integer and our first
5628 operand a PLUS of a virtual register and something else. In that
5629 case, we want to emit the sum of the virtual register and the
5630 constant first and then add the other value. This allows virtual
5631 register instantiation to simply modify the constant rather than
5632 creating another one around this addition. */
5633 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5634 && GET_CODE (XEXP (value, 0)) == PLUS
5635 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5636 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5637 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5639 rtx temp = expand_binop (GET_MODE (value), binoptab,
5640 XEXP (XEXP (value, 0), 0), op2,
5641 subtarget, 0, OPTAB_LIB_WIDEN);
5642 return expand_binop (GET_MODE (value), binoptab, temp,
5643 force_operand (XEXP (XEXP (value, 0), 1), 0),
5644 target, 0, OPTAB_LIB_WIDEN);
5647 tmp = force_operand (XEXP (value, 0), subtarget);
5648 return expand_binop (GET_MODE (value), binoptab, tmp,
5649 force_operand (op2, NULL_RTX),
5650 target, 0, OPTAB_LIB_WIDEN);
5651 /* We give UNSIGNEDP = 0 to expand_binop
5652 because the only operations we are expanding here are signed ones. */
5657 /* Subroutine of expand_expr:
5658 save the non-copied parts (LIST) of an expr (LHS), and return a list
5659 which can restore these values to their previous values,
5660 should something modify their storage. */
5663 save_noncopied_parts (lhs, list)
5670 for (tail = list; tail; tail = TREE_CHAIN (tail))
5671 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5672 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5675 tree part = TREE_VALUE (tail);
5676 tree part_type = TREE_TYPE (part);
5677 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5679 = assign_temp (build_qualified_type (part_type,
5680 (TYPE_QUALS (part_type)
5681 | TYPE_QUAL_CONST)),
5684 parts = tree_cons (to_be_saved,
5685 build (RTL_EXPR, part_type, NULL_TREE,
5686 (tree) validize_mem (target)),
5688 store_expr (TREE_PURPOSE (parts),
5689 RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5694 /* Subroutine of expand_expr:
5695 record the non-copied parts (LIST) of an expr (LHS), and return a list
5696 which specifies the initial values of these parts. */
5699 init_noncopied_parts (lhs, list)
5706 for (tail = list; tail; tail = TREE_CHAIN (tail))
5707 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5708 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5709 else if (TREE_PURPOSE (tail))
5711 tree part = TREE_VALUE (tail);
5712 tree part_type = TREE_TYPE (part);
5713 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5714 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5719 /* Subroutine of expand_expr: return nonzero iff there is no way that
5720 EXP can reference X, which is being modified. TOP_P is nonzero if this
5721 call is going to be used to determine whether we need a temporary
5722 for EXP, as opposed to a recursive call to this function.
5724 It is always safe for this routine to return zero since it merely
5725 searches for optimization opportunities. */
5728 safe_from_p (x, exp, top_p)
5735 static tree save_expr_list;
5738 /* If EXP has varying size, we MUST use a target since we currently
5739 have no way of allocating temporaries of variable size
5740 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5741 So we assume here that something at a higher level has prevented a
5742 clash. This is somewhat bogus, but the best we can do. Only
5743 do this when X is BLKmode and when we are at the top level. */
5744 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5745 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5746 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5747 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5748 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5750 && GET_MODE (x) == BLKmode)
5751 /* If X is in the outgoing argument area, it is always safe. */
5752 || (GET_CODE (x) == MEM
5753 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5754 || (GET_CODE (XEXP (x, 0)) == PLUS
5755 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5758 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5759 find the underlying pseudo. */
5760 if (GET_CODE (x) == SUBREG)
5763 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5767 /* A SAVE_EXPR might appear many times in the expression passed to the
5768 top-level safe_from_p call, and if it has a complex subexpression,
5769 examining it multiple times could result in a combinatorial explosion.
5770 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5771 with optimization took about 28 minutes to compile -- even though it was
5772 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5773 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5774 we have processed. Note that the only test of top_p was above. */
5783 rtn = safe_from_p (x, exp, 0);
5785 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5786 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5791 /* Now look at our tree code and possibly recurse. */
5792 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5795 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5802 if (TREE_CODE (exp) == TREE_LIST)
5803 return ((TREE_VALUE (exp) == 0
5804 || safe_from_p (x, TREE_VALUE (exp), 0))
5805 && (TREE_CHAIN (exp) == 0
5806 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5807 else if (TREE_CODE (exp) == ERROR_MARK)
5808 return 1; /* An already-visited SAVE_EXPR? */
5813 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5817 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5818 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5822 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5823 the expression. If it is set, we conflict iff we are that rtx or
5824 both are in memory. Otherwise, we check all operands of the
5825 expression recursively. */
5827 switch (TREE_CODE (exp))
5830 return (staticp (TREE_OPERAND (exp, 0))
5831 || TREE_STATIC (exp)
5832 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5835 if (GET_CODE (x) == MEM
5836 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5837 get_alias_set (exp)))
5842 /* Assume that the call will clobber all hard registers and
5844 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5845 || GET_CODE (x) == MEM)
5850 /* If a sequence exists, we would have to scan every instruction
5851 in the sequence to see if it was safe. This is probably not
5853 if (RTL_EXPR_SEQUENCE (exp))
5856 exp_rtl = RTL_EXPR_RTL (exp);
5859 case WITH_CLEANUP_EXPR:
5860 exp_rtl = RTL_EXPR_RTL (exp);
5863 case CLEANUP_POINT_EXPR:
5864 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5867 exp_rtl = SAVE_EXPR_RTL (exp);
5871 /* If we've already scanned this, don't do it again. Otherwise,
5872 show we've scanned it and record for clearing the flag if we're
5874 if (TREE_PRIVATE (exp))
5877 TREE_PRIVATE (exp) = 1;
5878 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5880 TREE_PRIVATE (exp) = 0;
5884 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5888 /* The only operand we look at is operand 1. The rest aren't
5889 part of the expression. */
5890 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5892 case METHOD_CALL_EXPR:
5893 /* This takes a rtx argument, but shouldn't appear here. */
5900 /* If we have an rtx, we do not need to scan our operands. */
5904 nops = first_rtl_op (TREE_CODE (exp));
5905 for (i = 0; i < nops; i++)
5906 if (TREE_OPERAND (exp, i) != 0
5907 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5910 /* If this is a language-specific tree code, it may require
5911 special handling. */
5912 if ((unsigned int) TREE_CODE (exp)
5913 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5915 && !(*lang_safe_from_p) (x, exp))
5919 /* If we have an rtl, find any enclosed object. Then see if we conflict
5923 if (GET_CODE (exp_rtl) == SUBREG)
5925 exp_rtl = SUBREG_REG (exp_rtl);
5926 if (GET_CODE (exp_rtl) == REG
5927 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5931 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5932 are memory and they conflict. */
5933 return ! (rtx_equal_p (x, exp_rtl)
5934 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5935 && true_dependence (exp_rtl, GET_MODE (x), x,
5936 rtx_addr_varies_p)));
5939 /* If we reach here, it is safe. */
5943 /* Subroutine of expand_expr: return nonzero iff EXP is an
5944 expression whose type is statically determinable. */
5950 if (TREE_CODE (exp) == PARM_DECL
5951 || TREE_CODE (exp) == VAR_DECL
5952 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5953 || TREE_CODE (exp) == COMPONENT_REF
5954 || TREE_CODE (exp) == ARRAY_REF)
5959 /* Subroutine of expand_expr: return rtx if EXP is a
5960 variable or parameter; else return 0. */
5967 switch (TREE_CODE (exp))
5971 return DECL_RTL (exp);
5977 #ifdef MAX_INTEGER_COMPUTATION_MODE
5980 check_max_integer_computation_mode (exp)
5983 enum tree_code code;
5984 enum machine_mode mode;
5986 /* Strip any NOPs that don't change the mode. */
5988 code = TREE_CODE (exp);
5990 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5991 if (code == NOP_EXPR
5992 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5995 /* First check the type of the overall operation. We need only look at
5996 unary, binary and relational operations. */
5997 if (TREE_CODE_CLASS (code) == '1'
5998 || TREE_CODE_CLASS (code) == '2'
5999 || TREE_CODE_CLASS (code) == '<')
6001 mode = TYPE_MODE (TREE_TYPE (exp));
6002 if (GET_MODE_CLASS (mode) == MODE_INT
6003 && mode > MAX_INTEGER_COMPUTATION_MODE)
6004 internal_error ("unsupported wide integer operation");
6007 /* Check operand of a unary op. */
6008 if (TREE_CODE_CLASS (code) == '1')
6010 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6011 if (GET_MODE_CLASS (mode) == MODE_INT
6012 && mode > MAX_INTEGER_COMPUTATION_MODE)
6013 internal_error ("unsupported wide integer operation");
6016 /* Check operands of a binary/comparison op. */
6017 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6019 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6020 if (GET_MODE_CLASS (mode) == MODE_INT
6021 && mode > MAX_INTEGER_COMPUTATION_MODE)
6022 internal_error ("unsupported wide integer operation");
6024 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6025 if (GET_MODE_CLASS (mode) == MODE_INT
6026 && mode > MAX_INTEGER_COMPUTATION_MODE)
6027 internal_error ("unsupported wide integer operation");
6032 /* expand_expr: generate code for computing expression EXP.
6033 An rtx for the computed value is returned. The value is never null.
6034 In the case of a void EXP, const0_rtx is returned.
6036 The value may be stored in TARGET if TARGET is nonzero.
6037 TARGET is just a suggestion; callers must assume that
6038 the rtx returned may not be the same as TARGET.
6040 If TARGET is CONST0_RTX, it means that the value will be ignored.
6042 If TMODE is not VOIDmode, it suggests generating the
6043 result in mode TMODE. But this is done only when convenient.
6044 Otherwise, TMODE is ignored and the value generated in its natural mode.
6045 TMODE is just a suggestion; callers must assume that
6046 the rtx returned may not have mode TMODE.
6048 Note that TARGET may have neither TMODE nor MODE. In that case, it
6049 probably will not be used.
6051 If MODIFIER is EXPAND_SUM then when EXP is an addition
6052 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6053 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6054 products as above, or REG or MEM, or constant.
6055 Ordinarily in such cases we would output mul or add instructions
6056 and then return a pseudo reg containing the sum.
6058 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6059 it also marks a label as absolutely required (it can't be dead).
6060 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6061 This is used for outputting expressions used in initializers.
6063 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6064 with a constant address even if that address is not normally legitimate.
6065 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6068 expand_expr (exp, target, tmode, modifier)
6071 enum machine_mode tmode;
6072 enum expand_modifier modifier;
6074 register rtx op0, op1, temp;
6075 tree type = TREE_TYPE (exp);
6076 int unsignedp = TREE_UNSIGNED (type);
6077 register enum machine_mode mode;
6078 register enum tree_code code = TREE_CODE (exp);
6080 rtx subtarget, original_target;
6083 /* Used by check-memory-usage to make modifier read only. */
6084 enum expand_modifier ro_modifier;
6086 /* Handle ERROR_MARK before anybody tries to access its type. */
6087 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6089 op0 = CONST0_RTX (tmode);
6095 mode = TYPE_MODE (type);
6096 /* Use subtarget as the target for operand 0 of a binary operation. */
6097 subtarget = get_subtarget (target);
6098 original_target = target;
6099 ignore = (target == const0_rtx
6100 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6101 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6102 || code == COND_EXPR)
6103 && TREE_CODE (type) == VOID_TYPE));
6105 /* Make a read-only version of the modifier. */
6106 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6107 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6108 ro_modifier = modifier;
6110 ro_modifier = EXPAND_NORMAL;
6112 /* If we are going to ignore this result, we need only do something
6113 if there is a side-effect somewhere in the expression. If there
6114 is, short-circuit the most common cases here. Note that we must
6115 not call expand_expr with anything but const0_rtx in case this
6116 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6120 if (! TREE_SIDE_EFFECTS (exp))
6123 /* Ensure we reference a volatile object even if value is ignored, but
6124 don't do this if all we are doing is taking its address. */
6125 if (TREE_THIS_VOLATILE (exp)
6126 && TREE_CODE (exp) != FUNCTION_DECL
6127 && mode != VOIDmode && mode != BLKmode
6128 && modifier != EXPAND_CONST_ADDRESS)
6130 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6131 if (GET_CODE (temp) == MEM)
6132 temp = copy_to_reg (temp);
6136 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6137 || code == INDIRECT_REF || code == BUFFER_REF)
6138 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6139 VOIDmode, ro_modifier);
6140 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6141 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6143 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6145 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6149 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6150 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6151 /* If the second operand has no side effects, just evaluate
6153 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6154 VOIDmode, ro_modifier);
6155 else if (code == BIT_FIELD_REF)
6157 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6159 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6161 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6169 #ifdef MAX_INTEGER_COMPUTATION_MODE
6170 /* Only check stuff here if the mode we want is different from the mode
6171 of the expression; if it's the same, check_max_integer_computiation_mode
6172 will handle it. Do we really need to check this stuff at all? */
6175 && GET_MODE (target) != mode
6176 && TREE_CODE (exp) != INTEGER_CST
6177 && TREE_CODE (exp) != PARM_DECL
6178 && TREE_CODE (exp) != ARRAY_REF
6179 && TREE_CODE (exp) != ARRAY_RANGE_REF
6180 && TREE_CODE (exp) != COMPONENT_REF
6181 && TREE_CODE (exp) != BIT_FIELD_REF
6182 && TREE_CODE (exp) != INDIRECT_REF
6183 && TREE_CODE (exp) != CALL_EXPR
6184 && TREE_CODE (exp) != VAR_DECL
6185 && TREE_CODE (exp) != RTL_EXPR)
6187 enum machine_mode mode = GET_MODE (target);
6189 if (GET_MODE_CLASS (mode) == MODE_INT
6190 && mode > MAX_INTEGER_COMPUTATION_MODE)
6191 internal_error ("unsupported wide integer operation");
6195 && TREE_CODE (exp) != INTEGER_CST
6196 && TREE_CODE (exp) != PARM_DECL
6197 && TREE_CODE (exp) != ARRAY_REF
6198 && TREE_CODE (exp) != ARRAY_RANGE_REF
6199 && TREE_CODE (exp) != COMPONENT_REF
6200 && TREE_CODE (exp) != BIT_FIELD_REF
6201 && TREE_CODE (exp) != INDIRECT_REF
6202 && TREE_CODE (exp) != VAR_DECL
6203 && TREE_CODE (exp) != CALL_EXPR
6204 && TREE_CODE (exp) != RTL_EXPR
6205 && GET_MODE_CLASS (tmode) == MODE_INT
6206 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6207 internal_error ("unsupported wide integer operation");
6209 check_max_integer_computation_mode (exp);
6212 /* If will do cse, generate all results into pseudo registers
6213 since 1) that allows cse to find more things
6214 and 2) otherwise cse could produce an insn the machine
6217 if (! cse_not_expected && mode != BLKmode && target
6218 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6225 tree function = decl_function_context (exp);
6226 /* Handle using a label in a containing function. */
6227 if (function != current_function_decl
6228 && function != inline_function_decl && function != 0)
6230 struct function *p = find_function_data (function);
6231 p->expr->x_forced_labels
6232 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6233 p->expr->x_forced_labels);
6237 if (modifier == EXPAND_INITIALIZER)
6238 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6243 temp = gen_rtx_MEM (FUNCTION_MODE,
6244 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6245 if (function != current_function_decl
6246 && function != inline_function_decl && function != 0)
6247 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6252 if (DECL_RTL (exp) == 0)
6254 error_with_decl (exp, "prior parameter's size depends on `%s'");
6255 return CONST0_RTX (mode);
6258 /* ... fall through ... */
6261 /* If a static var's type was incomplete when the decl was written,
6262 but the type is complete now, lay out the decl now. */
6263 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6264 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6266 layout_decl (exp, 0);
6267 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6270 /* Although static-storage variables start off initialized, according to
6271 ANSI C, a memcpy could overwrite them with uninitialized values. So
6272 we check them too. This also lets us check for read-only variables
6273 accessed via a non-const declaration, in case it won't be detected
6274 any other way (e.g., in an embedded system or OS kernel without
6277 Aggregates are not checked here; they're handled elsewhere. */
6278 if (cfun && current_function_check_memory_usage
6280 && GET_CODE (DECL_RTL (exp)) == MEM
6281 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6283 enum memory_use_mode memory_usage;
6284 memory_usage = get_memory_usage_from_modifier (modifier);
6286 in_check_memory_usage = 1;
6287 if (memory_usage != MEMORY_USE_DONT)
6288 emit_library_call (chkr_check_addr_libfunc,
6289 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6290 XEXP (DECL_RTL (exp), 0), Pmode,
6291 GEN_INT (int_size_in_bytes (type)),
6292 TYPE_MODE (sizetype),
6293 GEN_INT (memory_usage),
6294 TYPE_MODE (integer_type_node));
6295 in_check_memory_usage = 0;
6298 /* ... fall through ... */
6302 if (DECL_RTL (exp) == 0)
6305 /* Ensure variable marked as used even if it doesn't go through
6306 a parser. If it hasn't be used yet, write out an external
6308 if (! TREE_USED (exp))
6310 assemble_external (exp);
6311 TREE_USED (exp) = 1;
6314 /* Show we haven't gotten RTL for this yet. */
6317 /* Handle variables inherited from containing functions. */
6318 context = decl_function_context (exp);
6320 /* We treat inline_function_decl as an alias for the current function
6321 because that is the inline function whose vars, types, etc.
6322 are being merged into the current function.
6323 See expand_inline_function. */
6325 if (context != 0 && context != current_function_decl
6326 && context != inline_function_decl
6327 /* If var is static, we don't need a static chain to access it. */
6328 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6329 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6333 /* Mark as non-local and addressable. */
6334 DECL_NONLOCAL (exp) = 1;
6335 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6337 mark_addressable (exp);
6338 if (GET_CODE (DECL_RTL (exp)) != MEM)
6340 addr = XEXP (DECL_RTL (exp), 0);
6341 if (GET_CODE (addr) == MEM)
6343 = replace_equiv_address (addr,
6344 fix_lexical_addr (XEXP (addr, 0), exp));
6346 addr = fix_lexical_addr (addr, exp);
6348 temp = replace_equiv_address (DECL_RTL (exp), addr);
6351 /* This is the case of an array whose size is to be determined
6352 from its initializer, while the initializer is still being parsed.
6355 else if (GET_CODE (DECL_RTL (exp)) == MEM
6356 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6357 temp = validize_mem (DECL_RTL (exp));
6359 /* If DECL_RTL is memory, we are in the normal case and either
6360 the address is not valid or it is not a register and -fforce-addr
6361 is specified, get the address into a register. */
6363 else if (GET_CODE (DECL_RTL (exp)) == MEM
6364 && modifier != EXPAND_CONST_ADDRESS
6365 && modifier != EXPAND_SUM
6366 && modifier != EXPAND_INITIALIZER
6367 && (! memory_address_p (DECL_MODE (exp),
6368 XEXP (DECL_RTL (exp), 0))
6370 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6371 temp = replace_equiv_address (DECL_RTL (exp),
6372 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6374 /* If we got something, return it. But first, set the alignment
6375 if the address is a register. */
6378 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6379 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6384 /* If the mode of DECL_RTL does not match that of the decl, it
6385 must be a promoted value. We return a SUBREG of the wanted mode,
6386 but mark it so that we know that it was already extended. */
6388 if (GET_CODE (DECL_RTL (exp)) == REG
6389 && GET_MODE (DECL_RTL (exp)) != mode)
6391 /* Get the signedness used for this variable. Ensure we get the
6392 same mode we got when the variable was declared. */
6393 if (GET_MODE (DECL_RTL (exp))
6394 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6397 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6398 SUBREG_PROMOTED_VAR_P (temp) = 1;
6399 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6403 return DECL_RTL (exp);
6406 return immed_double_const (TREE_INT_CST_LOW (exp),
6407 TREE_INT_CST_HIGH (exp), mode);
6410 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6411 EXPAND_MEMORY_USE_BAD);
6414 /* If optimized, generate immediate CONST_DOUBLE
6415 which will be turned into memory by reload if necessary.
6417 We used to force a register so that loop.c could see it. But
6418 this does not allow gen_* patterns to perform optimizations with
6419 the constants. It also produces two insns in cases like "x = 1.0;".
6420 On most machines, floating-point constants are not permitted in
6421 many insns, so we'd end up copying it to a register in any case.
6423 Now, we do the copying in expand_binop, if appropriate. */
6424 return immed_real_const (exp);
6428 if (! TREE_CST_RTL (exp))
6429 output_constant_def (exp, 1);
6431 /* TREE_CST_RTL probably contains a constant address.
6432 On RISC machines where a constant address isn't valid,
6433 make some insns to get that address into a register. */
6434 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6435 && modifier != EXPAND_CONST_ADDRESS
6436 && modifier != EXPAND_INITIALIZER
6437 && modifier != EXPAND_SUM
6438 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6440 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6441 return replace_equiv_address (TREE_CST_RTL (exp),
6442 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6443 return TREE_CST_RTL (exp);
6445 case EXPR_WITH_FILE_LOCATION:
6448 const char *saved_input_filename = input_filename;
6449 int saved_lineno = lineno;
6450 input_filename = EXPR_WFL_FILENAME (exp);
6451 lineno = EXPR_WFL_LINENO (exp);
6452 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6453 emit_line_note (input_filename, lineno);
6454 /* Possibly avoid switching back and force here. */
6455 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6456 input_filename = saved_input_filename;
6457 lineno = saved_lineno;
6462 context = decl_function_context (exp);
6464 /* If this SAVE_EXPR was at global context, assume we are an
6465 initialization function and move it into our context. */
6467 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6469 /* We treat inline_function_decl as an alias for the current function
6470 because that is the inline function whose vars, types, etc.
6471 are being merged into the current function.
6472 See expand_inline_function. */
6473 if (context == current_function_decl || context == inline_function_decl)
6476 /* If this is non-local, handle it. */
6479 /* The following call just exists to abort if the context is
6480 not of a containing function. */
6481 find_function_data (context);
6483 temp = SAVE_EXPR_RTL (exp);
6484 if (temp && GET_CODE (temp) == REG)
6486 put_var_into_stack (exp);
6487 temp = SAVE_EXPR_RTL (exp);
6489 if (temp == 0 || GET_CODE (temp) != MEM)
6492 replace_equiv_address (temp,
6493 fix_lexical_addr (XEXP (temp, 0), exp));
6495 if (SAVE_EXPR_RTL (exp) == 0)
6497 if (mode == VOIDmode)
6500 temp = assign_temp (build_qualified_type (type,
6502 | TYPE_QUAL_CONST)),
6505 SAVE_EXPR_RTL (exp) = temp;
6506 if (!optimize && GET_CODE (temp) == REG)
6507 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6510 /* If the mode of TEMP does not match that of the expression, it
6511 must be a promoted value. We pass store_expr a SUBREG of the
6512 wanted mode but mark it so that we know that it was already
6513 extended. Note that `unsignedp' was modified above in
6516 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6518 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6519 SUBREG_PROMOTED_VAR_P (temp) = 1;
6520 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6523 if (temp == const0_rtx)
6524 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6525 EXPAND_MEMORY_USE_BAD);
6527 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6529 TREE_USED (exp) = 1;
6532 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6533 must be a promoted value. We return a SUBREG of the wanted mode,
6534 but mark it so that we know that it was already extended. */
6536 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6537 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6539 /* Compute the signedness and make the proper SUBREG. */
6540 promote_mode (type, mode, &unsignedp, 0);
6541 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6542 SUBREG_PROMOTED_VAR_P (temp) = 1;
6543 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6547 return SAVE_EXPR_RTL (exp);
6552 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6553 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6557 case PLACEHOLDER_EXPR:
6559 tree placeholder_expr;
6561 /* If there is an object on the head of the placeholder list,
6562 see if some object in it of type TYPE or a pointer to it. For
6563 further information, see tree.def. */
6564 for (placeholder_expr = placeholder_list;
6565 placeholder_expr != 0;
6566 placeholder_expr = TREE_CHAIN (placeholder_expr))
6568 tree need_type = TYPE_MAIN_VARIANT (type);
6570 tree old_list = placeholder_list;
6573 /* Find the outermost reference that is of the type we want.
6574 If none, see if any object has a type that is a pointer to
6575 the type we want. */
6576 for (elt = TREE_PURPOSE (placeholder_expr);
6577 elt != 0 && object == 0;
6579 = ((TREE_CODE (elt) == COMPOUND_EXPR
6580 || TREE_CODE (elt) == COND_EXPR)
6581 ? TREE_OPERAND (elt, 1)
6582 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6583 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6584 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6585 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6586 ? TREE_OPERAND (elt, 0) : 0))
6587 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6590 for (elt = TREE_PURPOSE (placeholder_expr);
6591 elt != 0 && object == 0;
6593 = ((TREE_CODE (elt) == COMPOUND_EXPR
6594 || TREE_CODE (elt) == COND_EXPR)
6595 ? TREE_OPERAND (elt, 1)
6596 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6597 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6598 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6599 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6600 ? TREE_OPERAND (elt, 0) : 0))
6601 if (POINTER_TYPE_P (TREE_TYPE (elt))
6602 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6604 object = build1 (INDIRECT_REF, need_type, elt);
6608 /* Expand this object skipping the list entries before
6609 it was found in case it is also a PLACEHOLDER_EXPR.
6610 In that case, we want to translate it using subsequent
6612 placeholder_list = TREE_CHAIN (placeholder_expr);
6613 temp = expand_expr (object, original_target, tmode,
6615 placeholder_list = old_list;
6621 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6624 case WITH_RECORD_EXPR:
6625 /* Put the object on the placeholder list, expand our first operand,
6626 and pop the list. */
6627 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6629 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6630 tmode, ro_modifier);
6631 placeholder_list = TREE_CHAIN (placeholder_list);
6635 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6636 expand_goto (TREE_OPERAND (exp, 0));
6638 expand_computed_goto (TREE_OPERAND (exp, 0));
6642 expand_exit_loop_if_false (NULL,
6643 invert_truthvalue (TREE_OPERAND (exp, 0)));
6646 case LABELED_BLOCK_EXPR:
6647 if (LABELED_BLOCK_BODY (exp))
6648 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6649 /* Should perhaps use expand_label, but this is simpler and safer. */
6650 do_pending_stack_adjust ();
6651 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6654 case EXIT_BLOCK_EXPR:
6655 if (EXIT_BLOCK_RETURN (exp))
6656 sorry ("returned value in block_exit_expr");
6657 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6662 expand_start_loop (1);
6663 expand_expr_stmt (TREE_OPERAND (exp, 0));
6671 tree vars = TREE_OPERAND (exp, 0);
6672 int vars_need_expansion = 0;
6674 /* Need to open a binding contour here because
6675 if there are any cleanups they must be contained here. */
6676 expand_start_bindings (2);
6678 /* Mark the corresponding BLOCK for output in its proper place. */
6679 if (TREE_OPERAND (exp, 2) != 0
6680 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6681 insert_block (TREE_OPERAND (exp, 2));
6683 /* If VARS have not yet been expanded, expand them now. */
6686 if (!DECL_RTL_SET_P (vars))
6688 vars_need_expansion = 1;
6691 expand_decl_init (vars);
6692 vars = TREE_CHAIN (vars);
6695 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6697 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6703 if (RTL_EXPR_SEQUENCE (exp))
6705 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6707 emit_insns (RTL_EXPR_SEQUENCE (exp));
6708 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6710 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6711 free_temps_for_rtl_expr (exp);
6712 return RTL_EXPR_RTL (exp);
6715 /* If we don't need the result, just ensure we evaluate any
6720 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6721 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6722 EXPAND_MEMORY_USE_BAD);
6726 /* All elts simple constants => refer to a constant in memory. But
6727 if this is a non-BLKmode mode, let it store a field at a time
6728 since that should make a CONST_INT or CONST_DOUBLE when we
6729 fold. Likewise, if we have a target we can use, it is best to
6730 store directly into the target unless the type is large enough
6731 that memcpy will be used. If we are making an initializer and
6732 all operands are constant, put it in memory as well. */
6733 else if ((TREE_STATIC (exp)
6734 && ((mode == BLKmode
6735 && ! (target != 0 && safe_from_p (target, exp, 1)))
6736 || TREE_ADDRESSABLE (exp)
6737 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6738 && (! MOVE_BY_PIECES_P
6739 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6741 && ! mostly_zeros_p (exp))))
6742 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6744 rtx constructor = output_constant_def (exp, 1);
6746 if (modifier != EXPAND_CONST_ADDRESS
6747 && modifier != EXPAND_INITIALIZER
6748 && modifier != EXPAND_SUM)
6749 constructor = validize_mem (constructor);
6755 /* Handle calls that pass values in multiple non-contiguous
6756 locations. The Irix 6 ABI has examples of this. */
6757 if (target == 0 || ! safe_from_p (target, exp, 1)
6758 || GET_CODE (target) == PARALLEL)
6760 = assign_temp (build_qualified_type (type,
6762 | (TREE_READONLY (exp)
6763 * TYPE_QUAL_CONST))),
6764 TREE_ADDRESSABLE (exp), 1, 1);
6766 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6767 int_size_in_bytes (TREE_TYPE (exp)));
6773 tree exp1 = TREE_OPERAND (exp, 0);
6775 tree string = string_constant (exp1, &index);
6777 /* Try to optimize reads from const strings. */
6779 && TREE_CODE (string) == STRING_CST
6780 && TREE_CODE (index) == INTEGER_CST
6781 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6782 && GET_MODE_CLASS (mode) == MODE_INT
6783 && GET_MODE_SIZE (mode) == 1
6784 && modifier != EXPAND_MEMORY_USE_WO)
6786 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6788 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6789 op0 = memory_address (mode, op0);
6791 if (cfun && current_function_check_memory_usage
6792 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6794 enum memory_use_mode memory_usage;
6795 memory_usage = get_memory_usage_from_modifier (modifier);
6797 if (memory_usage != MEMORY_USE_DONT)
6799 in_check_memory_usage = 1;
6800 emit_library_call (chkr_check_addr_libfunc,
6801 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6802 Pmode, GEN_INT (int_size_in_bytes (type)),
6803 TYPE_MODE (sizetype),
6804 GEN_INT (memory_usage),
6805 TYPE_MODE (integer_type_node));
6806 in_check_memory_usage = 0;
6810 temp = gen_rtx_MEM (mode, op0);
6811 set_mem_attributes (temp, exp, 0);
6813 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6814 here, because, in C and C++, the fact that a location is accessed
6815 through a pointer to const does not mean that the value there can
6816 never change. Languages where it can never change should
6817 also set TREE_STATIC. */
6818 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6820 /* If we are writing to this object and its type is a record with
6821 readonly fields, we must mark it as readonly so it will
6822 conflict with readonly references to those fields. */
6823 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6824 RTX_UNCHANGING_P (temp) = 1;
6830 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6834 tree array = TREE_OPERAND (exp, 0);
6835 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6836 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6837 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6840 /* Optimize the special-case of a zero lower bound.
6842 We convert the low_bound to sizetype to avoid some problems
6843 with constant folding. (E.g. suppose the lower bound is 1,
6844 and its mode is QI. Without the conversion, (ARRAY
6845 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6846 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6848 if (! integer_zerop (low_bound))
6849 index = size_diffop (index, convert (sizetype, low_bound));
6851 /* Fold an expression like: "foo"[2].
6852 This is not done in fold so it won't happen inside &.
6853 Don't fold if this is for wide characters since it's too
6854 difficult to do correctly and this is a very rare case. */
6856 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6857 && TREE_CODE (array) == STRING_CST
6858 && TREE_CODE (index) == INTEGER_CST
6859 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6860 && GET_MODE_CLASS (mode) == MODE_INT
6861 && GET_MODE_SIZE (mode) == 1)
6863 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6865 /* If this is a constant index into a constant array,
6866 just get the value from the array. Handle both the cases when
6867 we have an explicit constructor and when our operand is a variable
6868 that was declared const. */
6870 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6871 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6872 && TREE_CODE (index) == INTEGER_CST
6873 && 0 > compare_tree_int (index,
6874 list_length (CONSTRUCTOR_ELTS
6875 (TREE_OPERAND (exp, 0)))))
6879 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6880 i = TREE_INT_CST_LOW (index);
6881 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6885 return expand_expr (fold (TREE_VALUE (elem)), target,
6886 tmode, ro_modifier);
6889 else if (optimize >= 1
6890 && modifier != EXPAND_CONST_ADDRESS
6891 && modifier != EXPAND_INITIALIZER
6892 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6893 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6894 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6896 if (TREE_CODE (index) == INTEGER_CST)
6898 tree init = DECL_INITIAL (array);
6900 if (TREE_CODE (init) == CONSTRUCTOR)
6904 for (elem = CONSTRUCTOR_ELTS (init);
6906 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6907 elem = TREE_CHAIN (elem))
6910 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6911 return expand_expr (fold (TREE_VALUE (elem)), target,
6912 tmode, ro_modifier);
6914 else if (TREE_CODE (init) == STRING_CST
6915 && 0 > compare_tree_int (index,
6916 TREE_STRING_LENGTH (init)))
6918 tree type = TREE_TYPE (TREE_TYPE (init));
6919 enum machine_mode mode = TYPE_MODE (type);
6921 if (GET_MODE_CLASS (mode) == MODE_INT
6922 && GET_MODE_SIZE (mode) == 1)
6924 (TREE_STRING_POINTER
6925 (init)[TREE_INT_CST_LOW (index)]));
6934 case ARRAY_RANGE_REF:
6935 /* If the operand is a CONSTRUCTOR, we can just extract the
6936 appropriate field if it is present. Don't do this if we have
6937 already written the data since we want to refer to that copy
6938 and varasm.c assumes that's what we'll do. */
6939 if (code == COMPONENT_REF
6940 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6941 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6945 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6946 elt = TREE_CHAIN (elt))
6947 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6948 /* We can normally use the value of the field in the
6949 CONSTRUCTOR. However, if this is a bitfield in
6950 an integral mode that we can fit in a HOST_WIDE_INT,
6951 we must mask only the number of bits in the bitfield,
6952 since this is done implicitly by the constructor. If
6953 the bitfield does not meet either of those conditions,
6954 we can't do this optimization. */
6955 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6956 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6958 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6959 <= HOST_BITS_PER_WIDE_INT))))
6961 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6962 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6964 HOST_WIDE_INT bitsize
6965 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6967 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6969 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6970 op0 = expand_and (op0, op1, target);
6974 enum machine_mode imode
6975 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6977 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6980 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6982 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6992 enum machine_mode mode1;
6993 HOST_WIDE_INT bitsize, bitpos;
6996 unsigned int alignment;
6997 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6998 &mode1, &unsignedp, &volatilep,
7001 /* If we got back the original object, something is wrong. Perhaps
7002 we are evaluating an expression too early. In any event, don't
7003 infinitely recurse. */
7007 /* If TEM's type is a union of variable size, pass TARGET to the inner
7008 computation, since it will need a temporary and TARGET is known
7009 to have to do. This occurs in unchecked conversion in Ada. */
7011 op0 = expand_expr (tem,
7012 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7013 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7015 ? target : NULL_RTX),
7017 (modifier == EXPAND_INITIALIZER
7018 || modifier == EXPAND_CONST_ADDRESS)
7019 ? modifier : EXPAND_NORMAL);
7021 /* If this is a constant, put it into a register if it is a
7022 legitimate constant and OFFSET is 0 and memory if it isn't. */
7023 if (CONSTANT_P (op0))
7025 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7026 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7028 op0 = force_reg (mode, op0);
7030 op0 = validize_mem (force_const_mem (mode, op0));
7035 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7037 /* If this object is in a register, put it into memory.
7038 This case can't occur in C, but can in Ada if we have
7039 unchecked conversion of an expression from a scalar type to
7040 an array or record type. */
7041 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7042 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7044 /* If the operand is a SAVE_EXPR, we can deal with this by
7045 forcing the SAVE_EXPR into memory. */
7046 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7048 put_var_into_stack (TREE_OPERAND (exp, 0));
7049 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7054 = build_qualified_type (TREE_TYPE (tem),
7055 (TYPE_QUALS (TREE_TYPE (tem))
7056 | TYPE_QUAL_CONST));
7057 rtx memloc = assign_temp (nt, 1, 1, 1);
7059 mark_temp_addr_taken (memloc);
7060 emit_move_insn (memloc, op0);
7065 if (GET_CODE (op0) != MEM)
7068 if (GET_MODE (offset_rtx) != ptr_mode)
7070 #ifdef POINTERS_EXTEND_UNSIGNED
7071 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
7073 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7077 /* A constant address in OP0 can have VOIDmode, we must not try
7078 to call force_reg for that case. Avoid that case. */
7079 if (GET_CODE (op0) == MEM
7080 && GET_MODE (op0) == BLKmode
7081 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7083 && (bitpos % bitsize) == 0
7084 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7085 && alignment == GET_MODE_ALIGNMENT (mode1))
7087 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7089 if (GET_CODE (XEXP (temp, 0)) == REG)
7092 op0 = (replace_equiv_address
7094 force_reg (GET_MODE (XEXP (temp, 0)),
7099 op0 = change_address (op0, VOIDmode,
7100 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
7101 force_reg (ptr_mode,
7105 /* Don't forget about volatility even if this is a bitfield. */
7106 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7108 op0 = copy_rtx (op0);
7109 MEM_VOLATILE_P (op0) = 1;
7112 /* Check the access. */
7113 if (cfun != 0 && current_function_check_memory_usage
7114 && GET_CODE (op0) == MEM)
7116 enum memory_use_mode memory_usage;
7117 memory_usage = get_memory_usage_from_modifier (modifier);
7119 if (memory_usage != MEMORY_USE_DONT)
7124 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7125 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7127 /* Check the access right of the pointer. */
7128 in_check_memory_usage = 1;
7129 if (size > BITS_PER_UNIT)
7130 emit_library_call (chkr_check_addr_libfunc,
7131 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7132 Pmode, GEN_INT (size / BITS_PER_UNIT),
7133 TYPE_MODE (sizetype),
7134 GEN_INT (memory_usage),
7135 TYPE_MODE (integer_type_node));
7136 in_check_memory_usage = 0;
7140 /* In cases where an aligned union has an unaligned object
7141 as a field, we might be extracting a BLKmode value from
7142 an integer-mode (e.g., SImode) object. Handle this case
7143 by doing the extract into an object as wide as the field
7144 (which we know to be the width of a basic mode), then
7145 storing into memory, and changing the mode to BLKmode. */
7146 if (mode1 == VOIDmode
7147 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7148 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7149 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7150 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
7151 /* If the field isn't aligned enough to fetch as a memref,
7152 fetch it as a bit field. */
7153 || (mode1 != BLKmode
7154 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7155 && ((TYPE_ALIGN (TREE_TYPE (tem))
7156 < GET_MODE_ALIGNMENT (mode))
7157 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7158 /* If the type and the field are a constant size and the
7159 size of the type isn't the same size as the bitfield,
7160 we must use bitfield operations. */
7162 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7164 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7167 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7168 && (TYPE_ALIGN (type) > alignment
7169 || bitpos % TYPE_ALIGN (type) != 0)))
7171 enum machine_mode ext_mode = mode;
7173 if (ext_mode == BLKmode
7174 && ! (target != 0 && GET_CODE (op0) == MEM
7175 && GET_CODE (target) == MEM
7176 && bitpos % BITS_PER_UNIT == 0))
7177 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7179 if (ext_mode == BLKmode)
7181 /* In this case, BITPOS must start at a byte boundary and
7182 TARGET, if specified, must be a MEM. */
7183 if (GET_CODE (op0) != MEM
7184 || (target != 0 && GET_CODE (target) != MEM)
7185 || bitpos % BITS_PER_UNIT != 0)
7188 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7190 target = assign_temp (type, 0, 1, 1);
7192 emit_block_move (target, op0,
7193 bitsize == -1 ? expr_size (exp)
7194 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7201 op0 = validize_mem (op0);
7203 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7204 mark_reg_pointer (XEXP (op0, 0), alignment);
7206 op0 = extract_bit_field (op0, bitsize, bitpos,
7207 unsignedp, target, ext_mode, ext_mode,
7209 int_size_in_bytes (TREE_TYPE (tem)));
7211 /* If the result is a record type and BITSIZE is narrower than
7212 the mode of OP0, an integral mode, and this is a big endian
7213 machine, we must put the field into the high-order bits. */
7214 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7215 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7216 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7217 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7218 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7222 if (mode == BLKmode)
7224 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7226 rtx new = assign_temp (nt, 0, 1, 1);
7228 emit_move_insn (new, op0);
7229 op0 = copy_rtx (new);
7230 PUT_MODE (op0, BLKmode);
7236 /* If the result is BLKmode, use that to access the object
7238 if (mode == BLKmode)
7241 /* Get a reference to just this component. */
7242 if (modifier == EXPAND_CONST_ADDRESS
7243 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7245 rtx new = gen_rtx_MEM (mode1,
7246 plus_constant (XEXP (op0, 0),
7247 (bitpos / BITS_PER_UNIT)));
7249 MEM_COPY_ATTRIBUTES (new, op0);
7253 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7255 set_mem_attributes (op0, exp, 0);
7256 if (GET_CODE (XEXP (op0, 0)) == REG)
7257 mark_reg_pointer (XEXP (op0, 0), alignment);
7259 MEM_VOLATILE_P (op0) |= volatilep;
7260 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7261 || modifier == EXPAND_CONST_ADDRESS
7262 || modifier == EXPAND_INITIALIZER)
7264 else if (target == 0)
7265 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7267 convert_move (target, op0, unsignedp);
7271 /* Intended for a reference to a buffer of a file-object in Pascal.
7272 But it's not certain that a special tree code will really be
7273 necessary for these. INDIRECT_REF might work for them. */
7279 /* Pascal set IN expression.
7282 rlo = set_low - (set_low%bits_per_word);
7283 the_word = set [ (index - rlo)/bits_per_word ];
7284 bit_index = index % bits_per_word;
7285 bitmask = 1 << bit_index;
7286 return !!(the_word & bitmask); */
7288 tree set = TREE_OPERAND (exp, 0);
7289 tree index = TREE_OPERAND (exp, 1);
7290 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7291 tree set_type = TREE_TYPE (set);
7292 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7293 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7294 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7295 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7296 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7297 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7298 rtx setaddr = XEXP (setval, 0);
7299 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7301 rtx diff, quo, rem, addr, bit, result;
7303 /* If domain is empty, answer is no. Likewise if index is constant
7304 and out of bounds. */
7305 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7306 && TREE_CODE (set_low_bound) == INTEGER_CST
7307 && tree_int_cst_lt (set_high_bound, set_low_bound))
7308 || (TREE_CODE (index) == INTEGER_CST
7309 && TREE_CODE (set_low_bound) == INTEGER_CST
7310 && tree_int_cst_lt (index, set_low_bound))
7311 || (TREE_CODE (set_high_bound) == INTEGER_CST
7312 && TREE_CODE (index) == INTEGER_CST
7313 && tree_int_cst_lt (set_high_bound, index))))
7317 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7319 /* If we get here, we have to generate the code for both cases
7320 (in range and out of range). */
7322 op0 = gen_label_rtx ();
7323 op1 = gen_label_rtx ();
7325 if (! (GET_CODE (index_val) == CONST_INT
7326 && GET_CODE (lo_r) == CONST_INT))
7328 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7329 GET_MODE (index_val), iunsignedp, 0, op1);
7332 if (! (GET_CODE (index_val) == CONST_INT
7333 && GET_CODE (hi_r) == CONST_INT))
7335 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7336 GET_MODE (index_val), iunsignedp, 0, op1);
7339 /* Calculate the element number of bit zero in the first word
7341 if (GET_CODE (lo_r) == CONST_INT)
7342 rlow = GEN_INT (INTVAL (lo_r)
7343 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7345 rlow = expand_binop (index_mode, and_optab, lo_r,
7346 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7347 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7349 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7350 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7352 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7353 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7354 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7355 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7357 addr = memory_address (byte_mode,
7358 expand_binop (index_mode, add_optab, diff,
7359 setaddr, NULL_RTX, iunsignedp,
7362 /* Extract the bit we want to examine. */
7363 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7364 gen_rtx_MEM (byte_mode, addr),
7365 make_tree (TREE_TYPE (index), rem),
7367 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7368 GET_MODE (target) == byte_mode ? target : 0,
7369 1, OPTAB_LIB_WIDEN);
7371 if (result != target)
7372 convert_move (target, result, 1);
7374 /* Output the code to handle the out-of-range case. */
7377 emit_move_insn (target, const0_rtx);
7382 case WITH_CLEANUP_EXPR:
7383 if (RTL_EXPR_RTL (exp) == 0)
7386 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7387 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7389 /* That's it for this cleanup. */
7390 TREE_OPERAND (exp, 2) = 0;
7392 return RTL_EXPR_RTL (exp);
7394 case CLEANUP_POINT_EXPR:
7396 /* Start a new binding layer that will keep track of all cleanup
7397 actions to be performed. */
7398 expand_start_bindings (2);
7400 target_temp_slot_level = temp_slot_level;
7402 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7403 /* If we're going to use this value, load it up now. */
7405 op0 = force_not_mem (op0);
7406 preserve_temp_slots (op0);
7407 expand_end_bindings (NULL_TREE, 0, 0);
7412 /* Check for a built-in function. */
7413 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7414 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7416 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7418 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7419 == BUILT_IN_FRONTEND)
7420 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7422 return expand_builtin (exp, target, subtarget, tmode, ignore);
7425 return expand_call (exp, target, ignore);
7427 case NON_LVALUE_EXPR:
7430 case REFERENCE_EXPR:
7431 if (TREE_OPERAND (exp, 0) == error_mark_node)
7434 if (TREE_CODE (type) == UNION_TYPE)
7436 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7438 /* If both input and output are BLKmode, this conversion
7439 isn't actually doing anything unless we need to make the
7440 alignment stricter. */
7441 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7442 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7443 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7444 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7448 target = assign_temp (type, 0, 1, 1);
7450 if (GET_CODE (target) == MEM)
7451 /* Store data into beginning of memory target. */
7452 store_expr (TREE_OPERAND (exp, 0),
7453 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7455 else if (GET_CODE (target) == REG)
7456 /* Store this field into a union of the proper type. */
7457 store_field (target,
7458 MIN ((int_size_in_bytes (TREE_TYPE
7459 (TREE_OPERAND (exp, 0)))
7461 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7462 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7463 VOIDmode, 0, BITS_PER_UNIT,
7464 int_size_in_bytes (type), 0);
7468 /* Return the entire union. */
7472 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7474 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7477 /* If the signedness of the conversion differs and OP0 is
7478 a promoted SUBREG, clear that indication since we now
7479 have to do the proper extension. */
7480 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7481 && GET_CODE (op0) == SUBREG)
7482 SUBREG_PROMOTED_VAR_P (op0) = 0;
7487 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7488 if (GET_MODE (op0) == mode)
7491 /* If OP0 is a constant, just convert it into the proper mode. */
7492 if (CONSTANT_P (op0))
7494 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7495 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7497 if (modifier == EXPAND_INITIALIZER)
7498 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7502 convert_to_mode (mode, op0,
7503 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7505 convert_move (target, op0,
7506 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7510 /* We come here from MINUS_EXPR when the second operand is a
7513 this_optab = ! unsignedp && flag_trapv
7514 && (GET_MODE_CLASS(mode) == MODE_INT)
7515 ? addv_optab : add_optab;
7517 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7518 something else, make sure we add the register to the constant and
7519 then to the other thing. This case can occur during strength
7520 reduction and doing it this way will produce better code if the
7521 frame pointer or argument pointer is eliminated.
7523 fold-const.c will ensure that the constant is always in the inner
7524 PLUS_EXPR, so the only case we need to do anything about is if
7525 sp, ap, or fp is our second argument, in which case we must swap
7526 the innermost first argument and our second argument. */
7528 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7529 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7530 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7531 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7532 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7533 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7535 tree t = TREE_OPERAND (exp, 1);
7537 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7538 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7541 /* If the result is to be ptr_mode and we are adding an integer to
7542 something, we might be forming a constant. So try to use
7543 plus_constant. If it produces a sum and we can't accept it,
7544 use force_operand. This allows P = &ARR[const] to generate
7545 efficient code on machines where a SYMBOL_REF is not a valid
7548 If this is an EXPAND_SUM call, always return the sum. */
7549 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7550 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7552 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7553 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7554 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7558 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7560 /* Use immed_double_const to ensure that the constant is
7561 truncated according to the mode of OP1, then sign extended
7562 to a HOST_WIDE_INT. Using the constant directly can result
7563 in non-canonical RTL in a 64x32 cross compile. */
7565 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7567 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7568 op1 = plus_constant (op1, INTVAL (constant_part));
7569 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7570 op1 = force_operand (op1, target);
7574 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7575 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7576 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7580 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7582 if (! CONSTANT_P (op0))
7584 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7585 VOIDmode, modifier);
7586 /* Don't go to both_summands if modifier
7587 says it's not right to return a PLUS. */
7588 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7592 /* Use immed_double_const to ensure that the constant is
7593 truncated according to the mode of OP1, then sign extended
7594 to a HOST_WIDE_INT. Using the constant directly can result
7595 in non-canonical RTL in a 64x32 cross compile. */
7597 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7599 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7600 op0 = plus_constant (op0, INTVAL (constant_part));
7601 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7602 op0 = force_operand (op0, target);
7607 /* No sense saving up arithmetic to be done
7608 if it's all in the wrong mode to form part of an address.
7609 And force_operand won't know whether to sign-extend or
7611 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7612 || mode != ptr_mode)
7615 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7618 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7619 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7622 /* Make sure any term that's a sum with a constant comes last. */
7623 if (GET_CODE (op0) == PLUS
7624 && CONSTANT_P (XEXP (op0, 1)))
7630 /* If adding to a sum including a constant,
7631 associate it to put the constant outside. */
7632 if (GET_CODE (op1) == PLUS
7633 && CONSTANT_P (XEXP (op1, 1)))
7635 rtx constant_term = const0_rtx;
7637 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7640 /* Ensure that MULT comes first if there is one. */
7641 else if (GET_CODE (op0) == MULT)
7642 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7644 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7646 /* Let's also eliminate constants from op0 if possible. */
7647 op0 = eliminate_constant_term (op0, &constant_term);
7649 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7650 their sum should be a constant. Form it into OP1, since the
7651 result we want will then be OP0 + OP1. */
7653 temp = simplify_binary_operation (PLUS, mode, constant_term,
7658 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7661 /* Put a constant term last and put a multiplication first. */
7662 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7663 temp = op1, op1 = op0, op0 = temp;
7665 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7666 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7669 /* For initializers, we are allowed to return a MINUS of two
7670 symbolic constants. Here we handle all cases when both operands
7672 /* Handle difference of two symbolic constants,
7673 for the sake of an initializer. */
7674 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7675 && really_constant_p (TREE_OPERAND (exp, 0))
7676 && really_constant_p (TREE_OPERAND (exp, 1)))
7678 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7679 VOIDmode, ro_modifier);
7680 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7681 VOIDmode, ro_modifier);
7683 /* If the last operand is a CONST_INT, use plus_constant of
7684 the negated constant. Else make the MINUS. */
7685 if (GET_CODE (op1) == CONST_INT)
7686 return plus_constant (op0, - INTVAL (op1));
7688 return gen_rtx_MINUS (mode, op0, op1);
7690 /* Convert A - const to A + (-const). */
7691 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7693 tree negated = fold (build1 (NEGATE_EXPR, type,
7694 TREE_OPERAND (exp, 1)));
7696 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7697 /* If we can't negate the constant in TYPE, leave it alone and
7698 expand_binop will negate it for us. We used to try to do it
7699 here in the signed version of TYPE, but that doesn't work
7700 on POINTER_TYPEs. */;
7703 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7707 this_optab = ! unsignedp && flag_trapv
7708 && (GET_MODE_CLASS(mode) == MODE_INT)
7709 ? subv_optab : sub_optab;
7713 /* If first operand is constant, swap them.
7714 Thus the following special case checks need only
7715 check the second operand. */
7716 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7718 register tree t1 = TREE_OPERAND (exp, 0);
7719 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7720 TREE_OPERAND (exp, 1) = t1;
7723 /* Attempt to return something suitable for generating an
7724 indexed address, for machines that support that. */
7726 if (modifier == EXPAND_SUM && mode == ptr_mode
7727 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7728 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7730 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7733 /* Apply distributive law if OP0 is x+c. */
7734 if (GET_CODE (op0) == PLUS
7735 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7740 (mode, XEXP (op0, 0),
7741 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7742 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7743 * INTVAL (XEXP (op0, 1))));
7745 if (GET_CODE (op0) != REG)
7746 op0 = force_operand (op0, NULL_RTX);
7747 if (GET_CODE (op0) != REG)
7748 op0 = copy_to_mode_reg (mode, op0);
7751 gen_rtx_MULT (mode, op0,
7752 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7755 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7758 /* Check for multiplying things that have been extended
7759 from a narrower type. If this machine supports multiplying
7760 in that narrower type with a result in the desired type,
7761 do it that way, and avoid the explicit type-conversion. */
7762 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7763 && TREE_CODE (type) == INTEGER_TYPE
7764 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7765 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7766 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7767 && int_fits_type_p (TREE_OPERAND (exp, 1),
7768 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7769 /* Don't use a widening multiply if a shift will do. */
7770 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7771 > HOST_BITS_PER_WIDE_INT)
7772 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7774 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7775 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7777 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7778 /* If both operands are extended, they must either both
7779 be zero-extended or both be sign-extended. */
7780 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7782 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7784 enum machine_mode innermode
7785 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7786 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7787 ? smul_widen_optab : umul_widen_optab);
7788 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7789 ? umul_widen_optab : smul_widen_optab);
7790 if (mode == GET_MODE_WIDER_MODE (innermode))
7792 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7794 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7795 NULL_RTX, VOIDmode, 0);
7796 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7797 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7800 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7801 NULL_RTX, VOIDmode, 0);
7804 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7805 && innermode == word_mode)
7808 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7809 NULL_RTX, VOIDmode, 0);
7810 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7811 op1 = convert_modes (innermode, mode,
7812 expand_expr (TREE_OPERAND (exp, 1),
7813 NULL_RTX, VOIDmode, 0),
7816 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7817 NULL_RTX, VOIDmode, 0);
7818 temp = expand_binop (mode, other_optab, op0, op1, target,
7819 unsignedp, OPTAB_LIB_WIDEN);
7820 htem = expand_mult_highpart_adjust (innermode,
7821 gen_highpart (innermode, temp),
7823 gen_highpart (innermode, temp),
7825 emit_move_insn (gen_highpart (innermode, temp), htem);
7830 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7831 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7832 return expand_mult (mode, op0, op1, target, unsignedp);
7834 case TRUNC_DIV_EXPR:
7835 case FLOOR_DIV_EXPR:
7837 case ROUND_DIV_EXPR:
7838 case EXACT_DIV_EXPR:
7839 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7841 /* Possible optimization: compute the dividend with EXPAND_SUM
7842 then if the divisor is constant can optimize the case
7843 where some terms of the dividend have coeffs divisible by it. */
7844 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7845 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7846 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7849 this_optab = flodiv_optab;
7852 case TRUNC_MOD_EXPR:
7853 case FLOOR_MOD_EXPR:
7855 case ROUND_MOD_EXPR:
7856 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7858 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7859 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7860 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7862 case FIX_ROUND_EXPR:
7863 case FIX_FLOOR_EXPR:
7865 abort (); /* Not used for C. */
7867 case FIX_TRUNC_EXPR:
7868 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7870 target = gen_reg_rtx (mode);
7871 expand_fix (target, op0, unsignedp);
7875 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7877 target = gen_reg_rtx (mode);
7878 /* expand_float can't figure out what to do if FROM has VOIDmode.
7879 So give it the correct mode. With -O, cse will optimize this. */
7880 if (GET_MODE (op0) == VOIDmode)
7881 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7883 expand_float (target, op0,
7884 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7888 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7889 temp = expand_unop (mode,
7890 ! unsignedp && flag_trapv
7891 && (GET_MODE_CLASS(mode) == MODE_INT)
7892 ? negv_optab : neg_optab, op0, target, 0);
7898 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7900 /* Handle complex values specially. */
7901 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7902 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7903 return expand_complex_abs (mode, op0, target, unsignedp);
7905 /* Unsigned abs is simply the operand. Testing here means we don't
7906 risk generating incorrect code below. */
7907 if (TREE_UNSIGNED (type))
7910 return expand_abs (mode, op0, target, unsignedp,
7911 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7915 target = original_target;
7916 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7917 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7918 || GET_MODE (target) != mode
7919 || (GET_CODE (target) == REG
7920 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7921 target = gen_reg_rtx (mode);
7922 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7923 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7925 /* First try to do it with a special MIN or MAX instruction.
7926 If that does not win, use a conditional jump to select the proper
7928 this_optab = (TREE_UNSIGNED (type)
7929 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7930 : (code == MIN_EXPR ? smin_optab : smax_optab));
7932 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7937 /* At this point, a MEM target is no longer useful; we will get better
7940 if (GET_CODE (target) == MEM)
7941 target = gen_reg_rtx (mode);
7944 emit_move_insn (target, op0);
7946 op0 = gen_label_rtx ();
7948 /* If this mode is an integer too wide to compare properly,
7949 compare word by word. Rely on cse to optimize constant cases. */
7950 if (GET_MODE_CLASS (mode) == MODE_INT
7951 && ! can_compare_p (GE, mode, ccp_jump))
7953 if (code == MAX_EXPR)
7954 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7955 target, op1, NULL_RTX, op0);
7957 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7958 op1, target, NULL_RTX, op0);
7962 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7963 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7964 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7967 emit_move_insn (target, op1);
7972 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7973 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7979 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7980 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7985 /* ??? Can optimize bitwise operations with one arg constant.
7986 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7987 and (a bitwise1 b) bitwise2 b (etc)
7988 but that is probably not worth while. */
7990 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7991 boolean values when we want in all cases to compute both of them. In
7992 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7993 as actual zero-or-1 values and then bitwise anding. In cases where
7994 there cannot be any side effects, better code would be made by
7995 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7996 how to recognize those cases. */
7998 case TRUTH_AND_EXPR:
8000 this_optab = and_optab;
8005 this_optab = ior_optab;
8008 case TRUTH_XOR_EXPR:
8010 this_optab = xor_optab;
8017 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8019 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8020 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8023 /* Could determine the answer when only additive constants differ. Also,
8024 the addition of one can be handled by changing the condition. */
8031 case UNORDERED_EXPR:
8038 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8042 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8043 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8045 && GET_CODE (original_target) == REG
8046 && (GET_MODE (original_target)
8047 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8049 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8052 if (temp != original_target)
8053 temp = copy_to_reg (temp);
8055 op1 = gen_label_rtx ();
8056 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8057 GET_MODE (temp), unsignedp, 0, op1);
8058 emit_move_insn (temp, const1_rtx);
8063 /* If no set-flag instruction, must generate a conditional
8064 store into a temporary variable. Drop through
8065 and handle this like && and ||. */
8067 case TRUTH_ANDIF_EXPR:
8068 case TRUTH_ORIF_EXPR:
8070 && (target == 0 || ! safe_from_p (target, exp, 1)
8071 /* Make sure we don't have a hard reg (such as function's return
8072 value) live across basic blocks, if not optimizing. */
8073 || (!optimize && GET_CODE (target) == REG
8074 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8075 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8078 emit_clr_insn (target);
8080 op1 = gen_label_rtx ();
8081 jumpifnot (exp, op1);
8084 emit_0_to_1_insn (target);
8087 return ignore ? const0_rtx : target;
8089 case TRUTH_NOT_EXPR:
8090 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8091 /* The parser is careful to generate TRUTH_NOT_EXPR
8092 only with operands that are always zero or one. */
8093 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8094 target, 1, OPTAB_LIB_WIDEN);
8100 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8102 return expand_expr (TREE_OPERAND (exp, 1),
8103 (ignore ? const0_rtx : target),
8107 /* If we would have a "singleton" (see below) were it not for a
8108 conversion in each arm, bring that conversion back out. */
8109 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8110 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8111 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8112 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8114 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8115 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8117 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8118 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8119 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8120 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8121 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8122 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8123 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8124 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8125 return expand_expr (build1 (NOP_EXPR, type,
8126 build (COND_EXPR, TREE_TYPE (iftrue),
8127 TREE_OPERAND (exp, 0),
8129 target, tmode, modifier);
8133 /* Note that COND_EXPRs whose type is a structure or union
8134 are required to be constructed to contain assignments of
8135 a temporary variable, so that we can evaluate them here
8136 for side effect only. If type is void, we must do likewise. */
8138 /* If an arm of the branch requires a cleanup,
8139 only that cleanup is performed. */
8142 tree binary_op = 0, unary_op = 0;
8144 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8145 convert it to our mode, if necessary. */
8146 if (integer_onep (TREE_OPERAND (exp, 1))
8147 && integer_zerop (TREE_OPERAND (exp, 2))
8148 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8152 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8157 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8158 if (GET_MODE (op0) == mode)
8162 target = gen_reg_rtx (mode);
8163 convert_move (target, op0, unsignedp);
8167 /* Check for X ? A + B : A. If we have this, we can copy A to the
8168 output and conditionally add B. Similarly for unary operations.
8169 Don't do this if X has side-effects because those side effects
8170 might affect A or B and the "?" operation is a sequence point in
8171 ANSI. (operand_equal_p tests for side effects.) */
8173 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8174 && operand_equal_p (TREE_OPERAND (exp, 2),
8175 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8176 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8177 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8178 && operand_equal_p (TREE_OPERAND (exp, 1),
8179 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8180 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8181 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8182 && operand_equal_p (TREE_OPERAND (exp, 2),
8183 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8184 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8185 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8186 && operand_equal_p (TREE_OPERAND (exp, 1),
8187 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8188 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8190 /* If we are not to produce a result, we have no target. Otherwise,
8191 if a target was specified use it; it will not be used as an
8192 intermediate target unless it is safe. If no target, use a
8197 else if (original_target
8198 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8199 || (singleton && GET_CODE (original_target) == REG
8200 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8201 && original_target == var_rtx (singleton)))
8202 && GET_MODE (original_target) == mode
8203 #ifdef HAVE_conditional_move
8204 && (! can_conditionally_move_p (mode)
8205 || GET_CODE (original_target) == REG
8206 || TREE_ADDRESSABLE (type))
8208 && ! (GET_CODE (original_target) == MEM
8209 && MEM_VOLATILE_P (original_target)))
8210 temp = original_target;
8211 else if (TREE_ADDRESSABLE (type))
8214 temp = assign_temp (type, 0, 0, 1);
8216 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8217 do the test of X as a store-flag operation, do this as
8218 A + ((X != 0) << log C). Similarly for other simple binary
8219 operators. Only do for C == 1 if BRANCH_COST is low. */
8220 if (temp && singleton && binary_op
8221 && (TREE_CODE (binary_op) == PLUS_EXPR
8222 || TREE_CODE (binary_op) == MINUS_EXPR
8223 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8224 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8225 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8226 : integer_onep (TREE_OPERAND (binary_op, 1)))
8227 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8230 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8231 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8232 ? addv_optab : add_optab)
8233 : TREE_CODE (binary_op) == MINUS_EXPR
8234 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8235 ? subv_optab : sub_optab)
8236 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8239 /* If we had X ? A : A + 1, do this as A + (X == 0).
8241 We have to invert the truth value here and then put it
8242 back later if do_store_flag fails. We cannot simply copy
8243 TREE_OPERAND (exp, 0) to another variable and modify that
8244 because invert_truthvalue can modify the tree pointed to
8246 if (singleton == TREE_OPERAND (exp, 1))
8247 TREE_OPERAND (exp, 0)
8248 = invert_truthvalue (TREE_OPERAND (exp, 0));
8250 result = do_store_flag (TREE_OPERAND (exp, 0),
8251 (safe_from_p (temp, singleton, 1)
8253 mode, BRANCH_COST <= 1);
8255 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8256 result = expand_shift (LSHIFT_EXPR, mode, result,
8257 build_int_2 (tree_log2
8261 (safe_from_p (temp, singleton, 1)
8262 ? temp : NULL_RTX), 0);
8266 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8267 return expand_binop (mode, boptab, op1, result, temp,
8268 unsignedp, OPTAB_LIB_WIDEN);
8270 else if (singleton == TREE_OPERAND (exp, 1))
8271 TREE_OPERAND (exp, 0)
8272 = invert_truthvalue (TREE_OPERAND (exp, 0));
8275 do_pending_stack_adjust ();
8277 op0 = gen_label_rtx ();
8279 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8283 /* If the target conflicts with the other operand of the
8284 binary op, we can't use it. Also, we can't use the target
8285 if it is a hard register, because evaluating the condition
8286 might clobber it. */
8288 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8289 || (GET_CODE (temp) == REG
8290 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8291 temp = gen_reg_rtx (mode);
8292 store_expr (singleton, temp, 0);
8295 expand_expr (singleton,
8296 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8297 if (singleton == TREE_OPERAND (exp, 1))
8298 jumpif (TREE_OPERAND (exp, 0), op0);
8300 jumpifnot (TREE_OPERAND (exp, 0), op0);
8302 start_cleanup_deferral ();
8303 if (binary_op && temp == 0)
8304 /* Just touch the other operand. */
8305 expand_expr (TREE_OPERAND (binary_op, 1),
8306 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8308 store_expr (build (TREE_CODE (binary_op), type,
8309 make_tree (type, temp),
8310 TREE_OPERAND (binary_op, 1)),
8313 store_expr (build1 (TREE_CODE (unary_op), type,
8314 make_tree (type, temp)),
8318 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8319 comparison operator. If we have one of these cases, set the
8320 output to A, branch on A (cse will merge these two references),
8321 then set the output to FOO. */
8323 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8324 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8325 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8326 TREE_OPERAND (exp, 1), 0)
8327 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8328 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8329 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8331 if (GET_CODE (temp) == REG
8332 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8333 temp = gen_reg_rtx (mode);
8334 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8335 jumpif (TREE_OPERAND (exp, 0), op0);
8337 start_cleanup_deferral ();
8338 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8342 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8343 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8344 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8345 TREE_OPERAND (exp, 2), 0)
8346 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8347 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8348 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8350 if (GET_CODE (temp) == REG
8351 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8352 temp = gen_reg_rtx (mode);
8353 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8354 jumpifnot (TREE_OPERAND (exp, 0), op0);
8356 start_cleanup_deferral ();
8357 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8362 op1 = gen_label_rtx ();
8363 jumpifnot (TREE_OPERAND (exp, 0), op0);
8365 start_cleanup_deferral ();
8367 /* One branch of the cond can be void, if it never returns. For
8368 example A ? throw : E */
8370 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8371 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8373 expand_expr (TREE_OPERAND (exp, 1),
8374 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8375 end_cleanup_deferral ();
8377 emit_jump_insn (gen_jump (op1));
8380 start_cleanup_deferral ();
8382 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8383 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8385 expand_expr (TREE_OPERAND (exp, 2),
8386 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8389 end_cleanup_deferral ();
8400 /* Something needs to be initialized, but we didn't know
8401 where that thing was when building the tree. For example,
8402 it could be the return value of a function, or a parameter
8403 to a function which lays down in the stack, or a temporary
8404 variable which must be passed by reference.
8406 We guarantee that the expression will either be constructed
8407 or copied into our original target. */
8409 tree slot = TREE_OPERAND (exp, 0);
8410 tree cleanups = NULL_TREE;
8413 if (TREE_CODE (slot) != VAR_DECL)
8417 target = original_target;
8419 /* Set this here so that if we get a target that refers to a
8420 register variable that's already been used, put_reg_into_stack
8421 knows that it should fix up those uses. */
8422 TREE_USED (slot) = 1;
8426 if (DECL_RTL_SET_P (slot))
8428 target = DECL_RTL (slot);
8429 /* If we have already expanded the slot, so don't do
8431 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8436 target = assign_temp (type, 2, 0, 1);
8437 /* All temp slots at this level must not conflict. */
8438 preserve_temp_slots (target);
8439 SET_DECL_RTL (slot, target);
8440 if (TREE_ADDRESSABLE (slot))
8441 put_var_into_stack (slot);
8443 /* Since SLOT is not known to the called function
8444 to belong to its stack frame, we must build an explicit
8445 cleanup. This case occurs when we must build up a reference
8446 to pass the reference as an argument. In this case,
8447 it is very likely that such a reference need not be
8450 if (TREE_OPERAND (exp, 2) == 0)
8451 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8452 cleanups = TREE_OPERAND (exp, 2);
8457 /* This case does occur, when expanding a parameter which
8458 needs to be constructed on the stack. The target
8459 is the actual stack address that we want to initialize.
8460 The function we call will perform the cleanup in this case. */
8462 /* If we have already assigned it space, use that space,
8463 not target that we were passed in, as our target
8464 parameter is only a hint. */
8465 if (DECL_RTL_SET_P (slot))
8467 target = DECL_RTL (slot);
8468 /* If we have already expanded the slot, so don't do
8470 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8475 SET_DECL_RTL (slot, target);
8476 /* If we must have an addressable slot, then make sure that
8477 the RTL that we just stored in slot is OK. */
8478 if (TREE_ADDRESSABLE (slot))
8479 put_var_into_stack (slot);
8483 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8484 /* Mark it as expanded. */
8485 TREE_OPERAND (exp, 1) = NULL_TREE;
8487 store_expr (exp1, target, 0);
8489 expand_decl_cleanup (NULL_TREE, cleanups);
8496 tree lhs = TREE_OPERAND (exp, 0);
8497 tree rhs = TREE_OPERAND (exp, 1);
8498 tree noncopied_parts = 0;
8499 tree lhs_type = TREE_TYPE (lhs);
8501 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8502 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8504 = init_noncopied_parts (stabilize_reference (lhs),
8505 TYPE_NONCOPIED_PARTS (lhs_type));
8507 while (noncopied_parts != 0)
8509 expand_assignment (TREE_VALUE (noncopied_parts),
8510 TREE_PURPOSE (noncopied_parts), 0, 0);
8511 noncopied_parts = TREE_CHAIN (noncopied_parts);
8518 /* If lhs is complex, expand calls in rhs before computing it.
8519 That's so we don't compute a pointer and save it over a call.
8520 If lhs is simple, compute it first so we can give it as a
8521 target if the rhs is just a call. This avoids an extra temp and copy
8522 and that prevents a partial-subsumption which makes bad code.
8523 Actually we could treat component_ref's of vars like vars. */
8525 tree lhs = TREE_OPERAND (exp, 0);
8526 tree rhs = TREE_OPERAND (exp, 1);
8527 tree noncopied_parts = 0;
8528 tree lhs_type = TREE_TYPE (lhs);
8532 /* Check for |= or &= of a bitfield of size one into another bitfield
8533 of size 1. In this case, (unless we need the result of the
8534 assignment) we can do this more efficiently with a
8535 test followed by an assignment, if necessary.
8537 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8538 things change so we do, this code should be enhanced to
8541 && TREE_CODE (lhs) == COMPONENT_REF
8542 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8543 || TREE_CODE (rhs) == BIT_AND_EXPR)
8544 && TREE_OPERAND (rhs, 0) == lhs
8545 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8546 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8547 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8549 rtx label = gen_label_rtx ();
8551 do_jump (TREE_OPERAND (rhs, 1),
8552 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8553 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8554 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8555 (TREE_CODE (rhs) == BIT_IOR_EXPR
8557 : integer_zero_node)),
8559 do_pending_stack_adjust ();
8564 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8565 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8567 = save_noncopied_parts (stabilize_reference (lhs),
8568 TYPE_NONCOPIED_PARTS (lhs_type));
8570 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8571 while (noncopied_parts != 0)
8573 expand_assignment (TREE_PURPOSE (noncopied_parts),
8574 TREE_VALUE (noncopied_parts), 0, 0);
8575 noncopied_parts = TREE_CHAIN (noncopied_parts);
8581 if (!TREE_OPERAND (exp, 0))
8582 expand_null_return ();
8584 expand_return (TREE_OPERAND (exp, 0));
8587 case PREINCREMENT_EXPR:
8588 case PREDECREMENT_EXPR:
8589 return expand_increment (exp, 0, ignore);
8591 case POSTINCREMENT_EXPR:
8592 case POSTDECREMENT_EXPR:
8593 /* Faster to treat as pre-increment if result is not used. */
8594 return expand_increment (exp, ! ignore, ignore);
8597 /* If nonzero, TEMP will be set to the address of something that might
8598 be a MEM corresponding to a stack slot. */
8601 /* Are we taking the address of a nested function? */
8602 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8603 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8604 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8605 && ! TREE_STATIC (exp))
8607 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8608 op0 = force_operand (op0, target);
8610 /* If we are taking the address of something erroneous, just
8612 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8616 /* We make sure to pass const0_rtx down if we came in with
8617 ignore set, to avoid doing the cleanups twice for something. */
8618 op0 = expand_expr (TREE_OPERAND (exp, 0),
8619 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8620 (modifier == EXPAND_INITIALIZER
8621 ? modifier : EXPAND_CONST_ADDRESS));
8623 /* If we are going to ignore the result, OP0 will have been set
8624 to const0_rtx, so just return it. Don't get confused and
8625 think we are taking the address of the constant. */
8629 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8630 clever and returns a REG when given a MEM. */
8631 op0 = protect_from_queue (op0, 1);
8633 /* We would like the object in memory. If it is a constant, we can
8634 have it be statically allocated into memory. For a non-constant,
8635 we need to allocate some memory and store the value into it. */
8637 if (CONSTANT_P (op0))
8638 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8640 else if (GET_CODE (op0) == MEM)
8642 mark_temp_addr_taken (op0);
8643 temp = XEXP (op0, 0);
8646 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8647 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8648 || GET_CODE (op0) == PARALLEL)
8650 /* If this object is in a register, it must be not
8652 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8653 tree nt = build_qualified_type (inner_type,
8654 (TYPE_QUALS (inner_type)
8655 | TYPE_QUAL_CONST));
8656 rtx memloc = assign_temp (nt, 1, 1, 1);
8658 mark_temp_addr_taken (memloc);
8659 if (GET_CODE (op0) == PARALLEL)
8660 /* Handle calls that pass values in multiple non-contiguous
8661 locations. The Irix 6 ABI has examples of this. */
8662 emit_group_store (memloc, op0,
8663 int_size_in_bytes (inner_type),
8664 TYPE_ALIGN (inner_type));
8666 emit_move_insn (memloc, op0);
8670 if (GET_CODE (op0) != MEM)
8673 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8675 temp = XEXP (op0, 0);
8676 #ifdef POINTERS_EXTEND_UNSIGNED
8677 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8678 && mode == ptr_mode)
8679 temp = convert_memory_address (ptr_mode, temp);
8684 op0 = force_operand (XEXP (op0, 0), target);
8687 if (flag_force_addr && GET_CODE (op0) != REG)
8688 op0 = force_reg (Pmode, op0);
8690 if (GET_CODE (op0) == REG
8691 && ! REG_USERVAR_P (op0))
8692 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8694 /* If we might have had a temp slot, add an equivalent address
8697 update_temp_slot_address (temp, op0);
8699 #ifdef POINTERS_EXTEND_UNSIGNED
8700 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8701 && mode == ptr_mode)
8702 op0 = convert_memory_address (ptr_mode, op0);
8707 case ENTRY_VALUE_EXPR:
8710 /* COMPLEX type for Extended Pascal & Fortran */
8713 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8716 /* Get the rtx code of the operands. */
8717 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8718 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8721 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8725 /* Move the real (op0) and imaginary (op1) parts to their location. */
8726 emit_move_insn (gen_realpart (mode, target), op0);
8727 emit_move_insn (gen_imagpart (mode, target), op1);
8729 insns = get_insns ();
8732 /* Complex construction should appear as a single unit. */
8733 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8734 each with a separate pseudo as destination.
8735 It's not correct for flow to treat them as a unit. */
8736 if (GET_CODE (target) != CONCAT)
8737 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8745 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8746 return gen_realpart (mode, op0);
8749 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8750 return gen_imagpart (mode, op0);
8754 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8758 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8761 target = gen_reg_rtx (mode);
8765 /* Store the realpart and the negated imagpart to target. */
8766 emit_move_insn (gen_realpart (partmode, target),
8767 gen_realpart (partmode, op0));
8769 imag_t = gen_imagpart (partmode, target);
8770 temp = expand_unop (partmode,
8771 ! unsignedp && flag_trapv
8772 && (GET_MODE_CLASS(partmode) == MODE_INT)
8773 ? negv_optab : neg_optab,
8774 gen_imagpart (partmode, op0), imag_t, 0);
8776 emit_move_insn (imag_t, temp);
8778 insns = get_insns ();
8781 /* Conjugate should appear as a single unit
8782 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8783 each with a separate pseudo as destination.
8784 It's not correct for flow to treat them as a unit. */
8785 if (GET_CODE (target) != CONCAT)
8786 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8793 case TRY_CATCH_EXPR:
8795 tree handler = TREE_OPERAND (exp, 1);
8797 expand_eh_region_start ();
8799 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8801 expand_eh_region_end_cleanup (handler);
8806 case TRY_FINALLY_EXPR:
8808 tree try_block = TREE_OPERAND (exp, 0);
8809 tree finally_block = TREE_OPERAND (exp, 1);
8810 rtx finally_label = gen_label_rtx ();
8811 rtx done_label = gen_label_rtx ();
8812 rtx return_link = gen_reg_rtx (Pmode);
8813 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8814 (tree) finally_label, (tree) return_link);
8815 TREE_SIDE_EFFECTS (cleanup) = 1;
8817 /* Start a new binding layer that will keep track of all cleanup
8818 actions to be performed. */
8819 expand_start_bindings (2);
8821 target_temp_slot_level = temp_slot_level;
8823 expand_decl_cleanup (NULL_TREE, cleanup);
8824 op0 = expand_expr (try_block, target, tmode, modifier);
8826 preserve_temp_slots (op0);
8827 expand_end_bindings (NULL_TREE, 0, 0);
8828 emit_jump (done_label);
8829 emit_label (finally_label);
8830 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8831 emit_indirect_jump (return_link);
8832 emit_label (done_label);
8836 case GOTO_SUBROUTINE_EXPR:
8838 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8839 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8840 rtx return_address = gen_label_rtx ();
8841 emit_move_insn (return_link,
8842 gen_rtx_LABEL_REF (Pmode, return_address));
8844 emit_label (return_address);
8849 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8852 return get_exception_pointer (cfun);
8855 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8858 /* Here to do an ordinary binary operator, generating an instruction
8859 from the optab already placed in `this_optab'. */
8861 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8863 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8864 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8866 temp = expand_binop (mode, this_optab, op0, op1, target,
8867 unsignedp, OPTAB_LIB_WIDEN);
8873 /* Similar to expand_expr, except that we don't specify a target, target
8874 mode, or modifier and we return the alignment of the inner type. This is
8875 used in cases where it is not necessary to align the result to the
8876 alignment of its type as long as we know the alignment of the result, for
8877 example for comparisons of BLKmode values. */
8880 expand_expr_unaligned (exp, palign)
8882 unsigned int *palign;
8885 tree type = TREE_TYPE (exp);
8886 register enum machine_mode mode = TYPE_MODE (type);
8888 /* Default the alignment we return to that of the type. */
8889 *palign = TYPE_ALIGN (type);
8891 /* The only cases in which we do anything special is if the resulting mode
8893 if (mode != BLKmode)
8894 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8896 switch (TREE_CODE (exp))
8900 case NON_LVALUE_EXPR:
8901 /* Conversions between BLKmode values don't change the underlying
8902 alignment or value. */
8903 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8904 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8908 /* Much of the code for this case is copied directly from expand_expr.
8909 We need to duplicate it here because we will do something different
8910 in the fall-through case, so we need to handle the same exceptions
8913 tree array = TREE_OPERAND (exp, 0);
8914 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8915 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8916 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8919 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8922 /* Optimize the special-case of a zero lower bound.
8924 We convert the low_bound to sizetype to avoid some problems
8925 with constant folding. (E.g. suppose the lower bound is 1,
8926 and its mode is QI. Without the conversion, (ARRAY
8927 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8928 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8930 if (! integer_zerop (low_bound))
8931 index = size_diffop (index, convert (sizetype, low_bound));
8933 /* If this is a constant index into a constant array,
8934 just get the value from the array. Handle both the cases when
8935 we have an explicit constructor and when our operand is a variable
8936 that was declared const. */
8938 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8939 && host_integerp (index, 0)
8940 && 0 > compare_tree_int (index,
8941 list_length (CONSTRUCTOR_ELTS
8942 (TREE_OPERAND (exp, 0)))))
8946 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8947 i = tree_low_cst (index, 0);
8948 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8952 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8955 else if (optimize >= 1
8956 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8957 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8958 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8960 if (TREE_CODE (index) == INTEGER_CST)
8962 tree init = DECL_INITIAL (array);
8964 if (TREE_CODE (init) == CONSTRUCTOR)
8968 for (elem = CONSTRUCTOR_ELTS (init);
8969 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8970 elem = TREE_CHAIN (elem))
8974 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8984 case ARRAY_RANGE_REF:
8985 /* If the operand is a CONSTRUCTOR, we can just extract the
8986 appropriate field if it is present. Don't do this if we have
8987 already written the data since we want to refer to that copy
8988 and varasm.c assumes that's what we'll do. */
8989 if (TREE_CODE (exp) == COMPONENT_REF
8990 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8991 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8995 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8996 elt = TREE_CHAIN (elt))
8997 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8998 /* Note that unlike the case in expand_expr, we know this is
8999 BLKmode and hence not an integer. */
9000 return expand_expr_unaligned (TREE_VALUE (elt), palign);
9004 enum machine_mode mode1;
9005 HOST_WIDE_INT bitsize, bitpos;
9008 unsigned int alignment;
9010 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9011 &mode1, &unsignedp, &volatilep,
9014 /* If we got back the original object, something is wrong. Perhaps
9015 we are evaluating an expression too early. In any event, don't
9016 infinitely recurse. */
9020 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9022 /* If this is a constant, put it into a register if it is a
9023 legitimate constant and OFFSET is 0 and memory if it isn't. */
9024 if (CONSTANT_P (op0))
9026 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
9028 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
9030 op0 = force_reg (inner_mode, op0);
9032 op0 = validize_mem (force_const_mem (inner_mode, op0));
9037 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
9039 /* If this object is in a register, put it into memory.
9040 This case can't occur in C, but can in Ada if we have
9041 unchecked conversion of an expression from a scalar type to
9042 an array or record type. */
9043 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9044 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
9046 tree nt = build_qualified_type (TREE_TYPE (tem),
9047 (TYPE_QUALS (TREE_TYPE (tem))
9048 | TYPE_QUAL_CONST));
9049 rtx memloc = assign_temp (nt, 1, 1, 1);
9051 mark_temp_addr_taken (memloc);
9052 emit_move_insn (memloc, op0);
9056 if (GET_CODE (op0) != MEM)
9059 if (GET_MODE (offset_rtx) != ptr_mode)
9061 #ifdef POINTERS_EXTEND_UNSIGNED
9062 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
9064 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9068 op0 = change_address (op0, VOIDmode,
9069 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
9070 force_reg (ptr_mode,
9074 /* Don't forget about volatility even if this is a bitfield. */
9075 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9077 op0 = copy_rtx (op0);
9078 MEM_VOLATILE_P (op0) = 1;
9081 /* Check the access. */
9082 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9087 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9088 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9090 /* Check the access right of the pointer. */
9091 in_check_memory_usage = 1;
9092 if (size > BITS_PER_UNIT)
9093 emit_library_call (chkr_check_addr_libfunc,
9094 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9095 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9096 TYPE_MODE (sizetype),
9097 GEN_INT (MEMORY_USE_RO),
9098 TYPE_MODE (integer_type_node));
9099 in_check_memory_usage = 0;
9102 /* In cases where an aligned union has an unaligned object
9103 as a field, we might be extracting a BLKmode value from
9104 an integer-mode (e.g., SImode) object. Handle this case
9105 by doing the extract into an object as wide as the field
9106 (which we know to be the width of a basic mode), then
9107 storing into memory, and changing the mode to BLKmode.
9108 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9109 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9110 if (mode1 == VOIDmode
9111 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9112 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9113 && (TYPE_ALIGN (type) > alignment
9114 || bitpos % TYPE_ALIGN (type) != 0)))
9116 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9118 if (ext_mode == BLKmode)
9120 /* In this case, BITPOS must start at a byte boundary. */
9121 if (GET_CODE (op0) != MEM
9122 || bitpos % BITS_PER_UNIT != 0)
9125 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9129 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9131 rtx new = assign_temp (nt, 0, 1, 1);
9133 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9134 unsignedp, NULL_RTX, ext_mode,
9135 ext_mode, alignment,
9136 int_size_in_bytes (TREE_TYPE (tem)));
9138 /* If the result is a record type and BITSIZE is narrower than
9139 the mode of OP0, an integral mode, and this is a big endian
9140 machine, we must put the field into the high-order bits. */
9141 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9142 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9143 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9144 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9145 size_int (GET_MODE_BITSIZE
9150 emit_move_insn (new, op0);
9151 op0 = copy_rtx (new);
9152 PUT_MODE (op0, BLKmode);
9156 /* Get a reference to just this component. */
9157 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9159 MEM_ALIAS_SET (op0) = get_alias_set (exp);
9161 /* Adjust the alignment in case the bit position is not
9162 a multiple of the alignment of the inner object. */
9163 while (bitpos % alignment != 0)
9166 if (GET_CODE (XEXP (op0, 0)) == REG)
9167 mark_reg_pointer (XEXP (op0, 0), alignment);
9169 MEM_IN_STRUCT_P (op0) = 1;
9170 MEM_VOLATILE_P (op0) |= volatilep;
9172 *palign = alignment;
9181 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9184 /* Return the tree node if a ARG corresponds to a string constant or zero
9185 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9186 in bytes within the string that ARG is accessing. The type of the
9187 offset will be `sizetype'. */
9190 string_constant (arg, ptr_offset)
9196 if (TREE_CODE (arg) == ADDR_EXPR
9197 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9199 *ptr_offset = size_zero_node;
9200 return TREE_OPERAND (arg, 0);
9202 else if (TREE_CODE (arg) == PLUS_EXPR)
9204 tree arg0 = TREE_OPERAND (arg, 0);
9205 tree arg1 = TREE_OPERAND (arg, 1);
9210 if (TREE_CODE (arg0) == ADDR_EXPR
9211 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9213 *ptr_offset = convert (sizetype, arg1);
9214 return TREE_OPERAND (arg0, 0);
9216 else if (TREE_CODE (arg1) == ADDR_EXPR
9217 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9219 *ptr_offset = convert (sizetype, arg0);
9220 return TREE_OPERAND (arg1, 0);
9227 /* Expand code for a post- or pre- increment or decrement
9228 and return the RTX for the result.
9229 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9232 expand_increment (exp, post, ignore)
9236 register rtx op0, op1;
9237 register rtx temp, value;
9238 register tree incremented = TREE_OPERAND (exp, 0);
9239 optab this_optab = add_optab;
9241 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9242 int op0_is_copy = 0;
9243 int single_insn = 0;
9244 /* 1 means we can't store into OP0 directly,
9245 because it is a subreg narrower than a word,
9246 and we don't dare clobber the rest of the word. */
9249 /* Stabilize any component ref that might need to be
9250 evaluated more than once below. */
9252 || TREE_CODE (incremented) == BIT_FIELD_REF
9253 || (TREE_CODE (incremented) == COMPONENT_REF
9254 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9255 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9256 incremented = stabilize_reference (incremented);
9257 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9258 ones into save exprs so that they don't accidentally get evaluated
9259 more than once by the code below. */
9260 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9261 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9262 incremented = save_expr (incremented);
9264 /* Compute the operands as RTX.
9265 Note whether OP0 is the actual lvalue or a copy of it:
9266 I believe it is a copy iff it is a register or subreg
9267 and insns were generated in computing it. */
9269 temp = get_last_insn ();
9270 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9272 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9273 in place but instead must do sign- or zero-extension during assignment,
9274 so we copy it into a new register and let the code below use it as
9277 Note that we can safely modify this SUBREG since it is know not to be
9278 shared (it was made by the expand_expr call above). */
9280 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9283 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9287 else if (GET_CODE (op0) == SUBREG
9288 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9290 /* We cannot increment this SUBREG in place. If we are
9291 post-incrementing, get a copy of the old value. Otherwise,
9292 just mark that we cannot increment in place. */
9294 op0 = copy_to_reg (op0);
9299 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9300 && temp != get_last_insn ());
9301 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9302 EXPAND_MEMORY_USE_BAD);
9304 /* Decide whether incrementing or decrementing. */
9305 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9306 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9307 this_optab = sub_optab;
9309 /* Convert decrement by a constant into a negative increment. */
9310 if (this_optab == sub_optab
9311 && GET_CODE (op1) == CONST_INT)
9313 op1 = GEN_INT (-INTVAL (op1));
9314 this_optab = add_optab;
9317 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9318 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9320 /* For a preincrement, see if we can do this with a single instruction. */
9323 icode = (int) this_optab->handlers[(int) mode].insn_code;
9324 if (icode != (int) CODE_FOR_nothing
9325 /* Make sure that OP0 is valid for operands 0 and 1
9326 of the insn we want to queue. */
9327 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9328 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9329 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9333 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9334 then we cannot just increment OP0. We must therefore contrive to
9335 increment the original value. Then, for postincrement, we can return
9336 OP0 since it is a copy of the old value. For preincrement, expand here
9337 unless we can do it with a single insn.
9339 Likewise if storing directly into OP0 would clobber high bits
9340 we need to preserve (bad_subreg). */
9341 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9343 /* This is the easiest way to increment the value wherever it is.
9344 Problems with multiple evaluation of INCREMENTED are prevented
9345 because either (1) it is a component_ref or preincrement,
9346 in which case it was stabilized above, or (2) it is an array_ref
9347 with constant index in an array in a register, which is
9348 safe to reevaluate. */
9349 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9350 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9351 ? MINUS_EXPR : PLUS_EXPR),
9354 TREE_OPERAND (exp, 1));
9356 while (TREE_CODE (incremented) == NOP_EXPR
9357 || TREE_CODE (incremented) == CONVERT_EXPR)
9359 newexp = convert (TREE_TYPE (incremented), newexp);
9360 incremented = TREE_OPERAND (incremented, 0);
9363 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9364 return post ? op0 : temp;
9369 /* We have a true reference to the value in OP0.
9370 If there is an insn to add or subtract in this mode, queue it.
9371 Queueing the increment insn avoids the register shuffling
9372 that often results if we must increment now and first save
9373 the old value for subsequent use. */
9375 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9376 op0 = stabilize (op0);
9379 icode = (int) this_optab->handlers[(int) mode].insn_code;
9380 if (icode != (int) CODE_FOR_nothing
9381 /* Make sure that OP0 is valid for operands 0 and 1
9382 of the insn we want to queue. */
9383 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9384 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9386 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9387 op1 = force_reg (mode, op1);
9389 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9391 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9393 rtx addr = (general_operand (XEXP (op0, 0), mode)
9394 ? force_reg (Pmode, XEXP (op0, 0))
9395 : copy_to_reg (XEXP (op0, 0)));
9398 op0 = replace_equiv_address (op0, addr);
9399 temp = force_reg (GET_MODE (op0), op0);
9400 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9401 op1 = force_reg (mode, op1);
9403 /* The increment queue is LIFO, thus we have to `queue'
9404 the instructions in reverse order. */
9405 enqueue_insn (op0, gen_move_insn (op0, temp));
9406 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9411 /* Preincrement, or we can't increment with one simple insn. */
9413 /* Save a copy of the value before inc or dec, to return it later. */
9414 temp = value = copy_to_reg (op0);
9416 /* Arrange to return the incremented value. */
9417 /* Copy the rtx because expand_binop will protect from the queue,
9418 and the results of that would be invalid for us to return
9419 if our caller does emit_queue before using our result. */
9420 temp = copy_rtx (value = op0);
9422 /* Increment however we can. */
9423 op1 = expand_binop (mode, this_optab, value, op1,
9424 current_function_check_memory_usage ? NULL_RTX : op0,
9425 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9426 /* Make sure the value is stored into OP0. */
9428 emit_move_insn (op0, op1);
9433 /* At the start of a function, record that we have no previously-pushed
9434 arguments waiting to be popped. */
9437 init_pending_stack_adjust ()
9439 pending_stack_adjust = 0;
9442 /* When exiting from function, if safe, clear out any pending stack adjust
9443 so the adjustment won't get done.
9445 Note, if the current function calls alloca, then it must have a
9446 frame pointer regardless of the value of flag_omit_frame_pointer. */
9449 clear_pending_stack_adjust ()
9451 #ifdef EXIT_IGNORE_STACK
9453 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9454 && EXIT_IGNORE_STACK
9455 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9456 && ! flag_inline_functions)
9458 stack_pointer_delta -= pending_stack_adjust,
9459 pending_stack_adjust = 0;
9464 /* Pop any previously-pushed arguments that have not been popped yet. */
9467 do_pending_stack_adjust ()
9469 if (inhibit_defer_pop == 0)
9471 if (pending_stack_adjust != 0)
9472 adjust_stack (GEN_INT (pending_stack_adjust));
9473 pending_stack_adjust = 0;
9477 /* Expand conditional expressions. */
9479 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9480 LABEL is an rtx of code CODE_LABEL, in this function and all the
9484 jumpifnot (exp, label)
9488 do_jump (exp, label, NULL_RTX);
9491 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9498 do_jump (exp, NULL_RTX, label);
9501 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9502 the result is zero, or IF_TRUE_LABEL if the result is one.
9503 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9504 meaning fall through in that case.
9506 do_jump always does any pending stack adjust except when it does not
9507 actually perform a jump. An example where there is no jump
9508 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9510 This function is responsible for optimizing cases such as
9511 &&, || and comparison operators in EXP. */
9514 do_jump (exp, if_false_label, if_true_label)
9516 rtx if_false_label, if_true_label;
9518 register enum tree_code code = TREE_CODE (exp);
9519 /* Some cases need to create a label to jump to
9520 in order to properly fall through.
9521 These cases set DROP_THROUGH_LABEL nonzero. */
9522 rtx drop_through_label = 0;
9526 enum machine_mode mode;
9528 #ifdef MAX_INTEGER_COMPUTATION_MODE
9529 check_max_integer_computation_mode (exp);
9540 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9546 /* This is not true with #pragma weak */
9548 /* The address of something can never be zero. */
9550 emit_jump (if_true_label);
9555 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9556 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9557 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9558 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9561 /* If we are narrowing the operand, we have to do the compare in the
9563 if ((TYPE_PRECISION (TREE_TYPE (exp))
9564 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9566 case NON_LVALUE_EXPR:
9567 case REFERENCE_EXPR:
9572 /* These cannot change zero->non-zero or vice versa. */
9573 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9576 case WITH_RECORD_EXPR:
9577 /* Put the object on the placeholder list, recurse through our first
9578 operand, and pop the list. */
9579 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9581 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9582 placeholder_list = TREE_CHAIN (placeholder_list);
9586 /* This is never less insns than evaluating the PLUS_EXPR followed by
9587 a test and can be longer if the test is eliminated. */
9589 /* Reduce to minus. */
9590 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9591 TREE_OPERAND (exp, 0),
9592 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9593 TREE_OPERAND (exp, 1))));
9594 /* Process as MINUS. */
9598 /* Non-zero iff operands of minus differ. */
9599 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9600 TREE_OPERAND (exp, 0),
9601 TREE_OPERAND (exp, 1)),
9602 NE, NE, if_false_label, if_true_label);
9606 /* If we are AND'ing with a small constant, do this comparison in the
9607 smallest type that fits. If the machine doesn't have comparisons
9608 that small, it will be converted back to the wider comparison.
9609 This helps if we are testing the sign bit of a narrower object.
9610 combine can't do this for us because it can't know whether a
9611 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9613 if (! SLOW_BYTE_ACCESS
9614 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9615 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9616 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9617 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9618 && (type = type_for_mode (mode, 1)) != 0
9619 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9620 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9621 != CODE_FOR_nothing))
9623 do_jump (convert (type, exp), if_false_label, if_true_label);
9628 case TRUTH_NOT_EXPR:
9629 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9632 case TRUTH_ANDIF_EXPR:
9633 if (if_false_label == 0)
9634 if_false_label = drop_through_label = gen_label_rtx ();
9635 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9636 start_cleanup_deferral ();
9637 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9638 end_cleanup_deferral ();
9641 case TRUTH_ORIF_EXPR:
9642 if (if_true_label == 0)
9643 if_true_label = drop_through_label = gen_label_rtx ();
9644 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9645 start_cleanup_deferral ();
9646 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9647 end_cleanup_deferral ();
9652 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9653 preserve_temp_slots (NULL_RTX);
9657 do_pending_stack_adjust ();
9658 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9664 case ARRAY_RANGE_REF:
9666 HOST_WIDE_INT bitsize, bitpos;
9668 enum machine_mode mode;
9672 unsigned int alignment;
9674 /* Get description of this reference. We don't actually care
9675 about the underlying object here. */
9676 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9677 &unsignedp, &volatilep, &alignment);
9679 type = type_for_size (bitsize, unsignedp);
9680 if (! SLOW_BYTE_ACCESS
9681 && type != 0 && bitsize >= 0
9682 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9683 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9684 != CODE_FOR_nothing))
9686 do_jump (convert (type, exp), if_false_label, if_true_label);
9693 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9694 if (integer_onep (TREE_OPERAND (exp, 1))
9695 && integer_zerop (TREE_OPERAND (exp, 2)))
9696 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9698 else if (integer_zerop (TREE_OPERAND (exp, 1))
9699 && integer_onep (TREE_OPERAND (exp, 2)))
9700 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9704 register rtx label1 = gen_label_rtx ();
9705 drop_through_label = gen_label_rtx ();
9707 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9709 start_cleanup_deferral ();
9710 /* Now the THEN-expression. */
9711 do_jump (TREE_OPERAND (exp, 1),
9712 if_false_label ? if_false_label : drop_through_label,
9713 if_true_label ? if_true_label : drop_through_label);
9714 /* In case the do_jump just above never jumps. */
9715 do_pending_stack_adjust ();
9716 emit_label (label1);
9718 /* Now the ELSE-expression. */
9719 do_jump (TREE_OPERAND (exp, 2),
9720 if_false_label ? if_false_label : drop_through_label,
9721 if_true_label ? if_true_label : drop_through_label);
9722 end_cleanup_deferral ();
9728 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9730 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9731 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9733 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9734 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9737 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9738 fold (build (EQ_EXPR, TREE_TYPE (exp),
9739 fold (build1 (REALPART_EXPR,
9740 TREE_TYPE (inner_type),
9742 fold (build1 (REALPART_EXPR,
9743 TREE_TYPE (inner_type),
9745 fold (build (EQ_EXPR, TREE_TYPE (exp),
9746 fold (build1 (IMAGPART_EXPR,
9747 TREE_TYPE (inner_type),
9749 fold (build1 (IMAGPART_EXPR,
9750 TREE_TYPE (inner_type),
9752 if_false_label, if_true_label);
9755 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9756 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9758 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9759 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9760 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9762 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9768 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9770 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9771 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9773 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9774 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9777 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9778 fold (build (NE_EXPR, TREE_TYPE (exp),
9779 fold (build1 (REALPART_EXPR,
9780 TREE_TYPE (inner_type),
9782 fold (build1 (REALPART_EXPR,
9783 TREE_TYPE (inner_type),
9785 fold (build (NE_EXPR, TREE_TYPE (exp),
9786 fold (build1 (IMAGPART_EXPR,
9787 TREE_TYPE (inner_type),
9789 fold (build1 (IMAGPART_EXPR,
9790 TREE_TYPE (inner_type),
9792 if_false_label, if_true_label);
9795 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9796 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9798 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9799 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9800 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9802 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9807 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9808 if (GET_MODE_CLASS (mode) == MODE_INT
9809 && ! can_compare_p (LT, mode, ccp_jump))
9810 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9812 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9816 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9817 if (GET_MODE_CLASS (mode) == MODE_INT
9818 && ! can_compare_p (LE, mode, ccp_jump))
9819 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9821 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9825 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9826 if (GET_MODE_CLASS (mode) == MODE_INT
9827 && ! can_compare_p (GT, mode, ccp_jump))
9828 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9830 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9834 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9835 if (GET_MODE_CLASS (mode) == MODE_INT
9836 && ! can_compare_p (GE, mode, ccp_jump))
9837 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9839 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9842 case UNORDERED_EXPR:
9845 enum rtx_code cmp, rcmp;
9848 if (code == UNORDERED_EXPR)
9849 cmp = UNORDERED, rcmp = ORDERED;
9851 cmp = ORDERED, rcmp = UNORDERED;
9852 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9855 if (! can_compare_p (cmp, mode, ccp_jump)
9856 && (can_compare_p (rcmp, mode, ccp_jump)
9857 /* If the target doesn't provide either UNORDERED or ORDERED
9858 comparisons, canonicalize on UNORDERED for the library. */
9859 || rcmp == UNORDERED))
9863 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9865 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9870 enum rtx_code rcode1;
9871 enum tree_code tcode2;
9895 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9896 if (can_compare_p (rcode1, mode, ccp_jump))
9897 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9901 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9902 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9905 /* If the target doesn't support combined unordered
9906 compares, decompose into UNORDERED + comparison. */
9907 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9908 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9909 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9910 do_jump (exp, if_false_label, if_true_label);
9916 __builtin_expect (<test>, 0) and
9917 __builtin_expect (<test>, 1)
9919 We need to do this here, so that <test> is not converted to a SCC
9920 operation on machines that use condition code registers and COMPARE
9921 like the PowerPC, and then the jump is done based on whether the SCC
9922 operation produced a 1 or 0. */
9924 /* Check for a built-in function. */
9925 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9927 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9928 tree arglist = TREE_OPERAND (exp, 1);
9930 if (TREE_CODE (fndecl) == FUNCTION_DECL
9931 && DECL_BUILT_IN (fndecl)
9932 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9933 && arglist != NULL_TREE
9934 && TREE_CHAIN (arglist) != NULL_TREE)
9936 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9939 if (seq != NULL_RTX)
9946 /* fall through and generate the normal code. */
9950 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9952 /* This is not needed any more and causes poor code since it causes
9953 comparisons and tests from non-SI objects to have different code
9955 /* Copy to register to avoid generating bad insns by cse
9956 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9957 if (!cse_not_expected && GET_CODE (temp) == MEM)
9958 temp = copy_to_reg (temp);
9960 do_pending_stack_adjust ();
9961 /* Do any postincrements in the expression that was tested. */
9964 if (GET_CODE (temp) == CONST_INT
9965 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9966 || GET_CODE (temp) == LABEL_REF)
9968 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9972 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9973 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9974 /* Note swapping the labels gives us not-equal. */
9975 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9976 else if (GET_MODE (temp) != VOIDmode)
9977 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9978 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9979 GET_MODE (temp), NULL_RTX, 0,
9980 if_false_label, if_true_label);
9985 if (drop_through_label)
9987 /* If do_jump produces code that might be jumped around,
9988 do any stack adjusts from that code, before the place
9989 where control merges in. */
9990 do_pending_stack_adjust ();
9991 emit_label (drop_through_label);
9995 /* Given a comparison expression EXP for values too wide to be compared
9996 with one insn, test the comparison and jump to the appropriate label.
9997 The code of EXP is ignored; we always test GT if SWAP is 0,
9998 and LT if SWAP is 1. */
10001 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10004 rtx if_false_label, if_true_label;
10006 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10007 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10008 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10009 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10011 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10014 /* Compare OP0 with OP1, word at a time, in mode MODE.
10015 UNSIGNEDP says to do unsigned comparison.
10016 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10019 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10020 enum machine_mode mode;
10023 rtx if_false_label, if_true_label;
10025 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10026 rtx drop_through_label = 0;
10029 if (! if_true_label || ! if_false_label)
10030 drop_through_label = gen_label_rtx ();
10031 if (! if_true_label)
10032 if_true_label = drop_through_label;
10033 if (! if_false_label)
10034 if_false_label = drop_through_label;
10036 /* Compare a word at a time, high order first. */
10037 for (i = 0; i < nwords; i++)
10039 rtx op0_word, op1_word;
10041 if (WORDS_BIG_ENDIAN)
10043 op0_word = operand_subword_force (op0, i, mode);
10044 op1_word = operand_subword_force (op1, i, mode);
10048 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10049 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10052 /* All but high-order word must be compared as unsigned. */
10053 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10054 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
10055 NULL_RTX, if_true_label);
10057 /* Consider lower words only if these are equal. */
10058 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10059 NULL_RTX, 0, NULL_RTX, if_false_label);
10062 if (if_false_label)
10063 emit_jump (if_false_label);
10064 if (drop_through_label)
10065 emit_label (drop_through_label);
10068 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10069 with one insn, test the comparison and jump to the appropriate label. */
10072 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10074 rtx if_false_label, if_true_label;
10076 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10077 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10078 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10079 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10081 rtx drop_through_label = 0;
10083 if (! if_false_label)
10084 drop_through_label = if_false_label = gen_label_rtx ();
10086 for (i = 0; i < nwords; i++)
10087 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10088 operand_subword_force (op1, i, mode),
10089 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10090 word_mode, NULL_RTX, 0, if_false_label,
10094 emit_jump (if_true_label);
10095 if (drop_through_label)
10096 emit_label (drop_through_label);
10099 /* Jump according to whether OP0 is 0.
10100 We assume that OP0 has an integer mode that is too wide
10101 for the available compare insns. */
10104 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10106 rtx if_false_label, if_true_label;
10108 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10111 rtx drop_through_label = 0;
10113 /* The fastest way of doing this comparison on almost any machine is to
10114 "or" all the words and compare the result. If all have to be loaded
10115 from memory and this is a very wide item, it's possible this may
10116 be slower, but that's highly unlikely. */
10118 part = gen_reg_rtx (word_mode);
10119 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10120 for (i = 1; i < nwords && part != 0; i++)
10121 part = expand_binop (word_mode, ior_optab, part,
10122 operand_subword_force (op0, i, GET_MODE (op0)),
10123 part, 1, OPTAB_WIDEN);
10127 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10128 NULL_RTX, 0, if_false_label, if_true_label);
10133 /* If we couldn't do the "or" simply, do this with a series of compares. */
10134 if (! if_false_label)
10135 drop_through_label = if_false_label = gen_label_rtx ();
10137 for (i = 0; i < nwords; i++)
10138 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10139 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10140 if_false_label, NULL_RTX);
10143 emit_jump (if_true_label);
10145 if (drop_through_label)
10146 emit_label (drop_through_label);
10149 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10150 (including code to compute the values to be compared)
10151 and set (CC0) according to the result.
10152 The decision as to signed or unsigned comparison must be made by the caller.
10154 We force a stack adjustment unless there are currently
10155 things pushed on the stack that aren't yet used.
10157 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10160 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10161 size of MODE should be used. */
10164 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10165 register rtx op0, op1;
10166 enum rtx_code code;
10168 enum machine_mode mode;
10170 unsigned int align;
10174 /* If one operand is constant, make it the second one. Only do this
10175 if the other operand is not constant as well. */
10177 if (swap_commutative_operands_p (op0, op1))
10182 code = swap_condition (code);
10185 if (flag_force_mem)
10187 op0 = force_not_mem (op0);
10188 op1 = force_not_mem (op1);
10191 do_pending_stack_adjust ();
10193 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10194 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10198 /* There's no need to do this now that combine.c can eliminate lots of
10199 sign extensions. This can be less efficient in certain cases on other
10202 /* If this is a signed equality comparison, we can do it as an
10203 unsigned comparison since zero-extension is cheaper than sign
10204 extension and comparisons with zero are done as unsigned. This is
10205 the case even on machines that can do fast sign extension, since
10206 zero-extension is easier to combine with other operations than
10207 sign-extension is. If we are comparing against a constant, we must
10208 convert it to what it would look like unsigned. */
10209 if ((code == EQ || code == NE) && ! unsignedp
10210 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10212 if (GET_CODE (op1) == CONST_INT
10213 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10214 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10219 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10221 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10224 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10225 The decision as to signed or unsigned comparison must be made by the caller.
10227 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10230 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10231 size of MODE should be used. */
10234 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10235 if_false_label, if_true_label)
10236 register rtx op0, op1;
10237 enum rtx_code code;
10239 enum machine_mode mode;
10241 unsigned int align;
10242 rtx if_false_label, if_true_label;
10245 int dummy_true_label = 0;
10247 /* Reverse the comparison if that is safe and we want to jump if it is
10249 if (! if_true_label && ! FLOAT_MODE_P (mode))
10251 if_true_label = if_false_label;
10252 if_false_label = 0;
10253 code = reverse_condition (code);
10256 /* If one operand is constant, make it the second one. Only do this
10257 if the other operand is not constant as well. */
10259 if (swap_commutative_operands_p (op0, op1))
10264 code = swap_condition (code);
10267 if (flag_force_mem)
10269 op0 = force_not_mem (op0);
10270 op1 = force_not_mem (op1);
10273 do_pending_stack_adjust ();
10275 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10276 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10278 if (tem == const_true_rtx)
10281 emit_jump (if_true_label);
10285 if (if_false_label)
10286 emit_jump (if_false_label);
10292 /* There's no need to do this now that combine.c can eliminate lots of
10293 sign extensions. This can be less efficient in certain cases on other
10296 /* If this is a signed equality comparison, we can do it as an
10297 unsigned comparison since zero-extension is cheaper than sign
10298 extension and comparisons with zero are done as unsigned. This is
10299 the case even on machines that can do fast sign extension, since
10300 zero-extension is easier to combine with other operations than
10301 sign-extension is. If we are comparing against a constant, we must
10302 convert it to what it would look like unsigned. */
10303 if ((code == EQ || code == NE) && ! unsignedp
10304 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10306 if (GET_CODE (op1) == CONST_INT
10307 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10308 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10313 if (! if_true_label)
10315 dummy_true_label = 1;
10316 if_true_label = gen_label_rtx ();
10319 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10322 if (if_false_label)
10323 emit_jump (if_false_label);
10324 if (dummy_true_label)
10325 emit_label (if_true_label);
10328 /* Generate code for a comparison expression EXP (including code to compute
10329 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10330 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10331 generated code will drop through.
10332 SIGNED_CODE should be the rtx operation for this comparison for
10333 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10335 We force a stack adjustment unless there are currently
10336 things pushed on the stack that aren't yet used. */
10339 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10342 enum rtx_code signed_code, unsigned_code;
10343 rtx if_false_label, if_true_label;
10345 unsigned int align0, align1;
10346 register rtx op0, op1;
10347 register tree type;
10348 register enum machine_mode mode;
10350 enum rtx_code code;
10352 /* Don't crash if the comparison was erroneous. */
10353 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10354 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10357 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10358 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10361 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10362 mode = TYPE_MODE (type);
10363 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10364 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10365 || (GET_MODE_BITSIZE (mode)
10366 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10369 /* op0 might have been replaced by promoted constant, in which
10370 case the type of second argument should be used. */
10371 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10372 mode = TYPE_MODE (type);
10374 unsignedp = TREE_UNSIGNED (type);
10375 code = unsignedp ? unsigned_code : signed_code;
10377 #ifdef HAVE_canonicalize_funcptr_for_compare
10378 /* If function pointers need to be "canonicalized" before they can
10379 be reliably compared, then canonicalize them. */
10380 if (HAVE_canonicalize_funcptr_for_compare
10381 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10382 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10385 rtx new_op0 = gen_reg_rtx (mode);
10387 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10391 if (HAVE_canonicalize_funcptr_for_compare
10392 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10393 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10396 rtx new_op1 = gen_reg_rtx (mode);
10398 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10403 /* Do any postincrements in the expression that was tested. */
10406 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10408 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10409 MIN (align0, align1),
10410 if_false_label, if_true_label);
10413 /* Generate code to calculate EXP using a store-flag instruction
10414 and return an rtx for the result. EXP is either a comparison
10415 or a TRUTH_NOT_EXPR whose operand is a comparison.
10417 If TARGET is nonzero, store the result there if convenient.
10419 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10422 Return zero if there is no suitable set-flag instruction
10423 available on this machine.
10425 Once expand_expr has been called on the arguments of the comparison,
10426 we are committed to doing the store flag, since it is not safe to
10427 re-evaluate the expression. We emit the store-flag insn by calling
10428 emit_store_flag, but only expand the arguments if we have a reason
10429 to believe that emit_store_flag will be successful. If we think that
10430 it will, but it isn't, we have to simulate the store-flag with a
10431 set/jump/set sequence. */
10434 do_store_flag (exp, target, mode, only_cheap)
10437 enum machine_mode mode;
10440 enum rtx_code code;
10441 tree arg0, arg1, type;
10443 enum machine_mode operand_mode;
10447 enum insn_code icode;
10448 rtx subtarget = target;
10451 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10452 result at the end. We can't simply invert the test since it would
10453 have already been inverted if it were valid. This case occurs for
10454 some floating-point comparisons. */
10456 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10457 invert = 1, exp = TREE_OPERAND (exp, 0);
10459 arg0 = TREE_OPERAND (exp, 0);
10460 arg1 = TREE_OPERAND (exp, 1);
10462 /* Don't crash if the comparison was erroneous. */
10463 if (arg0 == error_mark_node || arg1 == error_mark_node)
10466 type = TREE_TYPE (arg0);
10467 operand_mode = TYPE_MODE (type);
10468 unsignedp = TREE_UNSIGNED (type);
10470 /* We won't bother with BLKmode store-flag operations because it would mean
10471 passing a lot of information to emit_store_flag. */
10472 if (operand_mode == BLKmode)
10475 /* We won't bother with store-flag operations involving function pointers
10476 when function pointers must be canonicalized before comparisons. */
10477 #ifdef HAVE_canonicalize_funcptr_for_compare
10478 if (HAVE_canonicalize_funcptr_for_compare
10479 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10480 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10482 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10483 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10484 == FUNCTION_TYPE))))
10491 /* Get the rtx comparison code to use. We know that EXP is a comparison
10492 operation of some type. Some comparisons against 1 and -1 can be
10493 converted to comparisons with zero. Do so here so that the tests
10494 below will be aware that we have a comparison with zero. These
10495 tests will not catch constants in the first operand, but constants
10496 are rarely passed as the first operand. */
10498 switch (TREE_CODE (exp))
10507 if (integer_onep (arg1))
10508 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10510 code = unsignedp ? LTU : LT;
10513 if (! unsignedp && integer_all_onesp (arg1))
10514 arg1 = integer_zero_node, code = LT;
10516 code = unsignedp ? LEU : LE;
10519 if (! unsignedp && integer_all_onesp (arg1))
10520 arg1 = integer_zero_node, code = GE;
10522 code = unsignedp ? GTU : GT;
10525 if (integer_onep (arg1))
10526 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10528 code = unsignedp ? GEU : GE;
10531 case UNORDERED_EXPR:
10557 /* Put a constant second. */
10558 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10560 tem = arg0; arg0 = arg1; arg1 = tem;
10561 code = swap_condition (code);
10564 /* If this is an equality or inequality test of a single bit, we can
10565 do this by shifting the bit being tested to the low-order bit and
10566 masking the result with the constant 1. If the condition was EQ,
10567 we xor it with 1. This does not require an scc insn and is faster
10568 than an scc insn even if we have it. */
10570 if ((code == NE || code == EQ)
10571 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10572 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10574 tree inner = TREE_OPERAND (arg0, 0);
10575 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10578 /* If INNER is a right shift of a constant and it plus BITNUM does
10579 not overflow, adjust BITNUM and INNER. */
10581 if (TREE_CODE (inner) == RSHIFT_EXPR
10582 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10583 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10584 && bitnum < TYPE_PRECISION (type)
10585 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10586 bitnum - TYPE_PRECISION (type)))
10588 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10589 inner = TREE_OPERAND (inner, 0);
10592 /* If we are going to be able to omit the AND below, we must do our
10593 operations as unsigned. If we must use the AND, we have a choice.
10594 Normally unsigned is faster, but for some machines signed is. */
10595 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10596 #ifdef LOAD_EXTEND_OP
10597 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10603 if (! get_subtarget (subtarget)
10604 || GET_MODE (subtarget) != operand_mode
10605 || ! safe_from_p (subtarget, inner, 1))
10608 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10611 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10612 size_int (bitnum), subtarget, ops_unsignedp);
10614 if (GET_MODE (op0) != mode)
10615 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10617 if ((code == EQ && ! invert) || (code == NE && invert))
10618 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10619 ops_unsignedp, OPTAB_LIB_WIDEN);
10621 /* Put the AND last so it can combine with more things. */
10622 if (bitnum != TYPE_PRECISION (type) - 1)
10623 op0 = expand_and (op0, const1_rtx, subtarget);
10628 /* Now see if we are likely to be able to do this. Return if not. */
10629 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10632 icode = setcc_gen_code[(int) code];
10633 if (icode == CODE_FOR_nothing
10634 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10636 /* We can only do this if it is one of the special cases that
10637 can be handled without an scc insn. */
10638 if ((code == LT && integer_zerop (arg1))
10639 || (! only_cheap && code == GE && integer_zerop (arg1)))
10641 else if (BRANCH_COST >= 0
10642 && ! only_cheap && (code == NE || code == EQ)
10643 && TREE_CODE (type) != REAL_TYPE
10644 && ((abs_optab->handlers[(int) operand_mode].insn_code
10645 != CODE_FOR_nothing)
10646 || (ffs_optab->handlers[(int) operand_mode].insn_code
10647 != CODE_FOR_nothing)))
10653 if (! get_subtarget (target)
10654 || GET_MODE (subtarget) != operand_mode
10655 || ! safe_from_p (subtarget, arg1, 1))
10658 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10659 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10662 target = gen_reg_rtx (mode);
10664 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10665 because, if the emit_store_flag does anything it will succeed and
10666 OP0 and OP1 will not be used subsequently. */
10668 result = emit_store_flag (target, code,
10669 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10670 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10671 operand_mode, unsignedp, 1);
10676 result = expand_binop (mode, xor_optab, result, const1_rtx,
10677 result, 0, OPTAB_LIB_WIDEN);
10681 /* If this failed, we have to do this with set/compare/jump/set code. */
10682 if (GET_CODE (target) != REG
10683 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10684 target = gen_reg_rtx (GET_MODE (target));
10686 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10687 result = compare_from_rtx (op0, op1, code, unsignedp,
10688 operand_mode, NULL_RTX, 0);
10689 if (GET_CODE (result) == CONST_INT)
10690 return (((result == const0_rtx && ! invert)
10691 || (result != const0_rtx && invert))
10692 ? const0_rtx : const1_rtx);
10694 label = gen_label_rtx ();
10695 if (bcc_gen_fctn[(int) code] == 0)
10698 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10699 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10700 emit_label (label);
10705 /* Generate a tablejump instruction (used for switch statements). */
10707 #ifdef HAVE_tablejump
10709 /* INDEX is the value being switched on, with the lowest value
10710 in the table already subtracted.
10711 MODE is its expected mode (needed if INDEX is constant).
10712 RANGE is the length of the jump table.
10713 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10715 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10716 index value is out of range. */
10719 do_tablejump (index, mode, range, table_label, default_label)
10720 rtx index, range, table_label, default_label;
10721 enum machine_mode mode;
10723 register rtx temp, vector;
10725 /* Do an unsigned comparison (in the proper mode) between the index
10726 expression and the value which represents the length of the range.
10727 Since we just finished subtracting the lower bound of the range
10728 from the index expression, this comparison allows us to simultaneously
10729 check that the original index expression value is both greater than
10730 or equal to the minimum value of the range and less than or equal to
10731 the maximum value of the range. */
10733 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10736 /* If index is in range, it must fit in Pmode.
10737 Convert to Pmode so we can index with it. */
10739 index = convert_to_mode (Pmode, index, 1);
10741 /* Don't let a MEM slip thru, because then INDEX that comes
10742 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10743 and break_out_memory_refs will go to work on it and mess it up. */
10744 #ifdef PIC_CASE_VECTOR_ADDRESS
10745 if (flag_pic && GET_CODE (index) != REG)
10746 index = copy_to_mode_reg (Pmode, index);
10749 /* If flag_force_addr were to affect this address
10750 it could interfere with the tricky assumptions made
10751 about addresses that contain label-refs,
10752 which may be valid only very near the tablejump itself. */
10753 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10754 GET_MODE_SIZE, because this indicates how large insns are. The other
10755 uses should all be Pmode, because they are addresses. This code
10756 could fail if addresses and insns are not the same size. */
10757 index = gen_rtx_PLUS (Pmode,
10758 gen_rtx_MULT (Pmode, index,
10759 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10760 gen_rtx_LABEL_REF (Pmode, table_label));
10761 #ifdef PIC_CASE_VECTOR_ADDRESS
10763 index = PIC_CASE_VECTOR_ADDRESS (index);
10766 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10767 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10768 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10769 RTX_UNCHANGING_P (vector) = 1;
10770 convert_move (temp, vector, 0);
10772 emit_jump_insn (gen_tablejump (temp, table_label));
10774 /* If we are generating PIC code or if the table is PC-relative, the
10775 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10776 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10780 #endif /* HAVE_tablejump */